From 4e151ba0fbf1312cbddcbf31305b501a07970162 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Tue, 8 Oct 2024 13:28:57 +0200 Subject: [PATCH 01/80] Core: Move current parameter implementation Move it to a folder, such that we can implement the new version in the parameter namespace. --- src/faebryk/core/{parameter.py => parameter/__init__.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/faebryk/core/{parameter.py => parameter/__init__.py} (100%) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter/__init__.py similarity index 100% rename from src/faebryk/core/parameter.py rename to src/faebryk/core/parameter/__init__.py From 048df645cb6ad914b25db39c4e5262583ee7fcc3 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Tue, 8 Oct 2024 13:29:11 +0200 Subject: [PATCH 02/80] Core: Add minimal Range implementation for new params --- src/faebryk/core/parameter/future.py | 42 ++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 src/faebryk/core/parameter/future.py diff --git a/src/faebryk/core/parameter/future.py b/src/faebryk/core/parameter/future.py new file mode 100644 index 00000000..599471d1 --- /dev/null +++ b/src/faebryk/core/parameter/future.py @@ -0,0 +1,42 @@ +from faebryk.libs.units import Quantity, Unit +import math + +from typing import Protocol + +class _SupportsRangeOps(Protocol): + def __le__(self, __value) -> bool: ... + def __lt__(self, __value) -> bool: ... + def __ge__(self, __value) -> bool: ... + def __gt__(self, __value) -> bool: ... + + +class RangeInclusive[T: _SupportsRangeOps]: + def __init__(self, min: T | None = None, max: T | None = None): + self.min = min + self.max = max + if min is not None and max is not None and not min <= max: + raise ValueError("min must be less than or equal to max") + + def __contains__(self, item: T): + if self.min is not None and not self.min <= item: + return False + if self.max is not None and not item <= self.max: + return False + return True + + def intersection(self, other: "RangeInclusive[T]") -> "RangeInclusive[T]": + if self.min is None: + _min = other.min + elif other.min is None: + _min = self.min + else: + _min = max(self.min, other.min) + + if self.max is None: + _max = other.max + elif other.max is None: + _max = self.max + else: + _max = min(self.max, other.max) + + return RangeInclusive(_min, _max) \ No newline at end of file From b5f0a8ba64b035c1803ba262b3bbd7ce1f76fac8 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Tue, 8 Oct 2024 13:29:18 +0200 Subject: [PATCH 03/80] Core: Add mostly empty new Parameter class --- src/faebryk/core/parameter/future.py | 12 +++++++++++- src/faebryk/libs/library/L.py | 1 + 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/faebryk/core/parameter/future.py b/src/faebryk/core/parameter/future.py index 599471d1..f205df0b 100644 --- a/src/faebryk/core/parameter/future.py +++ b/src/faebryk/core/parameter/future.py @@ -1,3 +1,4 @@ +from faebryk.core.node import Node, f_field from faebryk.libs.units import Quantity, Unit import math @@ -39,4 +40,13 @@ def intersection(self, other: "RangeInclusive[T]") -> "RangeInclusive[T]": else: _max = min(self.max, other.max) - return RangeInclusive(_min, _max) \ No newline at end of file + return RangeInclusive(_min, _max) + +class Parameter(Node): + def __init__(self, unit: Unit, within: RangeInclusive[Quantity]): + super().__init__() + assert unit.is_compatible(within.min.unit) + self.unit = unit + self.within = within + +p_field = f_field(Parameter) \ No newline at end of file diff --git a/src/faebryk/libs/library/L.py b/src/faebryk/libs/library/L.py index 12181080..f6fd36eb 100644 --- a/src/faebryk/libs/library/L.py +++ b/src/faebryk/libs/library/L.py @@ -13,6 +13,7 @@ list_field, rt_field, ) +from faebryk.core.parameter.future import p_field # noqa: F401 from faebryk.core.reference import reference # noqa: F401 From cb3a25f0c6f700e530e3b5fa845e3418a5f25978 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 8 Oct 2024 13:54:36 +0200 Subject: [PATCH 04/80] Sets.py; Delte old params; Tests --- src/faebryk/core/parameter.py | 22 ++ src/faebryk/core/parameter/__init__.py | 493 ------------------------- src/faebryk/core/parameter/future.py | 52 --- src/faebryk/libs/sets.py | 100 +++++ test/core/test_parameters.py | 6 + test/libs/test_sets.py | 35 ++ 6 files changed, 163 insertions(+), 545 deletions(-) create mode 100644 src/faebryk/core/parameter.py delete mode 100644 src/faebryk/core/parameter/__init__.py delete mode 100644 src/faebryk/core/parameter/future.py create mode 100644 src/faebryk/libs/sets.py create mode 100644 test/libs/test_sets.py diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py new file mode 100644 index 00000000..46a12f08 --- /dev/null +++ b/src/faebryk/core/parameter.py @@ -0,0 +1,22 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + +import logging + +from faebryk.core.node import Node, f_field +from faebryk.libs.sets import Range +from faebryk.libs.units import Quantity, Unit + +logger = logging.getLogger(__name__) + + +class Parameter(Node): + def __init__(self, unit: Unit, within: Range[Quantity]): + super().__init__() + if not within.is_compatible_with_unit(unit): + raise ValueError("incompatible units") + self.unit = unit + self.within = within + + +p_field = f_field(Parameter) diff --git a/src/faebryk/core/parameter/__init__.py b/src/faebryk/core/parameter/__init__.py deleted file mode 100644 index 607075de..00000000 --- a/src/faebryk/core/parameter/__init__.py +++ /dev/null @@ -1,493 +0,0 @@ -# This file is part of the faebryk project -# SPDX-License-Identifier: MIT -import logging -from typing import ( - Callable, - Concatenate, - Optional, - Sequence, -) - -from typing_extensions import Self - -from faebryk.core.graphinterface import GraphInterface -from faebryk.core.node import Node -from faebryk.core.trait import Trait -from faebryk.libs.units import Quantity, UnitsContainer -from faebryk.libs.util import Tree, TwistArgs, is_type_pair, try_avoid_endless_recursion - -logger = logging.getLogger(__name__) - - -def _resolved[PV, O]( - func: Callable[["Parameter[PV]", "Parameter[PV]"], O], -) -> Callable[ - [ - "PV | set[PV] | tuple[PV, PV] | Parameter[PV]", - "PV | set[PV] | tuple[PV, PV] | Parameter[PV]", - ], - O, -]: - def wrap(*args): - args = [Parameter.from_literal(arg).get_most_narrow() for arg in args] - return func(*args) - - return wrap - - -def _resolved_self[PV, O, **P]( - func: Callable[Concatenate["Parameter[PV]", P], O], -) -> Callable[Concatenate["PV | set[PV] | tuple[PV, PV] | Parameter[PV]", P], O]: - def wrap( - p: "PV | set[PV] | tuple[PV, PV] | Parameter[PV]", - *args: P.args, - **kwargs: P.kwargs, - ): - return func(Parameter.from_literal(p).get_most_narrow(), *args, **kwargs) - - return wrap - - -class Parameter[PV](Node): - type LIT = PV | set[PV] | tuple[PV, PV] - type LIT_OR_PARAM = LIT | "Parameter[PV]" - - class TraitT(Trait): ... - - narrowed_by: GraphInterface - narrows: GraphInterface - - class MergeException(Exception): ... - - class SupportsSetOps: - def __contains__(self, other: "Parameter[PV].LIT_OR_PARAM") -> bool: ... - - def try_compress(self) -> "Parameter[PV]": - return self - - @classmethod - def from_literal(cls, value: LIT_OR_PARAM) -> '"Parameter[PV]"': - from faebryk.library.Constant import Constant - from faebryk.library.Range import Range - from faebryk.library.Set import Set - - if isinstance(value, Parameter): - return value - elif isinstance(value, set): - return Set(value) - elif isinstance(value, tuple): - return Range(*value) - else: - return Constant(value) - - def _merge(self, other: "Parameter[PV]") -> "Parameter[PV]": - from faebryk.library.ANY import ANY - from faebryk.library.Operation import Operation - from faebryk.library.Set import Set - from faebryk.library.TBD import TBD - - def _is_pair[T, U](type1: type[T], type2: type[U]) -> Optional[tuple[T, U]]: - return is_type_pair(self, other, type1, type2) - - if self is other: - return self - - try: - if self == other: - return self - except ValueError: - ... - - if pair := _is_pair(Parameter[PV], TBD): - return pair[0] - - if pair := _is_pair(Parameter[PV], ANY): - return pair[0] - - # TODO remove as soon as possible - if pair := _is_pair(Parameter[PV], Operation): - # TODO make MergeOperation that inherits from Operation - # and return that instead, application can check if result is MergeOperation - # if it was checking mergeability - raise self.MergeException("cant merge range with operation") - - if pair := _is_pair(Parameter[PV], Parameter[PV].SupportsSetOps): - out = self.intersect(*pair) - if isinstance(out, Operation): - raise self.MergeException("not resolvable") - if out == Set([]) and not pair[0] == pair[1] == Set([]): - raise self.MergeException( - f"conflicting sets/ranges: {self!r} {other!r}" - ) - return out - - raise NotImplementedError - - def _narrowed(self, other: "Parameter[PV]"): - if self is other: - return - - if self.narrowed_by.is_connected(other.narrows): - return - self.narrowed_by.connect(other.narrows) - - @_resolved - def is_mergeable_with(self: "Parameter[PV]", other: "Parameter[PV]") -> bool: - try: - self._merge(other) - return True - except self.MergeException: - return False - except NotImplementedError: - return False - - @_resolved - def is_subset_of(self: "Parameter[PV]", other: "Parameter[PV]") -> bool: - from faebryk.library.ANY import ANY - from faebryk.library.Operation import Operation - from faebryk.library.TBD import TBD - - lhs = self - rhs = other - - def is_either_instance(t: type["Parameter[PV]"]): - return isinstance(lhs, t) or isinstance(rhs, t) - - # Not resolveable - if isinstance(rhs, ANY): - return True - if isinstance(lhs, ANY): - return False - if is_either_instance(TBD): - return False - if is_either_instance(Operation): - return False - - # Sets - return lhs & rhs == lhs - - @_resolved - def merge(self: "Parameter[PV]", other: "Parameter[PV]") -> "Parameter[PV]": - out = self._merge(other) - - self._narrowed(out) - other._narrowed(out) - - return out - - @_resolved - def override(self: "Parameter[PV]", other: "Parameter[PV]") -> "Parameter[PV]": - if not other.is_subset_of(self): - raise self.MergeException("override not possible") - - self._narrowed(other) - return other - - # TODO: replace with graph-based - @staticmethod - def arithmetic_op( - op1: "Parameter[PV]", op2: "Parameter[PV]", op: Callable - ) -> "Parameter[PV]": - from faebryk.library.ANY import ANY - from faebryk.library.Constant import Constant - from faebryk.library.Operation import Operation - from faebryk.library.Range import Range - from faebryk.library.Set import Set - from faebryk.library.TBD import TBD - - def _is_pair[T, U]( - type1: type[T], type2: type[U] - ) -> Optional[tuple[T, U, Callable]]: - if isinstance(op1, type1) and isinstance(op2, type2): - return op1, op2, op - if isinstance(op1, type2) and isinstance(op2, type1): - return op2, op1, TwistArgs(op) - - return None - - if pair := _is_pair(Constant, Constant): - return Constant(op(pair[0].value, pair[1].value)) - - if pair := _is_pair(Range, Range): - try: - p0_min, p0_max = pair[0].min, pair[0].max - p1_min, p1_max = pair[1].min, pair[1].max - except Range.MinMaxError: - return Operation(pair[:2], op) - return Range( - *( - op(lhs, rhs) - for lhs, rhs in [ - (p0_min, p1_min), - (p0_max, p1_max), - (p0_min, p1_max), - (p0_max, p1_min), - ] - ) - ) - - if pair := _is_pair(Constant, Range): - sop = pair[2] - try: - return Range(*(sop(pair[0], bound) for bound in pair[1].bounds)) - except Range.MinMaxError: - return Operation(pair[:2], op) - - if pair := _is_pair(Parameter, ANY): - sop = pair[2] - return Operation(pair[:2], sop) - - if pair := _is_pair(Parameter, Operation): - sop = pair[2] - return Operation(pair[:2], sop) - - if pair := _is_pair(Parameter, TBD): - sop = pair[2] - return Operation(pair[:2], sop) - - if pair := _is_pair(Parameter, Set): - sop = pair[2] - return Set( - Parameter.arithmetic_op(nested, pair[0], sop) - for nested in pair[1].params - ) - - raise NotImplementedError - - @staticmethod - def intersect(op1: "Parameter[PV]", op2: "Parameter[PV]") -> "Parameter[PV]": - from faebryk.library.Constant import Constant - from faebryk.library.Operation import Operation - from faebryk.library.Range import Range - from faebryk.library.Set import Set - - if op1 == op2: - return op1 - - def _is_pair[T, U]( - type1: type[T], type2: type[U] - ) -> Optional[tuple[T, U, Callable]]: - if isinstance(op1, type1) and isinstance(op2, type2): - return op1, op2, op - if isinstance(op1, type2) and isinstance(op2, type1): - return op2, op1, TwistArgs(op) - - return None - - def op(a, b): - return a & b - - # same types - if pair := _is_pair(Constant, Constant): - return Set([]) - if pair := _is_pair(Set, Set): - return Set(pair[0].params.intersection(pair[1].params)) - if pair := _is_pair(Range, Range): - try: - min_ = max(pair[0].min, pair[1].min) - max_ = min(pair[0].max, pair[1].max) - if min_ > max_: - return Set([]) - if min_ == max_: - return Constant(min_) - return Range(max_, min_) - except Range.MinMaxError: - return Operation(pair[:2], op) - - # diff types - if pair := _is_pair(Constant, Range): - try: - if pair[0] in pair[1]: - return pair[0] - else: - return Set([]) - except Range.MinMaxError: - return Operation(pair[:2], op) - if pair := _is_pair(Constant, Set): - if pair[0] in pair[1]: - return pair[0] - else: - return Set([]) - if pair := _is_pair(Range, Set): - try: - return Set(i for i in pair[1].params if i in pair[0]) - except Range.MinMaxError: - return Operation(pair[:2], op) - - return Operation((op1, op2), op) - - @_resolved - def __add__(self: "Parameter[PV]", other: "Parameter[PV]"): - return self.arithmetic_op(self, other, lambda a, b: a + b) - - @_resolved - def __radd__(self: "Parameter[PV]", other: "Parameter[PV]"): - return self.arithmetic_op(self, other, lambda a, b: b + a) - - @_resolved - def __sub__(self: "Parameter[PV]", other: "Parameter[PV]"): - return self.arithmetic_op(self, other, lambda a, b: a - b) - - @_resolved - def __rsub__(self: "Parameter[PV]", other: "Parameter[PV]"): - return self.arithmetic_op(self, other, lambda a, b: b - a) - - # TODO PV | float - @_resolved - def __mul__(self: "Parameter[PV]", other: "Parameter[PV]"): - return self.arithmetic_op(self, other, lambda a, b: a * b) - - @_resolved - def __rmul__(self: "Parameter[PV]", other: "Parameter[PV]"): - return self.arithmetic_op(self, other, lambda a, b: b * a) - - # TODO PV | float - @_resolved - def __truediv__(self: "Parameter[PV]", other: "Parameter[PV]"): - return self.arithmetic_op(self, other, lambda a, b: a / b) - - @_resolved - def __rtruediv__(self: "Parameter[PV]", other: "Parameter[PV]"): - return self.arithmetic_op(self, other, lambda a, b: b / a) - - @_resolved - def __pow__(self: "Parameter[PV]", other: "Parameter[PV]") -> "Parameter[PV]": - return self.arithmetic_op(self, other, lambda a, b: a**b) - - @_resolved - def __rpow__(self: "Parameter[PV]", other: "Parameter[PV]") -> "Parameter[PV]": - return self.arithmetic_op(self, other, lambda a, b: b**a) - - @_resolved - def __and__(self: "Parameter[PV]", other: "Parameter[PV]") -> "Parameter[PV]": - return self.intersect(self, other) - - @_resolved - def __rand__(self: "Parameter[PV]", other: "Parameter[PV]") -> "Parameter[PV]": - return self.intersect(other, self) - - def get_most_narrow(self) -> "Parameter[PV]": - out = self.get_narrowing_chain()[-1] - - com = out.try_compress() - if com is not out: - com = com.get_most_narrow() - out._narrowed(com) - out = com - - return out - - @staticmethod - def resolve_all(params: "Sequence[Parameter[PV]]") -> "Parameter[PV]": - from faebryk.library.TBD import TBD - - params_set = list(params) - if not params_set: - return TBD[PV]() - it = iter(params_set) - most_specific = next(it) - for param in it: - most_specific = most_specific.merge(param) - - return most_specific - - @try_avoid_endless_recursion - def __str__(self) -> str: - narrowest = self.get_most_narrow() - if narrowest is self: - return super().__str__() - return str(narrowest) - - # @try_avoid_endless_recursion - # def __repr__(self) -> str: - # narrowest = self.get_most_narrow() - # if narrowest is self: - # return super().__repr__() - # # return f"{super().__repr__()} -> {repr(narrowest)}" - # return repr(narrowest) - - def get_narrowing_chain(self) -> list["Parameter"]: - out: list[Parameter] = [self] - narrowers = self.narrowed_by.get_connected_nodes(Parameter) - if narrowers: - assert len(narrowers) == 1, "Narrowing tree diverged" - out += next(iter(narrowers)).get_narrowing_chain() - assert id(self) not in map(id, out[1:]), "Narrowing tree cycle" - return out - - def get_narrowed_siblings(self) -> set["Parameter"]: - return self.narrows.get_connected_nodes(Parameter) - - def __copy__(self) -> Self: - return type(self)() - - def __deepcopy__(self, memo) -> Self: - return self.__copy__() - - def get_tree_param(self, include_root: bool = True) -> Tree["Parameter"]: - out = Tree[Parameter]( - {p: p.get_tree_param() for p in self.get_narrowed_siblings()} - ) - if include_root: - out = Tree[Parameter]({self: out}) - return out - - # util functions ------------------------------------------------------------------- - @_resolved_self - def enum_parameter_representation( - self: "Parameter[PV]", required: bool = False - ) -> str: - return self._enum_parameter_representation(required=required) - - def _enum_parameter_representation(self, required: bool = False) -> str: - return self.as_unit("", required=required) - - @_resolved_self - def as_unit( - self: "Parameter[PV]", - unit: UnitsContainer, - base: int = 1000, - required: bool = False, - ) -> str: - if base != 1000: - raise NotImplementedError("Only base 1000 supported") - - return self._as_unit(unit, base=base, required=required) - - def _as_unit(self, unit: UnitsContainer, base: int, required: bool) -> str: - raise ValueError(f"Unsupported {self}") - - @_resolved_self - def as_unit_with_tolerance( - self: "Parameter[PV]", - unit: UnitsContainer, - base: int = 1000, - required: bool = False, - ) -> str: - return self._as_unit_with_tolerance(unit, base=base, required=required) - - def _as_unit_with_tolerance( - self, unit: UnitsContainer, base: int, required: bool - ) -> str: - return self._as_unit(unit, base=base, required=required) - - @_resolved_self - def get_max(self: "Parameter[PV]") -> PV: - return self._max() - - def _max(self): - raise ValueError(f"Can't get max for {self}") - - def with_same_unit( - self: "Quantity | float | int | LIT_OR_PARAM", - to_convert: float | int, - ): - from faebryk.library.Constant import Constant - - if isinstance(self, Constant) and isinstance(self.value, Quantity): - return Quantity(to_convert, self.value.units) - if isinstance(self, Quantity): - return Quantity(to_convert, self.units) - if isinstance(self, (float, int)): - return to_convert - raise NotImplementedError(f"Unsupported {self=}") diff --git a/src/faebryk/core/parameter/future.py b/src/faebryk/core/parameter/future.py deleted file mode 100644 index f205df0b..00000000 --- a/src/faebryk/core/parameter/future.py +++ /dev/null @@ -1,52 +0,0 @@ -from faebryk.core.node import Node, f_field -from faebryk.libs.units import Quantity, Unit -import math - -from typing import Protocol - -class _SupportsRangeOps(Protocol): - def __le__(self, __value) -> bool: ... - def __lt__(self, __value) -> bool: ... - def __ge__(self, __value) -> bool: ... - def __gt__(self, __value) -> bool: ... - - -class RangeInclusive[T: _SupportsRangeOps]: - def __init__(self, min: T | None = None, max: T | None = None): - self.min = min - self.max = max - if min is not None and max is not None and not min <= max: - raise ValueError("min must be less than or equal to max") - - def __contains__(self, item: T): - if self.min is not None and not self.min <= item: - return False - if self.max is not None and not item <= self.max: - return False - return True - - def intersection(self, other: "RangeInclusive[T]") -> "RangeInclusive[T]": - if self.min is None: - _min = other.min - elif other.min is None: - _min = self.min - else: - _min = max(self.min, other.min) - - if self.max is None: - _max = other.max - elif other.max is None: - _max = self.max - else: - _max = min(self.max, other.max) - - return RangeInclusive(_min, _max) - -class Parameter(Node): - def __init__(self, unit: Unit, within: RangeInclusive[Quantity]): - super().__init__() - assert unit.is_compatible(within.min.unit) - self.unit = unit - self.within = within - -p_field = f_field(Parameter) \ No newline at end of file diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py new file mode 100644 index 00000000..5c037bcd --- /dev/null +++ b/src/faebryk/libs/sets.py @@ -0,0 +1,100 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + +from abc import ABC, abstractmethod +from typing import Any, Protocol + +from faebryk.libs.units import Quantity, Unit + + +class _SupportsRangeOps(Protocol): + def __le__(self, __value) -> bool: ... + def __lt__(self, __value) -> bool: ... + def __ge__(self, __value) -> bool: ... + def __gt__(self, __value) -> bool: ... + + +class Set_[T](ABC): + def __init__(self): + pass + + @abstractmethod + def __contains__(self, item: T): + pass + + @abstractmethod + def is_compatible_with_unit(self, unit: Unit | Quantity) -> bool: + pass + + +class Range[T: _SupportsRangeOps](Set_[T]): + def __init__(self, min: T | None = None, max: T | None = None, empty: bool = False): + self.empty = empty + self.min = min + self.max = max + if empty and (min is not None or max is not None): + raise ValueError("empty range cannot have min or max") + if min is not None and max is not None and not min <= max: + raise ValueError("min must be less than or equal to max") + + def __contains__(self, item: T): + if self.min is not None and not self.min <= item: + return False + if self.max is not None and not item <= self.max: + return False + return True + + def is_compatible_with_unit(self, unit: Unit | Quantity) -> bool: + for m in [self.min, self.max]: + if isinstance(m, Quantity) and not unit.is_compatible_with(m.units): + return False + + return True + + def intersection(self, other: "Range[T]") -> "Range[T]": + if self.empty or other.empty: + return Range(empty=True) + + if self.min is None: + _min = other.min + elif other.min is None: + _min = self.min + else: + _min = max(self.min, other.min) + + if self.max is None: + _max = other.max + elif other.max is None: + _max = self.max + else: + _max = min(self.max, other.max) + + if (_min is not None and (_min not in self or _min not in other)) or ( + _max is not None and (_max not in self or _max not in other) + ): + return Range(empty=True) + + return Range(_min, _max) + + def __eq__(self, value: Any) -> bool: + if not isinstance(value, Range): + return False + if self.empty or value.empty: + return self.empty and value.empty + return self.min == value.min and self.max == value.max + + +class Single[T](Set_[T]): + def __init__(self, value: T): + self.value = value + + def __contains__(self, item: T): + return item == self.value + + +class Set[T](Set_[T]): + def __init__(self, *elements: T): + self.elements = set(elements) + + def __contains__(self, item: T): + return item in self.elements diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index 9db7050d..cefa762f 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -365,6 +365,12 @@ def __preinit__(self) -> None: def test_units(self): self.assertEqual(F.Constant(1e-9 * P.F), 1 * P.nF) + def test_new_definitions(self): + pass + + def test_constant_sets(self): + pass + if __name__ == "__main__": unittest.main() diff --git a/test/libs/test_sets.py b/test/libs/test_sets.py new file mode 100644 index 00000000..a4632e82 --- /dev/null +++ b/test/libs/test_sets.py @@ -0,0 +1,35 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + +import pytest +from pint import DimensionalityError + +from faebryk.libs.sets import Range +from faebryk.libs.units import P + + +def test_range_intersection_simple(): + x = Range(0, 10) + y = x.intersection(Range(5, 15)) + assert y == Range(5, 10) + + +def test_range_intersection_empty(): + x = Range(0, 10) + y = x.intersection(Range(15, 20)) + assert y == Range(empty=True) + + +def test_range_unit_none(): + x = Range(0, 10) + assert x.is_compatible_with_unit(P.V) + + +def test_range_unit_same(): + y = Range(0 * P.V, 10 * P.V) + assert y.is_compatible_with_unit(P.V) + + +def test_range_unit_different(): + with pytest.raises(DimensionalityError): + Range(0 * P.V, 10 * P.A) From c0f04181533e0da8c9d55d72986b500ad37445d6 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 8 Oct 2024 14:12:02 +0200 Subject: [PATCH 05/80] Add parameter constructor --- src/faebryk/core/parameter.py | 59 ++++++++++++++++++++++++++++++++++- src/faebryk/libs/library/L.py | 2 +- 2 files changed, 59 insertions(+), 2 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 46a12f08..cc1f4ef1 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: MIT import logging +from enum import Enum from faebryk.core.node import Node, f_field from faebryk.libs.sets import Range @@ -10,13 +11,69 @@ logger = logging.getLogger(__name__) +class Domain: + pass + + +class Domains: + class ESeries(Domain): + class E96(Domain): + pass + + class E24(Domain): + pass + + class Numbers(Domain): + def __init__(self, *, negative: bool = True, zero_allowed: bool = True) -> None: + super().__init__() + self.negative = negative + self.zero_allowed = zero_allowed + + class Integers(Domain): + class Positive(Domain): + pass + + class Reals(Domain): + class Positive(Domain): + pass + + class Boolean(Domain): + pass + + class Enum(Domain): + def __init__(self, enum_t: type[Enum]): + super().__init__() + self.enum_t = enum_t + + class Parameter(Node): - def __init__(self, unit: Unit, within: Range[Quantity]): + def __init__( + self, + *, + unit: Unit | Quantity, + # hard constraints + within: Range[Quantity] | None = None, + domain: Domain = Domains.Numbers.Reals.Positive(), + # soft constraints + soft_set: Range[Quantity] | None = None, + guess: Quantity | None = None, + tolerance_guess: Quantity | None = None, + # hints + likely_constrained: bool = False, + ): super().__init__() + if within is None: + within = Range() if not within.is_compatible_with_unit(unit): raise ValueError("incompatible units") + self.unit = unit self.within = within + self.domain = domain + self.soft_set = soft_set + self.guess = guess + self.tolerance_guess = tolerance_guess + self.likely_constrained = likely_constrained p_field = f_field(Parameter) diff --git a/src/faebryk/libs/library/L.py b/src/faebryk/libs/library/L.py index f6fd36eb..f5423d22 100644 --- a/src/faebryk/libs/library/L.py +++ b/src/faebryk/libs/library/L.py @@ -13,7 +13,7 @@ list_field, rt_field, ) -from faebryk.core.parameter.future import p_field # noqa: F401 +from faebryk.core.parameter import p_field # noqa: F401 from faebryk.core.reference import reference # noqa: F401 From 56754899986ae68565170e90bbc2e0ff76ad2929 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Tue, 8 Oct 2024 15:25:30 +0200 Subject: [PATCH 06/80] Core: Add some arithmetic expressions --- src/faebryk/core/parameter.py | 59 ++++++++++++++++++++++++++++++----- 1 file changed, 51 insertions(+), 8 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index cc1f4ef1..9ed2abd8 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -2,18 +2,63 @@ # SPDX-License-Identifier: MIT import logging +import math from enum import Enum from faebryk.core.node import Node, f_field from faebryk.libs.sets import Range -from faebryk.libs.units import Quantity, Unit +from faebryk.libs.units import Quantity, Unit, P +from typing import Protocol logger = logging.getLogger(__name__) +from typing import runtime_checkable -class Domain: +@runtime_checkable +class HasUnit(Protocol): + unit: Unit + +#TODO: prohibit instantiation +class Expression: pass +class Arithmetic(Expression): + def __init__(self, *operands): + types = [ int, float, Quantity, Parameter, Expression ] + if any(not type(op) in types for op in operands): + raise ValueError("operands must be int, float, Quantity, Parameter, or Expression") + self.operands = operands + +class Additive(Arithmetic): + def __init__(self, *operands): + super().__init__(*operands) + units = [ op.unit if isinstance(op, HasUnit) else P.dimensionless for op in operands ] + # Check if all units are compatible + self.unit = units[0] + if not all(u.is_compatible_with(self.unit) for u in units): + raise ValueError("All operands must have compatible units") + +class Add(Additive): + def __init__(self, *operands): + super().__init__(*operands) + +class Subtract(Additive): + def __init__(self, *operands): + super().__init__(*operands) + +class Multiply(Arithmetic): + def __init__(self, *operands): + super().__init__(*operands) + self.unit = math.prod([ op.unit if isinstance(op, HasUnit) else P.dimensionless for op in operands ]) + +class Divide(Multiply): + def __init__(self, numerator, denominator): + super().__init__(numerator, denominator) + self.unit = numerator.unit / denominator.unit + + +class Domain: + pass class Domains: class ESeries(Domain): @@ -33,10 +78,6 @@ class Integers(Domain): class Positive(Domain): pass - class Reals(Domain): - class Positive(Domain): - pass - class Boolean(Domain): pass @@ -52,10 +93,10 @@ def __init__( *, unit: Unit | Quantity, # hard constraints - within: Range[Quantity] | None = None, + within: Range | None = None, domain: Domain = Domains.Numbers.Reals.Positive(), # soft constraints - soft_set: Range[Quantity] | None = None, + soft_set: Range| None = None, guess: Quantity | None = None, tolerance_guess: Quantity | None = None, # hints @@ -67,6 +108,8 @@ def __init__( if not within.is_compatible_with_unit(unit): raise ValueError("incompatible units") + if not isinstance(unit, Unit): + raise TypeError("unit must be a Unit") self.unit = unit self.within = within self.domain = domain From 066bad8c2224421958f96a878d8c2af8c7a0639a Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 8 Oct 2024 15:30:44 +0200 Subject: [PATCH 07/80] Namespace --- src/faebryk/core/parameter.py | 189 ++++++++++++++--- src/faebryk/library/Battery.py | 19 +- src/faebryk/libs/library/L.py | 7 +- test/core/test_parameters.py | 375 +-------------------------------- 4 files changed, 188 insertions(+), 402 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 9ed2abd8..7c86b4ed 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -3,53 +3,68 @@ import logging import math -from enum import Enum +from enum import Enum, auto +from typing import Protocol, runtime_checkable +from deprecated import deprecated + +from faebryk.core.core import Namespace from faebryk.core.node import Node, f_field from faebryk.libs.sets import Range -from faebryk.libs.units import Quantity, Unit, P -from typing import Protocol +from faebryk.libs.units import P, Quantity, Unit logger = logging.getLogger(__name__) -from typing import runtime_checkable @runtime_checkable class HasUnit(Protocol): unit: Unit -#TODO: prohibit instantiation + +# TODO: prohibit instantiation class Expression: pass + class Arithmetic(Expression): def __init__(self, *operands): - types = [ int, float, Quantity, Parameter, Expression ] - if any(not type(op) in types for op in operands): - raise ValueError("operands must be int, float, Quantity, Parameter, or Expression") + types = [int, float, Quantity, Parameter, Expression] + if any(type(op) not in types for op in operands): + raise ValueError( + "operands must be int, float, Quantity, Parameter, or Expression" + ) self.operands = operands + class Additive(Arithmetic): def __init__(self, *operands): super().__init__(*operands) - units = [ op.unit if isinstance(op, HasUnit) else P.dimensionless for op in operands ] + units = [ + op.unit if isinstance(op, HasUnit) else P.dimensionless for op in operands + ] # Check if all units are compatible self.unit = units[0] if not all(u.is_compatible_with(self.unit) for u in units): raise ValueError("All operands must have compatible units") + class Add(Additive): def __init__(self, *operands): super().__init__(*operands) + class Subtract(Additive): def __init__(self, *operands): super().__init__(*operands) + class Multiply(Arithmetic): def __init__(self, *operands): super().__init__(*operands) - self.unit = math.prod([ op.unit if isinstance(op, HasUnit) else P.dimensionless for op in operands ]) + self.unit = math.prod( + [op.unit if isinstance(op, HasUnit) else P.dimensionless for op in operands] + ) + class Divide(Multiply): def __init__(self, numerator, denominator): @@ -60,31 +75,116 @@ def __init__(self, numerator, denominator): class Domain: pass -class Domains: - class ESeries(Domain): - class E96(Domain): - pass - class E24(Domain): - pass +class ESeries(Domain): + class SeriesType(Enum): + E6 = auto() + E12 = auto() + E24 = auto() + E48 = auto() + E96 = auto() + E192 = auto() - class Numbers(Domain): - def __init__(self, *, negative: bool = True, zero_allowed: bool = True) -> None: - super().__init__() - self.negative = negative - self.zero_allowed = zero_allowed + def __init__(self, series: SeriesType): + self.series = series - class Integers(Domain): - class Positive(Domain): - pass - class Boolean(Domain): - pass +class Numbers(Domain): + def __init__( + self, *, negative: bool = True, zero_allowed: bool = True, integer: bool = False + ) -> None: + super().__init__() + self.negative = negative + self.zero_allowed = zero_allowed + self.integer = integer + + +class Boolean(Domain): + pass - class Enum(Domain): - def __init__(self, enum_t: type[Enum]): - super().__init__() - self.enum_t = enum_t + +class EnumDomain(Domain): + def __init__(self, enum_t: type[Enum]): + super().__init__() + self.enum_t = enum_t + + +class Predicate(Node): + pass + + +class LessThan(Predicate): + pass + + +class GreaterThan(Predicate): + pass + + +class LessOrEqual(Predicate): + pass + + +class GreaterOrEqual(Predicate): + pass + + +class NotEqual(Predicate): + pass + + +class IsSubset(Predicate): + pass + + +class Alias(Node): + pass + + +class Is(Alias): + pass + + +class Aliases(Namespace): + IS = Is + + +# TODO rename? +class R(Namespace): + """ + Namespace holding Expressions, Domains and Predicates for Parameters. + R = paRameters + """ + + class Predicates(Namespace): + class Element(Namespace): + LT = LessThan + GT = GreaterThan + LE = LessOrEqual + GE = GreaterOrEqual + NE = NotEqual + + class Set(Namespace): + IS_SUBSET = IsSubset + + class Domains(Namespace): + class ESeries(Namespace): + E6 = lambda: ESeries(ESeries.SeriesType.E6) + E12 = lambda: ESeries(ESeries.SeriesType.E12) + E24 = lambda: ESeries(ESeries.SeriesType.E24) + E48 = lambda: ESeries(ESeries.SeriesType.E48) + E96 = lambda: ESeries(ESeries.SeriesType.E96) + E192 = lambda: ESeries(ESeries.SeriesType.E192) + + class Numbers(Namespace): + REAL = Numbers + NATURAL = lambda: Numbers(integer=True, negative=False) + + BOOL = Boolean + ENUM = Enum + + class Expressions(Namespace): + pass class Parameter(Node): @@ -94,9 +194,9 @@ def __init__( unit: Unit | Quantity, # hard constraints within: Range | None = None, - domain: Domain = Domains.Numbers.Reals.Positive(), + domain: Domain = Numbers(negative=False), # soft constraints - soft_set: Range| None = None, + soft_set: Range | None = None, guess: Quantity | None = None, tolerance_guess: Quantity | None = None, # hints @@ -118,5 +218,30 @@ def __init__( self.tolerance_guess = tolerance_guess self.likely_constrained = likely_constrained + def alias_is(self, other: "Parameter"): + pass + + def constrain_le(self, other: "Parameter"): + pass + + def constrain_ge(self, other: "Parameter"): + pass + + def constrain_lt(self, other: "Parameter"): + pass + + def constrain_gt(self, other: "Parameter"): + pass + + def constrain_ne(self, other: "Parameter"): + pass + + def constrain_subset(self, other: "Parameter"): + pass + + @deprecated("use alias_is instead") + def merge(self, other: "Parameter"): + return self.alias_is(other) + p_field = f_field(Parameter) diff --git a/src/faebryk/library/Battery.py b/src/faebryk/library/Battery.py index 60ec3372..c5b49c9e 100644 --- a/src/faebryk/library/Battery.py +++ b/src/faebryk/library/Battery.py @@ -5,12 +5,25 @@ import faebryk.library._F as F import faebryk.libs.library.L as L from faebryk.core.module import Module -from faebryk.libs.units import Quantity +from faebryk.libs.sets import Range +from faebryk.libs.units import P class Battery(Module): - voltage: F.TBD[Quantity] - capacity: F.TBD[Quantity] + voltage = L.p_field( + unit=P.V, + soft_set=Range(0 * P.V, 100 * P.V), + guess=3.7 * P.V, + tolerance_guess=5 * P.percent, + likely_constrained=True, + ) + capacity = L.p_field( + unit=P.Ah, + soft_set=Range(100 * P.mAh, 100 * P.Ah), + guess=1 * P.Ah, + tolerance_guess=5 * P.percent, + likely_constrained=True, + ) power: F.ElectricPower diff --git a/src/faebryk/libs/library/L.py b/src/faebryk/libs/library/L.py index f5423d22..d0efa67a 100644 --- a/src/faebryk/libs/library/L.py +++ b/src/faebryk/libs/library/L.py @@ -13,7 +13,7 @@ list_field, rt_field, ) -from faebryk.core.parameter import p_field # noqa: F401 +from faebryk.core.parameter import R, p_field # noqa: F401 from faebryk.core.reference import reference # noqa: F401 @@ -21,3 +21,8 @@ class AbstractclassError(Exception): ... logger = logging.getLogger(__name__) + + +Predicates = R.Predicates +Domains = R.Domains +Expressions = R.Expressions diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index cefa762f..e253a911 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -2,375 +2,18 @@ # SPDX-License-Identifier: MIT import logging -import unittest -from operator import add -import faebryk.library._F as F -from faebryk.core.core import logger as core_logger -from faebryk.core.module import Module -from faebryk.core.parameter import Parameter +from faebryk.core.parameter import Domains, Parameter +from faebryk.libs.sets import Range from faebryk.libs.units import P logger = logging.getLogger(__name__) -core_logger.setLevel(logger.getEffectiveLevel()) -class TestParameters(unittest.TestCase): - def test_operations(self): - def assertIsInstance[T: Parameter](obj: Parameter, cls: type[T]) -> T: - obj = obj.get_most_narrow() - self.assertIsInstance(obj, cls) - assert isinstance(obj, cls) - return obj - - # Constant - ONE = F.Constant(1) - self.assertEqual(ONE.value, 1) - - TWO = F.Constant(2) - self.assertEqual(assertIsInstance(ONE + TWO, F.Constant).value, 3) - self.assertEqual(assertIsInstance(ONE - TWO, F.Constant).value, -1) - - self.assertEqual(assertIsInstance((ONE / TWO) / TWO, F.Constant).value, 1 / 4) - - # Range - R_ONE_TEN = F.Range(1, 10) - self.assertEqual(assertIsInstance(R_ONE_TEN + TWO, F.Range), F.Range(3, 12)) - - R_TWO_THREE = F.Range(2, 3) - self.assertEqual( - assertIsInstance(R_ONE_TEN + R_TWO_THREE, F.Range), F.Range(3, 13) - ) - self.assertEqual( - assertIsInstance(R_ONE_TEN * R_TWO_THREE, F.Range), F.Range(2, 30) - ) - self.assertEqual( - assertIsInstance(R_ONE_TEN - R_TWO_THREE, F.Range), F.Range(-2, 8) - ) - self.assertEqual( - assertIsInstance(R_ONE_TEN / R_TWO_THREE, F.Range), F.Range(1 / 3, 10 / 2) - ) - - # TBD Range - a = F.TBD[int]() - b = F.TBD[int]() - R_TBD = F.Range(a, b) - add = R_ONE_TEN + R_TBD - mul = R_ONE_TEN * R_TBD - sub = R_ONE_TEN - R_TBD - div = R_ONE_TEN / R_TBD - a.merge(F.Constant(2)) - b.merge(F.Constant(3)) - self.assertEqual(assertIsInstance(add, F.Range), F.Range(3, 13)) - self.assertEqual(assertIsInstance(mul, F.Range), F.Range(2, 30)) - self.assertEqual(assertIsInstance(sub, F.Range), F.Range(-2, 8)) - self.assertEqual(assertIsInstance(div, F.Range), F.Range(1 / 3, 10 / 2)) - - # Set - S_FIVE_NINE = F.Set(set(F.Constant(x) for x in range(5, 10))) - self.assertEqual( - assertIsInstance(S_FIVE_NINE + ONE, F.Set).params, - set(F.Constant(x) for x in range(6, 11)), - ) - - S_TEN_TWENTY_THIRTY = F.Set(set(F.Constant(x) for x in [10, 20, 30])) - self.assertEqual( - assertIsInstance(S_FIVE_NINE + S_TEN_TWENTY_THIRTY, F.Set), - F.Set(F.Constant(x + y) for x in range(5, 10) for y in [10, 20, 30]), - ) - - # conjunctions - # with static values - R_ONE_TEN = F.Range(1, 10) - R_TWO_THREE = F.Range(2, 3) - self.assertEqual(R_ONE_TEN & R_TWO_THREE, F.Range(2, 3)) - self.assertEqual(R_ONE_TEN & F.Range(5, 20), F.Range(5, 10)) - self.assertEqual(R_ONE_TEN & 5, F.Constant(5)) - self.assertEqual(R_ONE_TEN & F.Constant(5), F.Constant(5)) - self.assertEqual(R_ONE_TEN & F.Set([1, 5, 8, 12]), F.Set([1, 5, 8])) - self.assertEqual(F.Set([1, 2, 3]) & F.Set([2, 3, 4]), F.Set([2, 3])) - self.assertEqual(F.Set([1, 2, 3]) & 3, F.Constant(3)) - self.assertEqual(F.Constant(3) & 3, F.Constant(3)) - self.assertEqual(F.Constant(2) & 3, F.Set([])) - self.assertEqual(R_ONE_TEN & {1, 2, 11}, F.Set([1, 2])) - self.assertEqual(R_ONE_TEN & F.Range(12, 13), F.Set([])) - # with tbd - a = F.TBD[int]() - b = F.TBD[int]() - RTBD = F.Range(a, b) - r_one_ten_con_tbd = R_ONE_TEN & RTBD - assertIsInstance(r_one_ten_con_tbd, F.Operation) - a.merge(2) - b.merge(20) - self.assertEqual(assertIsInstance(r_one_ten_con_tbd, F.Range), F.Range(2, 10)) - - # TODO disjunctions - - # F.Operation - token = F.TBD() - op = assertIsInstance(ONE + token, F.Operation) - op2 = assertIsInstance(op + 10, F.Operation) - - self.assertEqual(op.operands, (ONE, F.TBD())) - self.assertEqual(op.operation(1, 2), 3) - - token.merge(F.Constant(2)) - self.assertEqual(op.get_most_narrow(), F.Constant(3)) - - self.assertEqual(op + 5, F.Constant(8)) - self.assertEqual(op2.get_most_narrow(), F.Constant(13)) - - # Any - assertIsInstance(ONE + F.ANY(), F.Operation) - assertIsInstance(F.TBD() + F.ANY(), F.Operation) - assertIsInstance((F.TBD() + F.TBD()) + F.ANY(), F.Operation) - - # Test quantities - self.assertEqual(F.Constant(1 * P.baud), 1 * P.baud) - self.assertEqual(F.Constant(1) * P.baud, 1 * P.baud) - self.assertEqual(F.Range(1, 10) * P.baud, F.Range(1 * P.baud, 10 * P.baud)) - self.assertEqual(F.Set([1, 2]) * P.baud, F.Set([1 * P.baud, 2 * P.baud])) - - def test_resolution(self): - def assertIsInstance[T](obj, cls: type[T]) -> T: - self.assertIsInstance(obj, cls) - assert isinstance(obj, cls) - return obj - - ONE = F.Constant(1) - self.assertEqual( - assertIsInstance(Parameter.resolve_all([ONE, ONE]), F.Constant).value, 1 - ) - - TWO = F.Constant(2) - self.assertEqual( - assertIsInstance( - Parameter.resolve_all([F.Operation([ONE, ONE], add), TWO]), F.Constant - ).value, - 2, - ) - - self.assertEqual(F.TBD(), F.TBD()) - self.assertEqual(F.ANY(), F.ANY()) - - def test_merge( - a: Parameter[int] | set[int] | int | tuple[int, int], - b: Parameter[int] | set[int] | int | tuple[int, int], - expected, - ): - a = Parameter[int].from_literal(a) - expected = Parameter[int].from_literal(expected) - self.assertEqual(a.merge(b), expected) - - def fail_merge(a, b): - a = Parameter[int].from_literal(a) - self.assertRaises(Parameter.MergeException, lambda: a.merge(b)) - - # F.Sets ---- - - # F.Ranges - test_merge((0, 10), (5, 15), (5, 10)) - test_merge((0, 10), (5, 8), (5, 8)) - fail_merge((0, 10), (11, 15)) - test_merge((5, 10), 5, 5) - fail_merge((0, 10), 11) - test_merge((5, 10), {5, 6, 12}, {5, 6}) - - # Empty set - fail_merge({1, 2}, set()) - fail_merge((1, 5), set()) - fail_merge(5, set()) - test_merge(set(), set(), set()) - test_merge(F.TBD(), set(), set()) - test_merge(F.ANY(), set(), set()) - - test_merge({1, 2}, {2, 3}, {2}) - fail_merge({1, 2}, {3, 4}) - test_merge({1, 2}, 2, 2) - - # F.TBD/F.ANY -- - - test_merge(F.TBD(), F.TBD(), F.TBD()) - test_merge(F.ANY(), F.ANY(), F.ANY()) - test_merge(F.TBD(), F.ANY(), F.ANY()) - - def test_specific(self): - def test_spec( - a: Parameter[int] | set[int] | int | tuple[int, int], - b: Parameter[int] | set[int] | int | tuple[int, int], - expected: bool = True, - ): - b = Parameter[int].from_literal(b) - if expected: - self.assertTrue(b.is_subset_of(a)) - else: - self.assertFalse(b.is_subset_of(a)) - - test_spec(1, 1) - test_spec(1, 2, False) - - test_spec((1, 2), 1) - test_spec(1, (1, 2), False) - - test_spec({1, 2}, 1) - test_spec(1, {1, 2}, False) - test_spec(1, {1}) - - test_spec((1, 2), (1, 2)) - test_spec((1, 2), (1, 3), False) - test_spec((1, 10), (1, 3)) - - test_spec(1, F.ANY(), False) - test_spec(F.ANY(), 1) - test_spec(F.TBD(), 1, False) - test_spec(F.ANY(), F.Operation((1, 2), add)) - test_spec(F.ANY(), F.Operation((1, F.TBD()), add)) - - test_spec(F.Operation((1, 2), add), 3) - test_spec(F.Operation((1, F.TBD()), add), F.TBD(), False) - - def test_compress(self): - def test_comp( - a: Parameter[int].LIT_OR_PARAM, - expected: Parameter[int].LIT_OR_PARAM, - ): - a = Parameter[int].from_literal(a) - expected = Parameter[int].from_literal(expected) - self.assertEqual(a.get_most_narrow(), expected) - - test_comp(1, 1) - test_comp(F.Constant(F.Constant(1)), 1) - test_comp(F.Constant(F.Constant(F.Constant(1))), 1) - test_comp({1}, 1) - test_comp(F.Range(1), 1) - test_comp(F.Range(F.Range(1)), 1) - test_comp(F.Constant(F.Set([F.Range(F.Range(1))])), 1) - - def test_modules(self): - def assertIsInstance[T](obj, cls: type[T]) -> T: - self.assertIsInstance(obj, cls) - assert isinstance(obj, cls) - return obj - - class Modules(Module): - UART_A: F.UART_Base - UART_B: F.UART_Base - UART_C: F.UART_Base - - m = Modules() - - UART_A = m.UART_A - UART_B = m.UART_B - UART_C = m.UART_C - - UART_A.connect(UART_B) - - UART_A.baud.merge(F.Constant(9600 * P.baud)) - - for uart in [UART_A, UART_B]: - self.assertEqual( - assertIsInstance(uart.baud.get_most_narrow(), F.Constant).value, - 9600 * P.baud, - ) - - UART_C.baud.merge(F.Range(1200 * P.baud, 115200 * P.baud)) - UART_A.connect(UART_C) - - for uart in [UART_A, UART_B, UART_C]: - self.assertEqual( - assertIsInstance(uart.baud.get_most_narrow(), F.Constant).value, - 9600 * P.baud, - ) - - resistor = F.Resistor() - - assertIsInstance( - resistor.get_current_flow_by_voltage_resistance(F.Constant(0.5)), - F.Operation, - ) - - def test_comparisons(self): - # same type - self.assertGreater(F.Constant(2), F.Constant(1)) - self.assertLess(F.Constant(1), F.Constant(2)) - self.assertLessEqual(F.Constant(2), F.Constant(2)) - self.assertGreaterEqual(F.Constant(2), F.Constant(2)) - self.assertLess(F.Range(1, 2), F.Range(3, 4)) - self.assertEqual( - min(F.Range(1, 2), F.Range(3, 4), F.Range(5, 6)), F.Range(1, 2) - ) - - # mixed - self.assertLess(F.Constant(1), F.Range(2, 3)) - self.assertGreater(F.Constant(4), F.Range(2, 3)) - self.assertFalse(F.Constant(3) < F.Range(2, 4)) - self.assertFalse(F.Constant(3) > F.Range(2, 4)) - self.assertFalse(F.Constant(3) == F.Range(2, 4)) - self.assertEqual( - min(F.Constant(3), F.Range(5, 6), F.Constant(4)), F.Constant(3) - ) - - # nested - self.assertLess(F.Constant(1), F.Set([F.Constant(2), F.Constant(3)])) - self.assertLess(F.Range(1, 2), F.Range(F.Constant(3), F.Constant(4))) - self.assertLess(F.Range(1, 2), F.Set([F.Constant(4), F.Constant(3)])) - self.assertLess(F.Constant(F.Constant(F.Constant(1))), 2) - self.assertEqual( - min(F.Constant(F.Constant(F.Constant(1))), F.Constant(F.Constant(2))), - F.Constant(F.Constant(F.Constant(1))), - ) - - def test_specialize(self): - import faebryk.library._F as F - from faebryk.libs.brightness import TypicalLuminousIntensity - - for i in range(10): - - class App(Module): - led: F.PoweredLED - battery: F.Battery - - def __preinit__(self) -> None: - self.led.power.connect(self.battery.power) - - # Parametrize - self.led.led.color.merge(F.LED.Color.YELLOW) - self.led.led.brightness.merge( - TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value - ) - - app = App() - - bcell = app.battery.specialize(F.ButtonCell()) - bcell.voltage.merge(3 * P.V) - bcell.capacity.merge(F.Range.from_center(225 * P.mAh, 50 * P.mAh)) - bcell.material.merge(F.ButtonCell.Material.Lithium) - bcell.size.merge(F.ButtonCell.Size.N_2032) - bcell.shape.merge(F.ButtonCell.Shape.Round) - - app.led.led.color.merge(F.LED.Color.YELLOW) - app.led.led.max_brightness.merge(500 * P.millicandela) - app.led.led.forward_voltage.merge(1.2 * P.V) - app.led.led.max_current.merge(20 * P.mA) - - v = app.battery.voltage - # vbcell = bcell.voltage - # print(pretty_param_tree_top(v)) - # print(pretty_param_tree_top(vbcell)) - self.assertEqual(v.get_most_narrow(), 3 * P.V) - r = app.led.current_limiting_resistor.resistance - r = r.get_most_narrow() - self.assertIsInstance(r, F.Range, f"{type(r)}") - - def test_units(self): - self.assertEqual(F.Constant(1e-9 * P.F), 1 * P.nF) - - def test_new_definitions(self): - pass - - def test_constant_sets(self): - pass - - -if __name__ == "__main__": - unittest.main() +def test_new_definitions(): + _ = Parameter( + unit=P.ohm, + domain=Domains.Numbers.Reals.Positive(), + soft_set=Range(1 * P.ohm, 10 * P.Mohm), + likely_constrained=True, + ) From 8da05dd09317ab69846d45bf857f4ef0758e3f83 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 8 Oct 2024 15:31:13 +0200 Subject: [PATCH 08/80] lambda warnings --- src/faebryk/core/parameter.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 7c86b4ed..3e7cc313 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -169,16 +169,16 @@ class Set(Namespace): class Domains(Namespace): class ESeries(Namespace): - E6 = lambda: ESeries(ESeries.SeriesType.E6) - E12 = lambda: ESeries(ESeries.SeriesType.E12) - E24 = lambda: ESeries(ESeries.SeriesType.E24) - E48 = lambda: ESeries(ESeries.SeriesType.E48) - E96 = lambda: ESeries(ESeries.SeriesType.E96) - E192 = lambda: ESeries(ESeries.SeriesType.E192) + E6 = lambda: ESeries(ESeries.SeriesType.E6) # noqa: E731 + E12 = lambda: ESeries(ESeries.SeriesType.E12) # noqa: E731 + E24 = lambda: ESeries(ESeries.SeriesType.E24) # noqa: E731 + E48 = lambda: ESeries(ESeries.SeriesType.E48) # noqa: E731 + E96 = lambda: ESeries(ESeries.SeriesType.E96) # noqa: E731 + E192 = lambda: ESeries(ESeries.SeriesType.E192) # noqa: E731 class Numbers(Namespace): REAL = Numbers - NATURAL = lambda: Numbers(integer=True, negative=False) + NATURAL = lambda: Numbers(integer=True, negative=False) # noqa: E731 BOOL = Boolean ENUM = Enum From 0bd8b29469b2303bfaa4818f0056f05b1f8bf838 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Tue, 8 Oct 2024 15:47:52 +0200 Subject: [PATCH 09/80] Core: Add more arithmetic expressions --- src/faebryk/core/parameter.py | 66 +++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 3e7cc313..69ed4072 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -72,6 +72,72 @@ def __init__(self, numerator, denominator): self.unit = numerator.unit / denominator.unit +class Sqrt(Arithmetic): + def __init__(self, operand): + super().__init__(operand) + self.unit = operand.unit**0.5 + + +class Power(Arithmetic): + def __init__(self, base, exponent: int): + super().__init__(base, exponent) + if isinstance(exponent, HasUnit) and not exponent.unit.is_compatible_with( + P.dimensionless + ): + raise ValueError("exponent must have dimensionless unit") + self.unit = ( + base.unit**exponent if isinstance(base, HasUnit) else P.dimensionless + ) + + +class Log(Arithmetic): + def __init__(self, operand): + super().__init__(operand) + if not operand.unit.is_compatible_with(P.dimensionless): + raise ValueError("operand must have dimensionless unit") + self.unit = P.dimensionless + + +class Sin(Arithmetic): + def __init__(self, operand): + super().__init__(operand) + if not operand.unit.is_compatible_with(P.dimensionless): + raise ValueError("operand must have dimensionless unit") + self.unit = P.dimensionless + + +class Cos(Arithmetic): + def __init__(self, operand): + super().__init__(operand) + if not operand.unit.is_compatible_with(P.dimensionless): + raise ValueError("operand must have dimensionless unit") + self.unit = P.dimensionless + + +class Abs(Arithmetic): + def __init__(self, operand): + super().__init__(operand) + self.unit = operand.unit + + +class Round(Arithmetic): + def __init__(self, operand): + super().__init__(operand) + self.unit = operand.unit + + +class Floor(Arithmetic): + def __init__(self, operand): + super().__init__(operand) + self.unit = operand.unit + + +class Ceil(Arithmetic): + def __init__(self, operand): + super().__init__(operand) + self.unit = operand.unit + + class Domain: pass From 0e5209ddf16aa9b69d88c6379ef0c28cc5fef3a7 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 8 Oct 2024 15:52:16 +0200 Subject: [PATCH 10/80] functions for ops --- src/faebryk/core/parameter.py | 179 +++++++++++++++++++++++++++++++-- src/faebryk/library/Battery.py | 2 +- 2 files changed, 174 insertions(+), 7 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 69ed4072..7822e065 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -6,8 +6,6 @@ from enum import Enum, auto from typing import Protocol, runtime_checkable -from deprecated import deprecated - from faebryk.core.core import Namespace from faebryk.core.node import Node, f_field from faebryk.libs.sets import Range @@ -138,6 +136,50 @@ def __init__(self, operand): self.unit = operand.unit +class Logic(Expression): + pass + + +class And(Logic): + pass + + +class Or(Logic): + pass + + +class Not(Logic): + pass + + +class Xor(Logic): + pass + + +class Implies(Logic): + pass + + +class Set(Expression): + pass + + +class Union(Set): + pass + + +class Intersection(Set): + pass + + +class Difference(Set): + pass + + +class SymmetricDifference(Set): + pass + + class Domain: pass @@ -250,7 +292,34 @@ class Numbers(Namespace): ENUM = Enum class Expressions(Namespace): - pass + class Arithmetic(Namespace): + ADD = Add + SUBTRACT = Subtract + MULTIPLY = Multiply + DIVIDE = Divide + POWER = Power + LOG = Log + SQRT = Sqrt + LOG = Log + ABS = Abs + FLOOR = Floor + CEIL = Ceil + ROUND = Round + SIN = Sin + COS = Cos + + class Logic(Namespace): + AND = And + OR = Or + NOT = Not + XOR = Xor + IMPLIES = Implies + + class Set(Namespace): + UNION = Union + INTERSECTION = Intersection + DIFFERENCE = Difference + SYMMETRIC_DIFFERENCE = SymmetricDifference class Parameter(Node): @@ -284,6 +353,8 @@ def __init__( self.tolerance_guess = tolerance_guess self.likely_constrained = likely_constrained + # ---------------------------------------------------------------------------------- + def alias_is(self, other: "Parameter"): pass @@ -305,9 +376,105 @@ def constrain_ne(self, other: "Parameter"): def constrain_subset(self, other: "Parameter"): pass - @deprecated("use alias_is instead") - def merge(self, other: "Parameter"): - return self.alias_is(other) + def operation_add(self, other: "Parameter"): + pass + + def operation_subtract(self, other: "Parameter"): + pass + + def operation_multiply(self, other: "Parameter"): + pass + + def operation_divide(self, other: "Parameter"): + pass + + def operation_power(self, other: "Parameter"): + pass + + def operation_log(self): + pass + + def operation_sqrt(self): + pass + + def operation_abs(self): + pass + + def operation_floor(self): + pass + + def operation_ceil(self): + pass + + def operation_round(self): + pass + + def operation_sin(self): + pass + + def operation_cos(self): + pass + + def operation_union(self, other: "Parameter"): + pass + + def operation_intersection(self, other: "Parameter"): + pass + + def operation_difference(self, other: "Parameter"): + pass + + def operation_symmetric_difference(self, other: "Parameter"): + pass + + def operation_and(self, other: "Parameter"): + pass + + def operation_or(self, other: "Parameter"): + pass + + def operation_not(self): + pass + + def operation_xor(self, other: "Parameter"): + pass + + def operation_implies(self, other: "Parameter"): + pass + + # ---------------------------------------------------------------------------------- + def __add__(self, other: "Parameter"): + return self.operation_add(other) + + def __sub__(self, other: "Parameter"): + # TODO could be set difference + return self.operation_subtract(other) + + def __mul__(self, other: "Parameter"): + return self.operation_multiply(other) + + def __truediv__(self, other: "Parameter"): + return self.operation_divide(other) + + def __pow__(self, other: "Parameter"): + return self.operation_power(other) + + def __abs__(self): + return self.operation_abs() + + def __round__(self): + return self.operation_round() + + def __and__(self, other: "Parameter"): + # TODO could be set intersection + return self.operation_and(other) + + def __or__(self, other: "Parameter"): + # TODO could be set union + return self.operation_or(other) + + def __xor__(self, other: "Parameter"): + return self.operation_xor(other) p_field = f_field(Parameter) diff --git a/src/faebryk/library/Battery.py b/src/faebryk/library/Battery.py index c5b49c9e..11385210 100644 --- a/src/faebryk/library/Battery.py +++ b/src/faebryk/library/Battery.py @@ -28,7 +28,7 @@ class Battery(Module): power: F.ElectricPower def __preinit__(self) -> None: - self.power.voltage.merge(self.voltage) + self.power.voltage.alias_is(self.voltage) @L.rt_field def single_electric_reference(self): From a869db8410bd903ba8c17ba2dedb310719c0a910 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 8 Oct 2024 15:53:38 +0200 Subject: [PATCH 11/80] L sets --- src/faebryk/library/Battery.py | 5 ++--- src/faebryk/libs/library/L.py | 1 + 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/faebryk/library/Battery.py b/src/faebryk/library/Battery.py index 11385210..92306c08 100644 --- a/src/faebryk/library/Battery.py +++ b/src/faebryk/library/Battery.py @@ -5,21 +5,20 @@ import faebryk.library._F as F import faebryk.libs.library.L as L from faebryk.core.module import Module -from faebryk.libs.sets import Range from faebryk.libs.units import P class Battery(Module): voltage = L.p_field( unit=P.V, - soft_set=Range(0 * P.V, 100 * P.V), + soft_set=L.Range(0 * P.V, 100 * P.V), guess=3.7 * P.V, tolerance_guess=5 * P.percent, likely_constrained=True, ) capacity = L.p_field( unit=P.Ah, - soft_set=Range(100 * P.mAh, 100 * P.Ah), + soft_set=L.Range(100 * P.mAh, 100 * P.Ah), guess=1 * P.Ah, tolerance_guess=5 * P.percent, likely_constrained=True, diff --git a/src/faebryk/libs/library/L.py b/src/faebryk/libs/library/L.py index d0efa67a..725834f9 100644 --- a/src/faebryk/libs/library/L.py +++ b/src/faebryk/libs/library/L.py @@ -15,6 +15,7 @@ ) from faebryk.core.parameter import R, p_field # noqa: F401 from faebryk.core.reference import reference # noqa: F401 +from faebryk.libs.sets import Range, Set, Single # noqa: F401 class AbstractclassError(Exception): ... From 4566a02751750abb77152b92a634c523df716809 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Tue, 8 Oct 2024 16:42:06 +0200 Subject: [PATCH 12/80] Core: Make expression node and add check --- src/faebryk/core/parameter.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 7822e065..846f7ddb 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -20,17 +20,23 @@ class HasUnit(Protocol): # TODO: prohibit instantiation -class Expression: +class Expression(Node): pass class Arithmetic(Expression): def __init__(self, *operands): - types = [int, float, Quantity, Parameter, Expression] + types = [int, float, Quantity, Parameter, Arithmetic] if any(type(op) not in types for op in operands): raise ValueError( "operands must be int, float, Quantity, Parameter, or Expression" ) + if any( + param.domain not in [Numbers, ESeries] + for param in operands + if isinstance(param, Parameter) + ): + raise ValueError("parameters must have domain Numbers or ESeries") self.operands = operands From 10c3e6787d1c7685bf418d6f8c50b596cdc516ec Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 8 Oct 2024 18:57:23 +0200 Subject: [PATCH 13/80] WIP: Library refactor --- src/faebryk/core/parameter.py | 229 ++++++++++++++---- src/faebryk/library/B0505S_1WR3.py | 14 +- src/faebryk/library/BH1750FVI_TR.py | 24 +- src/faebryk/library/Battery.py | 4 - src/faebryk/library/Button.py | 9 +- src/faebryk/library/ButtonCell.py | 12 +- .../library/CBM9002A_56ILG_ReferenceDesign.py | 18 +- src/faebryk/library/CH340x.py | 4 +- src/faebryk/library/CH342.py | 10 +- src/faebryk/library/CH344.py | 2 +- src/faebryk/library/CH344Q_ReferenceDesign.py | 34 +-- src/faebryk/library/Capacitor.py | 20 +- src/faebryk/library/Common_Mode_Filter.py | 35 ++- src/faebryk/library/Comparator.py | 40 ++- src/faebryk/library/Crystal.py | 57 ++++- src/faebryk/library/Crystal_Oscillator.py | 2 +- src/faebryk/library/DifferentialPair.py | 12 +- src/faebryk/library/Diode.py | 37 ++- .../library/Diodes_Incorporated_AP2552W6_7.py | 29 ++- src/faebryk/library/EEPROM.py | 9 +- src/faebryk/library/ElectricLogic.py | 4 +- src/faebryk/library/ElectricPower.py | 23 +- src/faebryk/library/Electrical.py | 4 +- src/faebryk/library/Filter.py | 19 +- src/faebryk/library/Fuse.py | 17 +- src/faebryk/library/GDT.py | 14 +- src/faebryk/library/HLK_LD2410B_P.py | 7 +- src/faebryk/library/Header.py | 36 ++- src/faebryk/library/I2C.py | 10 +- src/faebryk/library/Inductor.py | 29 ++- src/faebryk/library/LDO.py | 47 +++- src/faebryk/library/LED.py | 9 +- src/faebryk/library/Logic74xx.py | 2 +- src/faebryk/library/MOSFET.py | 14 +- src/faebryk/library/OLED_Module.py | 6 +- src/faebryk/library/OpAmp.py | 16 +- src/faebryk/library/PM1006.py | 2 +- src/faebryk/library/Potentiometer.py | 11 +- src/faebryk/library/RJ45_Receptacle.py | 2 +- src/faebryk/library/Relay.py | 14 +- src/faebryk/library/Resistor.py | 8 +- src/faebryk/library/ResistorVoltageDivider.py | 4 +- .../library/Resistor_Voltage_Divider.py | 6 +- src/faebryk/library/SCD40.py | 4 +- src/faebryk/library/SPIFlash.py | 7 +- src/faebryk/library/TVS.py | 5 +- src/faebryk/library/UART_Base.py | 6 +- src/faebryk/library/UART_RS485.py | 8 +- src/faebryk/library/USB2_0_ESD_Protection.py | 4 +- src/faebryk/library/XL_3528RGBW_WS2812B.py | 3 +- .../library/can_switch_power_defined.py | 2 +- src/faebryk/libs/sets.py | 21 +- 52 files changed, 698 insertions(+), 267 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 846f7ddb..3d0fe716 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -8,7 +8,7 @@ from faebryk.core.core import Namespace from faebryk.core.node import Node, f_field -from faebryk.libs.sets import Range +from faebryk.libs.sets import Range, Set_ from faebryk.libs.units import P, Quantity, Unit logger = logging.getLogger(__name__) @@ -19,8 +19,139 @@ class HasUnit(Protocol): unit: Unit +class ParameterOperatable(Protocol): + type PE = ParameterOperatable | int | float | Quantity | Set_ + + def alias_is(self, other: PE): + pass + + def constrain_le(self, other: PE): + pass + + def constrain_ge(self, other: PE): + pass + + def constrain_lt(self, other: PE): + pass + + def constrain_gt(self, other: PE): + pass + + def constrain_ne(self, other: PE): + pass + + def constrain_subset(self, other: PE): + pass + + def constrain_superset(self, other: PE): + pass + + def operation_add(self, other: PE) -> Expression: + pass + + def operation_subtract(self, other: PE) -> Expression: + pass + + def operation_multiply(self, other: PE) -> Expression: + pass + + def operation_divide(self, other: PE) -> Expression: + pass + + def operation_power(self, other: PE) -> Expression: + pass + + def operation_log(self) -> Expression: + pass + + def operation_sqrt(self) -> Expression: + pass + + def operation_abs(self) -> Expression: + pass + + def operation_floor(self) -> Expression: + pass + + def operation_ceil(self) -> Expression: + pass + + def operation_round(self) -> Expression: + pass + + def operation_sin(self) -> Expression: + pass + + def operation_cos(self) -> Expression: + pass + + def operation_union(self, other: PE) -> Expression: + pass + + def operation_intersection(self, other: PE) -> Expression: + pass + + def operation_difference(self, other: PE) -> Expression: + pass + + def operation_symmetric_difference(self, other: PE) -> Expression: + pass + + def operation_and(self, other: PE) -> Expression: + pass + + def operation_or(self, other: PE) -> Expression: + pass + + def operation_not(self) -> Expression: + pass + + def operation_xor(self, other: PE) -> Expression: + pass + + def operation_implies(self, other: PE) -> Expression: + pass + + # ---------------------------------------------------------------------------------- + def __add__(self, other: PE): + return self.operation_add(other) + + def __sub__(self, other: PE): + # TODO could be set difference + return self.operation_subtract(other) + + def __mul__(self, other: PE): + return self.operation_multiply(other) + + def __truediv__(self, other: PE): + return self.operation_divide(other) + + def __rtruediv__(self, other: PE): + return self.operation_divide(other) + + def __pow__(self, other: PE): + return self.operation_power(other) + + def __abs__(self): + return self.operation_abs() + + def __round__(self): + return self.operation_round() + + def __and__(self, other: PE): + # TODO could be set intersection + return self.operation_and(other) + + def __or__(self, other: PE): + # TODO could be set union + return self.operation_or(other) + + def __xor__(self, other: PE): + return self.operation_xor(other) + + # TODO: prohibit instantiation -class Expression(Node): +class Expression(Node, ParameterOperatable): pass @@ -251,6 +382,10 @@ class IsSubset(Predicate): pass +class IsSuperset(Predicate): + pass + + class Alias(Node): pass @@ -280,6 +415,7 @@ class Element(Namespace): class Set(Namespace): IS_SUBSET = IsSubset + IS_SUPERSET = IsSuperset class Domains(Namespace): class ESeries(Namespace): @@ -295,7 +431,7 @@ class Numbers(Namespace): NATURAL = lambda: Numbers(integer=True, negative=False) # noqa: E731 BOOL = Boolean - ENUM = Enum + ENUM = EnumDomain class Expressions(Namespace): class Arithmetic(Namespace): @@ -328,17 +464,20 @@ class Set(Namespace): SYMMETRIC_DIFFERENCE = SymmetricDifference -class Parameter(Node): +class Parameter(Node, ParameterOperatable): def __init__( self, *, - unit: Unit | Quantity, + unit: Unit | Quantity | None = None, # hard constraints within: Range | None = None, domain: Domain = Numbers(negative=False), # soft constraints soft_set: Range | None = None, - guess: Quantity | None = None, + guess: Quantity + | int + | float + | None = None, # TODO actually allowed to be anything from domain tolerance_guess: Quantity | None = None, # hints likely_constrained: bool = False, @@ -360,109 +499,113 @@ def __init__( self.likely_constrained = likely_constrained # ---------------------------------------------------------------------------------- + type PE = ParameterOperatable.PE - def alias_is(self, other: "Parameter"): + def alias_is(self, other: PE): pass - def constrain_le(self, other: "Parameter"): + def constrain_le(self, other: PE): pass - def constrain_ge(self, other: "Parameter"): + def constrain_ge(self, other: PE): pass - def constrain_lt(self, other: "Parameter"): + def constrain_lt(self, other: PE): pass - def constrain_gt(self, other: "Parameter"): + def constrain_gt(self, other: PE): pass - def constrain_ne(self, other: "Parameter"): + def constrain_ne(self, other: PE): pass - def constrain_subset(self, other: "Parameter"): + def constrain_subset(self, other: PE): pass - def operation_add(self, other: "Parameter"): + def operation_add(self, other: PE) -> Expression: pass - def operation_subtract(self, other: "Parameter"): + def operation_subtract(self, other: PE) -> Expression: pass - def operation_multiply(self, other: "Parameter"): + def operation_multiply(self, other: PE) -> Expression: pass - def operation_divide(self, other: "Parameter"): + def operation_divide(self, other: PE) -> Expression: pass - def operation_power(self, other: "Parameter"): + def operation_power(self, other: PE) -> Expression: pass - def operation_log(self): + def operation_log(self) -> Expression: pass - def operation_sqrt(self): + def operation_sqrt(self) -> Expression: pass - def operation_abs(self): + def operation_abs(self) -> Expression: pass - def operation_floor(self): + def operation_floor(self) -> Expression: pass - def operation_ceil(self): + def operation_ceil(self) -> Expression: pass - def operation_round(self): + def operation_round(self) -> Expression: pass - def operation_sin(self): + def operation_sin(self) -> Expression: pass - def operation_cos(self): + def operation_cos(self) -> Expression: pass - def operation_union(self, other: "Parameter"): + def operation_union(self, other: PE) -> Expression: pass - def operation_intersection(self, other: "Parameter"): + def operation_intersection(self, other: PE) -> Expression: pass - def operation_difference(self, other: "Parameter"): + def operation_difference(self, other: PE) -> Expression: pass - def operation_symmetric_difference(self, other: "Parameter"): + def operation_symmetric_difference(self, other: PE) -> Expression: pass - def operation_and(self, other: "Parameter"): + def operation_and(self, other: PE) -> Expression: pass - def operation_or(self, other: "Parameter"): + def operation_or(self, other: PE) -> Expression: pass - def operation_not(self): + def operation_not(self) -> Expression: pass - def operation_xor(self, other: "Parameter"): + def operation_xor(self, other: PE) -> Expression: pass - def operation_implies(self, other: "Parameter"): + def operation_implies(self, other: PE) -> Expression: pass # ---------------------------------------------------------------------------------- - def __add__(self, other: "Parameter"): + def __add__(self, other: PE): return self.operation_add(other) - def __sub__(self, other: "Parameter"): + def __sub__(self, other: PE): # TODO could be set difference return self.operation_subtract(other) - def __mul__(self, other: "Parameter"): + def __mul__(self, other: PE): return self.operation_multiply(other) - def __truediv__(self, other: "Parameter"): + def __truediv__(self, other: PE): + return self.operation_divide(other) + + def __rtruediv__(self, other: PE): return self.operation_divide(other) - def __pow__(self, other: "Parameter"): + def __pow__(self, other: PE): return self.operation_power(other) def __abs__(self): @@ -471,15 +614,15 @@ def __abs__(self): def __round__(self): return self.operation_round() - def __and__(self, other: "Parameter"): + def __and__(self, other: PE): # TODO could be set intersection return self.operation_and(other) - def __or__(self, other: "Parameter"): + def __or__(self, other: PE): # TODO could be set union return self.operation_or(other) - def __xor__(self, other: "Parameter"): + def __xor__(self, other: PE): return self.operation_xor(other) diff --git a/src/faebryk/library/B0505S_1WR3.py b/src/faebryk/library/B0505S_1WR3.py index 03d54ee7..9f1e49ac 100644 --- a/src/faebryk/library/B0505S_1WR3.py +++ b/src/faebryk/library/B0505S_1WR3.py @@ -63,15 +63,19 @@ def __preinit__(self): # ---------------------------------------- # parametrization # ---------------------------------------- - self.power_in.get_trait(F.can_be_decoupled).decouple().capacitance.merge( - F.Range.from_center_rel(4.7 * P.uF, 0.1) + self.power_in.get_trait( + F.can_be_decoupled + ).decouple().capacitance.constrain_subset( + L.Range.from_center_rel(4.7 * P.uF, 0.1) ) - self.power_out.get_trait(F.can_be_decoupled).decouple().capacitance.merge( + self.power_out.get_trait( + F.can_be_decoupled + ).decouple().capacitance.constrain_subset( F.Range.from_center_rel(10 * P.uF, 0.1) ) # ---------------------------------------- # connections # ---------------------------------------- - self.power_in.voltage.merge(F.Range(4.3 * P.V, 9 * P.V)) - self.power_out.voltage.merge(F.Range.from_center(5 * P.V, 0.5 * P.V)) + self.power_in.voltage.constrain_subset(L.Range(4.3 * P.V, 9 * P.V)) + self.power_out.voltage.constrain_superset(L.Range.from_center_rel(5 * P.V, 0.1)) diff --git a/src/faebryk/library/BH1750FVI_TR.py b/src/faebryk/library/BH1750FVI_TR.py index e08eb0cc..972f02a6 100644 --- a/src/faebryk/library/BH1750FVI_TR.py +++ b/src/faebryk/library/BH1750FVI_TR.py @@ -6,14 +6,18 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P logger = logging.getLogger(__name__) class BH1750FVI_TR(Module): class _bh1750_esphome_config(F.has_esphome_config.impl()): - update_interval: F.TBD[Quantity] + update_interval = L.p_field( + unit=P.s, + soft_set=L.Range(100 * P.ms, 1 * P.day), + guess=1 * P.s, + ) def get_config(self) -> dict: val = self.update_interval.get_most_narrow() @@ -59,19 +63,25 @@ def set_address(self, addr: int): esphome_config: _bh1750_esphome_config def __preinit__(self): - self.dvi_capacitor.capacitance.merge(1 * P.uF) - self.dvi_resistor.resistance.merge(1 * P.kohm) + self.dvi_capacitor.capacitance.constrain_subset( + L.Range.from_center_rel(1 * P.uF, 0.1) + ) + self.dvi_resistor.resistance.constrain_subset( + L.Range.from_center_rel(1 * P.kohm, 0.1) + ) self.i2c.terminate() - self.i2c.frequency.merge( + self.i2c.frequency.constrain_le( F.I2C.define_max_frequency_capability(F.I2C.SpeedMode.fast_speed) ) # set constraints - self.power.voltage.merge(F.Range(2.4 * P.V, 3.6 * P.V)) + self.power.voltage.constrain_subset(F.Range(2.4 * P.V, 3.6 * P.V)) - self.power.decoupled.decouple().capacitance.merge(100 * P.nF) + self.power.decoupled.decouple().capacitance.constrain_subset( + L.Range.from_center_rel(100 * P.nF, 0.1) + ) # TODO: self.dvi.low_pass(self.dvi_capacitor, self.dvi_resistor) self.dvi.signal.connect_via(self.dvi_capacitor, self.power.lv) self.dvi.signal.connect_via(self.dvi_resistor, self.power.hv) diff --git a/src/faebryk/library/Battery.py b/src/faebryk/library/Battery.py index 92306c08..820d1b8e 100644 --- a/src/faebryk/library/Battery.py +++ b/src/faebryk/library/Battery.py @@ -12,15 +12,11 @@ class Battery(Module): voltage = L.p_field( unit=P.V, soft_set=L.Range(0 * P.V, 100 * P.V), - guess=3.7 * P.V, - tolerance_guess=5 * P.percent, likely_constrained=True, ) capacity = L.p_field( unit=P.Ah, soft_set=L.Range(100 * P.mAh, 100 * P.Ah), - guess=1 * P.Ah, - tolerance_guess=5 * P.percent, likely_constrained=True, ) diff --git a/src/faebryk/library/Button.py b/src/faebryk/library/Button.py index a74cf6d4..21922bf4 100644 --- a/src/faebryk/library/Button.py +++ b/src/faebryk/library/Button.py @@ -6,14 +6,19 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P logger = logging.getLogger(__name__) class Button(Module): unnamed = L.list_field(2, F.Electrical) - height: F.TBD[Quantity] + height = L.p_field( + unit=P.mm, + likely_constrained=False, + soft_set=L.Range(1 * P.mm, 10 * P.mm), + tolerance_guess=10 * P.percent, + ) designator_prefix = L.f_field(F.has_designator_prefix_defined)( F.has_designator_prefix.Prefix.S diff --git a/src/faebryk/library/ButtonCell.py b/src/faebryk/library/ButtonCell.py index 30810149..4e183c1b 100644 --- a/src/faebryk/library/ButtonCell.py +++ b/src/faebryk/library/ButtonCell.py @@ -53,9 +53,15 @@ class Size(IntEnum): N_2430 = 2430 N_2450 = 2450 - material: F.TBD[Material] - shape: F.TBD[Shape] - size: F.TBD[Size] + material = L.p_field( + domain=L.Domains.ENUM(Material), + ) + shape = L.p_field( + domain=L.Domains.ENUM(Shape), + ) + size = L.p_field( + domain=L.Domains.ENUM(Size), + ) designator_prefix = L.f_field(F.has_designator_prefix_defined)( F.has_designator_prefix.Prefix.B diff --git a/src/faebryk/library/CBM9002A_56ILG_ReferenceDesign.py b/src/faebryk/library/CBM9002A_56ILG_ReferenceDesign.py index 104d4898..8983eb7a 100644 --- a/src/faebryk/library/CBM9002A_56ILG_ReferenceDesign.py +++ b/src/faebryk/library/CBM9002A_56ILG_ReferenceDesign.py @@ -24,12 +24,16 @@ def __preinit__(self): self.logic.pulled.pull(up=True) self.logic.signal.connect_via(self.cap, self.logic.reference.lv) - self.cap.capacitance.merge(F.Range.from_center_rel(1 * P.uF, 0.05)) + self.cap.capacitance.constrain_subset( + L.Range.from_center_rel(1 * P.uF, 0.05) + ) - self.diode.forward_voltage.merge(F.Range(715 * P.mV, 1.5 * P.V)) - self.diode.reverse_leakage_current.merge(F.Range.upper_bound(1 * P.uA)) - self.diode.current.merge(F.Range.from_center_rel(300 * P.mA, 0.05)) - self.diode.max_current.merge(F.Range.lower_bound(1 * P.A)) + self.diode.forward_voltage.constrain_subset(F.Range(715 * P.mV, 1.5 * P.V)) + self.diode.reverse_leakage_current.constrain_le(1 * P.uA) + self.diode.current.constrain_subset( + F.Range.from_center_rel(300 * P.mA, 0.05) + ) + self.diode.current.constrain_ge(1 * P.A) # ---------------------------------------- # modules, interfaces, parameters @@ -81,9 +85,9 @@ def __preinit__(self): # Parameters # ---------------------------------------- - self.oscillator.crystal.frequency.merge( + self.oscillator.crystal.frequency.constrain_subset( F.Range.from_center_rel(24 * P.Mhertz, 0.05) ) - self.oscillator.crystal.frequency_tolerance.merge( + self.oscillator.crystal.frequency_tolerance.constrain_subset( F.Range(0 * P.ppm, 20 * P.ppm) ) diff --git a/src/faebryk/library/CH340x.py b/src/faebryk/library/CH340x.py index 2d375d73..8b638dab 100644 --- a/src/faebryk/library/CH340x.py +++ b/src/faebryk/library/CH340x.py @@ -27,9 +27,9 @@ class CH340x(Module): def __preinit__(self): self.gpio_power.lv.connect(self.usb.usb_if.buspower.lv) - self.gpio_power.voltage.merge(F.Range(0 * P.V, 5.3 * P.V)) + self.gpio_power.voltage.constrain_subset(L.Range(0 * P.V, 5.3 * P.V)) self.gpio_power.decoupled.decouple() - self.usb.usb_if.buspower.voltage.merge(F.Range(4 * P.V, 5.3 * P.V)) + self.usb.usb_if.buspower.voltage.constrain_subset(L.Range(4 * P.V, 5.3 * P.V)) self.usb.usb_if.buspower.decoupled.decouple() diff --git a/src/faebryk/library/CH342.py b/src/faebryk/library/CH342.py index 810643b7..91fec50d 100644 --- a/src/faebryk/library/CH342.py +++ b/src/faebryk/library/CH342.py @@ -25,8 +25,10 @@ class IntegratedLDO(Module): def __preinit__(self): F.ElectricLogic.connect_all_module_references(self, gnd_only=True) - self.power_out.voltage.merge(F.Range.from_center(3.3 * P.V, 0.3 * P.V)) - self.power_in.voltage.merge(F.Range(4 * P.V, 5.5 * P.V)) + self.power_out.voltage.constrain_superset( + L.Range.from_center_rel(3.3 * P.V, 0.1) + ) + self.power_in.voltage.constrain_subset(L.Range(4 * P.V, 5.5 * P.V)) @L.rt_field def bridge(self): @@ -124,8 +126,8 @@ def __preinit__(self): # ---------------------------------------- # parametrization # ---------------------------------------- - self.power_3v.voltage.merge(F.Range.from_center(3.3 * P.V, 0.3 * P.V)) - self.power_io.voltage.merge(F.Range(1.7 * P.V, 5.5 * P.V)) + self.power_3v.voltage.constrain_subset(L.Range.from_center_rel(3.3 * P.V, 0.1)) + self.power_io.voltage.constrain_subset(L.Range(1.7 * P.V, 5.5 * P.V)) # ---------------------------------------- # connections diff --git a/src/faebryk/library/CH344.py b/src/faebryk/library/CH344.py index e5d16b3c..bd8f6984 100644 --- a/src/faebryk/library/CH344.py +++ b/src/faebryk/library/CH344.py @@ -75,4 +75,4 @@ def __preinit__(self): # ------------------------------------ # parametrization # ------------------------------------ - self.power.voltage.merge(F.Range.from_center(3.3 * P.V, 0.3 * P.V)) + self.power.voltage.constrain_subset(L.Range.from_center_rel(3.3 * P.V, 0.1)) diff --git a/src/faebryk/library/CH344Q_ReferenceDesign.py b/src/faebryk/library/CH344Q_ReferenceDesign.py index d3a23aed..f025de8f 100644 --- a/src/faebryk/library/CH344Q_ReferenceDesign.py +++ b/src/faebryk/library/CH344Q_ReferenceDesign.py @@ -87,7 +87,7 @@ def __preinit__(self): # ------------------------------------ # connections # ------------------------------------ - self.usb_uart_converter.power.decoupled.decouple().capacitance.merge( + self.usb_uart_converter.power.decoupled.decouple().capacitance.constrain_subset( F.Range.from_center_rel(1 * P.uF, 0.05) ) # TODO: per pin self.vbus_fused.connect_via(self.ldo, pwr_3v3) @@ -116,25 +116,27 @@ def __preinit__(self): # ------------------------------------ self.usb_uart_converter.enable_status_or_modem_signals() - self.oscillator.crystal.frequency.merge( - F.Range.from_center_rel(8 * P.MHz, 0.001) + self.oscillator.crystal.frequency.constrain_subset( + L.Range.from_center_rel(8 * P.MHz, 0.001) ) - self.oscillator.crystal.frequency_tolerance.merge( - F.Range.upper_bound(40 * P.ppm) - ) - self.oscillator.crystal.load_capacitance.merge( - F.Range.from_center(8 * P.pF, 10 * P.pF) + self.oscillator.crystal.frequency_tolerance.constrain_le(40 * P.ppm) + self.oscillator.crystal.load_capacitance.constrain_subset( + L.Range.from_center(8 * P.pF, 10 * P.pF) ) # TODO: should be property of crystal when picked - self.usb.usb_if.buspower.max_current.merge( - F.Range.from_center_rel(500 * P.mA, 0.1) - ) + # self.usb.usb_if.buspower.max_current.constrain_subset( + # L.Range.from_center_rel(500 * P.mA, 0.1) + # ) - self.ldo.output_current.merge(F.Range.lower_bound(500 * P.mA)) - self.ldo.output_voltage.merge(F.Range.from_center_rel(3.3 * P.V, 0.05)) + self.ldo.output_current.constrain_ge(500 * P.mA) + self.ldo.output_voltage.constrain_subset( + L.Range.from_center_rel(3.3 * P.V, 0.05) + ) # reset lowpass - self.reset_lowpass.response.merge(F.Filter.Response.LOWPASS) - self.reset_lowpass.cutoff_frequency.merge( - F.Range.from_center_rel(100 * P.Hz, 0.1) + self.reset_lowpass.response.constrain_subset( + L.Single(F.Filter.Response.LOWPASS) + ) + self.reset_lowpass.cutoff_frequency.constrain_subset( + L.Range.from_center_rel(100 * P.Hz, 0.1) ) diff --git a/src/faebryk/library/Capacitor.py b/src/faebryk/library/Capacitor.py index 14107527..c88eadbb 100644 --- a/src/faebryk/library/Capacitor.py +++ b/src/faebryk/library/Capacitor.py @@ -7,7 +7,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P from faebryk.libs.util import join_if_non_empty logger = logging.getLogger(__name__) @@ -26,9 +26,21 @@ class TemperatureCoefficient(IntEnum): unnamed = L.list_field(2, F.Electrical) - capacitance: F.TBD[Quantity] - rated_voltage: F.TBD[Quantity] - temperature_coefficient: F.TBD[TemperatureCoefficient] + capacitance = L.p_field( + unit=P.F, + likely_constrained=True, + soft_set=L.Range(100 * P.pF, 1 * P.F), + tolerance_guess=10 * P.percent, + ) + rated_voltage = L.p_field( + unit=P.V, + likely_constrained=True, + soft_set=L.Range(10 * P.V, 100 * P.V), + tolerance_guess=10 * P.percent, + ) + temperature_coefficient = L.p_field( + domain=L.Domains.ENUM(TemperatureCoefficient), + ) attach_to_footprint: F.can_attach_to_footprint_symmetrically designator_prefix = L.f_field(F.has_designator_prefix_defined)( diff --git a/src/faebryk/library/Common_Mode_Filter.py b/src/faebryk/library/Common_Mode_Filter.py index b36ba92b..94d9b35a 100644 --- a/src/faebryk/library/Common_Mode_Filter.py +++ b/src/faebryk/library/Common_Mode_Filter.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P logger = logging.getLogger(__name__) @@ -15,10 +15,27 @@ class Common_Mode_Filter(Module): coil_a: F.Inductor coil_b: F.Inductor - inductance: F.TBD[Quantity] - self_resonant_frequency: F.TBD[Quantity] - rated_current: F.TBD[Quantity] - dc_resistance: F.TBD[Quantity] + inductance = L.p_field( + unit=P.H, + likely_constrained=True, + soft_set=L.Range(1 * P.µH, 10 * P.mH), + tolerance_guess=10 * P.percent, + ) + self_resonant_frequency = L.p_field( + unit=P.Hz, + likely_constrained=True, + soft_set=L.Range(100 * P.Hz, 1 * P.MHz), + tolerance_guess=10 * P.percent, + ) + rated_current = L.p_field( + unit=P.A, + likely_constrained=True, + soft_set=L.Range(1 * P.A, 10 * P.A), + tolerance_guess=10 * P.percent, + ) + dc_resistance = L.p_field( + unit=P.Ω, + ) designator_prefix = L.f_field(F.has_designator_prefix_defined)( F.has_designator_prefix.Prefix.FL @@ -29,7 +46,7 @@ def __preinit__(self): # parametrization # ---------------------------------------- for coil in [self.coil_a, self.coil_b]: - coil.inductance.merge(self.inductance) - coil.self_resonant_frequency.merge(self.self_resonant_frequency) - coil.rated_current.merge(self.rated_current) - coil.dc_resistance.merge(self.dc_resistance) + coil.inductance.alias_is(self.inductance) + coil.self_resonant_frequency.alias_is(self.self_resonant_frequency) + coil.rated_current.alias_is(self.rated_current) + coil.dc_resistance.alias_is(self.dc_resistance) diff --git a/src/faebryk/library/Comparator.py b/src/faebryk/library/Comparator.py index b8f25f9d..d7141794 100644 --- a/src/faebryk/library/Comparator.py +++ b/src/faebryk/library/Comparator.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P class Comparator(Module): @@ -15,12 +15,38 @@ class OutputType(Enum): PushPull = auto() OpenDrain = auto() - common_mode_rejection_ratio: F.TBD[Quantity] - input_bias_current: F.TBD[Quantity] - input_hysteresis_voltage: F.TBD[Quantity] - input_offset_voltage: F.TBD[Quantity] - propagation_delay: F.TBD[Quantity] - output_type: F.TBD[OutputType] + common_mode_rejection_ratio = L.p_field( + unit=P.dB, + likely_constrained=True, + soft_set=L.Range(60 * P.dB, 120 * P.dB), + tolerance_guess=10 * P.percent, + ) + input_bias_current = L.p_field( + unit=P.A, + likely_constrained=True, + soft_set=L.Range(1 * P.pA, 1 * P.µA), + tolerance_guess=20 * P.percent, + ) + input_hysteresis_voltage = L.p_field( + unit=P.V, + likely_constrained=True, + soft_set=L.Range(1 * P.mV, 100 * P.mV), + tolerance_guess=15 * P.percent, + ) + input_offset_voltage = L.p_field( + unit=P.V, + soft_set=L.Range(10 * P.µV, 10 * P.mV), + tolerance_guess=20 * P.percent, + ) + propagation_delay = L.p_field( + unit=P.s, + soft_set=L.Range(10 * P.ns, 1 * P.ms), + tolerance_guess=15 * P.percent, + ) + output_type = L.p_field( + domain=L.Domains.ENUM(OutputType), + likely_constrained=True, + ) power: F.ElectricPower inverting_input: F.Electrical diff --git a/src/faebryk/library/Crystal.py b/src/faebryk/library/Crystal.py index 96f56058..b985d197 100644 --- a/src/faebryk/library/Crystal.py +++ b/src/faebryk/library/Crystal.py @@ -4,7 +4,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P class Crystal(Module): @@ -17,13 +17,54 @@ class Crystal(Module): # ---------------------------------------- # parameters # ---------------------------------------- - frequency: F.TBD[Quantity] - frequency_tolerance: F.TBD[F.Range] - frequency_temperature_tolerance: F.TBD[F.Range] - frequency_ageing: F.TBD[F.Range] - equivalent_series_resistance: F.TBD[Quantity] - shunt_capacitance: F.TBD[Quantity] - load_capacitance: F.TBD[Quantity] + frequency = L.p_field( + unit=P.Hz, + likely_constrained=True, + soft_set=L.Range(32.768 * P.kHz, 100 * P.MHz), + tolerance_guess=50 * P.ppm, + ) + + frequency_tolerance = L.p_field( + unit=P.ppm, + likely_constrained=True, + soft_set=L.Range(10 * P.ppm, 100 * P.ppm), + tolerance_guess=10 * P.percent, + ) + + frequency_temperature_tolerance = L.p_field( + unit=P.ppm, + likely_constrained=True, + soft_set=L.Range(1 * P.ppm, 50 * P.ppm), + tolerance_guess=10 * P.percent, + ) + + frequency_ageing = L.p_field( + unit=P.ppm, + likely_constrained=True, + soft_set=L.Range(1 * P.ppm, 10 * P.ppm), + tolerance_guess=20 * P.percent, + ) + + equivalent_series_resistance = L.p_field( + unit=P.Ω, + likely_constrained=True, + soft_set=L.Range(10 * P.Ω, 200 * P.Ω), + tolerance_guess=10 * P.percent, + ) + + shunt_capacitance = L.p_field( + unit=P.F, + likely_constrained=True, + soft_set=L.Range(1 * P.pF, 10 * P.pF), + tolerance_guess=20 * P.percent, + ) + + load_capacitance = L.p_field( + unit=P.F, + likely_constrained=True, + soft_set=L.Range(8 * P.pF, 30 * P.pF), + tolerance_guess=10 * P.percent, + ) # ---------------------------------------- # traits diff --git a/src/faebryk/library/Crystal_Oscillator.py b/src/faebryk/library/Crystal_Oscillator.py index 527f8caa..ccdd1afd 100644 --- a/src/faebryk/library/Crystal_Oscillator.py +++ b/src/faebryk/library/Crystal_Oscillator.py @@ -34,7 +34,7 @@ def capacitance(self): def __preinit__(self): for cap in self.capacitors: - cap.capacitance.merge(self.capacitance) + cap.capacitance.alias_is(self.capacitance) # ---------------------------------------- # traits diff --git a/src/faebryk/library/DifferentialPair.py b/src/faebryk/library/DifferentialPair.py index 668cbe1b..3bd374b2 100644 --- a/src/faebryk/library/DifferentialPair.py +++ b/src/faebryk/library/DifferentialPair.py @@ -5,20 +5,26 @@ import faebryk.library._F as F from faebryk.core.moduleinterface import ModuleInterface -from faebryk.libs.units import Quantity +from faebryk.libs.library import L +from faebryk.libs.units import P class DifferentialPair(ModuleInterface): p: F.SignalElectrical n: F.SignalElectrical - impedance: F.TBD[Quantity] + impedance = L.p_field( + unit=P.Ω, + likely_constrained=True, + soft_set=L.Range(10 * P.Ω, 100 * P.Ω), + tolerance_guess=10 * P.percent, + ) def terminated(self) -> Self: terminated_bus = type(self)() rs = terminated_bus.add_to_container(2, F.Resistor) for r in rs: - r.resistance.merge(self.impedance) + r.resistance.alias_is(self.impedance) terminated_bus.p.signal.connect_via(rs[0], self.p.signal) terminated_bus.n.signal.connect_via(rs[1], self.n.signal) diff --git a/src/faebryk/library/Diode.py b/src/faebryk/library/Diode.py index 9342d5cd..14993350 100644 --- a/src/faebryk/library/Diode.py +++ b/src/faebryk/library/Diode.py @@ -3,17 +3,36 @@ import faebryk.library._F as F from faebryk.core.module import Module -from faebryk.core.parameter import Parameter +from faebryk.core.parameter import ParameterOperatable from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P class Diode(Module): - forward_voltage: F.TBD[Quantity] - max_current: F.TBD[Quantity] - current: F.TBD[Quantity] - reverse_working_voltage: F.TBD[Quantity] - reverse_leakage_current: F.TBD[Quantity] + forward_voltage = L.p_field( + unit=P.V, + likely_constrained=True, + soft_set=L.Range(0.1 * P.V, 1 * P.V), + tolerance_guess=10 * P.percent, + ) + current = L.p_field( + unit=P.A, + likely_constrained=True, + soft_set=L.Range(0.1 * P.mA, 100 * P.A), + tolerance_guess=10 * P.percent, + ) + reverse_working_voltage = L.p_field( + unit=P.V, + likely_constrained=True, + soft_set=L.Range(10 * P.V, 100 * P.V), + tolerance_guess=10 * P.percent, + ) + reverse_leakage_current = L.p_field( + unit=P.A, + likely_constrained=True, + soft_set=L.Range(0.1 * P.nA, 1 * P.µA), + tolerance_guess=10 * P.percent, + ) anode: F.Electrical cathode: F.Electrical @@ -45,6 +64,6 @@ def pin_association_heuristic(self): ) def get_needed_series_resistance_for_current_limit( - self, input_voltage_V: Parameter[Quantity] - ) -> Parameter[Quantity]: + self, input_voltage_V: ParameterOperatable + ): return (input_voltage_V - self.forward_voltage) / self.current diff --git a/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py b/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py index e770e0c0..5b49960a 100644 --- a/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py +++ b/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py @@ -5,14 +5,14 @@ import faebryk.library._F as F # noqa: F401 from faebryk.core.module import Module, ModuleException -from faebryk.core.parameter import Parameter +from faebryk.core.parameter import ParameterOperatable from faebryk.exporters.pcb.layout.absolute import LayoutAbsolute from faebryk.exporters.pcb.layout.extrude import LayoutExtrude from faebryk.exporters.pcb.layout.typehierarchy import LayoutTypeHierarchy from faebryk.library.has_pcb_position import has_pcb_position from faebryk.libs.library import L # noqa: F401 from faebryk.libs.picker.picker import DescriptiveProperties -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P from faebryk.libs.util import assert_once logger = logging.getLogger(__name__) @@ -25,8 +25,8 @@ class Diodes_Incorporated_AP2552W6_7(Module): """ @assert_once - def set_current_limit(self, current: Parameter[Quantity]) -> None: - self.current_limit.merge(current) + def set_current_limit(self, current: ParameterOperatable) -> None: + self.current_limit.alias_is(current) current_limit_setting_resistor = self.add(F.Resistor()) @@ -41,14 +41,14 @@ def set_current_limit(self, current: Parameter[Quantity]) -> None: # Rlim_max = (20.08 / (self.current_limit * P.mA)) ^ (1 / 0.904) * P.kohm # Rlim = Range(Rlim_min, Rlim_max) - Rlim = F.Constant(51 * P.kohm) # TODO: remove: ~0.52A typical current limit - if not Rlim.is_subset_of(F.Range(10 * P.kohm, 210 * P.kohm)): - raise ModuleException( - self, - f"Rlim must be in the range 10kOhm to 210kOhm but is {Rlim.get_most_narrow()}", # noqa: E501 - ) + # Rlim = F.Constant(51 * P.kohm) # TODO: remove: ~0.52A typical current limit + # if not Rlim.is_subset_of(F.Range(10 * P.kohm, 210 * P.kohm)): + # raise ModuleException( + # self, + # f"Rlim must be in the range 10kOhm to 210kOhm but is {Rlim.get_most_narrow()}", # noqa: E501 + # ) - current_limit_setting_resistor.resistance.merge(Rlim) + # current_limit_setting_resistor.resistance.constrain_subset(Rlim) # ---------------------------------------- # modules, interfaces, parameters @@ -59,7 +59,12 @@ def set_current_limit(self, current: Parameter[Quantity]) -> None: fault: F.ElectricLogic ilim: F.SignalElectrical - current_limit: F.TBD[Quantity] + current_limit = L.p_field( + unit=P.A, + likely_constrained=True, + soft_set=L.Range(100 * P.mA, 2.1 * P.A), + tolerance_guess=10 * P.percent, + ) # ---------------------------------------- # traits # ---------------------------------------- diff --git a/src/faebryk/library/EEPROM.py b/src/faebryk/library/EEPROM.py index e89b050f..00f7acd0 100644 --- a/src/faebryk/library/EEPROM.py +++ b/src/faebryk/library/EEPROM.py @@ -4,7 +4,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P class EEPROM(Module): @@ -25,7 +25,12 @@ def set_address(self, addr: int): # modules, interfaces, parameters # ---------------------------------------- - memory_size: F.TBD[Quantity] + memory_size = L.p_field( + unit=P.bit, + likely_constrained=True, + domain=L.Domains.Numbers.NATURAL(), + soft_set=L.Range(128 * P.bit, 1024 * P.kbit), + ) power: F.ElectricPower i2c: F.I2C diff --git a/src/faebryk/library/ElectricLogic.py b/src/faebryk/library/ElectricLogic.py index a0733cf2..436e0171 100644 --- a/src/faebryk/library/ElectricLogic.py +++ b/src/faebryk/library/ElectricLogic.py @@ -90,7 +90,9 @@ class PushPull(Enum): # ---------------------------------------- # modules, interfaces, parameters # ---------------------------------------- - push_pull: F.TBD[PushPull] + push_pull = L.p_field( + domain=L.Domains.ENUM(PushPull), + ) # ---------------------------------------- # traits diff --git a/src/faebryk/library/ElectricPower.py b/src/faebryk/library/ElectricPower.py index f8f03927..68138229 100644 --- a/src/faebryk/library/ElectricPower.py +++ b/src/faebryk/library/ElectricPower.py @@ -2,14 +2,13 @@ # SPDX-License-Identifier: MIT -import math from typing import Self import faebryk.library._F as F from faebryk.core.moduleinterface import ModuleInterface from faebryk.core.node import Node from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P from faebryk.libs.util import RecursionGuard @@ -26,11 +25,7 @@ def decouple(self): return ( super() .decouple() - .builder( - lambda c: c.rated_voltage.merge( - F.Range(obj.voltage * 2.0, math.inf * P.V) - ) - ) + .builder(lambda c: c.rated_voltage.constrain_ge(obj.voltage * 2.0)) ) class can_be_surge_protected_power(F.can_be_surge_protected_defined): @@ -50,8 +45,14 @@ def protect(self): hv: F.Electrical lv: F.Electrical - voltage: F.TBD[Quantity] - max_current: F.TBD[Quantity] + voltage = L.p_field( + unit=P.V, + likely_constrained=True, + domain=L.Domains.Numbers.REAL(), + soft_set=L.Range(0 * P.V, 1000 * P.V), + tolerance_guess=5 * P.percent, + ) + # max_current= L.p_field(unit=P.A) """ Only for this particular power interface Does not propagate to connections @@ -73,7 +74,7 @@ def fused(self, attach_to: Node | None = None): self.connect_shallow(fused_power) - fuse.trip_current.merge(F.Constant(self.max_current)) + # fuse.trip_current.merge(F.Constant(self.max_current)) # fused_power.max_current.merge(F.Range(0 * P.A, fuse.trip_current)) if attach_to is not None: @@ -93,7 +94,7 @@ def _on_connect(self, other: ModuleInterface) -> None: if not isinstance(other, ElectricPower): return - self.voltage.merge(other.voltage) + self.voltage.alias_is(other.voltage) # TODO remove with lazy mifs def connect(self: Self, *other: Self, linkcls=None) -> Self: diff --git a/src/faebryk/library/Electrical.py b/src/faebryk/library/Electrical.py index 6fce6013..69ac0b1e 100644 --- a/src/faebryk/library/Electrical.py +++ b/src/faebryk/library/Electrical.py @@ -1,13 +1,11 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -import faebryk.library._F as F from faebryk.core.moduleinterface import ModuleInterface -from faebryk.libs.units import Quantity class Electrical(ModuleInterface): - potential: F.TBD[Quantity] + # potential= L.p_field(unit=P.dimensionless) def get_net(self): from faebryk.library.Net import Net diff --git a/src/faebryk/library/Filter.py b/src/faebryk/library/Filter.py index b3ddec65..50d6b871 100644 --- a/src/faebryk/library/Filter.py +++ b/src/faebryk/library/Filter.py @@ -5,6 +5,8 @@ import faebryk.library._F as F from faebryk.core.module import Module +from faebryk.libs.library import L +from faebryk.libs.units import P class Filter(Module): @@ -15,9 +17,20 @@ class Response(Enum): BANDSTOP = auto() OTHER = auto() - cutoff_frequency: F.TBD[float] - order: F.TBD[int] - response: F.TBD[Response] + cutoff_frequency = L.p_field( + unit=P.Hz, + likely_constrained=True, + domain=L.Domains.Numbers.REAL(), + soft_set=L.Range(0 * P.Hz, 1000 * P.Hz), + ) + order = L.p_field( + domain=L.Domains.Numbers.NATURAL(), + soft_set=L.Range(2, 10), + guess=2, + ) + response = L.p_field( + domain=L.Domains.ENUM(Response), + ) in_: F.Signal out: F.Signal diff --git a/src/faebryk/library/Fuse.py b/src/faebryk/library/Fuse.py index 8a957956..aaafd43a 100644 --- a/src/faebryk/library/Fuse.py +++ b/src/faebryk/library/Fuse.py @@ -7,7 +7,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P logger = logging.getLogger(__name__) @@ -22,9 +22,18 @@ class ResponseType(Enum): FAST = auto() unnamed = L.list_field(2, F.Electrical) - fuse_type: F.TBD[FuseType] - response_type: F.TBD[ResponseType] - trip_current: F.TBD[Quantity] + fuse_type = L.p_field( + domain=L.Domains.ENUM(FuseType), + ) + response_type = L.p_field( + domain=L.Domains.ENUM(ResponseType), + ) + trip_current = L.p_field( + unit=P.A, + likely_constrained=True, + domain=L.Domains.Numbers.REAL(), + soft_set=L.Range(100 * P.mA, 100 * P.A), + ) attach_to_footprint: F.can_attach_to_footprint_symmetrically diff --git a/src/faebryk/library/GDT.py b/src/faebryk/library/GDT.py index 46149b66..42e47172 100644 --- a/src/faebryk/library/GDT.py +++ b/src/faebryk/library/GDT.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P logger = logging.getLogger(__name__) @@ -16,8 +16,16 @@ class GDT(Module): tube_1: F.Electrical tube_2: F.Electrical - dc_breakdown_voltage: F.TBD[Quantity] - impulse_discharge_current: F.TBD[Quantity] + dc_breakdown_voltage = L.p_field( + unit=P.V, + likely_constrained=True, + soft_set=L.Range(100 * P.V, 1000 * P.V), + ) + impulse_discharge_current = L.p_field( + unit=P.A, + likely_constrained=True, + soft_set=L.Range(100 * P.mA, 100 * P.A), + ) @L.rt_field def can_bridge(self): diff --git a/src/faebryk/library/HLK_LD2410B_P.py b/src/faebryk/library/HLK_LD2410B_P.py index 88e7c620..6512ad60 100644 --- a/src/faebryk/library/HLK_LD2410B_P.py +++ b/src/faebryk/library/HLK_LD2410B_P.py @@ -4,12 +4,15 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P class HLK_LD2410B_P(Module): class _ld2410b_esphome_config(F.has_esphome_config.impl()): - throttle: F.TBD[Quantity] + throttle = L.p_field( + unit=P.ms, + soft_set=L.Range(10 * P.ms, 1000 * P.ms), + ) def get_config(self) -> dict: val = self.throttle.get_most_narrow() diff --git a/src/faebryk/library/Header.py b/src/faebryk/library/Header.py index 18f45a06..0d80148a 100644 --- a/src/faebryk/library/Header.py +++ b/src/faebryk/library/Header.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P from faebryk.libs.util import times @@ -34,16 +34,32 @@ def __init__( self._vertical_pin_count = vertical_pin_count def __preinit__(self): - self.pin_count_horizonal.merge(self._horizontal_pin_count) - self.pin_count_vertical.merge(self._vertical_pin_count) + self.pin_count_horizonal.alias_is(self._horizontal_pin_count) + self.pin_count_vertical.alias_is(self._vertical_pin_count) - pin_pitch: F.TBD[Quantity] - pin_type: F.TBD[PinType] - pad_type: F.TBD[PadType] - angle: F.TBD[Angle] - - pin_count_horizonal: F.TBD[int] - pin_count_vertical: F.TBD[int] + pin_pitch = L.p_field( + unit=P.mm, + likely_constrained=True, + domain=L.Domains.Numbers.REAL(), + soft_set=L.Range(1 * P.mm, 10 * P.mm), + ) + pin_type = L.p_field( + domain=L.Domains.ENUM(PinType), + ) + pad_type = L.p_field( + domain=L.Domains.ENUM(PadType), + ) + angle = L.p_field( + domain=L.Domains.ENUM(Angle), + ) + pin_count_horizonal = L.p_field( + domain=L.Domains.Numbers.NATURAL(), + soft_set=L.Range(2, 100), + ) + pin_count_vertical = L.p_field( + domain=L.Domains.Numbers.NATURAL(), + soft_set=L.Range(2, 100), + ) @L.rt_field def unnamed(self): diff --git a/src/faebryk/library/I2C.py b/src/faebryk/library/I2C.py index 7db966d9..e47c1ae9 100644 --- a/src/faebryk/library/I2C.py +++ b/src/faebryk/library/I2C.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.moduleinterface import ModuleInterface from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P logger = logging.getLogger(__name__) @@ -15,7 +15,11 @@ class I2C(ModuleInterface): scl: F.ElectricLogic sda: F.ElectricLogic - frequency: F.TBD[Quantity] + frequency = L.p_field( + unit=P.Hz, + likely_constrained=True, + soft_set=L.Range(10 * P.kHz, 3.4 * P.MHz), + ) @L.rt_field def single_electric_reference(self): @@ -32,7 +36,7 @@ def terminate(self): def _on_connect(self, other: "I2C"): super()._on_connect(other) - self.frequency.merge(other.frequency) + self.frequency.alias_is(other.frequency) class SpeedMode(Enum): low_speed = 10 * P.khertz diff --git a/src/faebryk/library/Inductor.py b/src/faebryk/library/Inductor.py index 9120dc78..5de3f3fb 100644 --- a/src/faebryk/library/Inductor.py +++ b/src/faebryk/library/Inductor.py @@ -5,17 +5,36 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P from faebryk.libs.util import join_if_non_empty class Inductor(Module): unnamed = L.list_field(2, F.Electrical) - inductance: F.TBD[Quantity] - self_resonant_frequency: F.TBD[Quantity] - rated_current: F.TBD[Quantity] - dc_resistance: F.TBD[Quantity] + inductance = L.p_field( + unit=P.H, + likely_constrained=True, + soft_set=L.Range(100 * P.nH, 1 * P.H), + tolerance_guess=10 * P.percent, + ) + self_resonant_frequency = L.p_field( + unit=P.Hz, + likely_constrained=True, + soft_set=L.Range(100 * P.kHz, 1 * P.GHz), + tolerance_guess=10 * P.percent, + ) + rated_current = L.p_field( + unit=P.A, + likely_constrained=True, + soft_set=L.Range(1 * P.mA, 100 * P.A), + tolerance_guess=10 * P.percent, + ) + dc_resistance = L.p_field( + unit=P.Ω, + soft_set=L.Range(10 * P.mΩ, 100 * P.Ω), + tolerance_guess=10 * P.percent, + ) @L.rt_field def can_bridge(self): diff --git a/src/faebryk/library/LDO.py b/src/faebryk/library/LDO.py index 150ecabb..8d25c644 100644 --- a/src/faebryk/library/LDO.py +++ b/src/faebryk/library/LDO.py @@ -7,7 +7,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P from faebryk.libs.util import assert_once, join_if_non_empty @@ -24,15 +24,42 @@ class OutputPolarity(Enum): POSITIVE = auto() NEGATIVE = auto() - max_input_voltage: F.TBD[Quantity] - output_voltage: F.TBD[Quantity] - output_polarity: F.TBD[OutputPolarity] - output_type: F.TBD[OutputType] - output_current: F.TBD[Quantity] - psrr: F.TBD[Quantity] - dropout_voltage: F.TBD[Quantity] - quiescent_current: F.TBD[Quantity] - + max_input_voltage = L.p_field( + unit=P.V, + likely_constrained=True, + soft_set=L.Range(1 * P.V, 100 * P.V), + ) + output_voltage = L.p_field( + unit=P.V, + likely_constrained=True, + soft_set=L.Range(1 * P.V, 100 * P.V), + ) + quiescent_current = L.p_field( + unit=P.A, + likely_constrained=True, + soft_set=L.Range(1 * P.mA, 100 * P.mA), + ) + dropout_voltage = L.p_field( + unit=P.V, + likely_constrained=True, + soft_set=L.Range(1 * P.mV, 100 * P.mV), + ) + psrr = L.p_field( + unit=P.dB, + likely_constrained=True, + soft_set=L.Range(1 * P.dB, 100 * P.dB), + ) + output_polarity = L.p_field( + domain=L.Domains.ENUM(OutputPolarity), + ) + output_type = L.p_field( + domain=L.Domains.ENUM(OutputType), + ) + output_current = L.p_field( + unit=P.A, + likely_constrained=True, + soft_set=L.Range(1 * P.mA, 100 * P.mA), + ) enable: F.ElectricLogic power_in: F.ElectricPower power_out = L.d_field(lambda: F.ElectricPower().make_source()) diff --git a/src/faebryk/library/LED.py b/src/faebryk/library/LED.py index 12b9f60b..5243f531 100644 --- a/src/faebryk/library/LED.py +++ b/src/faebryk/library/LED.py @@ -6,7 +6,8 @@ import faebryk.library._F as F from faebryk.core.parameter import Parameter -from faebryk.libs.units import Quantity +from faebryk.libs.library import L +from faebryk.libs.units import P, Quantity class LED(F.Diode): @@ -40,9 +41,9 @@ class Color(Enum): ULTRA_VIOLET = auto() INFRA_RED = auto() - brightness: F.TBD[Quantity] - max_brightness: F.TBD[Quantity] - color: F.TBD[Color] + brightness = L.p_field(unit=P.candela) + max_brightness = L.p_field(unit=P.candela) + color = L.p_field(domain=L.Domains.ENUM(Color)) def __preinit__(self): self.current.merge(self.brightness / self.max_brightness * self.max_current) diff --git a/src/faebryk/library/Logic74xx.py b/src/faebryk/library/Logic74xx.py index 6cf3250f..c3af9a3c 100644 --- a/src/faebryk/library/Logic74xx.py +++ b/src/faebryk/library/Logic74xx.py @@ -50,7 +50,7 @@ class Family(Enum): CD4000 = auto() power: F.ElectricPower - logic_family: F.TBD[Family] + logic_family = L.p_field(domain=L.Domains.ENUM(Family)) designator = L.f_field(F.has_designator_prefix_defined)( F.has_designator_prefix.Prefix.U diff --git a/src/faebryk/library/MOSFET.py b/src/faebryk/library/MOSFET.py index 250c54b4..db4df9aa 100644 --- a/src/faebryk/library/MOSFET.py +++ b/src/faebryk/library/MOSFET.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P class MOSFET(Module): @@ -18,12 +18,12 @@ class SaturationType(Enum): ENHANCEMENT = auto() DEPLETION = auto() - channel_type: F.TBD[ChannelType] - saturation_type: F.TBD[SaturationType] - gate_source_threshold_voltage: F.TBD[Quantity] - max_drain_source_voltage: F.TBD[Quantity] - max_continuous_drain_current: F.TBD[Quantity] - on_resistance: F.TBD[Quantity] + channel_type = L.p_field(domain=L.Domains.ENUM(ChannelType)) + saturation_type = L.p_field(domain=L.Domains.ENUM(SaturationType)) + gate_source_threshold_voltage = L.p_field(unit=P.V) + max_drain_source_voltage = L.p_field(unit=P.V) + max_continuous_drain_current = L.p_field(unit=P.A) + on_resistance = L.p_field(unit=P.ohm) source: F.Electrical gate: F.Electrical diff --git a/src/faebryk/library/OLED_Module.py b/src/faebryk/library/OLED_Module.py index b7590a4a..f0569f62 100644 --- a/src/faebryk/library/OLED_Module.py +++ b/src/faebryk/library/OLED_Module.py @@ -38,9 +38,9 @@ class DisplayController(Enum): power: F.ElectricPower i2c: F.I2C - display_resolution: F.TBD[DisplayResolution] - display_controller: F.TBD[DisplayController] - display_size: F.TBD[DisplaySize] + display_resolution = L.p_field(domain=L.Domains.ENUM(DisplayResolution)) + display_controller = L.p_field(domain=L.Domains.ENUM(DisplayController)) + display_size = L.p_field(domain=L.Domains.ENUM(DisplaySize)) def __preinit__(self): self.power.voltage.merge(F.Range(3.0 * P.V, 5 * P.V)) diff --git a/src/faebryk/library/OpAmp.py b/src/faebryk/library/OpAmp.py index 8b9bb054..44d1dc83 100644 --- a/src/faebryk/library/OpAmp.py +++ b/src/faebryk/library/OpAmp.py @@ -4,17 +4,17 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P class OpAmp(Module): - bandwidth: F.TBD[Quantity] - common_mode_rejection_ratio: F.TBD[Quantity] - input_bias_current: F.TBD[Quantity] - input_offset_voltage: F.TBD[Quantity] - gain_bandwidth_product: F.TBD[Quantity] - output_current: F.TBD[Quantity] - slew_rate: F.TBD[Quantity] + bandwidth = L.p_field(unit=P.Hz) + common_mode_rejection_ratio = L.p_field(unit=P.dimensionless) + input_bias_current = L.p_field(unit=P.A) + input_offset_voltage = L.p_field(unit=P.V) + gain_bandwidth_product = L.p_field(unit=P.Hz) + output_current = L.p_field(unit=P.A) + slew_rate = L.p_field(unit=P.V / P.s) power: F.ElectricPower inverting_input: F.Electrical diff --git a/src/faebryk/library/PM1006.py b/src/faebryk/library/PM1006.py index df12656c..7e16ed51 100644 --- a/src/faebryk/library/PM1006.py +++ b/src/faebryk/library/PM1006.py @@ -26,7 +26,7 @@ class PM1006(Module): """ class _pm1006_esphome_config(F.has_esphome_config.impl()): - update_interval: F.TBD + update_interval = L.p_field(unit=P.s) def get_config(self) -> dict: val = self.update_interval.get_most_narrow() diff --git a/src/faebryk/library/Potentiometer.py b/src/faebryk/library/Potentiometer.py index 587859d6..63195bba 100644 --- a/src/faebryk/library/Potentiometer.py +++ b/src/faebryk/library/Potentiometer.py @@ -1,24 +1,27 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT +from tkinter import W + import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P class Potentiometer(Module): resistors_ifs = L.list_field(2, F.Electrical) wiper: F.Electrical - total_resistance: F.TBD[Quantity] + total_resistance = L.p_field(unit=P.ohm) resistors = L.list_field(2, F.Resistor) def __preinit__(self): for i, resistor in enumerate(self.resistors): self.resistors_ifs[i].connect_via(resistor, self.wiper) - # TODO use range(0, total_resistance) - resistor.resistance.merge(self.total_resistance) + self.total_resistance.alias_is( + self.resistors[0].resistance + self.resistors[1].resistance + ) def connect_as_voltage_divider( self, high: F.Electrical, low: F.Electrical, out: F.Electrical diff --git a/src/faebryk/library/RJ45_Receptacle.py b/src/faebryk/library/RJ45_Receptacle.py index 84c3f74d..04dafa91 100644 --- a/src/faebryk/library/RJ45_Receptacle.py +++ b/src/faebryk/library/RJ45_Receptacle.py @@ -21,4 +21,4 @@ class Mounting(Enum): designator_prefix = L.f_field(F.has_designator_prefix_defined)( F.has_designator_prefix.Prefix.J ) - mounting: F.TBD[Mounting] + mounting = L.p_field(domain=L.Domains.ENUM(Mounting)) diff --git a/src/faebryk/library/Relay.py b/src/faebryk/library/Relay.py index d0996add..862f6bdc 100644 --- a/src/faebryk/library/Relay.py +++ b/src/faebryk/library/Relay.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P logger = logging.getLogger(__name__) @@ -22,12 +22,12 @@ class Relay(Module): coil_p: F.Electrical coil_n: F.Electrical - coil_rated_voltage: F.TBD[Quantity] - coil_rated_current: F.TBD[Quantity] - coil_resistance: F.TBD[Quantity] - contact_max_switching_voltage: F.TBD[Quantity] - contact_rated_switching_current: F.TBD[Quantity] - contact_max_switchng_current: F.TBD[Quantity] + coil_rated_voltage = L.p_field(unit=P.V) + coil_rated_current = L.p_field(unit=P.A) + coil_resistance = L.p_field(unit=P.ohm) + contact_max_switching_voltage = L.p_field(unit=P.V) + contact_rated_switching_current = L.p_field(unit=P.A) + contact_max_switchng_current = L.p_field(unit=P.A) designator_prefix = L.f_field(F.has_designator_prefix_defined)( F.has_designator_prefix.Prefix.K diff --git a/src/faebryk/library/Resistor.py b/src/faebryk/library/Resistor.py index 31f00cf3..1df61d6d 100644 --- a/src/faebryk/library/Resistor.py +++ b/src/faebryk/library/Resistor.py @@ -8,16 +8,16 @@ from faebryk.core.parameter import Parameter from faebryk.libs.library import L from faebryk.libs.picker.picker import PickError, has_part_picked_remove -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P from faebryk.libs.util import join_if_non_empty class Resistor(Module): unnamed = L.list_field(2, F.Electrical) - resistance: F.TBD[Quantity] - rated_power: F.TBD[Quantity] - rated_voltage: F.TBD[Quantity] + resistance = L.p_field(unit=P.ohm) + rated_power = L.p_field(unit=P.W) + rated_voltage = L.p_field(unit=P.V) attach_to_footprint: F.can_attach_to_footprint_symmetrically designator_prefix = L.f_field(F.has_designator_prefix_defined)( diff --git a/src/faebryk/library/ResistorVoltageDivider.py b/src/faebryk/library/ResistorVoltageDivider.py index 8ba17374..5862ba3e 100644 --- a/src/faebryk/library/ResistorVoltageDivider.py +++ b/src/faebryk/library/ResistorVoltageDivider.py @@ -15,8 +15,8 @@ class ResistorVoltageDivider(Module): node = L.list_field(3, F.Electrical) - ratio: F.TBD[float] - max_current: F.TBD[float] + ratio = L.p_field(domain=L.Domains.ENUM(float)) + max_current = L.p_field(domain=L.Domains.ENUM(float)) @L.rt_field def can_bridge(self): diff --git a/src/faebryk/library/Resistor_Voltage_Divider.py b/src/faebryk/library/Resistor_Voltage_Divider.py index cc7dd64a..f4eab884 100644 --- a/src/faebryk/library/Resistor_Voltage_Divider.py +++ b/src/faebryk/library/Resistor_Voltage_Divider.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P logger = logging.getLogger(__name__) @@ -15,8 +15,8 @@ class Resistor_Voltage_Divider(Module): resistor = L.list_field(2, F.Resistor) node = L.list_field(3, F.Electrical) - ratio: F.TBD[Quantity] - max_current: F.TBD[Quantity] + ratio = L.p_field(unit=P.dimensionless) + max_current = L.p_field(unit=P.A) def __preinit__(self): self.node[0].connect_via(self.resistor[0], self.node[1]) diff --git a/src/faebryk/library/SCD40.py b/src/faebryk/library/SCD40.py index 17f2c5c7..b9f6d4dc 100644 --- a/src/faebryk/library/SCD40.py +++ b/src/faebryk/library/SCD40.py @@ -5,7 +5,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P class SCD40(Module): @@ -14,7 +14,7 @@ class SCD40(Module): """ class _scd4x_esphome_config(F.has_esphome_config.impl()): - update_interval: F.TBD[Quantity] + update_interval = L.p_field(unit=P.s) def get_config(self) -> dict: val = self.update_interval.get_most_narrow() diff --git a/src/faebryk/library/SPIFlash.py b/src/faebryk/library/SPIFlash.py index 3b918955..cae5e633 100644 --- a/src/faebryk/library/SPIFlash.py +++ b/src/faebryk/library/SPIFlash.py @@ -4,14 +4,17 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P class SPIFlash(Module): power: F.ElectricPower spi = L.f_field(F.MultiSPI)(4) - memory_size: F.TBD[Quantity] + memory_size = L.p_field( + unit=P.byte, + domain=L.Domains.Numbers.NATURAL(), + ) designator_prefix = L.f_field(F.has_designator_prefix_defined)( F.has_designator_prefix.Prefix.U ) diff --git a/src/faebryk/library/TVS.py b/src/faebryk/library/TVS.py index 205643a5..9d3599b6 100644 --- a/src/faebryk/library/TVS.py +++ b/src/faebryk/library/TVS.py @@ -4,10 +4,11 @@ import logging import faebryk.library._F as F -from faebryk.libs.units import Quantity +from faebryk.libs.library import L +from faebryk.libs.units import P logger = logging.getLogger(__name__) class TVS(F.Diode): - reverse_breakdown_voltage: F.TBD[Quantity] + reverse_breakdown_voltage = L.p_field(unit=P.V) diff --git a/src/faebryk/library/UART_Base.py b/src/faebryk/library/UART_Base.py index 025fe017..819fb988 100644 --- a/src/faebryk/library/UART_Base.py +++ b/src/faebryk/library/UART_Base.py @@ -4,14 +4,14 @@ import faebryk.library._F as F from faebryk.core.moduleinterface import ModuleInterface from faebryk.libs.library import L -from faebryk.libs.units import Quantity +from faebryk.libs.units import P class UART_Base(ModuleInterface): rx: F.ElectricLogic tx: F.ElectricLogic - baud: F.TBD[Quantity] + baud = L.p_field(unit=P.baud) @L.rt_field def single_electric_reference(self): @@ -22,4 +22,4 @@ def single_electric_reference(self): def _on_connect(self, other: "UART_Base"): super()._on_connect(other) - self.baud.merge(other.baud) + self.baud.alias_is(other.baud) diff --git a/src/faebryk/library/UART_RS485.py b/src/faebryk/library/UART_RS485.py index bf3eeee1..0c22ac31 100644 --- a/src/faebryk/library/UART_RS485.py +++ b/src/faebryk/library/UART_RS485.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P logger = logging.getLogger(__name__) @@ -18,11 +18,11 @@ class UART_RS485(Module): read_enable: F.ElectricLogic write_enable: F.ElectricLogic - max_data_rate: F.TBD[Quantity] - gpio_voltage: F.TBD[Quantity] + max_data_rate = L.p_field(unit=P.baud) + gpio_voltage = L.p_field(unit=P.V) def __preinit__(self): - self.max_data_rate.merge(self.uart.baud) + self.max_data_rate.alias_is(self.uart.baud) self.power.voltage.merge(F.Range(3.3 * P.V, 5.0 * P.V)) self.power.decoupled.decouple() diff --git a/src/faebryk/library/USB2_0_ESD_Protection.py b/src/faebryk/library/USB2_0_ESD_Protection.py index 6192b39d..66da1691 100644 --- a/src/faebryk/library/USB2_0_ESD_Protection.py +++ b/src/faebryk/library/USB2_0_ESD_Protection.py @@ -16,8 +16,8 @@ class USB2_0_ESD_Protection(Module): usb = L.list_field(2, F.USB2_0) - vbus_esd_protection: F.TBD[bool] - data_esd_protection: F.TBD[bool] + vbus_esd_protection = L.p_field(domain=L.Domains.ENUM(bool)) + data_esd_protection = L.p_field(domain=L.Domains.ENUM(bool)) def __preinit__(self): self.usb[0].usb_if.buspower.voltage.merge(F.Range(4.75 * P.V, 5.25 * P.V)) diff --git a/src/faebryk/library/XL_3528RGBW_WS2812B.py b/src/faebryk/library/XL_3528RGBW_WS2812B.py index bb45f3de..fef0f609 100644 --- a/src/faebryk/library/XL_3528RGBW_WS2812B.py +++ b/src/faebryk/library/XL_3528RGBW_WS2812B.py @@ -4,11 +4,12 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L +from faebryk.libs.units import P class XL_3528RGBW_WS2812B(Module): class _ws2812b_esphome_config(F.has_esphome_config.impl()): - update_interval: F.TBD + update_interval = L.p_field(unit=P.s) def get_config(self) -> dict: assert isinstance(self.update_interval, F.Constant) diff --git a/src/faebryk/library/can_switch_power_defined.py b/src/faebryk/library/can_switch_power_defined.py index 5feffed5..5d892a86 100644 --- a/src/faebryk/library/can_switch_power_defined.py +++ b/src/faebryk/library/can_switch_power_defined.py @@ -17,7 +17,7 @@ def __init__( self.out_power = out_power self.in_logic = in_logic - out_power.voltage.merge(in_power.voltage) + out_power.voltage.alias_is(in_power.voltage) def get_logic_in(self) -> F.ElectricLogic: return self.in_logic diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 5c037bcd..cb064b77 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: MIT from abc import ABC, abstractmethod -from typing import Any, Protocol +from typing import Any, Protocol, Self from faebryk.libs.units import Quantity, Unit @@ -13,6 +13,10 @@ def __lt__(self, __value) -> bool: ... def __ge__(self, __value) -> bool: ... def __gt__(self, __value) -> bool: ... + def __mul__(self, __value: float | Self) -> Self: ... + def __sub__(self, __value: Self) -> Self: ... + def __add__(self, __value: Self) -> Self: ... + class Set_[T](ABC): def __init__(self): @@ -51,6 +55,14 @@ def is_compatible_with_unit(self, unit: Unit | Quantity) -> bool: return True + @classmethod + def from_center(cls, center: T, abs_tol: T) -> "Range[T]": + return cls(center - abs_tol, center + abs_tol) + + @classmethod + def from_center_rel(cls, center: T, rel_tol: float) -> "Range[T]": + return cls(center - center * rel_tol, center + center * rel_tol) + def intersection(self, other: "Range[T]") -> "Range[T]": if self.empty or other.empty: return Range(empty=True) @@ -91,6 +103,13 @@ def __init__(self, value: T): def __contains__(self, item: T): return item == self.value + def is_compatible_with_unit(self, unit: Unit | Quantity) -> bool: + if isinstance(self.value, Quantity) and not unit.is_compatible_with( + self.value.units + ): + return False + return True + class Set[T](Set_[T]): def __init__(self, *elements: T): From b091b226e6d69d11011da58382694546b223736d Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Tue, 8 Oct 2024 19:11:34 +0200 Subject: [PATCH 14/80] Core: rename unit to units give sets units logic & setic expressions --- src/faebryk/core/parameter.py | 118 ++++++++++++++++++++-------------- src/faebryk/libs/sets.py | 49 +++++++------- src/faebryk/libs/units.py | 10 +++ test/libs/test_sets.py | 22 +++++-- 4 files changed, 124 insertions(+), 75 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 3d0fe716..fb659a0b 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -2,23 +2,16 @@ # SPDX-License-Identifier: MIT import logging -import math from enum import Enum, auto -from typing import Protocol, runtime_checkable from faebryk.core.core import Namespace from faebryk.core.node import Node, f_field -from faebryk.libs.sets import Range, Set_ -from faebryk.libs.units import P, Quantity, Unit +from faebryk.libs.sets import Range, Set_, Set_ +from faebryk.libs.units import HasUnit, P, Quantity, Unit, dimensionless logger = logging.getLogger(__name__) -@runtime_checkable -class HasUnit(Protocol): - unit: Unit - - class ParameterOperatable(Protocol): type PE = ParameterOperatable | int | float | Quantity | Set_ @@ -155,7 +148,7 @@ class Expression(Node, ParameterOperatable): pass -class Arithmetic(Expression): +class Arithmetic(HasUnit, Expression): def __init__(self, *operands): types = [int, float, Quantity, Parameter, Arithmetic] if any(type(op) not in types for op in operands): @@ -175,11 +168,11 @@ class Additive(Arithmetic): def __init__(self, *operands): super().__init__(*operands) units = [ - op.unit if isinstance(op, HasUnit) else P.dimensionless for op in operands + op.units if isinstance(op, HasUnit) else dimensionless for op in operands ] # Check if all units are compatible - self.unit = units[0] - if not all(u.is_compatible_with(self.unit) for u in units): + self.units = units[0] + if not all(u.is_compatible_with(self.units) for u in units): raise ValueError("All operands must have compatible units") @@ -196,85 +189,98 @@ def __init__(self, *operands): class Multiply(Arithmetic): def __init__(self, *operands): super().__init__(*operands) - self.unit = math.prod( - [op.unit if isinstance(op, HasUnit) else P.dimensionless for op in operands] - ) + units = [ + op.units if isinstance(op, HasUnit) else dimensionless for op in operands + ] + self.units = units[0] + for u in units[1:]: + self.units *= u class Divide(Multiply): def __init__(self, numerator, denominator): super().__init__(numerator, denominator) - self.unit = numerator.unit / denominator.unit + self.units = numerator.units / denominator.units class Sqrt(Arithmetic): def __init__(self, operand): super().__init__(operand) - self.unit = operand.unit**0.5 + self.units = operand.units**0.5 class Power(Arithmetic): def __init__(self, base, exponent: int): super().__init__(base, exponent) - if isinstance(exponent, HasUnit) and not exponent.unit.is_compatible_with( - P.dimensionless + if isinstance(exponent, HasUnit) and not exponent.units.is_compatible_with( + dimensionless ): raise ValueError("exponent must have dimensionless unit") - self.unit = ( - base.unit**exponent if isinstance(base, HasUnit) else P.dimensionless - ) + units = base.units**exponent if isinstance(base, HasUnit) else dimensionless + assert isinstance(units, Unit) + self.units = units class Log(Arithmetic): def __init__(self, operand): super().__init__(operand) - if not operand.unit.is_compatible_with(P.dimensionless): + if not operand.unit.is_compatible_with(dimensionless): raise ValueError("operand must have dimensionless unit") - self.unit = P.dimensionless + self.units = dimensionless class Sin(Arithmetic): def __init__(self, operand): super().__init__(operand) - if not operand.unit.is_compatible_with(P.dimensionless): + if not operand.unit.is_compatible_with(dimensionless): raise ValueError("operand must have dimensionless unit") - self.unit = P.dimensionless + self.units = dimensionless class Cos(Arithmetic): def __init__(self, operand): super().__init__(operand) - if not operand.unit.is_compatible_with(P.dimensionless): + if not operand.unit.is_compatible_with(dimensionless): raise ValueError("operand must have dimensionless unit") - self.unit = P.dimensionless + self.units = dimensionless class Abs(Arithmetic): def __init__(self, operand): super().__init__(operand) - self.unit = operand.unit + self.units = operand.units class Round(Arithmetic): def __init__(self, operand): super().__init__(operand) - self.unit = operand.unit + self.units = operand.units class Floor(Arithmetic): def __init__(self, operand): super().__init__(operand) - self.unit = operand.unit + self.units = operand.units class Ceil(Arithmetic): def __init__(self, operand): super().__init__(operand) - self.unit = operand.unit + self.units = operand.units class Logic(Expression): - pass + def __init__(self, *operands): + types = [bool, Parameter, Logic, Predicate] + if any(type(op) not in types for op in operands): + raise ValueError("operands must be bool, Parameter, Logic, or Predicate") + if any( + param.domain != Boolean or not param.units.is_compatible_with(dimensionless) + for param in operands + if isinstance(param, Parameter) + ): + raise ValueError("parameters must have domain Boolean without a unit") + self.operands = operands class And(Logic): @@ -286,34 +292,48 @@ class Or(Logic): class Not(Logic): - pass + def __init__(self, operand): + super().__init__(operand) class Xor(Logic): - pass + def __init__(self, left, right): + super().__init__(left, right) class Implies(Logic): - pass + def __init__(self, left, right): + super().__init__(left, right) -class Set(Expression): - pass +class Setic(Expression): + def __init__(self, *operands): + super().__init__(*operands) + types = [Parameter, Set_] + if any(type(op) not in types for op in operands): + raise ValueError("operands must be Parameter or Set") + units = [op.units for op in operands] + self.units = units[0] + for u in units[1:]: + if not self.units.is_compatible_with(u): + raise ValueError("all operands must have compatible units") + # TODO domain? -class Union(Set): +class Union(Setic): pass -class Intersection(Set): +class Intersection(Setic): pass -class Difference(Set): - pass +class Difference(Setic): + def __init__(self, minuend, subtrahend): + super().__init__(minuend, subtrahend) -class SymmetricDifference(Set): +class SymmetricDifference(Setic): pass @@ -468,7 +488,7 @@ class Parameter(Node, ParameterOperatable): def __init__( self, *, - unit: Unit | Quantity | None = None, + units: Unit | Quantity | None = dimensionless, # hard constraints within: Range | None = None, domain: Domain = Numbers(negative=False), @@ -485,12 +505,12 @@ def __init__( super().__init__() if within is None: within = Range() - if not within.is_compatible_with_unit(unit): + if not within.units.is_compatible_with(units): raise ValueError("incompatible units") - if not isinstance(unit, Unit): - raise TypeError("unit must be a Unit") - self.unit = unit + if not isinstance(units, Unit): + raise TypeError("units must be a Unit") + self.units = units self.within = within self.domain = domain self.soft_set = soft_set diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index cb064b77..486289e1 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -4,7 +4,7 @@ from abc import ABC, abstractmethod from typing import Any, Protocol, Self -from faebryk.libs.units import Quantity, Unit +from faebryk.libs.units import HasUnit, P, Unit, dimensionless class _SupportsRangeOps(Protocol): @@ -18,7 +18,7 @@ def __sub__(self, __value: Self) -> Self: ... def __add__(self, __value: Self) -> Self: ... -class Set_[T](ABC): +class Set_[T](ABC, HasUnit): def __init__(self): pass @@ -26,13 +26,15 @@ def __init__(self): def __contains__(self, item: T): pass - @abstractmethod - def is_compatible_with_unit(self, unit: Unit | Quantity) -> bool: - pass - class Range[T: _SupportsRangeOps](Set_[T]): - def __init__(self, min: T | None = None, max: T | None = None, empty: bool = False): + def __init__( + self, + min: T | None = None, + max: T | None = None, + empty: bool = False, + units: Unit | None = None, + ): self.empty = empty self.min = min self.max = max @@ -40,6 +42,18 @@ def __init__(self, min: T | None = None, max: T | None = None, empty: bool = Fal raise ValueError("empty range cannot have min or max") if min is not None and max is not None and not min <= max: raise ValueError("min must be less than or equal to max") + if min is None and max is None: + if units is None: + raise ValueError("units must be provided for empyt and full ranges") + self.units = units + else: + min_unit = min.units if isinstance(min, HasUnit) else dimensionless + max_unit = max.units if isinstance(max, HasUnit) else dimensionless + if units and not min_unit.is_compatible_with(units): + raise ValueError("min incompatible with units") + if units and not max_unit.is_compatible_with(units): + raise ValueError("max incompatible with units") + self.units = units or min_unit def __contains__(self, item: T): if self.min is not None and not self.min <= item: @@ -48,13 +62,6 @@ def __contains__(self, item: T): return False return True - def is_compatible_with_unit(self, unit: Unit | Quantity) -> bool: - for m in [self.min, self.max]: - if isinstance(m, Quantity) and not unit.is_compatible_with(m.units): - return False - - return True - @classmethod def from_center(cls, center: T, abs_tol: T) -> "Range[T]": return cls(center - abs_tol, center + abs_tol) @@ -84,7 +91,7 @@ def intersection(self, other: "Range[T]") -> "Range[T]": if (_min is not None and (_min not in self or _min not in other)) or ( _max is not None and (_max not in self or _max not in other) ): - return Range(empty=True) + return Range(empty=True, units=self.units) return Range(_min, _max) @@ -99,21 +106,19 @@ def __eq__(self, value: Any) -> bool: class Single[T](Set_[T]): def __init__(self, value: T): self.value = value + self.units = value.units if isinstance(value, HasUnit) else dimensionless def __contains__(self, item: T): return item == self.value - def is_compatible_with_unit(self, unit: Unit | Quantity) -> bool: - if isinstance(self.value, Quantity) and not unit.is_compatible_with( - self.value.units - ): - return False - return True - class Set[T](Set_[T]): def __init__(self, *elements: T): self.elements = set(elements) + units = [e.units if isinstance(e, HasUnit) else dimensionless for e in elements] + self.units = units[0] + if not all(u.is_compatible_with(self.units) for u in units): + raise ValueError("all elements must have compatible units") def __contains__(self, item: T): return item in self.elements diff --git a/src/faebryk/libs/units.py b/src/faebryk/libs/units.py index e4bf4c51..8b1c22d0 100644 --- a/src/faebryk/libs/units.py +++ b/src/faebryk/libs/units.py @@ -2,14 +2,24 @@ # SPDX-License-Identifier: MIT # re-exporting Quantity in-case we ever want to change it +from typing import Protocol, runtime_checkable + from pint import Quantity as _Quantity # noqa: F401 from pint import UndefinedUnitError, Unit, UnitRegistry # noqa: F401 from pint.util import UnitsContainer as _UnitsContainer +from faebryk.libs.util import cast_assert + P = UnitRegistry() UnitsContainer = _UnitsContainer | str Quantity = P.Quantity +dimensionless = cast_assert(Unit, P.dimensionless) + + +@runtime_checkable +class HasUnit(Protocol): + units: Unit def to_si_str( diff --git a/test/libs/test_sets.py b/test/libs/test_sets.py index a4632e82..3f182693 100644 --- a/test/libs/test_sets.py +++ b/test/libs/test_sets.py @@ -5,7 +5,8 @@ from pint import DimensionalityError from faebryk.libs.sets import Range -from faebryk.libs.units import P +from faebryk.libs.units import P, Unit, dimensionless +from faebryk.libs.util import cast_assert def test_range_intersection_simple(): @@ -17,19 +18,32 @@ def test_range_intersection_simple(): def test_range_intersection_empty(): x = Range(0, 10) y = x.intersection(Range(15, 20)) - assert y == Range(empty=True) + assert y == Range(empty=True, units=dimensionless) def test_range_unit_none(): x = Range(0, 10) - assert x.is_compatible_with_unit(P.V) + assert not x.units.is_compatible_with(P.V) def test_range_unit_same(): y = Range(0 * P.V, 10 * P.V) - assert y.is_compatible_with_unit(P.V) + assert y.units.is_compatible_with(P.V) def test_range_unit_different(): with pytest.raises(DimensionalityError): Range(0 * P.V, 10 * P.A) + with pytest.raises(ValueError): + Range(0 * P.V, 10 * P.V, units=cast_assert(Unit, P.A)) + with pytest.raises(ValueError): + Range(max=10 * P.V, units=cast_assert(Unit, P.A)) + with pytest.raises(ValueError): + Range(min=10 * P.V, units=cast_assert(Unit, P.A)) + + +def test_range_force_unit(): + with pytest.raises(ValueError): + Range(empty=True) + with pytest.raises(ValueError): + Range() From df73c8a1ec0f2507b44c254e5aaae16c5cef15bb Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 8 Oct 2024 19:17:44 +0200 Subject: [PATCH 15/80] fix range from_cente --- src/faebryk/core/parameter.py | 2 +- src/faebryk/libs/sets.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index fb659a0b..16cdf8ca 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -6,7 +6,7 @@ from faebryk.core.core import Namespace from faebryk.core.node import Node, f_field -from faebryk.libs.sets import Range, Set_, Set_ +from faebryk.libs.sets import Range, Set_ from faebryk.libs.units import HasUnit, P, Quantity, Unit, dimensionless logger = logging.getLogger(__name__) diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 486289e1..50f4ab1c 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -13,11 +13,14 @@ def __lt__(self, __value) -> bool: ... def __ge__(self, __value) -> bool: ... def __gt__(self, __value) -> bool: ... - def __mul__(self, __value: float | Self) -> Self: ... def __sub__(self, __value: Self) -> Self: ... def __add__(self, __value: Self) -> Self: ... +class _SupportsArithmeticOpsWithFloatMul(_SupportsRangeOps): + def __mul__(self, __value: float | Self) -> Self: ... + + class Set_[T](ABC, HasUnit): def __init__(self): pass @@ -66,9 +69,11 @@ def __contains__(self, item: T): def from_center(cls, center: T, abs_tol: T) -> "Range[T]": return cls(center - abs_tol, center + abs_tol) - @classmethod - def from_center_rel(cls, center: T, rel_tol: float) -> "Range[T]": - return cls(center - center * rel_tol, center + center * rel_tol) + @staticmethod + def from_center_rel[U: _SupportsArithmeticOpsWithFloatMul]( + center: U, rel_tol: float + ) -> "Range[U]": + return Range[U](center - center * rel_tol, center + center * rel_tol) def intersection(self, other: "Range[T]") -> "Range[T]": if self.empty or other.empty: From 2792ca3061173e78b6b93d1a68516f63ac885218 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 09:03:20 +0200 Subject: [PATCH 16/80] unit => units --- src/faebryk/library/BH1750FVI_TR.py | 2 +- src/faebryk/library/Battery.py | 4 ++-- src/faebryk/library/Button.py | 2 +- src/faebryk/library/Capacitor.py | 4 ++-- src/faebryk/library/Common_Mode_Filter.py | 8 ++++---- src/faebryk/library/Comparator.py | 10 +++++----- src/faebryk/library/Crystal.py | 14 +++++++------- src/faebryk/library/DifferentialPair.py | 2 +- src/faebryk/library/Diode.py | 8 ++++---- .../library/Diodes_Incorporated_AP2552W6_7.py | 2 +- src/faebryk/library/EEPROM.py | 2 +- src/faebryk/library/ElectricPower.py | 4 ++-- src/faebryk/library/Electrical.py | 2 +- src/faebryk/library/Filter.py | 2 +- src/faebryk/library/Fuse.py | 2 +- src/faebryk/library/GDT.py | 4 ++-- src/faebryk/library/HLK_LD2410B_P.py | 2 +- src/faebryk/library/Header.py | 2 +- src/faebryk/library/I2C.py | 2 +- src/faebryk/library/Inductor.py | 8 ++++---- src/faebryk/library/LDO.py | 12 ++++++------ src/faebryk/library/LED.py | 4 ++-- src/faebryk/library/MOSFET.py | 8 ++++---- src/faebryk/library/OpAmp.py | 14 +++++++------- src/faebryk/library/PM1006.py | 2 +- src/faebryk/library/Potentiometer.py | 2 +- src/faebryk/library/Relay.py | 12 ++++++------ src/faebryk/library/Resistor.py | 6 +++--- src/faebryk/library/Resistor_Voltage_Divider.py | 4 ++-- src/faebryk/library/SCD40.py | 2 +- src/faebryk/library/SPIFlash.py | 2 +- src/faebryk/library/TVS.py | 2 +- src/faebryk/library/UART_Base.py | 2 +- src/faebryk/library/UART_RS485.py | 4 ++-- src/faebryk/library/XL_3528RGBW_WS2812B.py | 2 +- 35 files changed, 82 insertions(+), 82 deletions(-) diff --git a/src/faebryk/library/BH1750FVI_TR.py b/src/faebryk/library/BH1750FVI_TR.py index 972f02a6..d027c835 100644 --- a/src/faebryk/library/BH1750FVI_TR.py +++ b/src/faebryk/library/BH1750FVI_TR.py @@ -14,7 +14,7 @@ class BH1750FVI_TR(Module): class _bh1750_esphome_config(F.has_esphome_config.impl()): update_interval = L.p_field( - unit=P.s, + units=P.s, soft_set=L.Range(100 * P.ms, 1 * P.day), guess=1 * P.s, ) diff --git a/src/faebryk/library/Battery.py b/src/faebryk/library/Battery.py index 820d1b8e..519caeb1 100644 --- a/src/faebryk/library/Battery.py +++ b/src/faebryk/library/Battery.py @@ -10,12 +10,12 @@ class Battery(Module): voltage = L.p_field( - unit=P.V, + units=P.V, soft_set=L.Range(0 * P.V, 100 * P.V), likely_constrained=True, ) capacity = L.p_field( - unit=P.Ah, + units=P.Ah, soft_set=L.Range(100 * P.mAh, 100 * P.Ah), likely_constrained=True, ) diff --git a/src/faebryk/library/Button.py b/src/faebryk/library/Button.py index 21922bf4..082b89ec 100644 --- a/src/faebryk/library/Button.py +++ b/src/faebryk/library/Button.py @@ -14,7 +14,7 @@ class Button(Module): unnamed = L.list_field(2, F.Electrical) height = L.p_field( - unit=P.mm, + units=P.mm, likely_constrained=False, soft_set=L.Range(1 * P.mm, 10 * P.mm), tolerance_guess=10 * P.percent, diff --git a/src/faebryk/library/Capacitor.py b/src/faebryk/library/Capacitor.py index c88eadbb..6058df67 100644 --- a/src/faebryk/library/Capacitor.py +++ b/src/faebryk/library/Capacitor.py @@ -27,13 +27,13 @@ class TemperatureCoefficient(IntEnum): unnamed = L.list_field(2, F.Electrical) capacitance = L.p_field( - unit=P.F, + units=P.F, likely_constrained=True, soft_set=L.Range(100 * P.pF, 1 * P.F), tolerance_guess=10 * P.percent, ) rated_voltage = L.p_field( - unit=P.V, + units=P.V, likely_constrained=True, soft_set=L.Range(10 * P.V, 100 * P.V), tolerance_guess=10 * P.percent, diff --git a/src/faebryk/library/Common_Mode_Filter.py b/src/faebryk/library/Common_Mode_Filter.py index 94d9b35a..ff9f71a0 100644 --- a/src/faebryk/library/Common_Mode_Filter.py +++ b/src/faebryk/library/Common_Mode_Filter.py @@ -16,25 +16,25 @@ class Common_Mode_Filter(Module): coil_b: F.Inductor inductance = L.p_field( - unit=P.H, + units=P.H, likely_constrained=True, soft_set=L.Range(1 * P.µH, 10 * P.mH), tolerance_guess=10 * P.percent, ) self_resonant_frequency = L.p_field( - unit=P.Hz, + units=P.Hz, likely_constrained=True, soft_set=L.Range(100 * P.Hz, 1 * P.MHz), tolerance_guess=10 * P.percent, ) rated_current = L.p_field( - unit=P.A, + units=P.A, likely_constrained=True, soft_set=L.Range(1 * P.A, 10 * P.A), tolerance_guess=10 * P.percent, ) dc_resistance = L.p_field( - unit=P.Ω, + units=P.Ω, ) designator_prefix = L.f_field(F.has_designator_prefix_defined)( diff --git a/src/faebryk/library/Comparator.py b/src/faebryk/library/Comparator.py index d7141794..c14cf8b8 100644 --- a/src/faebryk/library/Comparator.py +++ b/src/faebryk/library/Comparator.py @@ -16,30 +16,30 @@ class OutputType(Enum): OpenDrain = auto() common_mode_rejection_ratio = L.p_field( - unit=P.dB, + units=P.dB, likely_constrained=True, soft_set=L.Range(60 * P.dB, 120 * P.dB), tolerance_guess=10 * P.percent, ) input_bias_current = L.p_field( - unit=P.A, + units=P.A, likely_constrained=True, soft_set=L.Range(1 * P.pA, 1 * P.µA), tolerance_guess=20 * P.percent, ) input_hysteresis_voltage = L.p_field( - unit=P.V, + units=P.V, likely_constrained=True, soft_set=L.Range(1 * P.mV, 100 * P.mV), tolerance_guess=15 * P.percent, ) input_offset_voltage = L.p_field( - unit=P.V, + units=P.V, soft_set=L.Range(10 * P.µV, 10 * P.mV), tolerance_guess=20 * P.percent, ) propagation_delay = L.p_field( - unit=P.s, + units=P.s, soft_set=L.Range(10 * P.ns, 1 * P.ms), tolerance_guess=15 * P.percent, ) diff --git a/src/faebryk/library/Crystal.py b/src/faebryk/library/Crystal.py index b985d197..d1d03902 100644 --- a/src/faebryk/library/Crystal.py +++ b/src/faebryk/library/Crystal.py @@ -18,49 +18,49 @@ class Crystal(Module): # parameters # ---------------------------------------- frequency = L.p_field( - unit=P.Hz, + units=P.Hz, likely_constrained=True, soft_set=L.Range(32.768 * P.kHz, 100 * P.MHz), tolerance_guess=50 * P.ppm, ) frequency_tolerance = L.p_field( - unit=P.ppm, + units=P.ppm, likely_constrained=True, soft_set=L.Range(10 * P.ppm, 100 * P.ppm), tolerance_guess=10 * P.percent, ) frequency_temperature_tolerance = L.p_field( - unit=P.ppm, + units=P.ppm, likely_constrained=True, soft_set=L.Range(1 * P.ppm, 50 * P.ppm), tolerance_guess=10 * P.percent, ) frequency_ageing = L.p_field( - unit=P.ppm, + units=P.ppm, likely_constrained=True, soft_set=L.Range(1 * P.ppm, 10 * P.ppm), tolerance_guess=20 * P.percent, ) equivalent_series_resistance = L.p_field( - unit=P.Ω, + units=P.Ω, likely_constrained=True, soft_set=L.Range(10 * P.Ω, 200 * P.Ω), tolerance_guess=10 * P.percent, ) shunt_capacitance = L.p_field( - unit=P.F, + units=P.F, likely_constrained=True, soft_set=L.Range(1 * P.pF, 10 * P.pF), tolerance_guess=20 * P.percent, ) load_capacitance = L.p_field( - unit=P.F, + units=P.F, likely_constrained=True, soft_set=L.Range(8 * P.pF, 30 * P.pF), tolerance_guess=10 * P.percent, diff --git a/src/faebryk/library/DifferentialPair.py b/src/faebryk/library/DifferentialPair.py index 3bd374b2..d155545a 100644 --- a/src/faebryk/library/DifferentialPair.py +++ b/src/faebryk/library/DifferentialPair.py @@ -14,7 +14,7 @@ class DifferentialPair(ModuleInterface): n: F.SignalElectrical impedance = L.p_field( - unit=P.Ω, + units=P.Ω, likely_constrained=True, soft_set=L.Range(10 * P.Ω, 100 * P.Ω), tolerance_guess=10 * P.percent, diff --git a/src/faebryk/library/Diode.py b/src/faebryk/library/Diode.py index 14993350..98b2220e 100644 --- a/src/faebryk/library/Diode.py +++ b/src/faebryk/library/Diode.py @@ -10,25 +10,25 @@ class Diode(Module): forward_voltage = L.p_field( - unit=P.V, + units=P.V, likely_constrained=True, soft_set=L.Range(0.1 * P.V, 1 * P.V), tolerance_guess=10 * P.percent, ) current = L.p_field( - unit=P.A, + units=P.A, likely_constrained=True, soft_set=L.Range(0.1 * P.mA, 100 * P.A), tolerance_guess=10 * P.percent, ) reverse_working_voltage = L.p_field( - unit=P.V, + units=P.V, likely_constrained=True, soft_set=L.Range(10 * P.V, 100 * P.V), tolerance_guess=10 * P.percent, ) reverse_leakage_current = L.p_field( - unit=P.A, + units=P.A, likely_constrained=True, soft_set=L.Range(0.1 * P.nA, 1 * P.µA), tolerance_guess=10 * P.percent, diff --git a/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py b/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py index 5b49960a..245e1e70 100644 --- a/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py +++ b/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py @@ -60,7 +60,7 @@ def set_current_limit(self, current: ParameterOperatable) -> None: ilim: F.SignalElectrical current_limit = L.p_field( - unit=P.A, + units=P.A, likely_constrained=True, soft_set=L.Range(100 * P.mA, 2.1 * P.A), tolerance_guess=10 * P.percent, diff --git a/src/faebryk/library/EEPROM.py b/src/faebryk/library/EEPROM.py index 00f7acd0..156e957d 100644 --- a/src/faebryk/library/EEPROM.py +++ b/src/faebryk/library/EEPROM.py @@ -26,7 +26,7 @@ def set_address(self, addr: int): # ---------------------------------------- memory_size = L.p_field( - unit=P.bit, + units=P.bit, likely_constrained=True, domain=L.Domains.Numbers.NATURAL(), soft_set=L.Range(128 * P.bit, 1024 * P.kbit), diff --git a/src/faebryk/library/ElectricPower.py b/src/faebryk/library/ElectricPower.py index 68138229..9b6529ff 100644 --- a/src/faebryk/library/ElectricPower.py +++ b/src/faebryk/library/ElectricPower.py @@ -46,13 +46,13 @@ def protect(self): lv: F.Electrical voltage = L.p_field( - unit=P.V, + units=P.V, likely_constrained=True, domain=L.Domains.Numbers.REAL(), soft_set=L.Range(0 * P.V, 1000 * P.V), tolerance_guess=5 * P.percent, ) - # max_current= L.p_field(unit=P.A) + # max_current= L.p_field(units=P.A) """ Only for this particular power interface Does not propagate to connections diff --git a/src/faebryk/library/Electrical.py b/src/faebryk/library/Electrical.py index 69ac0b1e..57904184 100644 --- a/src/faebryk/library/Electrical.py +++ b/src/faebryk/library/Electrical.py @@ -5,7 +5,7 @@ class Electrical(ModuleInterface): - # potential= L.p_field(unit=P.dimensionless) + # potential= L.p_field(units=P.dimensionless) def get_net(self): from faebryk.library.Net import Net diff --git a/src/faebryk/library/Filter.py b/src/faebryk/library/Filter.py index 50d6b871..79324ff1 100644 --- a/src/faebryk/library/Filter.py +++ b/src/faebryk/library/Filter.py @@ -18,7 +18,7 @@ class Response(Enum): OTHER = auto() cutoff_frequency = L.p_field( - unit=P.Hz, + units=P.Hz, likely_constrained=True, domain=L.Domains.Numbers.REAL(), soft_set=L.Range(0 * P.Hz, 1000 * P.Hz), diff --git a/src/faebryk/library/Fuse.py b/src/faebryk/library/Fuse.py index aaafd43a..d8939f8e 100644 --- a/src/faebryk/library/Fuse.py +++ b/src/faebryk/library/Fuse.py @@ -29,7 +29,7 @@ class ResponseType(Enum): domain=L.Domains.ENUM(ResponseType), ) trip_current = L.p_field( - unit=P.A, + units=P.A, likely_constrained=True, domain=L.Domains.Numbers.REAL(), soft_set=L.Range(100 * P.mA, 100 * P.A), diff --git a/src/faebryk/library/GDT.py b/src/faebryk/library/GDT.py index 42e47172..07b135ca 100644 --- a/src/faebryk/library/GDT.py +++ b/src/faebryk/library/GDT.py @@ -17,12 +17,12 @@ class GDT(Module): tube_2: F.Electrical dc_breakdown_voltage = L.p_field( - unit=P.V, + units=P.V, likely_constrained=True, soft_set=L.Range(100 * P.V, 1000 * P.V), ) impulse_discharge_current = L.p_field( - unit=P.A, + units=P.A, likely_constrained=True, soft_set=L.Range(100 * P.mA, 100 * P.A), ) diff --git a/src/faebryk/library/HLK_LD2410B_P.py b/src/faebryk/library/HLK_LD2410B_P.py index 6512ad60..d6f00445 100644 --- a/src/faebryk/library/HLK_LD2410B_P.py +++ b/src/faebryk/library/HLK_LD2410B_P.py @@ -10,7 +10,7 @@ class HLK_LD2410B_P(Module): class _ld2410b_esphome_config(F.has_esphome_config.impl()): throttle = L.p_field( - unit=P.ms, + units=P.ms, soft_set=L.Range(10 * P.ms, 1000 * P.ms), ) diff --git a/src/faebryk/library/Header.py b/src/faebryk/library/Header.py index 0d80148a..436a2715 100644 --- a/src/faebryk/library/Header.py +++ b/src/faebryk/library/Header.py @@ -38,7 +38,7 @@ def __preinit__(self): self.pin_count_vertical.alias_is(self._vertical_pin_count) pin_pitch = L.p_field( - unit=P.mm, + units=P.mm, likely_constrained=True, domain=L.Domains.Numbers.REAL(), soft_set=L.Range(1 * P.mm, 10 * P.mm), diff --git a/src/faebryk/library/I2C.py b/src/faebryk/library/I2C.py index e47c1ae9..02759fa2 100644 --- a/src/faebryk/library/I2C.py +++ b/src/faebryk/library/I2C.py @@ -16,7 +16,7 @@ class I2C(ModuleInterface): sda: F.ElectricLogic frequency = L.p_field( - unit=P.Hz, + units=P.Hz, likely_constrained=True, soft_set=L.Range(10 * P.kHz, 3.4 * P.MHz), ) diff --git a/src/faebryk/library/Inductor.py b/src/faebryk/library/Inductor.py index 5de3f3fb..9c1fdd2f 100644 --- a/src/faebryk/library/Inductor.py +++ b/src/faebryk/library/Inductor.py @@ -13,25 +13,25 @@ class Inductor(Module): unnamed = L.list_field(2, F.Electrical) inductance = L.p_field( - unit=P.H, + units=P.H, likely_constrained=True, soft_set=L.Range(100 * P.nH, 1 * P.H), tolerance_guess=10 * P.percent, ) self_resonant_frequency = L.p_field( - unit=P.Hz, + units=P.Hz, likely_constrained=True, soft_set=L.Range(100 * P.kHz, 1 * P.GHz), tolerance_guess=10 * P.percent, ) rated_current = L.p_field( - unit=P.A, + units=P.A, likely_constrained=True, soft_set=L.Range(1 * P.mA, 100 * P.A), tolerance_guess=10 * P.percent, ) dc_resistance = L.p_field( - unit=P.Ω, + units=P.Ω, soft_set=L.Range(10 * P.mΩ, 100 * P.Ω), tolerance_guess=10 * P.percent, ) diff --git a/src/faebryk/library/LDO.py b/src/faebryk/library/LDO.py index 8d25c644..dd4fc02d 100644 --- a/src/faebryk/library/LDO.py +++ b/src/faebryk/library/LDO.py @@ -25,27 +25,27 @@ class OutputPolarity(Enum): NEGATIVE = auto() max_input_voltage = L.p_field( - unit=P.V, + units=P.V, likely_constrained=True, soft_set=L.Range(1 * P.V, 100 * P.V), ) output_voltage = L.p_field( - unit=P.V, + units=P.V, likely_constrained=True, soft_set=L.Range(1 * P.V, 100 * P.V), ) quiescent_current = L.p_field( - unit=P.A, + units=P.A, likely_constrained=True, soft_set=L.Range(1 * P.mA, 100 * P.mA), ) dropout_voltage = L.p_field( - unit=P.V, + units=P.V, likely_constrained=True, soft_set=L.Range(1 * P.mV, 100 * P.mV), ) psrr = L.p_field( - unit=P.dB, + units=P.dB, likely_constrained=True, soft_set=L.Range(1 * P.dB, 100 * P.dB), ) @@ -56,7 +56,7 @@ class OutputPolarity(Enum): domain=L.Domains.ENUM(OutputType), ) output_current = L.p_field( - unit=P.A, + units=P.A, likely_constrained=True, soft_set=L.Range(1 * P.mA, 100 * P.mA), ) diff --git a/src/faebryk/library/LED.py b/src/faebryk/library/LED.py index 5243f531..938a8417 100644 --- a/src/faebryk/library/LED.py +++ b/src/faebryk/library/LED.py @@ -41,8 +41,8 @@ class Color(Enum): ULTRA_VIOLET = auto() INFRA_RED = auto() - brightness = L.p_field(unit=P.candela) - max_brightness = L.p_field(unit=P.candela) + brightness = L.p_field(units=P.candela) + max_brightness = L.p_field(units=P.candela) color = L.p_field(domain=L.Domains.ENUM(Color)) def __preinit__(self): diff --git a/src/faebryk/library/MOSFET.py b/src/faebryk/library/MOSFET.py index db4df9aa..50575724 100644 --- a/src/faebryk/library/MOSFET.py +++ b/src/faebryk/library/MOSFET.py @@ -20,10 +20,10 @@ class SaturationType(Enum): channel_type = L.p_field(domain=L.Domains.ENUM(ChannelType)) saturation_type = L.p_field(domain=L.Domains.ENUM(SaturationType)) - gate_source_threshold_voltage = L.p_field(unit=P.V) - max_drain_source_voltage = L.p_field(unit=P.V) - max_continuous_drain_current = L.p_field(unit=P.A) - on_resistance = L.p_field(unit=P.ohm) + gate_source_threshold_voltage = L.p_field(units=P.V) + max_drain_source_voltage = L.p_field(units=P.V) + max_continuous_drain_current = L.p_field(units=P.A) + on_resistance = L.p_field(units=P.ohm) source: F.Electrical gate: F.Electrical diff --git a/src/faebryk/library/OpAmp.py b/src/faebryk/library/OpAmp.py index 44d1dc83..c3402c81 100644 --- a/src/faebryk/library/OpAmp.py +++ b/src/faebryk/library/OpAmp.py @@ -8,13 +8,13 @@ class OpAmp(Module): - bandwidth = L.p_field(unit=P.Hz) - common_mode_rejection_ratio = L.p_field(unit=P.dimensionless) - input_bias_current = L.p_field(unit=P.A) - input_offset_voltage = L.p_field(unit=P.V) - gain_bandwidth_product = L.p_field(unit=P.Hz) - output_current = L.p_field(unit=P.A) - slew_rate = L.p_field(unit=P.V / P.s) + bandwidth = L.p_field(units=P.Hz) + common_mode_rejection_ratio = L.p_field(units=P.dimensionless) + input_bias_current = L.p_field(units=P.A) + input_offset_voltage = L.p_field(units=P.V) + gain_bandwidth_product = L.p_field(units=P.Hz) + output_current = L.p_field(units=P.A) + slew_rate = L.p_field(units=P.V / P.s) power: F.ElectricPower inverting_input: F.Electrical diff --git a/src/faebryk/library/PM1006.py b/src/faebryk/library/PM1006.py index 7e16ed51..1e9b6a1a 100644 --- a/src/faebryk/library/PM1006.py +++ b/src/faebryk/library/PM1006.py @@ -26,7 +26,7 @@ class PM1006(Module): """ class _pm1006_esphome_config(F.has_esphome_config.impl()): - update_interval = L.p_field(unit=P.s) + update_interval = L.p_field(units=P.s) def get_config(self) -> dict: val = self.update_interval.get_most_narrow() diff --git a/src/faebryk/library/Potentiometer.py b/src/faebryk/library/Potentiometer.py index 63195bba..4cf32cb4 100644 --- a/src/faebryk/library/Potentiometer.py +++ b/src/faebryk/library/Potentiometer.py @@ -12,7 +12,7 @@ class Potentiometer(Module): resistors_ifs = L.list_field(2, F.Electrical) wiper: F.Electrical - total_resistance = L.p_field(unit=P.ohm) + total_resistance = L.p_field(units=P.ohm) resistors = L.list_field(2, F.Resistor) def __preinit__(self): diff --git a/src/faebryk/library/Relay.py b/src/faebryk/library/Relay.py index 862f6bdc..81dc4d1f 100644 --- a/src/faebryk/library/Relay.py +++ b/src/faebryk/library/Relay.py @@ -22,12 +22,12 @@ class Relay(Module): coil_p: F.Electrical coil_n: F.Electrical - coil_rated_voltage = L.p_field(unit=P.V) - coil_rated_current = L.p_field(unit=P.A) - coil_resistance = L.p_field(unit=P.ohm) - contact_max_switching_voltage = L.p_field(unit=P.V) - contact_rated_switching_current = L.p_field(unit=P.A) - contact_max_switchng_current = L.p_field(unit=P.A) + coil_rated_voltage = L.p_field(units=P.V) + coil_rated_current = L.p_field(units=P.A) + coil_resistance = L.p_field(units=P.ohm) + contact_max_switching_voltage = L.p_field(units=P.V) + contact_rated_switching_current = L.p_field(units=P.A) + contact_max_switchng_current = L.p_field(units=P.A) designator_prefix = L.f_field(F.has_designator_prefix_defined)( F.has_designator_prefix.Prefix.K diff --git a/src/faebryk/library/Resistor.py b/src/faebryk/library/Resistor.py index 1df61d6d..5642444c 100644 --- a/src/faebryk/library/Resistor.py +++ b/src/faebryk/library/Resistor.py @@ -15,9 +15,9 @@ class Resistor(Module): unnamed = L.list_field(2, F.Electrical) - resistance = L.p_field(unit=P.ohm) - rated_power = L.p_field(unit=P.W) - rated_voltage = L.p_field(unit=P.V) + resistance = L.p_field(units=P.ohm) + rated_power = L.p_field(units=P.W) + rated_voltage = L.p_field(units=P.V) attach_to_footprint: F.can_attach_to_footprint_symmetrically designator_prefix = L.f_field(F.has_designator_prefix_defined)( diff --git a/src/faebryk/library/Resistor_Voltage_Divider.py b/src/faebryk/library/Resistor_Voltage_Divider.py index f4eab884..405526bb 100644 --- a/src/faebryk/library/Resistor_Voltage_Divider.py +++ b/src/faebryk/library/Resistor_Voltage_Divider.py @@ -15,8 +15,8 @@ class Resistor_Voltage_Divider(Module): resistor = L.list_field(2, F.Resistor) node = L.list_field(3, F.Electrical) - ratio = L.p_field(unit=P.dimensionless) - max_current = L.p_field(unit=P.A) + ratio = L.p_field(units=P.dimensionless) + max_current = L.p_field(units=P.A) def __preinit__(self): self.node[0].connect_via(self.resistor[0], self.node[1]) diff --git a/src/faebryk/library/SCD40.py b/src/faebryk/library/SCD40.py index b9f6d4dc..7a09b9fa 100644 --- a/src/faebryk/library/SCD40.py +++ b/src/faebryk/library/SCD40.py @@ -14,7 +14,7 @@ class SCD40(Module): """ class _scd4x_esphome_config(F.has_esphome_config.impl()): - update_interval = L.p_field(unit=P.s) + update_interval = L.p_field(units=P.s) def get_config(self) -> dict: val = self.update_interval.get_most_narrow() diff --git a/src/faebryk/library/SPIFlash.py b/src/faebryk/library/SPIFlash.py index cae5e633..fd1fb4b4 100644 --- a/src/faebryk/library/SPIFlash.py +++ b/src/faebryk/library/SPIFlash.py @@ -12,7 +12,7 @@ class SPIFlash(Module): spi = L.f_field(F.MultiSPI)(4) memory_size = L.p_field( - unit=P.byte, + units=P.byte, domain=L.Domains.Numbers.NATURAL(), ) designator_prefix = L.f_field(F.has_designator_prefix_defined)( diff --git a/src/faebryk/library/TVS.py b/src/faebryk/library/TVS.py index 9d3599b6..c0a28e65 100644 --- a/src/faebryk/library/TVS.py +++ b/src/faebryk/library/TVS.py @@ -11,4 +11,4 @@ class TVS(F.Diode): - reverse_breakdown_voltage = L.p_field(unit=P.V) + reverse_breakdown_voltage = L.p_field(units=P.V) diff --git a/src/faebryk/library/UART_Base.py b/src/faebryk/library/UART_Base.py index 819fb988..4deaff82 100644 --- a/src/faebryk/library/UART_Base.py +++ b/src/faebryk/library/UART_Base.py @@ -11,7 +11,7 @@ class UART_Base(ModuleInterface): rx: F.ElectricLogic tx: F.ElectricLogic - baud = L.p_field(unit=P.baud) + baud = L.p_field(units=P.baud) @L.rt_field def single_electric_reference(self): diff --git a/src/faebryk/library/UART_RS485.py b/src/faebryk/library/UART_RS485.py index 0c22ac31..ec4b23e4 100644 --- a/src/faebryk/library/UART_RS485.py +++ b/src/faebryk/library/UART_RS485.py @@ -18,8 +18,8 @@ class UART_RS485(Module): read_enable: F.ElectricLogic write_enable: F.ElectricLogic - max_data_rate = L.p_field(unit=P.baud) - gpio_voltage = L.p_field(unit=P.V) + max_data_rate = L.p_field(units=P.baud) + gpio_voltage = L.p_field(units=P.V) def __preinit__(self): self.max_data_rate.alias_is(self.uart.baud) diff --git a/src/faebryk/library/XL_3528RGBW_WS2812B.py b/src/faebryk/library/XL_3528RGBW_WS2812B.py index fef0f609..6c29922b 100644 --- a/src/faebryk/library/XL_3528RGBW_WS2812B.py +++ b/src/faebryk/library/XL_3528RGBW_WS2812B.py @@ -9,7 +9,7 @@ class XL_3528RGBW_WS2812B(Module): class _ws2812b_esphome_config(F.has_esphome_config.impl()): - update_interval = L.p_field(unit=P.s) + update_interval = L.p_field(units=P.s) def get_config(self) -> dict: assert isinstance(self.update_interval, F.Constant) From 52126b2bfce640eefcc6ac7f114367847fa2c19d Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 10:20:47 +0200 Subject: [PATCH 17/80] Replace merge with {subset, superset, alias_is} half Lib --- src/faebryk/library/Diode.py | 9 +++ src/faebryk/library/ESP32_C3.py | 26 ++++--- src/faebryk/library/ESP32_C3_MINI_1.py | 4 +- .../ESP32_C3_MINI_1_ReferenceDesign.py | 17 +++-- src/faebryk/library/ElectricPower.py | 2 +- src/faebryk/library/FilterElectricalLC.py | 21 ++---- src/faebryk/library/FilterElectricalRC.py | 19 ++--- src/faebryk/library/GenericBusProtection.py | 2 +- src/faebryk/library/HLK_LD2410B_P.py | 2 +- src/faebryk/library/INA228.py | 2 +- src/faebryk/library/INA228_ReferenceDesign.py | 18 +++-- src/faebryk/library/ISO1540.py | 4 +- .../library/ISO1540_ReferenceDesign.py | 8 +- src/faebryk/library/LDO.py | 4 +- src/faebryk/library/LED.py | 11 +-- src/faebryk/library/Logic.py | 4 +- src/faebryk/library/M24C08_FMN6TP.py | 4 +- src/faebryk/library/ME6211C33M5G_N.py | 2 +- src/faebryk/library/MultiCapacitor.py | 4 +- src/faebryk/library/OLED_Module.py | 6 +- src/faebryk/library/PM1006.py | 4 +- src/faebryk/library/PowerSwitch.py | 2 +- src/faebryk/library/PowerSwitchMOSFET.py | 9 ++- src/faebryk/library/QWIIC_Connector.py | 4 +- src/faebryk/library/RP2040.py | 20 +++-- src/faebryk/library/RP2040_ReferenceDesign.py | 58 ++++++++------- src/faebryk/library/RS485_Bus_Protection.py | 52 +++++++------ src/faebryk/library/Resistor.py | 73 ------------------- src/faebryk/library/SCD40.py | 4 +- src/faebryk/library/SNx4LVC541A.py | 2 +- src/faebryk/library/SP3243E.py | 10 +-- .../library/SP3243E_ReferenceDesign.py | 18 +++-- src/faebryk/library/SignalElectrical.py | 2 +- src/faebryk/library/TD541S485H.py | 3 +- src/faebryk/library/TPS2116.py | 2 +- src/faebryk/library/UART_RS485.py | 2 +- src/faebryk/library/USB2_0.py | 5 +- src/faebryk/library/USB2_0_ESD_Protection.py | 8 +- src/faebryk/library/USB_C_5V_PSU.py | 4 +- src/faebryk/library/USB_C_PSU_Vertical.py | 20 +++-- src/faebryk/library/USB_RS485.py | 12 +-- src/faebryk/library/pf_74AHCT2G125.py | 2 +- 42 files changed, 229 insertions(+), 256 deletions(-) diff --git a/src/faebryk/library/Diode.py b/src/faebryk/library/Diode.py index 98b2220e..de7d0543 100644 --- a/src/faebryk/library/Diode.py +++ b/src/faebryk/library/Diode.py @@ -33,6 +33,12 @@ class Diode(Module): soft_set=L.Range(0.1 * P.nA, 1 * P.µA), tolerance_guess=10 * P.percent, ) + max_current = L.p_field( + units=P.A, + likely_constrained=True, + soft_set=L.Range(0.1 * P.mA, 100 * P.A), + tolerance_guess=10 * P.percent, + ) anode: F.Electrical cathode: F.Electrical @@ -63,6 +69,9 @@ def pin_association_heuristic(self): case_sensitive=False, ) + def __preinit__(self): + self.current.constrain_le(self.max_current) + def get_needed_series_resistance_for_current_limit( self, input_voltage_V: ParameterOperatable ): diff --git a/src/faebryk/library/ESP32_C3.py b/src/faebryk/library/ESP32_C3.py index 4355b52d..6eff224e 100644 --- a/src/faebryk/library/ESP32_C3.py +++ b/src/faebryk/library/ESP32_C3.py @@ -42,7 +42,7 @@ def __preinit__(self): # https://www.espressif.com/sites/default/files/documentation/esp32-c3_technical_reference_manual_en.pdf#uart for ser in x.uart: - ser.baud.merge(F.Range(0 * P.baud, 5000000 * P.baud)) + ser.baud.constrain_le(5 * P.mbaud) # connect all logic references # TODO: set correctly for each power domain @@ -51,12 +51,14 @@ def __preinit__(self): # set power domain constraints to recommended operating conditions for power_domain in [self.vdd3p3_rtc, self.vdd3p3, self.vdda]: - power_domain.voltage.merge(F.Range.from_center(3.3 * P.V, 0.3 * P.V)) - self.vdd3p3_cpu.voltage.merge( - F.Range(3.0 * P.V, 3.6 * P.V) + power_domain.voltage.constrain_subset( + L.Range.from_center(3.3 * P.V, 0.3 * P.V) + ) + self.vdd3p3_cpu.voltage.constrain_subset( + L.Range(3.0 * P.V, 3.6 * P.V) ) # TODO: max 3.3V when writing eFuses - self.vdd_spi.voltage.merge( - F.Range.from_center(3.3 * P.V, 0.3 * P.V) + self.vdd_spi.voltage.constrain_subset( + L.Range.from_center(3.3 * P.V, 0.3 * P.V) ) # TODO: when configured as input # connect all grounds to eachother and power @@ -211,8 +213,14 @@ def set_default_boot_mode(self, default_boot_to_spi_flash: bool = True): # set default boot mode to "SPI Boot mode" # https://www.espressif.com/sites/default/files/documentation/esp32-c3_datasheet_en.pdf page 26 # noqa E501 # TODO: make configurable - self.gpio[8].pulled.pull(up=True).resistance.merge(10 * P.kohm) - self.gpio[2].pulled.pull(up=True).resistance.merge(10 * P.kohm) + self.gpio[8].pulled.pull(up=True).resistance.constrain_subset( + L.Range.from_center_rel(10 * P.kohm, 0.1) + ) + self.gpio[2].pulled.pull(up=True).resistance.constrain_subset( + L.Range.from_center_rel(10 * P.kohm, 0.1) + ) # gpio[9] has an internal pull-up at boot = SPI-Boot if not default_boot_to_spi_flash: - self.gpio[9].pulled.pull(up=False).resistance.merge(10 * P.kohm) + self.gpio[9].pulled.pull(up=False).resistance.constrain_subset( + L.Range.from_center_rel(10 * P.kohm, 0.1) + ) diff --git a/src/faebryk/library/ESP32_C3_MINI_1.py b/src/faebryk/library/ESP32_C3_MINI_1.py index 0b4000bb..f19b096d 100644 --- a/src/faebryk/library/ESP32_C3_MINI_1.py +++ b/src/faebryk/library/ESP32_C3_MINI_1.py @@ -36,8 +36,8 @@ def single_electric_reference(self): def __preinit__(self): # connect power decoupling caps - self.vdd3v3.decoupled.decouple().capacitance.merge( - F.Range(100 * P.nF, 10 * P.uF) + self.vdd3v3.decoupled.decouple().capacitance.constrain_subset( + L.Range(100 * P.nF, 10 * P.uF) ) e = self.esp32_c3 diff --git a/src/faebryk/library/ESP32_C3_MINI_1_ReferenceDesign.py b/src/faebryk/library/ESP32_C3_MINI_1_ReferenceDesign.py index fcc1ff14..3542ba56 100644 --- a/src/faebryk/library/ESP32_C3_MINI_1_ReferenceDesign.py +++ b/src/faebryk/library/ESP32_C3_MINI_1_ReferenceDesign.py @@ -5,6 +5,7 @@ import faebryk.library._F as F from faebryk.core.module import Module +from faebryk.libs.library import L from faebryk.libs.units import P logger = logging.getLogger(__name__) @@ -23,8 +24,12 @@ def __preinit__(self): self.lp_filter.in_.signal.connect_via( self.button, self.logic_out.reference.lv ) - self.lp_filter.cutoff_frequency.merge(F.Range(100 * P.Hz, 200 * P.Hz)) - self.lp_filter.response.merge(F.Filter.Response.LOWPASS) + self.lp_filter.cutoff_frequency.constrain_subset( + L.Range(100 * P.Hz, 200 * P.Hz) + ) + self.lp_filter.response.constrain_subset( + L.Single(F.Filter.Response.LOWPASS) + ) esp32_c3_mini_1: F.ESP32_C3_MINI_1 boot_switch: DebouncedButton @@ -88,9 +93,9 @@ def __preinit__(self): # ------------------------------------ # parametrization # ------------------------------------ - self.low_speed_crystal_clock.crystal.frequency.merge( - F.Range.from_center_rel(32.768 * P.kHz, 0.001) + self.low_speed_crystal_clock.crystal.frequency.constrain_subset( + L.Range.from_center_rel(32.768 * P.kHz, 0.001) ) - self.low_speed_crystal_clock.crystal.frequency_tolerance.merge( - F.Range(0 * P.ppm, 20 * P.ppm) + self.low_speed_crystal_clock.crystal.frequency_tolerance.constrain_subset( + L.Range(0 * P.ppm, 20 * P.ppm) ) diff --git a/src/faebryk/library/ElectricPower.py b/src/faebryk/library/ElectricPower.py index 9b6529ff..fb29392b 100644 --- a/src/faebryk/library/ElectricPower.py +++ b/src/faebryk/library/ElectricPower.py @@ -38,7 +38,7 @@ def on_obj_set(self): def protect(self): obj = self.get_obj(ElectricPower) return [ - tvs.builder(lambda t: t.reverse_working_voltage.merge(obj.voltage)) + tvs.builder(lambda t: t.reverse_working_voltage.alias_is(obj.voltage)) for tvs in super().protect() ] diff --git a/src/faebryk/library/FilterElectricalLC.py b/src/faebryk/library/FilterElectricalLC.py index 322c275f..713653a0 100644 --- a/src/faebryk/library/FilterElectricalLC.py +++ b/src/faebryk/library/FilterElectricalLC.py @@ -14,6 +14,8 @@ class FilterElectricalLC(F.Filter): capacitor: F.Capacitor inductor: F.Inductor + z0 = L.p_field(units=P.ohm) + def __preinit__(self) -> None: ... @L.rt_field @@ -21,24 +23,15 @@ def construction_dependency(self): class _(F.has_construction_dependency.impl()): def _construct(_self): if F.Constant(F.Filter.Response.LOWPASS).is_subset_of(self.response): - self.response.merge(F.Filter.Response.LOWPASS) - - # TODO other orders - self.order.merge(2) + # TODO other orders & types + self.order.constrain_subset(L.Single(2)) + self.response.constrain_subset(L.Single(F.Filter.Response.LOWPASS)) - L = self.inductor.inductance + Li = self.inductor.inductance C = self.capacitor.capacitance fc = self.cutoff_frequency - # TODO requires parameter constraint solving implemented - # fc.merge(1 / (2 * math.pi * math.sqrt(C * L))) - - # instead assume fc being the driving param - realistic_C = F.Range(1 * P.pF, 1 * P.mF) - L.merge(1 / ((2 * math.pi * fc) ** 2 * realistic_C)) - C.merge(1 / ((2 * math.pi * fc) ** 2 * L)) - - # TODO consider splitting C / L in a typical way + fc.alias_is(1 / (2 * math.pi * math.sqrt(C * Li))) # low pass self.in_.signal.connect_via( diff --git a/src/faebryk/library/FilterElectricalRC.py b/src/faebryk/library/FilterElectricalRC.py index eab17126..4fa903c2 100644 --- a/src/faebryk/library/FilterElectricalRC.py +++ b/src/faebryk/library/FilterElectricalRC.py @@ -21,6 +21,8 @@ class FilterElectricalRC(F.Filter): capacitor: F.Capacitor resistor: F.Resistor + z0 = L.p_field(units=P.ohm) + def __preinit__(self): ... @L.rt_field @@ -28,24 +30,15 @@ def construction_dependency(self): class _(F.has_construction_dependency.impl()): def _construct(_self): if F.Constant(F.Filter.Response.LOWPASS).is_subset_of(self.response): - self.response.merge(F.Filter.Response.LOWPASS) - - # TODO other orders - self.order.merge(1) + # TODO other orders, types + self.order.constrain_subset(L.Single(1)) + self.response.constrain_subset(L.Single(F.Filter.Response.LOWPASS)) R = self.resistor.resistance C = self.capacitor.capacitance fc = self.cutoff_frequency - # TODO requires parameter constraint solving implemented - # fc.merge(1 / (2 * math.pi * R * C)) - - # instead assume fc being the driving param - realistic_C = F.Range(1 * P.pF, 1 * P.mF) - R.merge(1 / (2 * math.pi * realistic_C * fc)) - C.merge(1 / (2 * math.pi * R * fc)) - - # TODO consider splitting C / L in a typical way + fc.alias_is(1 / (2 * math.pi * R * C)) # low pass self.in_.signal.connect_via( diff --git a/src/faebryk/library/GenericBusProtection.py b/src/faebryk/library/GenericBusProtection.py index ba7865bb..6bee5ab0 100644 --- a/src/faebryk/library/GenericBusProtection.py +++ b/src/faebryk/library/GenericBusProtection.py @@ -59,7 +59,7 @@ def get_mifs[U: ModuleInterface](bus: T, mif_type: type[U]) -> set[U]: for (power_unprotected, power_protected), fuse in zip(power, fuse): power_unprotected.hv.connect_via(fuse, power_protected.hv) # TODO maybe shallow connect? - power_protected.voltage.merge(power_unprotected.voltage) + power_protected.voltage.alias_is(power_unprotected.voltage) # TVS if self.bus_protected.has_trait(F.can_be_surge_protected): diff --git a/src/faebryk/library/HLK_LD2410B_P.py b/src/faebryk/library/HLK_LD2410B_P.py index d6f00445..69b2358d 100644 --- a/src/faebryk/library/HLK_LD2410B_P.py +++ b/src/faebryk/library/HLK_LD2410B_P.py @@ -80,7 +80,7 @@ def attach_to_footprint(self): ) def __preinit__(self): - self.uart.baud.merge(F.Constant(256 * P.kbaud)) + self.uart.baud.constrain_le(L.Single(256 * P.kbaud)) # connect all logic references @L.rt_field diff --git a/src/faebryk/library/INA228.py b/src/faebryk/library/INA228.py index cfb19ac1..0cdb6367 100644 --- a/src/faebryk/library/INA228.py +++ b/src/faebryk/library/INA228.py @@ -110,4 +110,4 @@ def __preinit__(self): # ------------------------------------ # parametrization # ------------------------------------ - self.power.voltage.merge(F.Range(2.7 * P.V, 5.5 * P.V)) + self.power.voltage.constrain_subset(L.Range(2.7 * P.V, 5.5 * P.V)) diff --git a/src/faebryk/library/INA228_ReferenceDesign.py b/src/faebryk/library/INA228_ReferenceDesign.py index 8f2c1570..14ddd365 100644 --- a/src/faebryk/library/INA228_ReferenceDesign.py +++ b/src/faebryk/library/INA228_ReferenceDesign.py @@ -33,9 +33,7 @@ def __init__(self, lowside: bool = False, filtered: bool = False): self._filtered = filtered def __preinit__(self): - self.power_in.voltage.merge( - self.power_out.voltage - ) # TODO: minus voltagedrop over shunt + self.power_in.voltage.alias_is(self.power_out.voltage) self.shunt_sense.p.connect_via(self.shunt, self.shunt_sense.n) if self._lowside: self.power_in.hv.connect_via(self.shunt, self.power_out.hv) @@ -83,8 +81,12 @@ def __preinit__(self): shunted_power = self.add( self.ShuntedElectricPower(lowside=self._lowside, filtered=self._filtered) ) - shunted_power.shunt.resistance.merge(F.Range.from_center_rel(15 * P.mohm, 0.01)) - shunted_power.shunt.rated_power.merge(F.Range.from_center_rel(2 * P.W, 0.01)) + shunted_power.shunt.resistance.constrain_subset( + L.Range.from_center_rel(15 * P.mohm, 0.01) + ) + shunted_power.shunt.rated_power.constrain_subset( + L.Range.from_center_rel(2 * P.W, 0.01) + ) # TODO: calculate according to datasheet p36 # ---------------------------------------- @@ -98,6 +100,8 @@ def __preinit__(self): self.ina288.shunt_input.connect(shunted_power.shunt_sense) # decouple power rail - self.ina288.power.get_trait(F.can_be_decoupled).decouple().capacitance.merge( - F.Range.from_center_rel(0.1 * P.uF, 0.01) + self.ina288.power.get_trait( + F.can_be_decoupled + ).decouple().capacitance.constrain_subset( + L.Range.from_center_rel(0.1 * P.uF, 0.01) ) diff --git a/src/faebryk/library/ISO1540.py b/src/faebryk/library/ISO1540.py index 0541c6ce..97e16cdf 100644 --- a/src/faebryk/library/ISO1540.py +++ b/src/faebryk/library/ISO1540.py @@ -94,5 +94,5 @@ def __preinit__(self): # ------------------------------------ # parametrization # ------------------------------------ - self.non_iso.power.voltage.merge(F.Range(3.0 * P.V, 5.5 * P.V)) - self.iso.power.voltage.merge(F.Range(3.0 * P.V, 5.5 * P.V)) + self.non_iso.power.voltage.constrain_subset(L.Range(3.0 * P.V, 5.5 * P.V)) + self.iso.power.voltage.constrain_subset(L.Range(3.0 * P.V, 5.5 * P.V)) diff --git a/src/faebryk/library/ISO1540_ReferenceDesign.py b/src/faebryk/library/ISO1540_ReferenceDesign.py index 9033c095..574ca139 100644 --- a/src/faebryk/library/ISO1540_ReferenceDesign.py +++ b/src/faebryk/library/ISO1540_ReferenceDesign.py @@ -22,9 +22,9 @@ class ISO1540_ReferenceDesign(Module): # ---------------------------------------- def __preinit__(self): - self.isolator.non_iso.power.decoupled.decouple().capacitance.merge( - F.Range.from_center_rel(10 * P.uF, 0.01) + self.isolator.non_iso.power.decoupled.decouple().capacitance.constrain_subset( + L.Range.from_center_rel(10 * P.uF, 0.01) ) - self.isolator.iso.power.decoupled.decouple().capacitance.merge( - F.Range.from_center_rel(10 * P.uF, 0.01) + self.isolator.iso.power.decoupled.decouple().capacitance.constrain_subset( + L.Range.from_center_rel(10 * P.uF, 0.01) ) diff --git a/src/faebryk/library/LDO.py b/src/faebryk/library/LDO.py index dd4fc02d..c5b86eca 100644 --- a/src/faebryk/library/LDO.py +++ b/src/faebryk/library/LDO.py @@ -65,8 +65,8 @@ class OutputPolarity(Enum): power_out = L.d_field(lambda: F.ElectricPower().make_source()) def __preinit__(self): - self.max_input_voltage.merge(F.Range(self.power_in.voltage, math.inf * P.V)) - self.power_out.voltage.merge(self.output_voltage) + self.max_input_voltage.constrain_ge(self.power_in.voltage) + self.power_out.voltage.alias_is(self.output_voltage) self.enable.reference.connect(self.power_in) # TODO: should be implemented differently (see below) diff --git a/src/faebryk/library/LED.py b/src/faebryk/library/LED.py index 938a8417..6f98f62a 100644 --- a/src/faebryk/library/LED.py +++ b/src/faebryk/library/LED.py @@ -46,14 +46,11 @@ class Color(Enum): color = L.p_field(domain=L.Domains.ENUM(Color)) def __preinit__(self): - self.current.merge(self.brightness / self.max_brightness * self.max_current) - - # self.brightness.merge( - # F.Range(0 * P.millicandela, self.max_brightness) - # ) + self.current.alias_is(self.brightness / self.max_brightness * self.max_current) + self.brightness.constrain_le(self.max_brightness) def set_intensity(self, intensity: Parameter[Quantity]) -> None: - self.brightness.merge(intensity * self.max_brightness) + self.brightness.alias_is(intensity * self.max_brightness) def connect_via_current_limiting_resistor( self, @@ -67,7 +64,7 @@ def connect_via_current_limiting_resistor( else: self.anode.connect_via(resistor, target) - resistor.resistance.merge( + resistor.resistance.alias_is( self.get_needed_series_resistance_for_current_limit(input_voltage), ) resistor.allow_removal_if_zero() diff --git a/src/faebryk/library/Logic.py b/src/faebryk/library/Logic.py index 8edc33ba..0567a1dc 100644 --- a/src/faebryk/library/Logic.py +++ b/src/faebryk/library/Logic.py @@ -6,7 +6,7 @@ class Logic(F.Signal): - state = L.f_field(F.Range)(False, True) + state = L.p_field(domain=L.Domains.BOOL()) def set(self, on: bool): - self.state.merge(on) + self.state.constrain_subset(L.Single(on)) diff --git a/src/faebryk/library/M24C08_FMN6TP.py b/src/faebryk/library/M24C08_FMN6TP.py index 800439b6..4c2c6086 100644 --- a/src/faebryk/library/M24C08_FMN6TP.py +++ b/src/faebryk/library/M24C08_FMN6TP.py @@ -43,8 +43,8 @@ def __preinit__(self): ) self.data.terminate() - self.power.decoupled.decouple().capacitance.merge( - F.Range(10 * P.nF, 100 * P.nF) + self.power.decoupled.decouple().capacitance.constrain_subset( + L.Range(10 * P.nF, 100 * P.nF) ) self.add( diff --git a/src/faebryk/library/ME6211C33M5G_N.py b/src/faebryk/library/ME6211C33M5G_N.py index 59d4bf5b..c7bdb0da 100644 --- a/src/faebryk/library/ME6211C33M5G_N.py +++ b/src/faebryk/library/ME6211C33M5G_N.py @@ -19,7 +19,7 @@ def __init__(self, default_enabled: bool = True) -> None: def __preinit__(self): # set constraints - self.output_voltage.merge(F.Range(3.3 * 0.98 * P.V, 3.3 * 1.02 * P.V)) + self.output_voltage.constrain_superset(L.Range.from_center_rel(3.3 * P.V, 0.02)) if self._default_enabled: self.enable.set(True) diff --git a/src/faebryk/library/MultiCapacitor.py b/src/faebryk/library/MultiCapacitor.py index 5c5cdbf8..c71e7755 100644 --- a/src/faebryk/library/MultiCapacitor.py +++ b/src/faebryk/library/MultiCapacitor.py @@ -43,7 +43,7 @@ def __preinit__(self): # ------------------------------------ # parametrization # ------------------------------------ - self.capacitance.merge(sum(c.capacitance for c in self.capacitors)) + self.capacitance.alias_is(sum(c.capacitance for c in self.capacitors)) def set_equal_capacitance(self, capacitance: Parameter[Quantity]): op = capacitance / self._count @@ -52,4 +52,4 @@ def set_equal_capacitance(self, capacitance: Parameter[Quantity]): def set_equal_capacitance_each(self, capacitance: Parameter[Quantity]): for c in self.capacitors: - c.capacitance.merge(capacitance) + c.capacitance.constrain_subset(capacitance) diff --git a/src/faebryk/library/OLED_Module.py b/src/faebryk/library/OLED_Module.py index f0569f62..6e3d2147 100644 --- a/src/faebryk/library/OLED_Module.py +++ b/src/faebryk/library/OLED_Module.py @@ -43,9 +43,9 @@ class DisplayController(Enum): display_size = L.p_field(domain=L.Domains.ENUM(DisplaySize)) def __preinit__(self): - self.power.voltage.merge(F.Range(3.0 * P.V, 5 * P.V)) - self.power.decoupled.decouple().capacitance.merge( - F.Range(100 * P.uF, 220 * P.uF) + self.power.voltage.constrain_subset(L.Range(3.0 * P.V, 5 * P.V)) + self.power.decoupled.decouple().capacitance.constrain_subset( + L.Range(100 * P.uF, 220 * P.uF) ) designator_prefix = L.f_field(F.has_designator_prefix_defined)( diff --git a/src/faebryk/library/PM1006.py b/src/faebryk/library/PM1006.py index 1e9b6a1a..fec6dc69 100644 --- a/src/faebryk/library/PM1006.py +++ b/src/faebryk/library/PM1006.py @@ -61,5 +61,5 @@ def get_config(self) -> dict: # --------------------------------------------------------------------- def __preinit__(self): - self.power.voltage.merge(F.Range.from_center(5, 0.2)) - self.data.baud.merge(F.Constant(9600 * P.baud)) + self.power.voltage.constrain_subset(L.Range.from_center(5 * P.V, 0.2 * P.V)) + self.data.baud.constrain_subset(L.Single(9600 * P.baud)) diff --git a/src/faebryk/library/PowerSwitch.py b/src/faebryk/library/PowerSwitch.py index f264e8b2..af5b773b 100644 --- a/src/faebryk/library/PowerSwitch.py +++ b/src/faebryk/library/PowerSwitch.py @@ -30,4 +30,4 @@ def switch_power(self): ) def __preinit__(self): - self.switched_power_out.voltage.merge(self.power_in.voltage) + self.switched_power_out.voltage.alias_is(self.power_in.voltage) diff --git a/src/faebryk/library/PowerSwitchMOSFET.py b/src/faebryk/library/PowerSwitchMOSFET.py index 2dc452ce..e4fe6907 100644 --- a/src/faebryk/library/PowerSwitchMOSFET.py +++ b/src/faebryk/library/PowerSwitchMOSFET.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: MIT import faebryk.library._F as F +from faebryk.libs.library import L class PowerSwitchMOSFET(F.PowerSwitch): @@ -21,15 +22,15 @@ def __init__(self, lowside: bool, normally_closed: bool) -> None: mosfet: F.MOSFET def __preinit__(self): - self.mosfet.channel_type.merge( - F.Constant( + self.mosfet.channel_type.constrain_subset( + L.Single( F.MOSFET.ChannelType.N_CHANNEL if self._lowside else F.MOSFET.ChannelType.P_CHANNEL ) ) - self.mosfet.saturation_type.merge( - F.Constant(F.MOSFET.SaturationType.ENHANCEMENT) + self.mosfet.saturation_type.constrain_subset( + L.Single(F.MOSFET.SaturationType.ENHANCEMENT) ) # pull gate diff --git a/src/faebryk/library/QWIIC_Connector.py b/src/faebryk/library/QWIIC_Connector.py index ea62562e..85904157 100644 --- a/src/faebryk/library/QWIIC_Connector.py +++ b/src/faebryk/library/QWIIC_Connector.py @@ -60,5 +60,5 @@ def can_attach_to_footprint(self): ) def __preinit__(self): - self.power.voltage.merge(F.Range.from_center(3.3 * P.V, 0.3 * P.V)) - self.power.max_current.merge(F.Range.from_center_rel(226 * P.mA, 0.05)) + self.power.voltage.constrain_subset(L.Range.from_center(3.3 * P.V, 0.3 * P.V)) + # self.power.max_current.merge(F.Range.from_center_rel(226 * P.mA, 0.05)) diff --git a/src/faebryk/library/RP2040.py b/src/faebryk/library/RP2040.py index d1dd7323..b31c3b8d 100644 --- a/src/faebryk/library/RP2040.py +++ b/src/faebryk/library/RP2040.py @@ -43,8 +43,10 @@ def __preinit__(self): F.ElectricLogic.connect_all_module_references(self, gnd_only=True) # TODO get tolerance - self.power_out.voltage.merge(F.Range.from_center_rel(1.1 * P.V, 0.05)) - self.power_in.voltage.merge(F.Range(1.8 * P.V, 3.3 * P.V)) + self.power_out.voltage.constrain_subset( + L.Range.from_center_rel(1.1 * P.V, 0.05) + ) + self.power_in.voltage.constrain_subset(F.Range(1.8 * P.V, 3.3 * P.V)) @L.rt_field def bridge(self): @@ -101,10 +103,16 @@ def single_reference(self): def __preinit__(self): # TODO get tolerance - self.power_adc.voltage.merge(F.Range.from_center_rel(3.3 * P.V, 0.05)) - self.power_usb_phy.voltage.merge(F.Range.from_center_rel(3.3 * P.V, 0.05)) - self.power_core.voltage.merge(F.Range.from_center_rel(1.1 * P.V, 0.05)) - self.power_io.voltage.merge(F.Range(1.8 * P.V, 3.3 * P.V)) + self.power_adc.voltage.constrain_subset( + L.Range.from_center_rel(3.3 * P.V, 0.05) + ) + self.power_usb_phy.voltage.constrain_subset( + L.Range.from_center_rel(3.3 * P.V, 0.05) + ) + self.power_core.voltage.constrain_subset( + L.Range.from_center_rel(1.1 * P.V, 0.05) + ) + self.power_io.voltage.constrain_subset(F.Range(1.8 * P.V, 3.3 * P.V)) F.ElectricLogic.connect_all_module_references(self, gnd_only=True) F.ElectricLogic.connect_all_node_references( diff --git a/src/faebryk/library/RP2040_ReferenceDesign.py b/src/faebryk/library/RP2040_ReferenceDesign.py index 6dab0e51..d477a3db 100644 --- a/src/faebryk/library/RP2040_ReferenceDesign.py +++ b/src/faebryk/library/RP2040_ReferenceDesign.py @@ -30,8 +30,10 @@ class Jumper(Module): switch = L.f_field(F.Switch(F.Electrical))() def __preinit__(self): - self.resistor.resistance.merge(F.Range.from_center_rel(1 * P.kohm, 0.05)) - self.logic_out.set_weak(True).resistance.merge(self.resistor.resistance) + self.resistor.resistance.constrain_subset( + L.Range.from_center_rel(1 * P.kohm, 0.05) + ) + self.logic_out.set_weak(True).resistance.alias_is(self.resistor.resistance) self.logic_out.signal.connect_via( [self.resistor, self.switch], self.logic_out.reference.lv ) @@ -82,21 +84,23 @@ def __preinit__(self): # parametrization # ---------------------------------------- # LDO - self.ldo.output_current.merge(F.Range.from_center_rel(600 * P.mA, 0.05)) - self.ldo.power_in.decoupled.decouple().capacitance.merge( - F.Range.from_center_rel(10 * P.uF, 0.05) + self.ldo.output_current.constrain_subset( + L.Range.from_center_rel(600 * P.mA, 0.05) ) - self.ldo.power_out.decoupled.decouple().capacitance.merge( - F.Range.from_center_rel(10 * P.uF, 0.05) + self.ldo.power_in.decoupled.decouple().capacitance.constrain_subset( + L.Range.from_center_rel(10 * P.uF, 0.05) + ) + self.ldo.power_out.decoupled.decouple().capacitance.constrain_subset( + L.Range.from_center_rel(10 * P.uF, 0.05) ) # XTAL - self.clock_source.crystal.load_capacitance.merge( - F.Range.from_center_rel(10 * P.pF, 0.05) + self.clock_source.crystal.load_capacitance.constrain_subset( + L.Range.from_center_rel(10 * P.pF, 0.05) ) - self.clock_source.current_limiting_resistor.resistance.merge( - F.Range.from_center_rel(1 * P.kohm, 0.05) + self.clock_source.current_limiting_resistor.resistance.constrain_subset( + L.Range.from_center_rel(1 * P.kohm, 0.05) ) self.clock_source.crystal.add( F.has_descriptive_properties_defined( @@ -109,35 +113,37 @@ def __preinit__(self): # USB terminated_usb = self.usb.usb_if.d.terminated() - terminated_usb.impedance.merge(F.Range.from_center_rel(27.4 * P.ohm, 0.05)) + terminated_usb.impedance.constrain_subset( + L.Range.from_center_rel(27.4 * P.ohm, 0.05) + ) # Flash - self.flash.memory_size.merge(16 * P.Mbit) - self.flash.decoupled.decouple().capacitance.merge( - F.Range.from_center_rel(100 * P.nF, 0.05) + self.flash.memory_size.constrain_subset(L.Single(16 * P.Mbit)) + self.flash.decoupled.decouple().capacitance.constrain_subset( + L.Range.from_center_rel(100 * P.nF, 0.05) ) # Power rails self.rp2040.power_io.decoupled.decouple().specialize( F.MultiCapacitor(6) - ).set_equal_capacitance_each(F.Range.from_center_rel(100 * P.nF, 0.05)) - self.rp2040.core_regulator.power_in.decoupled.decouple().capacitance.merge( - F.Range.from_center_rel(1 * P.uF, 0.05) + ).set_equal_capacitance_each(L.Range.from_center_rel(100 * P.nF, 0.05)) + self.rp2040.core_regulator.power_in.decoupled.decouple().capacitance.constrain_subset( + L.Range.from_center_rel(1 * P.uF, 0.05) ) - self.rp2040.power_adc.decoupled.decouple().capacitance.merge( - F.Range.from_center_rel(100 * P.nF, 0.05) + self.rp2040.power_adc.decoupled.decouple().capacitance.constrain_subset( + L.Range.from_center_rel(100 * P.nF, 0.05) ) - self.rp2040.power_usb_phy.decoupled.decouple().capacitance.merge( - F.Range.from_center_rel(100 * P.nF, 0.05) + self.rp2040.power_usb_phy.decoupled.decouple().capacitance.constrain_subset( + L.Range.from_center_rel(100 * P.nF, 0.05) ) - power_3v3.decoupled.decouple().capacitance.merge( + power_3v3.decoupled.decouple().capacitance.constrain_subset( F.Range.from_center_rel(10 * P.uF, 0.05) ) self.rp2040.power_core.decoupled.decouple().specialize( F.MultiCapacitor(2) - ).set_equal_capacitance_each(F.Range.from_center_rel(100 * P.nF, 0.05)) - self.rp2040.core_regulator.power_out.decoupled.decouple().capacitance.merge( - F.Range.from_center_rel(1 * P.uF, 0.05) + ).set_equal_capacitance_each(L.Range.from_center_rel(100 * P.nF, 0.05)) + self.rp2040.core_regulator.power_out.decoupled.decouple().capacitance.constrain_subset( + L.Range.from_center_rel(1 * P.uF, 0.05) ) # ---------------------------------------- diff --git a/src/faebryk/library/RS485_Bus_Protection.py b/src/faebryk/library/RS485_Bus_Protection.py index 1a6fa8b9..e583a44d 100644 --- a/src/faebryk/library/RS485_Bus_Protection.py +++ b/src/faebryk/library/RS485_Bus_Protection.py @@ -46,17 +46,21 @@ def __preinit__(self): # ---------------------------------------- # parametrization # ---------------------------------------- - self.tvs.reverse_working_voltage.merge( - F.Range.from_center_rel(8.5 * P.V, 0.05) + self.tvs.reverse_working_voltage.constrain_subset( + L.Range.from_center_rel(8.5 * P.V, 0.05) ) # self.tvs.max_current.merge(F.Range.from_center_rel(41.7*P.A, 0.05)) # self.tvs.forward_voltage.merge(F.Range(9.44*P.V, 10.40*P.V)) for diode in self.clamping_diodes: - diode.forward_voltage.merge(F.Range.from_center_rel(1.1 * P.V, 0.05)) - diode.max_current.merge(F.Range.from_center_rel(1 * P.A, 0.05)) - diode.reverse_working_voltage.merge( - F.Range.from_center_rel(1 * P.kV, 0.05) + diode.forward_voltage.constrain_subset( + L.Range.from_center_rel(1.1 * P.V, 0.05) + ) + diode.max_current.constrain_subset( + L.Range.from_center_rel(1 * P.A, 0.05) + ) + diode.reverse_working_voltage.constrain_subset( + L.Range.from_center_rel(1 * P.kV, 0.05) ) # ---------------------------------------- @@ -155,8 +159,8 @@ def can_bridge(self): def __preinit__(self): if self._termination: termination_resistor = self.add(F.Resistor(), name="termination_resistor") - termination_resistor.resistance.merge( - F.Range.from_center_rel(120 * P.ohm, 0.05) + termination_resistor.resistance.constrain_subset( + L.Range.from_center_rel(120 * P.ohm, 0.05) ) self.rs485_ufp.diff_pair.p.connect_via( termination_resistor, self.rs485_ufp.diff_pair.n @@ -164,11 +168,11 @@ def __preinit__(self): if self._polarization: polarization_resistors = self.add_to_container(2, F.Resistor) - polarization_resistors[0].resistance.merge( - F.Range(380 * P.ohm, 420 * P.ohm) + polarization_resistors[0].resistance.constrain_subset( + L.Range(380 * P.ohm, 420 * P.ohm) ) - polarization_resistors[1].resistance.merge( - F.Range(380 * P.ohm, 420 * P.ohm) + polarization_resistors[1].resistance.constrain_subset( + L.Range(380 * P.ohm, 420 * P.ohm) ) self.rs485_dfp.diff_pair.p.signal.connect_via( polarization_resistors[0], self.power.hv @@ -180,26 +184,26 @@ def __preinit__(self): # ---------------------------------------- # parametrization # ---------------------------------------- - self.current_limmiter_resistors[0].resistance.merge( - F.Range.from_center_rel(2.7 * P.ohm, 0.05) + self.current_limmiter_resistors[0].resistance.constrain_subset( + L.Range.from_center_rel(2.7 * P.ohm, 0.05) ) - self.current_limmiter_resistors[0].rated_power.merge( + self.current_limmiter_resistors[0].rated_power.constrain_subset( F.Range.lower_bound(500 * P.mW) ) - self.current_limmiter_resistors[1].resistance.merge( - F.Range.from_center_rel(2.7 * P.ohm, 0.05) + self.current_limmiter_resistors[1].resistance.constrain_subset( + L.Range.from_center_rel(2.7 * P.ohm, 0.05) ) - self.current_limmiter_resistors[1].rated_power.merge( - F.Range.lower_bound(500 * P.mW) + self.current_limmiter_resistors[1].rated_power.constrain_ge( + L.Single(500 * P.mW) ) - self.gnd_couple_resistor.resistance.merge( - F.Range.from_center_rel(1 * P.Mohm, 0.05) + self.gnd_couple_resistor.resistance.constrain_subset( + L.Range.from_center_rel(1 * P.Mohm, 0.05) ) - self.gnd_couple_capacitor.capacitance.merge( - F.Range.from_center_rel(1 * P.uF, 0.05) + self.gnd_couple_capacitor.capacitance.constrain_subset( + L.Range.from_center_rel(1 * P.uF, 0.05) ) - self.gnd_couple_capacitor.rated_voltage.merge(F.Range.lower_bound(2 * P.kV)) + self.gnd_couple_capacitor.rated_voltage.constrain_ge(L.Single(2 * P.kV)) # ---------------------------------------- # Connections diff --git a/src/faebryk/library/Resistor.py b/src/faebryk/library/Resistor.py index 5642444c..1151cc42 100644 --- a/src/faebryk/library/Resistor.py +++ b/src/faebryk/library/Resistor.py @@ -1,11 +1,8 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from math import sqrt - import faebryk.library._F as F from faebryk.core.module import Module -from faebryk.core.parameter import Parameter from faebryk.libs.library import L from faebryk.libs.picker.picker import PickError, has_part_picked_remove from faebryk.libs.units import P @@ -59,73 +56,3 @@ def replace_zero(m: Module): self.add( F.has_multi_picker(-100, F.has_multi_picker.FunctionPicker(replace_zero)) ) - - def get_voltage_drop_by_current_resistance(self, current_A: Parameter) -> Parameter: - return current_A * self.resistance - - def get_voltage_drop_by_power_resistance(self, power_W: Parameter) -> Parameter: - return sqrt(power_W * self.resistance) - - @staticmethod - def set_voltage_drop_by_power_current( - power_W: Parameter, current_A: Parameter - ) -> Parameter: - return power_W / current_A - - def get_current_flow_by_voltage_resistance( - self, voltage_drop_V: Parameter - ) -> Parameter: - return voltage_drop_V / self.resistance - - def get_current_flow_by_power_resistance(self, power_W: Parameter) -> Parameter: - return sqrt(power_W / self.resistance) - - @staticmethod - def get_current_flow_by_voltage_power( - voltage_drop_V: Parameter, power_W: Parameter - ) -> Parameter: - return power_W / voltage_drop_V - - def set_resistance_by_voltage_current( - self, voltage_drop_V: Parameter, current_A: Parameter - ) -> Parameter: - self.resistance.merge(voltage_drop_V / current_A) - return self.resistance.get_most_narrow() - - def set_resistance_by_voltage_power( - self, voltage_drop_V: Parameter, power_W: Parameter - ) -> Parameter: - self.resistance.merge(pow(voltage_drop_V, 2) / power_W) - return self.resistance.get_most_narrow() - - def set_resistance_by_power_current( - self, current_A: Parameter, power_W: Parameter - ) -> Parameter: - self.resistance.merge(power_W / pow(current_A, 2)) - return self.resistance.get_most_narrow() - - def get_power_dissipation_by_voltage_resistance( - self, voltage_drop_V: Parameter - ) -> Parameter: - return pow(voltage_drop_V, 2) / self.resistance - - def get_power_dissipation_by_current_resistance( - self, current_A: Parameter - ) -> Parameter: - return pow(current_A, 2) * self.resistance - - @staticmethod - def get_power_dissipation_by_voltage_current( - voltage_drop_V: Parameter, current_A - ) -> Parameter: - return voltage_drop_V * current_A - - def set_rated_power_by_voltage_resistance(self, voltage_drop_V: Parameter): - self.rated_power.merge( - self.get_power_dissipation_by_voltage_resistance(voltage_drop_V) - ) - - def set_rated_power_by_current_resistance(self, current_A: Parameter): - self.rated_power.merge( - self.get_power_dissipation_by_current_resistance(current_A) - ) diff --git a/src/faebryk/library/SCD40.py b/src/faebryk/library/SCD40.py index 7a09b9fa..2bc00049 100644 --- a/src/faebryk/library/SCD40.py +++ b/src/faebryk/library/SCD40.py @@ -71,10 +71,10 @@ def attach_to_footprint(self): ) def __preinit__(self): - self.power.voltage.merge(F.Range.from_center_rel(3.3 * P.V, 0.05)) + self.power.voltage.constrain_subset(L.Range.from_center_rel(3.3 * P.V, 0.05)) self.i2c.terminate() self.power.decoupled.decouple() - self.i2c.frequency.merge( + self.i2c.frequency.constrain_le( F.I2C.define_max_frequency_capability(F.I2C.SpeedMode.fast_speed) ) diff --git a/src/faebryk/library/SNx4LVC541A.py b/src/faebryk/library/SNx4LVC541A.py index 31c4b921..670cd69b 100644 --- a/src/faebryk/library/SNx4LVC541A.py +++ b/src/faebryk/library/SNx4LVC541A.py @@ -38,7 +38,7 @@ def __preinit__(self): # ---------------------------------------- # parameters # ---------------------------------------- - self.power.voltage.merge(F.Range.upper_bound(3.6 * P.V)) + self.power.voltage.constrain_le(L.Single(3.6 * P.V)) # ---------------------------------------- # aliases diff --git a/src/faebryk/library/SP3243E.py b/src/faebryk/library/SP3243E.py index df4f8c60..dda098d6 100644 --- a/src/faebryk/library/SP3243E.py +++ b/src/faebryk/library/SP3243E.py @@ -106,14 +106,14 @@ def __preinit__(self): # ------------------------------------ # parametrization # ------------------------------------ - self.power.voltage.merge(F.Range(3.0 * P.V, 5.5 * P.V)) + self.power.voltage.constrain_subset(L.Range(3.0 * P.V, 5.5 * P.V)) - self.uart.base_uart.baud.merge(F.Range.upper_bound(250 * P.kbaud)) + self.uart.base_uart.baud.constrain_le(L.Single(250 * P.kbaud)) self.rs232.get_trait( F.has_single_electric_reference - ).get_reference().voltage.merge( - F.Range.from_center(3 * P.V, 15 * P.V) + ).get_reference().voltage.constrain_subset( + L.Range.from_center(3 * P.V, 15 * P.V) ) # TODO: Support negative numbers (-15 * P.V, 15 * P.V)) # ------------------------------------ @@ -124,4 +124,4 @@ def __preinit__(self): # ------------------------------------ # parametrization # ------------------------------------ - self.power.voltage.merge(F.Range(3.0 * P.V, 5.5 * P.V)) + self.power.voltage.constrain_subset(L.Range(3.0 * P.V, 5.5 * P.V)) diff --git a/src/faebryk/library/SP3243E_ReferenceDesign.py b/src/faebryk/library/SP3243E_ReferenceDesign.py index 7aafd05f..a242ac9e 100644 --- a/src/faebryk/library/SP3243E_ReferenceDesign.py +++ b/src/faebryk/library/SP3243E_ReferenceDesign.py @@ -42,11 +42,13 @@ def __preinit__(self): # 4.5V to 5.5V > C1 = 0.047µF, C2,Cvp, Cvn = 0.33µF # 3.0V to 5.5V > C_all = 0.22μF # - cap.capacitance.merge(F.Range.from_center(0.22 * P.uF, 0.22 * 0.05 * P.uF)) - - if isinstance(pwr.voltage.get_most_narrow(), F.TBD): - pwr.voltage.merge( - F.Constant(8 * P.V) - # F.Range.lower_bound(16 * P.V) - ) # TODO: fix merge - # TODO: merge conflict + cap.capacitance.constrain_subset( + L.Range.from_center(0.22 * P.uF, 0.22 * 0.05 * P.uF) + ) + + # if isinstance(pwr.voltage.get_most_narrow(), F.TBD): + # pwr.voltage.merge( + # F.Constant(8 * P.V) + # # F.Range.lower_bound(16 * P.V) + # ) # TODO: fix merge + # # TODO: merge conflict diff --git a/src/faebryk/library/SignalElectrical.py b/src/faebryk/library/SignalElectrical.py index 780fa2bd..572b824c 100644 --- a/src/faebryk/library/SignalElectrical.py +++ b/src/faebryk/library/SignalElectrical.py @@ -83,7 +83,7 @@ class _can_be_surge_protected_defined(F.can_be_surge_protected_defined): def protect(_self): return [ tvs.builder( - lambda t: t.reverse_working_voltage.merge( + lambda t: t.reverse_working_voltage.alias_is( self.reference.voltage ) ) diff --git a/src/faebryk/library/TD541S485H.py b/src/faebryk/library/TD541S485H.py index 98461a8b..57e77c73 100644 --- a/src/faebryk/library/TD541S485H.py +++ b/src/faebryk/library/TD541S485H.py @@ -56,7 +56,8 @@ def __preinit__(self): self.power_iso_out.decoupled.decouple() self.power_iso_in.lv.connect(self.power_iso_out.lv) - self.power_iso_out.voltage.merge(5 * P.V) + # TODO tolerance + self.power_iso_out.voltage.constrain_superset(L.Single(5 * P.V)) F.ElectricLogic.connect_all_module_references( self, diff --git a/src/faebryk/library/TPS2116.py b/src/faebryk/library/TPS2116.py index 1a043944..5bda3ee5 100644 --- a/src/faebryk/library/TPS2116.py +++ b/src/faebryk/library/TPS2116.py @@ -110,4 +110,4 @@ def __preinit__(self): # parametrization # ------------------------------------ for power in [self.power_in[0], self.power_in[1], self.power_out]: - power.voltage.merge(F.Range(1.6 * P.V, 5.5 * P.V)) + power.voltage.constrain_subset(L.Range(1.6 * P.V, 5.5 * P.V)) diff --git a/src/faebryk/library/UART_RS485.py b/src/faebryk/library/UART_RS485.py index ec4b23e4..50b8a345 100644 --- a/src/faebryk/library/UART_RS485.py +++ b/src/faebryk/library/UART_RS485.py @@ -23,7 +23,7 @@ class UART_RS485(Module): def __preinit__(self): self.max_data_rate.alias_is(self.uart.baud) - self.power.voltage.merge(F.Range(3.3 * P.V, 5.0 * P.V)) + self.power.voltage.constrain_subset(L.Range(3.3 * P.V, 5.0 * P.V)) self.power.decoupled.decouple() designator_prefix = L.f_field(F.has_designator_prefix_defined)( diff --git a/src/faebryk/library/USB2_0.py b/src/faebryk/library/USB2_0.py index 36048ff2..3b299b8d 100644 --- a/src/faebryk/library/USB2_0.py +++ b/src/faebryk/library/USB2_0.py @@ -3,6 +3,7 @@ import faebryk.library._F as F from faebryk.core.moduleinterface import ModuleInterface +from faebryk.libs.library import L from faebryk.libs.units import P @@ -10,4 +11,6 @@ class USB2_0(ModuleInterface): usb_if: F.USB2_0_IF def __preinit__(self): - self.usb_if.buspower.voltage.merge(F.Range.from_center(5 * P.V, 0.25 * P.V)) + self.usb_if.buspower.voltage.constrain_subset( + L.Range.from_center(5 * P.V, 0.25 * P.V) + ) diff --git a/src/faebryk/library/USB2_0_ESD_Protection.py b/src/faebryk/library/USB2_0_ESD_Protection.py index 66da1691..da823b5c 100644 --- a/src/faebryk/library/USB2_0_ESD_Protection.py +++ b/src/faebryk/library/USB2_0_ESD_Protection.py @@ -16,11 +16,13 @@ class USB2_0_ESD_Protection(Module): usb = L.list_field(2, F.USB2_0) - vbus_esd_protection = L.p_field(domain=L.Domains.ENUM(bool)) - data_esd_protection = L.p_field(domain=L.Domains.ENUM(bool)) + vbus_esd_protection = L.p_field(domain=L.Domains.BOOL()) + data_esd_protection = L.p_field(domain=L.Domains.BOOL()) def __preinit__(self): - self.usb[0].usb_if.buspower.voltage.merge(F.Range(4.75 * P.V, 5.25 * P.V)) + self.usb[0].usb_if.buspower.voltage.constrain_subset( + L.Range(4.75 * P.V, 5.25 * P.V) + ) self.usb[0].connect(self.usb[1]) self.usb[0].usb_if.buspower.connect(self.usb[1].usb_if.buspower) self.usb[0].usb_if.buspower.decoupled.decouple() diff --git a/src/faebryk/library/USB_C_5V_PSU.py b/src/faebryk/library/USB_C_5V_PSU.py index 7340601a..7bf6b1e3 100644 --- a/src/faebryk/library/USB_C_5V_PSU.py +++ b/src/faebryk/library/USB_C_5V_PSU.py @@ -16,7 +16,9 @@ class USB_C_5V_PSU(Module): configuration_resistors = L.list_field( 2, lambda: F.Resistor().builder( - lambda r: r.resistance.merge(F.Range.from_center_rel(5.1 * P.kohm, 0.05)) + lambda r: r.resistance.constrain_subset( + L.Range.from_center_rel(5.1 * P.kohm, 0.05) + ) ), ) diff --git a/src/faebryk/library/USB_C_PSU_Vertical.py b/src/faebryk/library/USB_C_PSU_Vertical.py index bba595e9..59525576 100644 --- a/src/faebryk/library/USB_C_PSU_Vertical.py +++ b/src/faebryk/library/USB_C_PSU_Vertical.py @@ -23,20 +23,26 @@ class USB_C_PSU_Vertical(Module): fuse: F.Fuse def __preinit__(self): - self.gnd_capacitor.capacitance.merge(F.Range.from_center_rel(100 * P.nF, 0.05)) - self.gnd_capacitor.rated_voltage.merge(F.Range.from_center_rel(16 * P.V, 0.05)) - self.gnd_resistor.resistance.merge(F.Range.from_center_rel(1 * P.Mohm, 0.05)) + self.gnd_capacitor.capacitance.constrain_subset( + L.Range.from_center_rel(100 * P.nF, 0.05) + ) + self.gnd_capacitor.rated_voltage.constrain_subset( + L.Range.from_center_rel(16 * P.V, 0.05) + ) + self.gnd_resistor.resistance.constrain_subset( + L.Range.from_center_rel(1 * P.Mohm, 0.05) + ) for res in self.configuration_resistors: - res.resistance.merge(F.Range.from_center_rel(5.1 * P.kohm, 0.05)) - self.fuse.fuse_type.merge(F.Fuse.FuseType.RESETTABLE) - self.fuse.trip_current.merge(F.Range.from_center_rel(1 * P.A, 0.05)) + res.resistance.constrain_subset(L.Range.from_center_rel(5.1 * P.kohm, 0.05)) + self.fuse.fuse_type.constrain_subset(F.Fuse.FuseType.RESETTABLE) + self.fuse.trip_current.constrain_subset(L.Range.from_center_rel(1 * P.A, 0.05)) # alliases vcon = self.usb_connector.vbus vusb = self.usb.usb_if.buspower v5 = self.power_out gnd = v5.lv - v5.voltage.merge(F.Range.from_center_rel(5 * P.V, 0.05)) + v5.voltage.constrain_superset(L.Range.from_center_rel(5 * P.V, 0.05)) vcon.hv.connect_via(self.fuse, v5.hv) vcon.lv.connect(gnd) diff --git a/src/faebryk/library/USB_RS485.py b/src/faebryk/library/USB_RS485.py index 3c5cb885..08ca2935 100644 --- a/src/faebryk/library/USB_RS485.py +++ b/src/faebryk/library/USB_RS485.py @@ -45,10 +45,12 @@ def __preinit__(self): self.uart_rs485.power.lv, ) - self.termination.resistance.merge(F.Range.from_center(150 * P.ohm, 1.5 * P.ohm)) - self.polarization[0].resistance.merge( - F.Range.from_center(680 * P.ohm, 6.8 * P.ohm) + self.termination.resistance.constrain_subset( + L.Range.from_center(150 * P.ohm, 1.5 * P.ohm) ) - self.polarization[1].resistance.merge( - F.Range.from_center(680 * P.ohm, 6.8 * P.ohm) + self.polarization[0].resistance.constrain_subset( + L.Range.from_center(680 * P.ohm, 6.8 * P.ohm) + ) + self.polarization[1].resistance.constrain_subset( + L.Range.from_center(680 * P.ohm, 6.8 * P.ohm) ) diff --git a/src/faebryk/library/pf_74AHCT2G125.py b/src/faebryk/library/pf_74AHCT2G125.py index 5d426706..881a9590 100644 --- a/src/faebryk/library/pf_74AHCT2G125.py +++ b/src/faebryk/library/pf_74AHCT2G125.py @@ -37,7 +37,7 @@ def attach_to_footprint(self): ) def __preinit__(self): - self.power.voltage.merge(F.Range(4.5 * P.V, 5.5 * P.V)) + self.power.voltage.constrain_subset(L.Range(4.5 * P.V, 5.5 * P.V)) self.power.decoupled.decouple() @L.rt_field From 988bf1c4f29c3e544e5903c528e3d90d25269bf5 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 10:20:47 +0200 Subject: [PATCH 18/80] Replace merge with {subset, superset, alias_is} half Lib --- src/faebryk/library/USB2514B.py | 16 ++++++---- .../library/USB2514B_ReferenceDesign.py | 32 +++++++++++-------- src/faebryk/library/USB3.py | 5 ++- src/faebryk/library/USB3_connector.py | 2 -- .../Wuxi_I_core_Elec_AiP74LVC1T45GB236_TR.py | 4 +-- 5 files changed, 35 insertions(+), 24 deletions(-) diff --git a/src/faebryk/library/USB2514B.py b/src/faebryk/library/USB2514B.py index 91c336e5..f895c61e 100644 --- a/src/faebryk/library/USB2514B.py +++ b/src/faebryk/library/USB2514B.py @@ -339,11 +339,15 @@ def __preinit__(self): # ---------------------------------------- # parametrization # ---------------------------------------- - self.power_pll.voltage.merge( - F.Range.from_center_rel(1.8 * P.V, 0.05) + self.power_pll.voltage.constrain_subset( + L.Range.from_center_rel(1.8 * P.V, 0.05) ) # datasheet does not specify a voltage range - self.power_core.voltage.merge( - F.Range.from_center_rel(1.8 * P.V, 0.05) + self.power_core.voltage.constrain_subset( + L.Range.from_center_rel(1.8 * P.V, 0.05) ) # datasheet does not specify a voltage range - self.power_3v3.voltage.merge(F.Range.from_center(3.3 * P.V, 0.3 * P.V)) - self.power_3v3_analog.voltage.merge(F.Range.from_center(3.3 * P.V, 0.3 * P.V)) + self.power_3v3.voltage.constrain_subset( + L.Range.from_center(3.3 * P.V, 0.3 * P.V) + ) + self.power_3v3_analog.voltage.constrain_subset( + L.Range.from_center(3.3 * P.V, 0.3 * P.V) + ) diff --git a/src/faebryk/library/USB2514B_ReferenceDesign.py b/src/faebryk/library/USB2514B_ReferenceDesign.py index 2673995d..dfd119ed 100644 --- a/src/faebryk/library/USB2514B_ReferenceDesign.py +++ b/src/faebryk/library/USB2514B_ReferenceDesign.py @@ -84,26 +84,30 @@ def __preinit__(self): ) # TODO: load_capacitance is a property of the crystal. remove this - self.crystal_oscillator.crystal.load_capacitance.merge( - F.Range(8 * P.pF, 15 * P.pF) + self.crystal_oscillator.crystal.load_capacitance.constrain_subset( + L.Range(8 * P.pF, 15 * P.pF) ) - self.crystal_oscillator.crystal.frequency.merge( - F.Range.from_center_rel(24 * P.MHz, 0.01) + self.crystal_oscillator.crystal.frequency.constrain_subset( + L.Range.from_center_rel(24 * P.MHz, 0.01) ) - self.crystal_oscillator.crystal.frequency_tolerance.merge( - F.Range.upper_bound(50 * P.ppm) + self.crystal_oscillator.crystal.frequency_tolerance.constrain_le( + L.Single(50 * P.ppm) ) # usb transceiver bias resistor - self.bias_resistor.resistance.merge(F.Range.from_center_rel(12 * P.kohm, 0.01)) + self.bias_resistor.resistance.constrain_subset( + L.Range.from_center_rel(12 * P.kohm, 0.01) + ) for led in [self.suspend_indicator.led, self.power_3v3_indicator]: - led.led.color.merge(F.LED.Color.GREEN) - led.led.brightness.merge( + led.led.color.constrain_subset(L.Single(F.LED.Color.GREEN)) + led.led.brightness.constrain_subset( TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value ) - self.ldo_3v3.output_voltage.merge(F.Range.from_center_rel(3.3 * P.V, 0.05)) + self.ldo_3v3.output_voltage.constrain_subset( + L.Range.from_center_rel(3.3 * P.V, 0.05) + ) # ---------------------------------------- # connections @@ -131,12 +135,14 @@ def __preinit__(self): self.hub_controller.configurable_downstream_usb[i].over_current_sense ) dfp.usb_if.buspower.connect(self.usb_dfp_power_indicator[i].power) - self.usb_dfp_power_indicator[i].led.color.merge(F.LED.Color.YELLOW) - self.usb_dfp_power_indicator[i].led.brightness.merge( + self.usb_dfp_power_indicator[i].led.color.constrain_subset( + L.Single(F.LED.Color.YELLOW) + ) + self.usb_dfp_power_indicator[i].led.brightness.constrain_subset( TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value ) self.power_distribution_switch[i].set_current_limit( - F.Range.from_center_rel(520 * P.mA, 0.01) + L.Range.from_center_rel(520 * P.mA, 0.01) ) # Bias resistor diff --git a/src/faebryk/library/USB3.py b/src/faebryk/library/USB3.py index 09be9d81..1ca27a1f 100644 --- a/src/faebryk/library/USB3.py +++ b/src/faebryk/library/USB3.py @@ -3,6 +3,7 @@ import faebryk.library._F as F from faebryk.core.moduleinterface import ModuleInterface +from faebryk.libs.library import L from faebryk.libs.units import P @@ -11,4 +12,6 @@ class USB3(ModuleInterface): def __preinit__(self): self.usb3_if.gnd_drain.connect(self.usb3_if.usb_if.buspower.lv) - self.usb3_if.usb_if.buspower.voltage.merge(F.Range(4.75 * P.V, 5.5 * P.V)) + self.usb3_if.usb_if.buspower.voltage.constrain_subset( + L.Range(4.75 * P.V, 5.5 * P.V) + ) diff --git a/src/faebryk/library/USB3_connector.py b/src/faebryk/library/USB3_connector.py index 9803c1be..653f898d 100644 --- a/src/faebryk/library/USB3_connector.py +++ b/src/faebryk/library/USB3_connector.py @@ -16,8 +16,6 @@ class USB3_connector(Module): shield: F.Electrical def __preinit__(self): - self.usb3.usb3_if.usb_if.buspower.voltage.merge(F.Range(4.75 * P.V, 5.25 * P.V)) - self.usb3.usb3_if.usb_if.buspower.lv.connect(self.usb3.usb3_if.gnd_drain) designator_prefix = L.f_field(F.has_designator_prefix_defined)( diff --git a/src/faebryk/library/Wuxi_I_core_Elec_AiP74LVC1T45GB236_TR.py b/src/faebryk/library/Wuxi_I_core_Elec_AiP74LVC1T45GB236_TR.py index 4748edca..e28d63e1 100644 --- a/src/faebryk/library/Wuxi_I_core_Elec_AiP74LVC1T45GB236_TR.py +++ b/src/faebryk/library/Wuxi_I_core_Elec_AiP74LVC1T45GB236_TR.py @@ -75,5 +75,5 @@ def __preinit__(self): # ------------------------------------ # parametrization # ------------------------------------ - self.power_a.voltage.merge(F.Range(1.2 * P.V, 5.5 * P.V)) - self.power_b.voltage.merge(F.Range(1.2 * P.V, 5.5 * P.V)) + self.power_a.voltage.constrain_subset(L.Range(1.2 * P.V, 5.5 * P.V)) + self.power_b.voltage.constrain_subset(L.Range(1.2 * P.V, 5.5 * P.V)) From ac1b98b2bae5f8ac7d713253fbe293dcefe4a163 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 10:34:39 +0200 Subject: [PATCH 19/80] Fix F.Range -> L.Range --- src/faebryk/library/B0505S_1WR3.py | 2 +- src/faebryk/library/BH1750FVI_TR.py | 2 +- src/faebryk/library/ButtonCell.py | 14 +++++++------- .../library/CBM9002A_56ILG_ReferenceDesign.py | 8 ++++---- src/faebryk/library/CD4011.py | 5 +---- src/faebryk/library/CH344Q_ReferenceDesign.py | 2 +- src/faebryk/library/Crystal_Oscillator.py | 6 ++---- src/faebryk/library/ElectricLogicGates.py | 11 +++++------ src/faebryk/library/I2C.py | 2 +- src/faebryk/library/LogicGates.py | 11 +++++------ src/faebryk/library/RP2040.py | 4 ++-- src/faebryk/library/RP2040_ReferenceDesign.py | 2 +- src/faebryk/library/RS485_Bus_Protection.py | 2 +- src/faebryk/library/Resistor.py | 2 +- 14 files changed, 33 insertions(+), 40 deletions(-) diff --git a/src/faebryk/library/B0505S_1WR3.py b/src/faebryk/library/B0505S_1WR3.py index 9f1e49ac..72cfda5f 100644 --- a/src/faebryk/library/B0505S_1WR3.py +++ b/src/faebryk/library/B0505S_1WR3.py @@ -71,7 +71,7 @@ def __preinit__(self): self.power_out.get_trait( F.can_be_decoupled ).decouple().capacitance.constrain_subset( - F.Range.from_center_rel(10 * P.uF, 0.1) + L.Range.from_center_rel(10 * P.uF, 0.1) ) # ---------------------------------------- diff --git a/src/faebryk/library/BH1750FVI_TR.py b/src/faebryk/library/BH1750FVI_TR.py index d027c835..9dfe6bfa 100644 --- a/src/faebryk/library/BH1750FVI_TR.py +++ b/src/faebryk/library/BH1750FVI_TR.py @@ -77,7 +77,7 @@ def __preinit__(self): ) # set constraints - self.power.voltage.constrain_subset(F.Range(2.4 * P.V, 3.6 * P.V)) + self.power.voltage.constrain_subset(L.Range(2.4 * P.V, 3.6 * P.V)) self.power.decoupled.decouple().capacitance.constrain_subset( L.Range.from_center_rel(100 * P.nF, 0.1) diff --git a/src/faebryk/library/ButtonCell.py b/src/faebryk/library/ButtonCell.py index 4e183c1b..4296a896 100644 --- a/src/faebryk/library/ButtonCell.py +++ b/src/faebryk/library/ButtonCell.py @@ -23,13 +23,13 @@ class Material(StrEnum): @property def voltage(self) -> Parameter: return { - self.Alkaline: F.Constant(1.5 * P.V), - self.SilverOxide: F.Constant(1.55 * P.V), - self.ZincAir: F.Constant(1.65 * P.V), - self.Lithium: F.Constant(3.0 * P.V), - self.Mercury: F.Constant(1.35 * P.V), - self.NickelCadmium: F.Constant(1.2 * P.V), - self.NickelMetalHydride: F.Constant(1.2 * P.V), + self.Alkaline: L.Single(1.5 * P.V), + self.SilverOxide: L.Single(1.55 * P.V), + self.ZincAir: L.Single(1.65 * P.V), + self.Lithium: L.Single(3.0 * P.V), + self.Mercury: L.Single(1.35 * P.V), + self.NickelCadmium: L.Single(1.2 * P.V), + self.NickelMetalHydride: L.Single(1.2 * P.V), }[self] class Shape(StrEnum): diff --git a/src/faebryk/library/CBM9002A_56ILG_ReferenceDesign.py b/src/faebryk/library/CBM9002A_56ILG_ReferenceDesign.py index 8983eb7a..0fcf60aa 100644 --- a/src/faebryk/library/CBM9002A_56ILG_ReferenceDesign.py +++ b/src/faebryk/library/CBM9002A_56ILG_ReferenceDesign.py @@ -28,10 +28,10 @@ def __preinit__(self): L.Range.from_center_rel(1 * P.uF, 0.05) ) - self.diode.forward_voltage.constrain_subset(F.Range(715 * P.mV, 1.5 * P.V)) + self.diode.forward_voltage.constrain_subset(L.Range(715 * P.mV, 1.5 * P.V)) self.diode.reverse_leakage_current.constrain_le(1 * P.uA) self.diode.current.constrain_subset( - F.Range.from_center_rel(300 * P.mA, 0.05) + L.Range.from_center_rel(300 * P.mA, 0.05) ) self.diode.current.constrain_ge(1 * P.A) @@ -86,8 +86,8 @@ def __preinit__(self): # ---------------------------------------- self.oscillator.crystal.frequency.constrain_subset( - F.Range.from_center_rel(24 * P.Mhertz, 0.05) + L.Range.from_center_rel(24 * P.Mhertz, 0.05) ) self.oscillator.crystal.frequency_tolerance.constrain_subset( - F.Range(0 * P.ppm, 20 * P.ppm) + L.Range(0 * P.ppm, 20 * P.ppm) ) diff --git a/src/faebryk/library/CD4011.py b/src/faebryk/library/CD4011.py index cc2b3353..160d2e0d 100644 --- a/src/faebryk/library/CD4011.py +++ b/src/faebryk/library/CD4011.py @@ -8,10 +8,7 @@ class CD4011(F.Logic74xx): def __init__(self): super().__init__( - [ - lambda: F.ElectricLogicGates.NAND(input_cnt=F.Constant(2)) - for _ in range(4) - ] + [lambda: F.ElectricLogicGates.NAND(input_cnt=L.Single(2)) for _ in range(4)] ) simple_value_representation = L.f_field(F.has_simple_value_representation_defined)( diff --git a/src/faebryk/library/CH344Q_ReferenceDesign.py b/src/faebryk/library/CH344Q_ReferenceDesign.py index f025de8f..0b97fdc9 100644 --- a/src/faebryk/library/CH344Q_ReferenceDesign.py +++ b/src/faebryk/library/CH344Q_ReferenceDesign.py @@ -88,7 +88,7 @@ def __preinit__(self): # connections # ------------------------------------ self.usb_uart_converter.power.decoupled.decouple().capacitance.constrain_subset( - F.Range.from_center_rel(1 * P.uF, 0.05) + L.Range.from_center_rel(1 * P.uF, 0.05) ) # TODO: per pin self.vbus_fused.connect_via(self.ldo, pwr_3v3) diff --git a/src/faebryk/library/Crystal_Oscillator.py b/src/faebryk/library/Crystal_Oscillator.py index ccdd1afd..c35646b3 100644 --- a/src/faebryk/library/Crystal_Oscillator.py +++ b/src/faebryk/library/Crystal_Oscillator.py @@ -2,8 +2,6 @@ # SPDX-License-Identifier: MIT -from copy import copy - import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L @@ -26,11 +24,11 @@ class Crystal_Oscillator(Module): # ---------------------------------------- # https://blog.adafruit.com/2012/01/24/choosing-the-right-crystal-and-caps-for-your-design/ # http://www.st.com/internet/com/TECHNICAL_RESOURCES/TECHNICAL_LITERATURE/APPLICATION_NOTE/CD00221665.pdf - _STRAY_CAPACITANCE = F.Range(1 * P.pF, 5 * P.pF) + _STRAY_CAPACITANCE = L.Range(1 * P.pF, 5 * P.pF) @L.rt_field def capacitance(self): - return (self.crystal.load_capacitance - copy(self._STRAY_CAPACITANCE)) * 2 + return (self.crystal.load_capacitance - self._STRAY_CAPACITANCE) * 2 def __preinit__(self): for cap in self.capacitors: diff --git a/src/faebryk/library/ElectricLogicGates.py b/src/faebryk/library/ElectricLogicGates.py index dc3d78fe..64386f3f 100644 --- a/src/faebryk/library/ElectricLogicGates.py +++ b/src/faebryk/library/ElectricLogicGates.py @@ -3,23 +3,22 @@ import faebryk.library._F as F from faebryk.core.core import Namespace +from faebryk.libs.library import L class ElectricLogicGates(Namespace): class OR(F.ElectricLogicGate): def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, F.Constant(1), F.LogicGate.can_logic_or_gate()) + super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_or_gate()) class NOR(F.ElectricLogicGate): def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, F.Constant(1), F.LogicGate.can_logic_nor_gate()) + super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_nor_gate()) class NAND(F.ElectricLogicGate): def __init__(self, input_cnt: F.Constant[int]): - super().__init__( - input_cnt, F.Constant(1), F.LogicGate.can_logic_nand_gate() - ) + super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_nand_gate()) class XOR(F.ElectricLogicGate): def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, F.Constant(1), F.LogicGate.can_logic_xor_gate()) + super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_xor_gate()) diff --git a/src/faebryk/library/I2C.py b/src/faebryk/library/I2C.py index 02759fa2..9dfdf134 100644 --- a/src/faebryk/library/I2C.py +++ b/src/faebryk/library/I2C.py @@ -46,4 +46,4 @@ class SpeedMode(Enum): @staticmethod def define_max_frequency_capability(mode: SpeedMode): - return F.Range(I2C.SpeedMode.low_speed, mode) + return L.Range(I2C.SpeedMode.low_speed, mode) diff --git a/src/faebryk/library/LogicGates.py b/src/faebryk/library/LogicGates.py index 16c0ef08..41653d26 100644 --- a/src/faebryk/library/LogicGates.py +++ b/src/faebryk/library/LogicGates.py @@ -3,23 +3,22 @@ import faebryk.library._F as F from faebryk.core.core import Namespace +from faebryk.libs.library import L class LogicGates(Namespace): class OR(F.LogicGate): def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, F.Constant(1), F.LogicGate.can_logic_or_gate()) + super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_or_gate()) class NOR(F.LogicGate): def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, F.Constant(1), F.LogicGate.can_logic_nor_gate()) + super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_nor_gate()) class NAND(F.LogicGate): def __init__(self, input_cnt: F.Constant[int]): - super().__init__( - input_cnt, F.Constant(1), F.LogicGate.can_logic_nand_gate() - ) + super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_nand_gate()) class XOR(F.LogicGate): def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, F.Constant(1), F.LogicGate.can_logic_xor_gate()) + super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_xor_gate()) diff --git a/src/faebryk/library/RP2040.py b/src/faebryk/library/RP2040.py index b31c3b8d..01c17d6c 100644 --- a/src/faebryk/library/RP2040.py +++ b/src/faebryk/library/RP2040.py @@ -46,7 +46,7 @@ def __preinit__(self): self.power_out.voltage.constrain_subset( L.Range.from_center_rel(1.1 * P.V, 0.05) ) - self.power_in.voltage.constrain_subset(F.Range(1.8 * P.V, 3.3 * P.V)) + self.power_in.voltage.constrain_subset(L.Range(1.8 * P.V, 3.3 * P.V)) @L.rt_field def bridge(self): @@ -112,7 +112,7 @@ def __preinit__(self): self.power_core.voltage.constrain_subset( L.Range.from_center_rel(1.1 * P.V, 0.05) ) - self.power_io.voltage.constrain_subset(F.Range(1.8 * P.V, 3.3 * P.V)) + self.power_io.voltage.constrain_subset(L.Range(1.8 * P.V, 3.3 * P.V)) F.ElectricLogic.connect_all_module_references(self, gnd_only=True) F.ElectricLogic.connect_all_node_references( diff --git a/src/faebryk/library/RP2040_ReferenceDesign.py b/src/faebryk/library/RP2040_ReferenceDesign.py index d477a3db..9c14b8ad 100644 --- a/src/faebryk/library/RP2040_ReferenceDesign.py +++ b/src/faebryk/library/RP2040_ReferenceDesign.py @@ -137,7 +137,7 @@ def __preinit__(self): L.Range.from_center_rel(100 * P.nF, 0.05) ) power_3v3.decoupled.decouple().capacitance.constrain_subset( - F.Range.from_center_rel(10 * P.uF, 0.05) + L.Range.from_center_rel(10 * P.uF, 0.05) ) self.rp2040.power_core.decoupled.decouple().specialize( F.MultiCapacitor(2) diff --git a/src/faebryk/library/RS485_Bus_Protection.py b/src/faebryk/library/RS485_Bus_Protection.py index e583a44d..6a9e0c8c 100644 --- a/src/faebryk/library/RS485_Bus_Protection.py +++ b/src/faebryk/library/RS485_Bus_Protection.py @@ -188,7 +188,7 @@ def __preinit__(self): L.Range.from_center_rel(2.7 * P.ohm, 0.05) ) self.current_limmiter_resistors[0].rated_power.constrain_subset( - F.Range.lower_bound(500 * P.mW) + L.Range.lower_bound(500 * P.mW) ) self.current_limmiter_resistors[1].resistance.constrain_subset( L.Range.from_center_rel(2.7 * P.ohm, 0.05) diff --git a/src/faebryk/library/Resistor.py b/src/faebryk/library/Resistor.py index 1151cc42..affae85d 100644 --- a/src/faebryk/library/Resistor.py +++ b/src/faebryk/library/Resistor.py @@ -49,7 +49,7 @@ def replace_zero(m: Module): if not F.Constant(0.0 * P.ohm).is_subset_of(r): raise PickError("", self) - self.resistance.override(F.Constant(0.0 * P.ohm)) + self.resistance.constrain_subset(L.Single(0.0 * P.ohm)) self.unnamed[0].connect(self.unnamed[1]) self.add(has_part_picked_remove()) From 3d0fafcea9e7bc75ab12450d318b18949113caf7 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 11:16:53 +0200 Subject: [PATCH 20/80] Fix ParameterOperatable param types; Run ruff --- src/faebryk/core/parameter.py | 265 +++++------------- .../library/Diodes_Incorporated_AP2552W6_7.py | 2 +- src/faebryk/library/FilterElectricalLC.py | 2 +- src/faebryk/library/LDO.py | 1 - src/faebryk/library/Potentiometer.py | 1 - src/faebryk/library/USB3_connector.py | 1 - src/faebryk/libs/sets.py | 2 +- 7 files changed, 72 insertions(+), 202 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 16cdf8ca..d000756a 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -3,126 +3,113 @@ import logging from enum import Enum, auto +from typing import Protocol from faebryk.core.core import Namespace from faebryk.core.node import Node, f_field from faebryk.libs.sets import Range, Set_ -from faebryk.libs.units import HasUnit, P, Quantity, Unit, dimensionless +from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless logger = logging.getLogger(__name__) class ParameterOperatable(Protocol): - type PE = ParameterOperatable | int | float | Quantity | Set_ + type Number = int | float | Quantity - def alias_is(self, other: PE): - pass + type NumberLike = ParameterOperatable | Number | Set_[Number] + type BooleanLike = ParameterOperatable | bool | Set_[bool] + type EnumLike = ParameterOperatable | Enum | Set_[Enum] - def constrain_le(self, other: PE): - pass + type All = NumberLike | BooleanLike | EnumLike + type Sets = All - def constrain_ge(self, other: PE): - pass + def alias_is(self, other: All): ... - def constrain_lt(self, other: PE): - pass + def constrain_le(self, other: NumberLike): ... - def constrain_gt(self, other: PE): - pass + def constrain_ge(self, other: NumberLike): ... - def constrain_ne(self, other: PE): - pass + def constrain_lt(self, other: NumberLike): ... - def constrain_subset(self, other: PE): - pass + def constrain_gt(self, other: NumberLike): ... - def constrain_superset(self, other: PE): - pass + def constrain_ne(self, other: NumberLike): ... - def operation_add(self, other: PE) -> Expression: - pass + def constrain_subset(self, other: Sets): ... - def operation_subtract(self, other: PE) -> Expression: - pass + def constrain_superset(self, other: Sets): ... - def operation_multiply(self, other: PE) -> Expression: - pass + def operation_add(self, other: NumberLike) -> "Expression": ... - def operation_divide(self, other: PE) -> Expression: - pass + def operation_subtract(self, other: NumberLike) -> "Expression": ... - def operation_power(self, other: PE) -> Expression: - pass + def operation_multiply(self, other: NumberLike) -> "Expression": ... - def operation_log(self) -> Expression: - pass + def operation_divide(self: NumberLike, other: NumberLike) -> "Expression": ... - def operation_sqrt(self) -> Expression: - pass + def operation_power(self, other: NumberLike) -> "Expression": ... - def operation_abs(self) -> Expression: - pass + def operation_log(self) -> "Expression": ... - def operation_floor(self) -> Expression: - pass + def operation_sqrt(self) -> "Expression": ... - def operation_ceil(self) -> Expression: - pass + def operation_abs(self) -> "Expression": ... - def operation_round(self) -> Expression: - pass + def operation_floor(self) -> "Expression": ... - def operation_sin(self) -> Expression: - pass + def operation_ceil(self) -> "Expression": ... - def operation_cos(self) -> Expression: - pass + def operation_round(self) -> "Expression": ... - def operation_union(self, other: PE) -> Expression: - pass + def operation_sin(self) -> "Expression": ... - def operation_intersection(self, other: PE) -> Expression: - pass + def operation_cos(self) -> "Expression": ... - def operation_difference(self, other: PE) -> Expression: - pass + def operation_union(self, other: Sets) -> "Expression": ... - def operation_symmetric_difference(self, other: PE) -> Expression: - pass + def operation_intersection(self, other: Sets) -> "Expression": ... - def operation_and(self, other: PE) -> Expression: - pass + def operation_difference(self, other: Sets) -> "Expression": ... - def operation_or(self, other: PE) -> Expression: - pass + def operation_symmetric_difference(self, other: Sets) -> "Expression": ... - def operation_not(self) -> Expression: - pass + def operation_and(self, other: BooleanLike) -> "Expression": ... - def operation_xor(self, other: PE) -> Expression: - pass + def operation_or(self, other: BooleanLike) -> "Expression": ... - def operation_implies(self, other: PE) -> Expression: - pass + def operation_not(self) -> "Expression": ... + + def operation_xor(self, other: BooleanLike) -> "Expression": ... + + def operation_implies(self, other: BooleanLike) -> "Expression": ... # ---------------------------------------------------------------------------------- - def __add__(self, other: PE): + def __add__(self, other: NumberLike): return self.operation_add(other) - def __sub__(self, other: PE): + def __radd__(self, other: NumberLike): + return self.operation_add(other) + + def __sub__(self, other: NumberLike): # TODO could be set difference return self.operation_subtract(other) - def __mul__(self, other: PE): + def __rsub__(self, other: NumberLike): + return self.operation_subtract(other) + + def __mul__(self, other: NumberLike): return self.operation_multiply(other) - def __truediv__(self, other: PE): - return self.operation_divide(other) + def __rmul__(self, other: NumberLike): + return self.operation_multiply(other) - def __rtruediv__(self, other: PE): + def __truediv__(self, other: NumberLike): return self.operation_divide(other) - def __pow__(self, other: PE): + def __rtruediv__(self, other: NumberLike): + return type(self).operation_divide(other, self) + + def __pow__(self, other: NumberLike): return self.operation_power(other) def __abs__(self): @@ -131,15 +118,24 @@ def __abs__(self): def __round__(self): return self.operation_round() - def __and__(self, other: PE): + def __and__(self, other: BooleanLike): # TODO could be set intersection return self.operation_and(other) - def __or__(self, other: PE): + def __rand__(self, other: BooleanLike): + return self.operation_and(other) + + def __or__(self, other: BooleanLike): # TODO could be set union return self.operation_or(other) - def __xor__(self, other: PE): + def __ror__(self, other: BooleanLike): + return self.operation_or(other) + + def __xor__(self, other: BooleanLike): + return self.operation_xor(other) + + def __rxor__(self, other: BooleanLike): return self.operation_xor(other) @@ -519,131 +515,8 @@ def __init__( self.likely_constrained = likely_constrained # ---------------------------------------------------------------------------------- - type PE = ParameterOperatable.PE - - def alias_is(self, other: PE): - pass - - def constrain_le(self, other: PE): - pass - - def constrain_ge(self, other: PE): - pass - - def constrain_lt(self, other: PE): - pass - - def constrain_gt(self, other: PE): - pass - - def constrain_ne(self, other: PE): - pass - - def constrain_subset(self, other: PE): - pass - - def operation_add(self, other: PE) -> Expression: - pass - - def operation_subtract(self, other: PE) -> Expression: - pass - - def operation_multiply(self, other: PE) -> Expression: - pass - - def operation_divide(self, other: PE) -> Expression: - pass - - def operation_power(self, other: PE) -> Expression: - pass - - def operation_log(self) -> Expression: - pass - - def operation_sqrt(self) -> Expression: - pass - - def operation_abs(self) -> Expression: - pass - - def operation_floor(self) -> Expression: - pass - - def operation_ceil(self) -> Expression: - pass - - def operation_round(self) -> Expression: - pass - - def operation_sin(self) -> Expression: - pass - - def operation_cos(self) -> Expression: - pass - - def operation_union(self, other: PE) -> Expression: - pass - - def operation_intersection(self, other: PE) -> Expression: - pass - - def operation_difference(self, other: PE) -> Expression: - pass - - def operation_symmetric_difference(self, other: PE) -> Expression: - pass - - def operation_and(self, other: PE) -> Expression: - pass - - def operation_or(self, other: PE) -> Expression: - pass - - def operation_not(self) -> Expression: - pass - - def operation_xor(self, other: PE) -> Expression: - pass - - def operation_implies(self, other: PE) -> Expression: - pass - + # TODO implement ParameterOperatable functions # ---------------------------------------------------------------------------------- - def __add__(self, other: PE): - return self.operation_add(other) - - def __sub__(self, other: PE): - # TODO could be set difference - return self.operation_subtract(other) - - def __mul__(self, other: PE): - return self.operation_multiply(other) - - def __truediv__(self, other: PE): - return self.operation_divide(other) - - def __rtruediv__(self, other: PE): - return self.operation_divide(other) - - def __pow__(self, other: PE): - return self.operation_power(other) - - def __abs__(self): - return self.operation_abs() - - def __round__(self): - return self.operation_round() - - def __and__(self, other: PE): - # TODO could be set intersection - return self.operation_and(other) - - def __or__(self, other: PE): - # TODO could be set union - return self.operation_or(other) - - def __xor__(self, other: PE): - return self.operation_xor(other) p_field = f_field(Parameter) diff --git a/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py b/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py index 245e1e70..efb3a148 100644 --- a/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py +++ b/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py @@ -4,7 +4,7 @@ import logging import faebryk.library._F as F # noqa: F401 -from faebryk.core.module import Module, ModuleException +from faebryk.core.module import Module from faebryk.core.parameter import ParameterOperatable from faebryk.exporters.pcb.layout.absolute import LayoutAbsolute from faebryk.exporters.pcb.layout.extrude import LayoutExtrude diff --git a/src/faebryk/library/FilterElectricalLC.py b/src/faebryk/library/FilterElectricalLC.py index 713653a0..95f8f2e5 100644 --- a/src/faebryk/library/FilterElectricalLC.py +++ b/src/faebryk/library/FilterElectricalLC.py @@ -31,7 +31,7 @@ def _construct(_self): C = self.capacitor.capacitance fc = self.cutoff_frequency - fc.alias_is(1 / (2 * math.pi * math.sqrt(C * Li))) + fc.alias_is(1 / (2 * math.pi * (C * Li).operation_sqrt())) # low pass self.in_.signal.connect_via( diff --git a/src/faebryk/library/LDO.py b/src/faebryk/library/LDO.py index c5b86eca..c6791d73 100644 --- a/src/faebryk/library/LDO.py +++ b/src/faebryk/library/LDO.py @@ -1,7 +1,6 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -import math from enum import Enum, auto import faebryk.library._F as F diff --git a/src/faebryk/library/Potentiometer.py b/src/faebryk/library/Potentiometer.py index 4cf32cb4..fc017c4d 100644 --- a/src/faebryk/library/Potentiometer.py +++ b/src/faebryk/library/Potentiometer.py @@ -1,7 +1,6 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from tkinter import W import faebryk.library._F as F from faebryk.core.module import Module diff --git a/src/faebryk/library/USB3_connector.py b/src/faebryk/library/USB3_connector.py index 653f898d..e0a730d9 100644 --- a/src/faebryk/library/USB3_connector.py +++ b/src/faebryk/library/USB3_connector.py @@ -6,7 +6,6 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P logger = logging.getLogger(__name__) diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 50f4ab1c..c3ae1cae 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -4,7 +4,7 @@ from abc import ABC, abstractmethod from typing import Any, Protocol, Self -from faebryk.libs.units import HasUnit, P, Unit, dimensionless +from faebryk.libs.units import HasUnit, Unit, dimensionless class _SupportsRangeOps(Protocol): From a74bd73187078c119d15be165f33257f1666b206 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Wed, 9 Oct 2024 11:25:48 +0200 Subject: [PATCH 21/80] Librar: Remove F.Constant in function signatures --- src/faebryk/library/ElectricLogicGate.py | 4 ++-- src/faebryk/library/ElectricLogicGates.py | 16 ++++++++-------- src/faebryk/library/LogicGate.py | 4 ++-- src/faebryk/library/LogicGates.py | 16 ++++++++-------- 4 files changed, 20 insertions(+), 20 deletions(-) diff --git a/src/faebryk/library/ElectricLogicGate.py b/src/faebryk/library/ElectricLogicGate.py index 7c4b80bf..ba92063b 100644 --- a/src/faebryk/library/ElectricLogicGate.py +++ b/src/faebryk/library/ElectricLogicGate.py @@ -10,8 +10,8 @@ class ElectricLogicGate(F.LogicGate): def __init__( self, - input_cnt: F.Constant[int], - output_cnt: F.Constant[int], + input_cnt: int, + output_cnt: int, *functions: TraitImpl, ) -> None: self.input_cnt = input_cnt diff --git a/src/faebryk/library/ElectricLogicGates.py b/src/faebryk/library/ElectricLogicGates.py index 64386f3f..333f207a 100644 --- a/src/faebryk/library/ElectricLogicGates.py +++ b/src/faebryk/library/ElectricLogicGates.py @@ -8,17 +8,17 @@ class ElectricLogicGates(Namespace): class OR(F.ElectricLogicGate): - def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_or_gate()) + def __init__(self, input_cnt: int): + super().__init__(input_cnt, 1, F.LogicGate.can_logic_or_gate()) class NOR(F.ElectricLogicGate): - def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_nor_gate()) + def __init__(self, input_cnt: int): + super().__init__(input_cnt, 1, F.LogicGate.can_logic_nor_gate()) class NAND(F.ElectricLogicGate): - def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_nand_gate()) + def __init__(self, input_cnt: int): + super().__init__(input_cnt, 1, F.LogicGate.can_logic_nand_gate()) class XOR(F.ElectricLogicGate): - def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_xor_gate()) + def __init__(self, input_cnt: int): + super().__init__(input_cnt, 1, F.LogicGate.can_logic_xor_gate()) diff --git a/src/faebryk/library/LogicGate.py b/src/faebryk/library/LogicGate.py index d9917341..5d7c4432 100644 --- a/src/faebryk/library/LogicGate.py +++ b/src/faebryk/library/LogicGate.py @@ -49,8 +49,8 @@ def xor(self, *ins: F.Logic): def __init__( self, - input_cnt: F.Constant[int], - output_cnt: F.Constant[int], + input_cnt: int, + output_cnt: int, *functions: TraitImpl, ) -> None: super().__init__() diff --git a/src/faebryk/library/LogicGates.py b/src/faebryk/library/LogicGates.py index 41653d26..56c36bee 100644 --- a/src/faebryk/library/LogicGates.py +++ b/src/faebryk/library/LogicGates.py @@ -8,17 +8,17 @@ class LogicGates(Namespace): class OR(F.LogicGate): - def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_or_gate()) + def __init__(self, input_cnt: int): + super().__init__(input_cnt, 1, F.LogicGate.can_logic_or_gate()) class NOR(F.LogicGate): - def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_nor_gate()) + def __init__(self, input_cnt: int): + super().__init__(input_cnt, 1, F.LogicGate.can_logic_nor_gate()) class NAND(F.LogicGate): - def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_nand_gate()) + def __init__(self, input_cnt: int): + super().__init__(input_cnt, 1, F.LogicGate.can_logic_nand_gate()) class XOR(F.LogicGate): - def __init__(self, input_cnt: F.Constant[int]): - super().__init__(input_cnt, L.Single(1), F.LogicGate.can_logic_xor_gate()) + def __init__(self, input_cnt: int): + super().__init__(input_cnt, 1, F.LogicGate.can_logic_xor_gate()) From 6a6975d5486e24fcd9d80316ecb64823be276472 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Wed, 9 Oct 2024 11:57:53 +0200 Subject: [PATCH 22/80] Library: Take ParameterOperatable in function signatures --- src/faebryk/library/LED.py | 6 +++--- src/faebryk/library/MultiCapacitor.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/faebryk/library/LED.py b/src/faebryk/library/LED.py index 6f98f62a..7cb81b79 100644 --- a/src/faebryk/library/LED.py +++ b/src/faebryk/library/LED.py @@ -5,7 +5,7 @@ from enum import Enum, auto import faebryk.library._F as F -from faebryk.core.parameter import Parameter +from faebryk.core.parameter import Parameter, ParameterOperatable from faebryk.libs.library import L from faebryk.libs.units import P, Quantity @@ -49,12 +49,12 @@ def __preinit__(self): self.current.alias_is(self.brightness / self.max_brightness * self.max_current) self.brightness.constrain_le(self.max_brightness) - def set_intensity(self, intensity: Parameter[Quantity]) -> None: + def set_intensity(self, intensity: ParameterOperatable) -> None: self.brightness.alias_is(intensity * self.max_brightness) def connect_via_current_limiting_resistor( self, - input_voltage: Parameter[Quantity], + input_voltage: ParameterOperatable, resistor: F.Resistor, target: F.Electrical, low_side: bool, diff --git a/src/faebryk/library/MultiCapacitor.py b/src/faebryk/library/MultiCapacitor.py index c71e7755..fda8699a 100644 --- a/src/faebryk/library/MultiCapacitor.py +++ b/src/faebryk/library/MultiCapacitor.py @@ -4,7 +4,7 @@ import logging import faebryk.library._F as F # noqa: F401 -from faebryk.core.parameter import Parameter +from faebryk.core.parameter import Parameter, ParameterOperatable from faebryk.libs.library import L # noqa: F401 from faebryk.libs.units import Quantity from faebryk.libs.util import times # noqa: F401 @@ -45,11 +45,11 @@ def __preinit__(self): # ------------------------------------ self.capacitance.alias_is(sum(c.capacitance for c in self.capacitors)) - def set_equal_capacitance(self, capacitance: Parameter[Quantity]): + def set_equal_capacitance(self, capacitance: ParameterOperatable): op = capacitance / self._count self.set_equal_capacitance_each(op) - def set_equal_capacitance_each(self, capacitance: Parameter[Quantity]): + def set_equal_capacitance_each(self, capacitance: ParameterOperatable): for c in self.capacitors: c.capacitance.constrain_subset(capacitance) From 76efaab2e3aff2ba9239551502cedb5224755cc5 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 13:17:40 +0200 Subject: [PATCH 23/80] remove singles --- src/faebryk/core/parameter.py | 4 +++- src/faebryk/library/ButtonCell.py | 18 +++++++++--------- src/faebryk/library/CD4011.py | 2 +- src/faebryk/library/CH344Q_ReferenceDesign.py | 4 +--- .../library/ESP32_C3_MINI_1_ReferenceDesign.py | 4 +--- src/faebryk/library/FilterElectricalLC.py | 4 ++-- src/faebryk/library/FilterElectricalRC.py | 4 ++-- src/faebryk/library/HLK_LD2410B_P.py | 2 +- src/faebryk/library/Logic.py | 2 +- src/faebryk/library/PM1006.py | 2 +- src/faebryk/library/PowerSwitchMOSFET.py | 11 ++++------- src/faebryk/library/RP2040_ReferenceDesign.py | 2 +- src/faebryk/library/RS485_Bus_Protection.py | 10 +++------- src/faebryk/library/Resistor.py | 2 +- src/faebryk/library/SNx4LVC541A.py | 2 +- src/faebryk/library/SP3243E.py | 2 +- src/faebryk/library/TD541S485H.py | 2 +- .../library/USB2514B_ReferenceDesign.py | 8 +++----- 18 files changed, 37 insertions(+), 48 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index d000756a..74015280 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -3,6 +3,7 @@ import logging from enum import Enum, auto +from types import NotImplementedType from typing import Protocol from faebryk.core.core import Namespace @@ -14,7 +15,8 @@ class ParameterOperatable(Protocol): - type Number = int | float | Quantity + type QuantityLike = Quantity | NotImplementedType + type Number = int | float | QuantityLike type NumberLike = ParameterOperatable | Number | Set_[Number] type BooleanLike = ParameterOperatable | bool | Set_[bool] diff --git a/src/faebryk/library/ButtonCell.py b/src/faebryk/library/ButtonCell.py index 4296a896..d27aa71b 100644 --- a/src/faebryk/library/ButtonCell.py +++ b/src/faebryk/library/ButtonCell.py @@ -5,7 +5,7 @@ from enum import IntEnum, StrEnum import faebryk.library._F as F -from faebryk.core.parameter import Parameter +from faebryk.core.parameter import ParameterOperatable from faebryk.libs.library import L from faebryk.libs.units import P @@ -21,15 +21,15 @@ class Material(StrEnum): NickelMetalHydride = "H" @property - def voltage(self) -> Parameter: + def voltage(self) -> ParameterOperatable.NumberLike: return { - self.Alkaline: L.Single(1.5 * P.V), - self.SilverOxide: L.Single(1.55 * P.V), - self.ZincAir: L.Single(1.65 * P.V), - self.Lithium: L.Single(3.0 * P.V), - self.Mercury: L.Single(1.35 * P.V), - self.NickelCadmium: L.Single(1.2 * P.V), - self.NickelMetalHydride: L.Single(1.2 * P.V), + self.Alkaline: 1.5 * P.V, + self.SilverOxide: 1.55 * P.V, + self.ZincAir: 1.65 * P.V, + self.Lithium: 3.0 * P.V, + self.Mercury: 1.35 * P.V, + self.NickelCadmium: 1.2 * P.V, + self.NickelMetalHydride: 1.2 * P.V, }[self] class Shape(StrEnum): diff --git a/src/faebryk/library/CD4011.py b/src/faebryk/library/CD4011.py index 160d2e0d..35506068 100644 --- a/src/faebryk/library/CD4011.py +++ b/src/faebryk/library/CD4011.py @@ -8,7 +8,7 @@ class CD4011(F.Logic74xx): def __init__(self): super().__init__( - [lambda: F.ElectricLogicGates.NAND(input_cnt=L.Single(2)) for _ in range(4)] + [lambda: F.ElectricLogicGates.NAND(input_cnt=2) for _ in range(4)] ) simple_value_representation = L.f_field(F.has_simple_value_representation_defined)( diff --git a/src/faebryk/library/CH344Q_ReferenceDesign.py b/src/faebryk/library/CH344Q_ReferenceDesign.py index 0b97fdc9..7d996c9e 100644 --- a/src/faebryk/library/CH344Q_ReferenceDesign.py +++ b/src/faebryk/library/CH344Q_ReferenceDesign.py @@ -134,9 +134,7 @@ def __preinit__(self): ) # reset lowpass - self.reset_lowpass.response.constrain_subset( - L.Single(F.Filter.Response.LOWPASS) - ) + self.reset_lowpass.response.constrain_subset(F.Filter.Response.LOWPASS) self.reset_lowpass.cutoff_frequency.constrain_subset( L.Range.from_center_rel(100 * P.Hz, 0.1) ) diff --git a/src/faebryk/library/ESP32_C3_MINI_1_ReferenceDesign.py b/src/faebryk/library/ESP32_C3_MINI_1_ReferenceDesign.py index 3542ba56..646d7af8 100644 --- a/src/faebryk/library/ESP32_C3_MINI_1_ReferenceDesign.py +++ b/src/faebryk/library/ESP32_C3_MINI_1_ReferenceDesign.py @@ -27,9 +27,7 @@ def __preinit__(self): self.lp_filter.cutoff_frequency.constrain_subset( L.Range(100 * P.Hz, 200 * P.Hz) ) - self.lp_filter.response.constrain_subset( - L.Single(F.Filter.Response.LOWPASS) - ) + self.lp_filter.response.constrain_subset(F.Filter.Response.LOWPASS) esp32_c3_mini_1: F.ESP32_C3_MINI_1 boot_switch: DebouncedButton diff --git a/src/faebryk/library/FilterElectricalLC.py b/src/faebryk/library/FilterElectricalLC.py index 95f8f2e5..7fe87c18 100644 --- a/src/faebryk/library/FilterElectricalLC.py +++ b/src/faebryk/library/FilterElectricalLC.py @@ -24,8 +24,8 @@ class _(F.has_construction_dependency.impl()): def _construct(_self): if F.Constant(F.Filter.Response.LOWPASS).is_subset_of(self.response): # TODO other orders & types - self.order.constrain_subset(L.Single(2)) - self.response.constrain_subset(L.Single(F.Filter.Response.LOWPASS)) + self.order.constrain_subset(2) + self.response.constrain_subset(F.Filter.Response.LOWPASS) Li = self.inductor.inductance C = self.capacitor.capacitance diff --git a/src/faebryk/library/FilterElectricalRC.py b/src/faebryk/library/FilterElectricalRC.py index 4fa903c2..14dfee79 100644 --- a/src/faebryk/library/FilterElectricalRC.py +++ b/src/faebryk/library/FilterElectricalRC.py @@ -31,8 +31,8 @@ class _(F.has_construction_dependency.impl()): def _construct(_self): if F.Constant(F.Filter.Response.LOWPASS).is_subset_of(self.response): # TODO other orders, types - self.order.constrain_subset(L.Single(1)) - self.response.constrain_subset(L.Single(F.Filter.Response.LOWPASS)) + self.order.constrain_subset(1) + self.response.constrain_subset(F.Filter.Response.LOWPASS) R = self.resistor.resistance C = self.capacitor.capacitance diff --git a/src/faebryk/library/HLK_LD2410B_P.py b/src/faebryk/library/HLK_LD2410B_P.py index 69b2358d..063c1fc7 100644 --- a/src/faebryk/library/HLK_LD2410B_P.py +++ b/src/faebryk/library/HLK_LD2410B_P.py @@ -80,7 +80,7 @@ def attach_to_footprint(self): ) def __preinit__(self): - self.uart.baud.constrain_le(L.Single(256 * P.kbaud)) + self.uart.baud.constrain_le(256 * P.kbaud) # connect all logic references @L.rt_field diff --git a/src/faebryk/library/Logic.py b/src/faebryk/library/Logic.py index 0567a1dc..abff791e 100644 --- a/src/faebryk/library/Logic.py +++ b/src/faebryk/library/Logic.py @@ -9,4 +9,4 @@ class Logic(F.Signal): state = L.p_field(domain=L.Domains.BOOL()) def set(self, on: bool): - self.state.constrain_subset(L.Single(on)) + self.state.constrain_subset(on) diff --git a/src/faebryk/library/PM1006.py b/src/faebryk/library/PM1006.py index fec6dc69..bb4f37e9 100644 --- a/src/faebryk/library/PM1006.py +++ b/src/faebryk/library/PM1006.py @@ -62,4 +62,4 @@ def get_config(self) -> dict: def __preinit__(self): self.power.voltage.constrain_subset(L.Range.from_center(5 * P.V, 0.2 * P.V)) - self.data.baud.constrain_subset(L.Single(9600 * P.baud)) + self.data.baud.constrain_subset(9600 * P.baud) diff --git a/src/faebryk/library/PowerSwitchMOSFET.py b/src/faebryk/library/PowerSwitchMOSFET.py index e4fe6907..cc25e598 100644 --- a/src/faebryk/library/PowerSwitchMOSFET.py +++ b/src/faebryk/library/PowerSwitchMOSFET.py @@ -2,7 +2,6 @@ # SPDX-License-Identifier: MIT import faebryk.library._F as F -from faebryk.libs.library import L class PowerSwitchMOSFET(F.PowerSwitch): @@ -23,14 +22,12 @@ def __init__(self, lowside: bool, normally_closed: bool) -> None: def __preinit__(self): self.mosfet.channel_type.constrain_subset( - L.Single( - F.MOSFET.ChannelType.N_CHANNEL - if self._lowside - else F.MOSFET.ChannelType.P_CHANNEL - ) + F.MOSFET.ChannelType.N_CHANNEL + if self._lowside + else F.MOSFET.ChannelType.P_CHANNEL ) self.mosfet.saturation_type.constrain_subset( - L.Single(F.MOSFET.SaturationType.ENHANCEMENT) + F.MOSFET.SaturationType.ENHANCEMENT ) # pull gate diff --git a/src/faebryk/library/RP2040_ReferenceDesign.py b/src/faebryk/library/RP2040_ReferenceDesign.py index 9c14b8ad..973ba16a 100644 --- a/src/faebryk/library/RP2040_ReferenceDesign.py +++ b/src/faebryk/library/RP2040_ReferenceDesign.py @@ -118,7 +118,7 @@ def __preinit__(self): ) # Flash - self.flash.memory_size.constrain_subset(L.Single(16 * P.Mbit)) + self.flash.memory_size.constrain_subset(16 * P.Mbit) self.flash.decoupled.decouple().capacitance.constrain_subset( L.Range.from_center_rel(100 * P.nF, 0.05) ) diff --git a/src/faebryk/library/RS485_Bus_Protection.py b/src/faebryk/library/RS485_Bus_Protection.py index 6a9e0c8c..2ca053f0 100644 --- a/src/faebryk/library/RS485_Bus_Protection.py +++ b/src/faebryk/library/RS485_Bus_Protection.py @@ -187,15 +187,11 @@ def __preinit__(self): self.current_limmiter_resistors[0].resistance.constrain_subset( L.Range.from_center_rel(2.7 * P.ohm, 0.05) ) - self.current_limmiter_resistors[0].rated_power.constrain_subset( - L.Range.lower_bound(500 * P.mW) - ) + self.current_limmiter_resistors[0].rated_power.constrain_ge(500 * P.mW) self.current_limmiter_resistors[1].resistance.constrain_subset( L.Range.from_center_rel(2.7 * P.ohm, 0.05) ) - self.current_limmiter_resistors[1].rated_power.constrain_ge( - L.Single(500 * P.mW) - ) + self.current_limmiter_resistors[1].rated_power.constrain_ge(500 * P.mW) self.gnd_couple_resistor.resistance.constrain_subset( L.Range.from_center_rel(1 * P.Mohm, 0.05) @@ -203,7 +199,7 @@ def __preinit__(self): self.gnd_couple_capacitor.capacitance.constrain_subset( L.Range.from_center_rel(1 * P.uF, 0.05) ) - self.gnd_couple_capacitor.rated_voltage.constrain_ge(L.Single(2 * P.kV)) + self.gnd_couple_capacitor.rated_voltage.constrain_ge(2 * P.kV) # ---------------------------------------- # Connections diff --git a/src/faebryk/library/Resistor.py b/src/faebryk/library/Resistor.py index affae85d..aab3b698 100644 --- a/src/faebryk/library/Resistor.py +++ b/src/faebryk/library/Resistor.py @@ -49,7 +49,7 @@ def replace_zero(m: Module): if not F.Constant(0.0 * P.ohm).is_subset_of(r): raise PickError("", self) - self.resistance.constrain_subset(L.Single(0.0 * P.ohm)) + self.resistance.constrain_subset(0.0 * P.ohm) self.unnamed[0].connect(self.unnamed[1]) self.add(has_part_picked_remove()) diff --git a/src/faebryk/library/SNx4LVC541A.py b/src/faebryk/library/SNx4LVC541A.py index 670cd69b..bb635017 100644 --- a/src/faebryk/library/SNx4LVC541A.py +++ b/src/faebryk/library/SNx4LVC541A.py @@ -38,7 +38,7 @@ def __preinit__(self): # ---------------------------------------- # parameters # ---------------------------------------- - self.power.voltage.constrain_le(L.Single(3.6 * P.V)) + self.power.voltage.constrain_le(3.6 * P.V) # ---------------------------------------- # aliases diff --git a/src/faebryk/library/SP3243E.py b/src/faebryk/library/SP3243E.py index dda098d6..11f9018b 100644 --- a/src/faebryk/library/SP3243E.py +++ b/src/faebryk/library/SP3243E.py @@ -108,7 +108,7 @@ def __preinit__(self): # ------------------------------------ self.power.voltage.constrain_subset(L.Range(3.0 * P.V, 5.5 * P.V)) - self.uart.base_uart.baud.constrain_le(L.Single(250 * P.kbaud)) + self.uart.base_uart.baud.constrain_le(250 * P.kbaud) self.rs232.get_trait( F.has_single_electric_reference diff --git a/src/faebryk/library/TD541S485H.py b/src/faebryk/library/TD541S485H.py index 57e77c73..2d0e08b1 100644 --- a/src/faebryk/library/TD541S485H.py +++ b/src/faebryk/library/TD541S485H.py @@ -57,7 +57,7 @@ def __preinit__(self): self.power_iso_in.lv.connect(self.power_iso_out.lv) # TODO tolerance - self.power_iso_out.voltage.constrain_superset(L.Single(5 * P.V)) + self.power_iso_out.voltage.constrain_superset(5 * P.V) F.ElectricLogic.connect_all_module_references( self, diff --git a/src/faebryk/library/USB2514B_ReferenceDesign.py b/src/faebryk/library/USB2514B_ReferenceDesign.py index dfd119ed..80e7634c 100644 --- a/src/faebryk/library/USB2514B_ReferenceDesign.py +++ b/src/faebryk/library/USB2514B_ReferenceDesign.py @@ -90,9 +90,7 @@ def __preinit__(self): self.crystal_oscillator.crystal.frequency.constrain_subset( L.Range.from_center_rel(24 * P.MHz, 0.01) ) - self.crystal_oscillator.crystal.frequency_tolerance.constrain_le( - L.Single(50 * P.ppm) - ) + self.crystal_oscillator.crystal.frequency_tolerance.constrain_le(50 * P.ppm) # usb transceiver bias resistor self.bias_resistor.resistance.constrain_subset( @@ -100,7 +98,7 @@ def __preinit__(self): ) for led in [self.suspend_indicator.led, self.power_3v3_indicator]: - led.led.color.constrain_subset(L.Single(F.LED.Color.GREEN)) + led.led.color.constrain_subset(F.LED.Color.GREEN) led.led.brightness.constrain_subset( TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value ) @@ -136,7 +134,7 @@ def __preinit__(self): ) dfp.usb_if.buspower.connect(self.usb_dfp_power_indicator[i].power) self.usb_dfp_power_indicator[i].led.color.constrain_subset( - L.Single(F.LED.Color.YELLOW) + F.LED.Color.YELLOW ) self.usb_dfp_power_indicator[i].led.brightness.constrain_subset( TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value From 66b5a7108850206d509b40d40e429592937ac922 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 13:22:12 +0200 Subject: [PATCH 24/80] remove last param operands --- src/faebryk/library/BJT.py | 4 ++-- src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py | 2 +- src/faebryk/library/LED.py | 4 ++-- src/faebryk/library/MultiCapacitor.py | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/faebryk/library/BJT.py b/src/faebryk/library/BJT.py index 03c7dde3..149cf896 100644 --- a/src/faebryk/library/BJT.py +++ b/src/faebryk/library/BJT.py @@ -22,8 +22,8 @@ class OperationRegion(Enum): SATURATION = auto() CUT_OFF = auto() - doping_type: Parameter[DopingType] - operation_region: Parameter[OperationRegion] + doping_type = L.p_field(domain=L.Domains.ENUM(DopingType)) + operation_region = L.p_field(domain=L.Domains.ENUM(OperationRegion)) emitter: F.Electrical base: F.Electrical diff --git a/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py b/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py index efb3a148..590d51c3 100644 --- a/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py +++ b/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py @@ -25,7 +25,7 @@ class Diodes_Incorporated_AP2552W6_7(Module): """ @assert_once - def set_current_limit(self, current: ParameterOperatable) -> None: + def set_current_limit(self, current: ParameterOperatable.NumberLike) -> None: self.current_limit.alias_is(current) current_limit_setting_resistor = self.add(F.Resistor()) diff --git a/src/faebryk/library/LED.py b/src/faebryk/library/LED.py index 7cb81b79..94507c29 100644 --- a/src/faebryk/library/LED.py +++ b/src/faebryk/library/LED.py @@ -49,12 +49,12 @@ def __preinit__(self): self.current.alias_is(self.brightness / self.max_brightness * self.max_current) self.brightness.constrain_le(self.max_brightness) - def set_intensity(self, intensity: ParameterOperatable) -> None: + def set_intensity(self, intensity: ParameterOperatable.NumberLike) -> None: self.brightness.alias_is(intensity * self.max_brightness) def connect_via_current_limiting_resistor( self, - input_voltage: ParameterOperatable, + input_voltage: ParameterOperatable.NumberLike, resistor: F.Resistor, target: F.Electrical, low_side: bool, diff --git a/src/faebryk/library/MultiCapacitor.py b/src/faebryk/library/MultiCapacitor.py index fda8699a..9a0aa1d8 100644 --- a/src/faebryk/library/MultiCapacitor.py +++ b/src/faebryk/library/MultiCapacitor.py @@ -50,6 +50,6 @@ def set_equal_capacitance(self, capacitance: ParameterOperatable): self.set_equal_capacitance_each(op) - def set_equal_capacitance_each(self, capacitance: ParameterOperatable): + def set_equal_capacitance_each(self, capacitance: ParameterOperatable.NumberLike): for c in self.capacitors: c.capacitance.constrain_subset(capacitance) From d247aa49c45f48dcdd42ceec6ffdace0b02cfc83 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 13:22:26 +0200 Subject: [PATCH 25/80] ruff --- src/faebryk/library/BJT.py | 1 - src/faebryk/library/ElectricLogicGates.py | 1 - src/faebryk/library/LED.py | 4 ++-- src/faebryk/library/LogicGates.py | 1 - src/faebryk/library/MultiCapacitor.py | 3 +-- 5 files changed, 3 insertions(+), 7 deletions(-) diff --git a/src/faebryk/library/BJT.py b/src/faebryk/library/BJT.py index 149cf896..a07d24b5 100644 --- a/src/faebryk/library/BJT.py +++ b/src/faebryk/library/BJT.py @@ -6,7 +6,6 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.core.node import rt_field -from faebryk.core.parameter import Parameter from faebryk.libs.library import L diff --git a/src/faebryk/library/ElectricLogicGates.py b/src/faebryk/library/ElectricLogicGates.py index 333f207a..d4117605 100644 --- a/src/faebryk/library/ElectricLogicGates.py +++ b/src/faebryk/library/ElectricLogicGates.py @@ -3,7 +3,6 @@ import faebryk.library._F as F from faebryk.core.core import Namespace -from faebryk.libs.library import L class ElectricLogicGates(Namespace): diff --git a/src/faebryk/library/LED.py b/src/faebryk/library/LED.py index 94507c29..b7dee584 100644 --- a/src/faebryk/library/LED.py +++ b/src/faebryk/library/LED.py @@ -5,9 +5,9 @@ from enum import Enum, auto import faebryk.library._F as F -from faebryk.core.parameter import Parameter, ParameterOperatable +from faebryk.core.parameter import ParameterOperatable from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P class LED(F.Diode): diff --git a/src/faebryk/library/LogicGates.py b/src/faebryk/library/LogicGates.py index 56c36bee..cbcfb7f1 100644 --- a/src/faebryk/library/LogicGates.py +++ b/src/faebryk/library/LogicGates.py @@ -3,7 +3,6 @@ import faebryk.library._F as F from faebryk.core.core import Namespace -from faebryk.libs.library import L class LogicGates(Namespace): diff --git a/src/faebryk/library/MultiCapacitor.py b/src/faebryk/library/MultiCapacitor.py index 9a0aa1d8..28e71a36 100644 --- a/src/faebryk/library/MultiCapacitor.py +++ b/src/faebryk/library/MultiCapacitor.py @@ -4,9 +4,8 @@ import logging import faebryk.library._F as F # noqa: F401 -from faebryk.core.parameter import Parameter, ParameterOperatable +from faebryk.core.parameter import ParameterOperatable from faebryk.libs.library import L # noqa: F401 -from faebryk.libs.units import Quantity from faebryk.libs.util import times # noqa: F401 logger = logging.getLogger(__name__) From 4a3629a80c31fba911448eaa3a08d98fee4b534b Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Wed, 9 Oct 2024 13:41:42 +0200 Subject: [PATCH 26/80] Core: Parameter: Add some checks to predicates --- src/faebryk/core/parameter.py | 48 +++++++++++++++++++++++++++-------- 1 file changed, 37 insertions(+), 11 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 74015280..39a68c32 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -168,7 +168,6 @@ def __init__(self, *operands): units = [ op.units if isinstance(op, HasUnit) else dimensionless for op in operands ] - # Check if all units are compatible self.units = units[0] if not all(u.is_compatible_with(self.units) for u in units): raise ValueError("All operands must have compatible units") @@ -307,7 +306,7 @@ def __init__(self, left, right): class Setic(Expression): def __init__(self, *operands): super().__init__(*operands) - types = [Parameter, Set_] + types = [Parameter, ParameterOperatable.Sets] if any(type(op) not in types for op in operands): raise ValueError("operands must be Parameter or Set") units = [op.units for op in operands] @@ -372,23 +371,37 @@ def __init__(self, enum_t: type[Enum]): self.enum_t = enum_t -class Predicate(Node): - pass +class Predicate(Expression): + def __init__(self, left, right): + l_units = left.units if isinstance(left, HasUnit) else dimensionless + r_units = right.units if isinstance(right, HasUnit) else dimensionless + if not l_units.is_compatible_with(r_units): + raise ValueError("operands must have compatible units") + self.operands = [left, right] -class LessThan(Predicate): +class NumericPredicate(Predicate): + def __init__(self, left, right): + super().__init__(left, right) + if isinstance(left, Parameter) and left.domain not in [Numbers, ESeries]: + raise ValueError("left operand must have domain Numbers or ESeries") + if isinstance(right, Parameter) and right.domain not in [Numbers, ESeries]: + raise ValueError("right operand must have domain Numbers or ESeries") + + +class LessThan(NumericPredicate): pass -class GreaterThan(Predicate): +class GreaterThan(NumericPredicate): pass -class LessOrEqual(Predicate): +class LessOrEqual(NumericPredicate): pass -class GreaterOrEqual(Predicate): +class GreaterOrEqual(NumericPredicate): pass @@ -396,15 +409,28 @@ class NotEqual(Predicate): pass -class IsSubset(Predicate): +class SeticPredicate(Predicate): + def __init__(self, left, right): + super().__init__(left, right) + types = [Parameter, ParameterOperatable.Sets] + if any(type(op) not in types for op in self.operands): + raise ValueError("operands must be Parameter or Set") + units = [op.units for op in self.operands] + for u in units[1:]: + if not units[0].is_compatible_with(u): + raise ValueError("all operands must have compatible units") + # TODO domain? + + +class IsSubset(SeticPredicate): pass -class IsSuperset(Predicate): +class IsSuperset(SeticPredicate): pass -class Alias(Node): +class Alias(Expression): pass From 0707414250875d2da7d0332d13471923d35b3c50 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Wed, 9 Oct 2024 15:54:43 +0200 Subject: [PATCH 27/80] Params: Rework rated_* slightly mostly renaming --- src/faebryk/library/Capacitor.py | 6 +++--- src/faebryk/library/Common_Mode_Filter.py | 5 ++--- src/faebryk/library/Diode.py | 8 +++++--- src/faebryk/library/ElectricPower.py | 2 +- src/faebryk/library/INA228_ReferenceDesign.py | 4 ++-- src/faebryk/library/Inductor.py | 9 ++++----- src/faebryk/library/RS485_Bus_Protection.py | 6 +++--- src/faebryk/library/Relay.py | 8 ++++---- src/faebryk/library/Resistor.py | 10 +++++----- src/faebryk/library/USB_C_PSU_Vertical.py | 2 +- src/faebryk/libs/examples/pickers.py | 4 ++-- src/faebryk/libs/picker/jlcpcb/picker_lib.py | 8 ++++---- test/libs/picker/test_jlcpcb.py | 20 +++++++++---------- 13 files changed, 46 insertions(+), 46 deletions(-) diff --git a/src/faebryk/library/Capacitor.py b/src/faebryk/library/Capacitor.py index 6058df67..435dee67 100644 --- a/src/faebryk/library/Capacitor.py +++ b/src/faebryk/library/Capacitor.py @@ -32,11 +32,11 @@ class TemperatureCoefficient(IntEnum): soft_set=L.Range(100 * P.pF, 1 * P.F), tolerance_guess=10 * P.percent, ) - rated_voltage = L.p_field( + # Voltage at which the design may be damaged + max_voltage = L.p_field( units=P.V, likely_constrained=True, soft_set=L.Range(10 * P.V, 100 * P.V), - tolerance_guess=10 * P.percent, ) temperature_coefficient = L.p_field( domain=L.Domains.ENUM(TemperatureCoefficient), @@ -56,7 +56,7 @@ def simple_value_representation(self): return F.has_simple_value_representation_based_on_params( ( self.capacitance, - self.rated_voltage, + self.max_voltage, self.temperature_coefficient, ), lambda c, v, t: join_if_non_empty( diff --git a/src/faebryk/library/Common_Mode_Filter.py b/src/faebryk/library/Common_Mode_Filter.py index ff9f71a0..ba1ba4d4 100644 --- a/src/faebryk/library/Common_Mode_Filter.py +++ b/src/faebryk/library/Common_Mode_Filter.py @@ -27,11 +27,10 @@ class Common_Mode_Filter(Module): soft_set=L.Range(100 * P.Hz, 1 * P.MHz), tolerance_guess=10 * P.percent, ) - rated_current = L.p_field( + max_current = L.p_field( units=P.A, likely_constrained=True, soft_set=L.Range(1 * P.A, 10 * P.A), - tolerance_guess=10 * P.percent, ) dc_resistance = L.p_field( units=P.Ω, @@ -48,5 +47,5 @@ def __preinit__(self): for coil in [self.coil_a, self.coil_b]: coil.inductance.alias_is(self.inductance) coil.self_resonant_frequency.alias_is(self.self_resonant_frequency) - coil.rated_current.alias_is(self.rated_current) + coil.max_current.alias_is(self.max_current) coil.dc_resistance.alias_is(self.dc_resistance) diff --git a/src/faebryk/library/Diode.py b/src/faebryk/library/Diode.py index de7d0543..2b9d0ef4 100644 --- a/src/faebryk/library/Diode.py +++ b/src/faebryk/library/Diode.py @@ -15,10 +15,11 @@ class Diode(Module): soft_set=L.Range(0.1 * P.V, 1 * P.V), tolerance_guess=10 * P.percent, ) + # Current at which the design is functional current = L.p_field( units=P.A, likely_constrained=True, - soft_set=L.Range(0.1 * P.mA, 100 * P.A), + soft_set=L.Range(0.1 * P.mA, 10 * P.A), tolerance_guess=10 * P.percent, ) reverse_working_voltage = L.p_field( @@ -33,11 +34,12 @@ class Diode(Module): soft_set=L.Range(0.1 * P.nA, 1 * P.µA), tolerance_guess=10 * P.percent, ) + # Current at which the design may be damaged + # In some cases, this is useful to know, e.g. to calculate the brightness of an LED max_current = L.p_field( units=P.A, likely_constrained=True, - soft_set=L.Range(0.1 * P.mA, 100 * P.A), - tolerance_guess=10 * P.percent, + soft_set=L.Range(0.1 * P.mA, 10 * P.A), ) anode: F.Electrical diff --git a/src/faebryk/library/ElectricPower.py b/src/faebryk/library/ElectricPower.py index fb29392b..87d1e239 100644 --- a/src/faebryk/library/ElectricPower.py +++ b/src/faebryk/library/ElectricPower.py @@ -25,7 +25,7 @@ def decouple(self): return ( super() .decouple() - .builder(lambda c: c.rated_voltage.constrain_ge(obj.voltage * 2.0)) + .builder(lambda c: c.max_voltage.constrain_ge(obj.voltage * 2.0)) ) class can_be_surge_protected_power(F.can_be_surge_protected_defined): diff --git a/src/faebryk/library/INA228_ReferenceDesign.py b/src/faebryk/library/INA228_ReferenceDesign.py index 14ddd365..84794368 100644 --- a/src/faebryk/library/INA228_ReferenceDesign.py +++ b/src/faebryk/library/INA228_ReferenceDesign.py @@ -49,7 +49,7 @@ def __preinit__(self): # filter_resistors = L.list_field(2, F.Resistor) # # filter_cap.capacitance.merge(F.Range.from_center_rel(0.1 * P.uF, 0.01)) - # filter_cap.rated_voltage.merge(F.Range.from_center_rel(170 * P.V, 0.01) + # filter_cap.max_voltage.merge(F.Range.from_center_rel(170 * P.V, 0.01) # for res in filter_resistors: # res.resistance.merge(10 * P.kohm) # TODO: auto calculate, see: https://www.ti.com/lit/ug/tidu473/tidu473.pdf @@ -84,7 +84,7 @@ def __preinit__(self): shunted_power.shunt.resistance.constrain_subset( L.Range.from_center_rel(15 * P.mohm, 0.01) ) - shunted_power.shunt.rated_power.constrain_subset( + shunted_power.shunt.max_power.constrain_subset( L.Range.from_center_rel(2 * P.W, 0.01) ) # TODO: calculate according to datasheet p36 diff --git a/src/faebryk/library/Inductor.py b/src/faebryk/library/Inductor.py index 9c1fdd2f..153bf834 100644 --- a/src/faebryk/library/Inductor.py +++ b/src/faebryk/library/Inductor.py @@ -24,11 +24,10 @@ class Inductor(Module): soft_set=L.Range(100 * P.kHz, 1 * P.GHz), tolerance_guess=10 * P.percent, ) - rated_current = L.p_field( + max_current = L.p_field( units=P.A, likely_constrained=True, soft_set=L.Range(1 * P.mA, 100 * P.A), - tolerance_guess=10 * P.percent, ) dc_resistance = L.p_field( units=P.Ω, @@ -48,17 +47,17 @@ def simple_value_representation(self): ( self.inductance, self.self_resonant_frequency, - self.rated_current, + self.max_current, self.dc_resistance, ), lambda inductance, self_resonant_frequency, - rated_current, + max_current, dc_resistance: join_if_non_empty( " ", inductance.as_unit_with_tolerance("H"), self_resonant_frequency.as_unit("Hz"), - rated_current.as_unit("A"), + max_current.as_unit("A"), dc_resistance.as_unit("Ω"), ), ) diff --git a/src/faebryk/library/RS485_Bus_Protection.py b/src/faebryk/library/RS485_Bus_Protection.py index 2ca053f0..bf998d04 100644 --- a/src/faebryk/library/RS485_Bus_Protection.py +++ b/src/faebryk/library/RS485_Bus_Protection.py @@ -187,11 +187,11 @@ def __preinit__(self): self.current_limmiter_resistors[0].resistance.constrain_subset( L.Range.from_center_rel(2.7 * P.ohm, 0.05) ) - self.current_limmiter_resistors[0].rated_power.constrain_ge(500 * P.mW) + self.current_limmiter_resistors[0].max_power.constrain_ge(500 * P.mW) self.current_limmiter_resistors[1].resistance.constrain_subset( L.Range.from_center_rel(2.7 * P.ohm, 0.05) ) - self.current_limmiter_resistors[1].rated_power.constrain_ge(500 * P.mW) + self.current_limmiter_resistors[1].max_power.constrain_ge(500 * P.mW) self.gnd_couple_resistor.resistance.constrain_subset( L.Range.from_center_rel(1 * P.Mohm, 0.05) @@ -199,7 +199,7 @@ def __preinit__(self): self.gnd_couple_capacitor.capacitance.constrain_subset( L.Range.from_center_rel(1 * P.uF, 0.05) ) - self.gnd_couple_capacitor.rated_voltage.constrain_ge(2 * P.kV) + self.gnd_couple_capacitor.max_voltage.constrain_ge(2 * P.kV) # ---------------------------------------- # Connections diff --git a/src/faebryk/library/Relay.py b/src/faebryk/library/Relay.py index 81dc4d1f..fa454612 100644 --- a/src/faebryk/library/Relay.py +++ b/src/faebryk/library/Relay.py @@ -22,12 +22,12 @@ class Relay(Module): coil_p: F.Electrical coil_n: F.Electrical - coil_rated_voltage = L.p_field(units=P.V) - coil_rated_current = L.p_field(units=P.A) + coil_max_voltage = L.p_field(units=P.V) + coil_max_current = L.p_field(units=P.A) coil_resistance = L.p_field(units=P.ohm) contact_max_switching_voltage = L.p_field(units=P.V) - contact_rated_switching_current = L.p_field(units=P.A) - contact_max_switchng_current = L.p_field(units=P.A) + contact_max_switching_current = L.p_field(units=P.A) + contact_max_current = L.p_field(units=P.A) designator_prefix = L.f_field(F.has_designator_prefix_defined)( F.has_designator_prefix.Prefix.K diff --git a/src/faebryk/library/Resistor.py b/src/faebryk/library/Resistor.py index aab3b698..118384b0 100644 --- a/src/faebryk/library/Resistor.py +++ b/src/faebryk/library/Resistor.py @@ -13,8 +13,8 @@ class Resistor(Module): unnamed = L.list_field(2, F.Electrical) resistance = L.p_field(units=P.ohm) - rated_power = L.p_field(units=P.W) - rated_voltage = L.p_field(units=P.V) + max_power = L.p_field(units=P.W) + max_voltage = L.p_field(units=P.V) attach_to_footprint: F.can_attach_to_footprint_symmetrically designator_prefix = L.f_field(F.has_designator_prefix_defined)( @@ -30,12 +30,12 @@ def simple_value_representation(self): return F.has_simple_value_representation_based_on_params( ( self.resistance, - self.rated_power, + self.max_power, ), - lambda resistance, rated_power: join_if_non_empty( + lambda resistance, max_power: join_if_non_empty( " ", resistance.as_unit_with_tolerance("Ω"), - rated_power.as_unit("W"), + max_power.as_unit("W"), ), ) diff --git a/src/faebryk/library/USB_C_PSU_Vertical.py b/src/faebryk/library/USB_C_PSU_Vertical.py index 59525576..1e75f53b 100644 --- a/src/faebryk/library/USB_C_PSU_Vertical.py +++ b/src/faebryk/library/USB_C_PSU_Vertical.py @@ -26,7 +26,7 @@ def __preinit__(self): self.gnd_capacitor.capacitance.constrain_subset( L.Range.from_center_rel(100 * P.nF, 0.05) ) - self.gnd_capacitor.rated_voltage.constrain_subset( + self.gnd_capacitor.max_voltage.constrain_subset( L.Range.from_center_rel(16 * P.V, 0.05) ) self.gnd_resistor.resistance.constrain_subset( diff --git a/src/faebryk/libs/examples/pickers.py b/src/faebryk/libs/examples/pickers.py index e48cb68b..bbd33027 100644 --- a/src/faebryk/libs/examples/pickers.py +++ b/src/faebryk/libs/examples/pickers.py @@ -90,7 +90,7 @@ def pick_capacitor(module: F.Capacitor): F.Capacitor.TemperatureCoefficient.X7R, ), "capacitance": F.Constant(100 * P.nF), - "rated_voltage": F.Range(0 * P.V, 16 * P.V), + "max_voltage": F.Range(0 * P.V, 16 * P.V), }, ), PickerOption( @@ -101,7 +101,7 @@ def pick_capacitor(module: F.Capacitor): F.Capacitor.TemperatureCoefficient.X7R, ), "capacitance": F.Constant(10 * P.uF), - "rated_voltage": F.Range(0 * P.V, 10 * P.V), + "max_voltage": F.Range(0 * P.V, 10 * P.V), }, ), ], diff --git a/src/faebryk/libs/picker/jlcpcb/picker_lib.py b/src/faebryk/libs/picker/jlcpcb/picker_lib.py index 49a421b4..9a3468dd 100644 --- a/src/faebryk/libs/picker/jlcpcb/picker_lib.py +++ b/src/faebryk/libs/picker/jlcpcb/picker_lib.py @@ -176,11 +176,11 @@ def find_resistor(cmp: Module): "Tolerance", ), MappingParameterDB( - "rated_power", + "max_power", ["Power(Watts)"], ), MappingParameterDB( - "rated_voltage", + "max_voltage", ["Overload Voltage (Max)"], ), ] @@ -207,7 +207,7 @@ def find_capacitor(cmp: Module): mapping = [ MappingParameterDB("capacitance", ["Capacitance"], "Tolerance"), MappingParameterDB( - "rated_voltage", + "max_voltage", ["Voltage Rated"], ), MappingParameterDB( @@ -251,7 +251,7 @@ def find_inductor(cmp: Module): "Tolerance", ), MappingParameterDB( - "rated_current", + "max_current", ["Rated Current"], ), MappingParameterDB( diff --git a/test/libs/picker/test_jlcpcb.py b/test/libs/picker/test_jlcpcb.py index 7997221b..5a484f37 100644 --- a/test/libs/picker/test_jlcpcb.py +++ b/test/libs/picker/test_jlcpcb.py @@ -218,8 +218,8 @@ def test_find_resistor(self): requirement=F.Resistor().builder( lambda r: ( r.resistance.merge(F.Range.from_center(10 * P.kohm, 1 * P.kohm)), - r.rated_power.merge(F.Range.lower_bound(0.05 * P.W)), - r.rated_voltage.merge(F.Range.lower_bound(25 * P.V)), + r.max_power.merge(F.Range.lower_bound(0.05 * P.W)), + r.max_voltage.merge(F.Range.lower_bound(25 * P.V)), ) ), footprint=[("0402", 2)], @@ -230,8 +230,8 @@ def test_find_resistor(self): requirement=F.Resistor().builder( lambda r: ( r.resistance.merge(F.Range.from_center(69 * P.kohm, 2 * P.kohm)), - r.rated_power.merge(F.Range.lower_bound(0.1 * P.W)), - r.rated_voltage.merge(F.Range.lower_bound(50 * P.V)), + r.max_power.merge(F.Range.lower_bound(0.1 * P.W)), + r.max_voltage.merge(F.Range.lower_bound(50 * P.V)), ) ), footprint=[("0603", 2)], @@ -243,7 +243,7 @@ def test_find_capacitor(self): requirement=F.Capacitor().builder( lambda c: ( c.capacitance.merge(F.Range.from_center(100 * P.nF, 10 * P.nF)), - c.rated_voltage.merge(F.Range.lower_bound(25 * P.V)), + c.max_voltage.merge(F.Range.lower_bound(25 * P.V)), c.temperature_coefficient.merge( F.Range.lower_bound(F.Capacitor.TemperatureCoefficient.X7R) ), @@ -257,7 +257,7 @@ def test_find_capacitor(self): requirement=F.Capacitor().builder( lambda c: ( c.capacitance.merge(F.Range.from_center(47 * P.pF, 4.7 * P.pF)), - c.rated_voltage.merge(F.Range.lower_bound(50 * P.V)), + c.max_voltage.merge(F.Range.lower_bound(50 * P.V)), c.temperature_coefficient.merge( F.Range.lower_bound(F.Capacitor.TemperatureCoefficient.C0G) ), @@ -272,7 +272,7 @@ def test_find_inductor(self): requirement=F.Inductor().builder( lambda i: ( i.inductance.merge(F.Range.from_center(4.7 * P.nH, 0.47 * P.nH)), - i.rated_current.merge(F.Range.lower_bound(0.01 * P.A)), + i.max_current.merge(F.Range.lower_bound(0.01 * P.A)), i.dc_resistance.merge(F.Range.upper_bound(1 * P.ohm)), i.self_resonant_frequency.merge( F.Range.lower_bound(100 * P.Mhertz) @@ -388,8 +388,8 @@ def r_builder(resistance_kohm: float): r.resistance.merge( F.Range.from_center_rel(resistance_kohm * P.kohm, 0.1) ), - r.rated_power.merge(F.ANY()), - r.rated_voltage.merge(F.ANY()), + r.max_power.merge(F.ANY()), + r.max_voltage.merge(F.ANY()), ) ) @@ -399,7 +399,7 @@ def c_builder(capacitance_pf: float): c.capacitance.merge( F.Range.from_center_rel(capacitance_pf * P.pF, 0.1) ), - c.rated_voltage.merge(F.ANY()), + c.max_voltage.merge(F.ANY()), c.temperature_coefficient.merge(F.ANY()), ) ) From 414d3052b48298f746299e715077a7f236af871b Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 16:30:07 +0200 Subject: [PATCH 28/80] Remove merge from non lib --- examples/iterative_design_nand.py | 14 ++--- examples/mcu.py | 4 +- examples/minimal_led.py | 4 +- examples/minimal_led_orderable.py | 7 ++- examples/pcb_layout.py | 19 ++++--- examples/route.py | 4 +- examples/signal_processing.py | 15 ++++-- src/faebryk/core/module.py | 4 +- src/faebryk/core/node.py | 2 +- src/faebryk/core/parameter.py | 39 +++++++++++++- src/faebryk/library/BH1750FVI_TR.py | 12 +++-- src/faebryk/library/CH344Q.py | 2 +- src/faebryk/library/ElectricPower.py | 8 +-- src/faebryk/library/FilterElectricalLC.py | 52 +++++++++---------- src/faebryk/library/FilterElectricalRC.py | 51 +++++++++--------- src/faebryk/library/HLK_LD2410B_P.py | 9 ++-- src/faebryk/library/M24C08_FMN6TP.py | 2 + src/faebryk/library/PM1006.py | 8 +-- src/faebryk/library/RP2040_ReferenceDesign.py | 2 +- src/faebryk/library/SCD40.py | 14 ++--- src/faebryk/library/SP3243E.py | 2 +- src/faebryk/library/TI_CD4011BE.py | 2 +- .../USB_Type_C_Receptacle_14_pin_Vertical.py | 2 +- src/faebryk/library/XL_3528RGBW_WS2812B.py | 16 ++---- .../has_descriptive_properties_defined.py | 7 ++- src/faebryk/library/is_esphome_bus.py | 9 ++-- src/faebryk/libs/app/parameters.py | 29 ----------- src/faebryk/libs/examples/buildutil.py | 5 -- src/faebryk/libs/examples/pickers.py | 2 - src/faebryk/libs/sets.py | 2 +- .../netlist/kicad/test_netlist_kicad.py | 8 ++- 31 files changed, 187 insertions(+), 169 deletions(-) diff --git a/examples/iterative_design_nand.py b/examples/iterative_design_nand.py index ca69b0cd..5989cca5 100644 --- a/examples/iterative_design_nand.py +++ b/examples/iterative_design_nand.py @@ -32,10 +32,10 @@ class PowerSource(Module): class XOR_with_NANDS(F.LogicGates.XOR): - nands = L.list_field(4, lambda: F.LogicGates.NAND(F.Constant(2))) + nands = L.list_field(4, lambda: F.LogicGates.NAND(2)) def __init__(self): - super().__init__(F.Constant(2)) + super().__init__(2) def __preinit__(self): A = self.inputs[0] @@ -71,7 +71,7 @@ def App(): logic_in = F.Logic() logic_out = F.Logic() - xor = F.LogicGates.XOR(F.Constant(2)) + xor = F.LogicGates.XOR(2) logic_out.connect(xor.get_trait(F.LogicOps.can_logic_xor).xor(logic_in, on)) # led @@ -121,9 +121,11 @@ def App(): # parametrizing for _, t in app.get_graph().nodes_with_trait(F.ElectricLogic.has_pulls): for pull_resistor in (r for r in t.get_pulls() if r): - pull_resistor.resistance.merge(F.Range.from_center_rel(100 * P.kohm, 0.05)) - power_source.power.voltage.merge(3 * P.V) - led.led.led.brightness.merge( + pull_resistor.resistance.constrain_subset( + F.Range.from_center_rel(100 * P.kohm, 0.05) + ) + power_source.power.voltage.constrain_subset(L.Range.from_center_rel(3 * P.V, 0.05)) + led.led.led.brightness.constrain_subset( TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value ) diff --git a/examples/mcu.py b/examples/mcu.py index bbba714b..7a0d9edc 100644 --- a/examples/mcu.py +++ b/examples/mcu.py @@ -29,8 +29,8 @@ class App(Module): def __preinit__(self) -> None: # Parametrize - self.led.led.led.color.merge(F.LED.Color.YELLOW) - self.led.led.led.brightness.merge( + self.led.led.led.color.constrain_subset(F.LED.Color.YELLOW) + self.led.led.led.brightness.constrain_subset( TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value ) diff --git a/examples/minimal_led.py b/examples/minimal_led.py index a48349a7..9858521f 100644 --- a/examples/minimal_led.py +++ b/examples/minimal_led.py @@ -26,8 +26,8 @@ def __preinit__(self) -> None: self.led.power.connect(self.battery.power) # Parametrize - self.led.led.color.merge(F.LED.Color.YELLOW) - self.led.led.brightness.merge( + self.led.led.color.constrain_subset(F.LED.Color.YELLOW) + self.led.led.brightness.constrain_subset( TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value ) diff --git a/examples/minimal_led_orderable.py b/examples/minimal_led_orderable.py index 5764ff97..09f8ada5 100644 --- a/examples/minimal_led_orderable.py +++ b/examples/minimal_led_orderable.py @@ -7,6 +7,7 @@ import logging from pathlib import Path +from tkinter import W import typer @@ -18,7 +19,6 @@ from faebryk.exporters.pcb.layout.typehierarchy import LayoutTypeHierarchy from faebryk.libs.app.checks import run_checks from faebryk.libs.app.manufacturing import export_pcba_artifacts -from faebryk.libs.app.parameters import replace_tbd_with_any from faebryk.libs.brightness import TypicalLuminousIntensity from faebryk.libs.examples.buildutil import BUILD_DIR, PCB_FILE, apply_design_to_pcb from faebryk.libs.examples.pickers import add_example_pickers @@ -56,8 +56,8 @@ def __preinit__(self) -> None: self.led.power.connect_via(self.power_button, self.battery.power) # Parametrize - self.led.led.color.merge(F.LED.Color.YELLOW) - self.led.led.brightness.merge( + self.led.led.color.constrain_subset(F.LED.Color.YELLOW) + self.led.led.brightness.constrain_subset( TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value ) @@ -130,7 +130,6 @@ def main(): G = app.get_graph() # picking ---------------------------------------------------------------- - replace_tbd_with_any(app, recursive=True) modules = app.get_children_modules(types=Module) try: JLCPCB_DB() diff --git a/examples/pcb_layout.py b/examples/pcb_layout.py index b75ce9cb..b56ba4da 100644 --- a/examples/pcb_layout.py +++ b/examples/pcb_layout.py @@ -22,6 +22,7 @@ from faebryk.exporters.pcb.layout.typehierarchy import LayoutTypeHierarchy from faebryk.libs.brightness import TypicalLuminousIntensity from faebryk.libs.examples.buildutil import apply_design_to_pcb +from faebryk.libs.library import L from faebryk.libs.logging import setup_basic_logging from faebryk.libs.units import P @@ -37,23 +38,25 @@ def __preinit__(self) -> None: self.leds.power.connect(self.battery.power) # Parametrize - self.leds.led.color.merge(F.LED.Color.YELLOW) - self.leds.led.brightness.merge( + self.leds.led.color.constrain_subset(F.LED.Color.YELLOW) + self.leds.led.brightness.constrain_subset( TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value ) - self.eeprom.power.voltage.merge(3.3 * P.V) + self.eeprom.power.voltage.constrain_subset( + L.Range.from_center_rel(3.3 * P.V, 0.05) + ) self.eeprom.set_address(0x0) # Layout Point = F.has_pcb_position.Point - L = F.has_pcb_position.layer_type + Ly = F.has_pcb_position.layer_type layout = LayoutTypeHierarchy( layouts=[ LayoutTypeHierarchy.Level( mod_type=F.PoweredLED, - layout=LayoutAbsolute(Point((0, 0, 0, L.TOP_LAYER))), + layout=LayoutAbsolute(Point((0, 0, 0, Ly.TOP_LAYER))), children_layout=LayoutTypeHierarchy( layouts=[ LayoutTypeHierarchy.Level( @@ -65,16 +68,16 @@ def __preinit__(self) -> None: ), LayoutTypeHierarchy.Level( mod_type=F.Battery, - layout=LayoutAbsolute(Point((0, 20, 0, L.BOTTOM_LAYER))), + layout=LayoutAbsolute(Point((0, 20, 0, Ly.BOTTOM_LAYER))), ), LayoutTypeHierarchy.Level( mod_type=F.M24C08_FMN6TP, - layout=LayoutAbsolute(Point((15, 10, 0, L.TOP_LAYER))), + layout=LayoutAbsolute(Point((15, 10, 0, Ly.TOP_LAYER))), ), ] ) self.add(F.has_pcb_layout_defined(layout)) - self.add(F.has_pcb_position_defined(Point((50, 50, 0, L.NONE)))) + self.add(F.has_pcb_position_defined(Point((50, 50, 0, Ly.NONE)))) LayoutHeuristicElectricalClosenessDecouplingCaps.add_to_all_suitable_modules( self diff --git a/examples/route.py b/examples/route.py index b53b3ab9..47c67516 100644 --- a/examples/route.py +++ b/examples/route.py @@ -33,7 +33,9 @@ def __init__(self, extrude_y: float): def __preinit__(self): for resistor in self.resistors: - resistor.resistance.merge(F.Range.from_center_rel(1000 * P.ohm, 0.05)) + resistor.resistance.constrain_subset( + F.Range.from_center_rel(1000 * P.ohm, 0.05) + ) resistor.unnamed[0].connect(self.unnamed[0]) resistor.unnamed[1].connect(self.unnamed[1]) diff --git a/examples/signal_processing.py b/examples/signal_processing.py index 5ebab92a..be6bf899 100644 --- a/examples/signal_processing.py +++ b/examples/signal_processing.py @@ -12,6 +12,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.examples.buildutil import apply_design_to_pcb +from faebryk.libs.library import L from faebryk.libs.logging import setup_basic_logging from faebryk.libs.units import P @@ -25,8 +26,10 @@ def __preinit__(self) -> None: # TODO actually do something with the filter # Parametrize - self.lowpass.cutoff_frequency.merge(200 * P.Hz) - self.lowpass.response.merge(F.Filter.Response.LOWPASS) + self.lowpass.cutoff_frequency.constrain_subset( + L.Range.from_center_rel(200 * P.Hz, 0.05) + ) + self.lowpass.response.constrain_subset(F.Filter.Response.LOWPASS) # Specialize special = self.lowpass.specialize(F.FilterElectricalLC()) @@ -34,8 +37,12 @@ def __preinit__(self) -> None: # set reference voltage # TODO: this will be automatically set by the power supply # once this example is more complete - special.in_.reference.voltage.merge(3 * P.V) - special.out.reference.voltage.merge(3 * P.V) + special.in_.reference.voltage.constrain_subset( + L.Range.from_center_rel(3 * P.V, 0.05) + ) + special.out.reference.voltage.constrain_subset( + L.Range.from_center_rel(3 * P.V, 0.05) + ) # Construct special.get_trait(F.has_construction_dependency).construct() diff --git a/src/faebryk/core/module.py b/src/faebryk/core/module.py index a23bf857..2bfd2d68 100644 --- a/src/faebryk/core/module.py +++ b/src/faebryk/core/module.py @@ -102,7 +102,7 @@ def get_node_prop_matrix[N: Node](sub_type: type[N]): continue if dst is None: raise Exception(f"Special module misses parameter: {src.get_name()}") - dst.merge(src) + dst.alias_is(src) # TODO this cant work # for t in self.traits: @@ -118,7 +118,7 @@ def get_node_prop_matrix[N: Node](sub_type: type[N]): assert not has_parent or attach_to is None if not has_parent: if attach_to: - attach_to.add(special, container=attach_to.specialized) + attach_to.add(special, container=attach_to.specialized_nodes) else: gen_parent = self.get_parent() if gen_parent: diff --git a/src/faebryk/core/node.py b/src/faebryk/core/node.py index 415d03b3..743974ba 100644 --- a/src/faebryk/core/node.py +++ b/src/faebryk/core/node.py @@ -183,7 +183,7 @@ class InitVar(dataclass_InitVar): class Node(FaebrykLibObject, metaclass=PostInitCaller): runtime_anon: list["Node"] runtime: dict[str, "Node"] - specialized: list["Node"] + specialized_nodes: list["Node"] self_gif: GraphInterfaceSelf children: GraphInterfaceHierarchical = f_field(GraphInterfaceHierarchical)( diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 39a68c32..a3414428 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -4,7 +4,7 @@ import logging from enum import Enum, auto from types import NotImplementedType -from typing import Protocol +from typing import Any, Callable, Protocol from faebryk.core.core import Namespace from faebryk.core.node import Node, f_field @@ -41,6 +41,8 @@ def constrain_subset(self, other: Sets): ... def constrain_superset(self, other: Sets): ... + def constrain_cardinality(self, other: int): ... + def operation_add(self, other: NumberLike) -> "Expression": ... def operation_subtract(self, other: NumberLike) -> "Expression": ... @@ -85,6 +87,22 @@ def operation_xor(self, other: BooleanLike) -> "Expression": ... def operation_implies(self, other: BooleanLike) -> "Expression": ... + def operation_is_le(self, other: NumberLike) -> "Expression": ... + + def operation_is_ge(self, other: NumberLike) -> "Expression": ... + + def operation_is_lt(self, other: NumberLike) -> "Expression": ... + + def operation_is_gt(self, other: NumberLike) -> "Expression": ... + + def operation_is_ne(self, other: NumberLike) -> "Expression": ... + + def operation_is_subset(self, other: Sets) -> "Expression": ... + + def operation_is_superset(self, other: Sets) -> "Expression": ... + + def get_any_single(self) -> Number | Enum: ... + # ---------------------------------------------------------------------------------- def __add__(self, other: NumberLike): return self.operation_add(other) @@ -120,6 +138,7 @@ def __abs__(self): def __round__(self): return self.operation_round() + # bitwise and def __and__(self, other: BooleanLike): # TODO could be set intersection return self.operation_and(other) @@ -140,6 +159,22 @@ def __xor__(self, other: BooleanLike): def __rxor__(self, other: BooleanLike): return self.operation_xor(other) + # ---------------------------------------------------------------------------------- + + # TODO: move + + def if_then_else( + self, + if_true: Callable[[], Any], + if_false: Callable[[], Any], + ) -> None: ... + + # TODO + # def switch_case( + # self, + # cases: list[tuple[?, Callable[[], Any]]], + # ) -> None: ... + # TODO: prohibit instantiation class Expression(Node, ParameterOperatable): @@ -525,6 +560,7 @@ def __init__( tolerance_guess: Quantity | None = None, # hints likely_constrained: bool = False, + cardinality: int | None = None, ): super().__init__() if within is None: @@ -541,6 +577,7 @@ def __init__( self.guess = guess self.tolerance_guess = tolerance_guess self.likely_constrained = likely_constrained + self.cardinality = cardinality # ---------------------------------------------------------------------------------- # TODO implement ParameterOperatable functions diff --git a/src/faebryk/library/BH1750FVI_TR.py b/src/faebryk/library/BH1750FVI_TR.py index 9dfe6bfa..9886f95e 100644 --- a/src/faebryk/library/BH1750FVI_TR.py +++ b/src/faebryk/library/BH1750FVI_TR.py @@ -6,7 +6,8 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P +from faebryk.libs.units import P, Quantity +from faebryk.libs.util import cast_assert logger = logging.getLogger(__name__) @@ -19,12 +20,13 @@ class _bh1750_esphome_config(F.has_esphome_config.impl()): guess=1 * P.s, ) - def get_config(self) -> dict: - val = self.update_interval.get_most_narrow() - assert isinstance(val, F.Constant), "No update interval set!" + def __preinit__(self): + self.update_interval.constrain_cardinality(1) + def get_config(self) -> dict: obj = self.obj assert isinstance(obj, BH1750FVI_TR) + val = cast_assert(Quantity, self.update_interval.get_any_single()) i2c = F.is_esphome_bus.find_connected_bus(obj.i2c) @@ -35,7 +37,7 @@ def get_config(self) -> dict: "name": "BH1750 Illuminance", "address": "0x23", "i2c_id": i2c.get_trait(F.is_esphome_bus).get_bus_id(), - "update_interval": f"{val.value.to('s')}", + "update_interval": f"{val.to('s')}", } ] } diff --git a/src/faebryk/library/CH344Q.py b/src/faebryk/library/CH344Q.py index 3521a8a7..d6465eb8 100644 --- a/src/faebryk/library/CH344Q.py +++ b/src/faebryk/library/CH344Q.py @@ -72,7 +72,7 @@ def enable_hardware_flow_conrol(self): def descriptive_properties(self): return F.has_descriptive_properties_defined( { - DescriptiveProperties.manufacturer.value: "WCH", + DescriptiveProperties.manufacturer: "WCH", DescriptiveProperties.partno: "CH344Q", }, ) diff --git a/src/faebryk/library/ElectricPower.py b/src/faebryk/library/ElectricPower.py index 87d1e239..e3a836af 100644 --- a/src/faebryk/library/ElectricPower.py +++ b/src/faebryk/library/ElectricPower.py @@ -52,7 +52,7 @@ def protect(self): soft_set=L.Range(0 * P.V, 1000 * P.V), tolerance_guess=5 * P.percent, ) - # max_current= L.p_field(units=P.A) + max_current = L.p_field(units=P.A) """ Only for this particular power interface Does not propagate to connections @@ -74,8 +74,10 @@ def fused(self, attach_to: Node | None = None): self.connect_shallow(fused_power) - # fuse.trip_current.merge(F.Constant(self.max_current)) - # fused_power.max_current.merge(F.Range(0 * P.A, fuse.trip_current)) + fuse.trip_current.constrain_subset( + self.max_current * L.Range.from_center_rel(1.0, 0.1) + ) + fused_power.max_current.constrain_le(fuse.trip_current) if attach_to is not None: attach_to.add(fused_power) diff --git a/src/faebryk/library/FilterElectricalLC.py b/src/faebryk/library/FilterElectricalLC.py index 7fe87c18..7eea7ff0 100644 --- a/src/faebryk/library/FilterElectricalLC.py +++ b/src/faebryk/library/FilterElectricalLC.py @@ -3,6 +3,8 @@ import math +from more_itertools import raise_ + import faebryk.library._F as F from faebryk.libs.library import L from faebryk.libs.units import P @@ -16,35 +18,33 @@ class FilterElectricalLC(F.Filter): z0 = L.p_field(units=P.ohm) - def __preinit__(self) -> None: ... - - @L.rt_field - def construction_dependency(self): - class _(F.has_construction_dependency.impl()): - def _construct(_self): - if F.Constant(F.Filter.Response.LOWPASS).is_subset_of(self.response): - # TODO other orders & types - self.order.constrain_subset(2) - self.response.constrain_subset(F.Filter.Response.LOWPASS) - - Li = self.inductor.inductance - C = self.capacitor.capacitance - fc = self.cutoff_frequency + def __preinit__(self) -> None: + Li = self.inductor.inductance + C = self.capacitor.capacitance + fc = self.cutoff_frequency - fc.alias_is(1 / (2 * math.pi * (C * Li).operation_sqrt())) + def build_lowpass(): + # TODO other orders & types + self.order.constrain_subset(2) + self.response.constrain_subset(F.Filter.Response.LOWPASS) - # low pass - self.in_.signal.connect_via( - (self.inductor, self.capacitor), - self.in_.reference.lv, - ) + fc.alias_is(1 / (2 * math.pi * (C * Li).operation_sqrt())) - self.in_.signal.connect_via(self.inductor, self.out.signal) - return + # low pass + self.in_.signal.connect_via( + (self.inductor, self.capacitor), + self.in_.reference.lv, + ) - if isinstance(self.response, F.Constant): - raise F.has_construction_dependency.NotConstructableEver() + self.in_.signal.connect_via(self.inductor, self.out.signal) + return - raise F.has_construction_dependency.NotConstructableYet() + ( + self.response.operation_is_subset(F.Filter.Response.LOWPASS) + & self.order.operation_is_subset(2) + ).if_then_else( + build_lowpass, + lambda: raise_(NotImplementedError()), + ) - return _() + # TODO add construction dependency trait diff --git a/src/faebryk/library/FilterElectricalRC.py b/src/faebryk/library/FilterElectricalRC.py index 14dfee79..ed45250f 100644 --- a/src/faebryk/library/FilterElectricalRC.py +++ b/src/faebryk/library/FilterElectricalRC.py @@ -4,6 +4,8 @@ import logging import math +from more_itertools import raise_ + import faebryk.library._F as F # noqa: F401 from faebryk.libs.library import L # noqa: F401 from faebryk.libs.units import P # noqa: F401 @@ -23,35 +25,32 @@ class FilterElectricalRC(F.Filter): z0 = L.p_field(units=P.ohm) - def __preinit__(self): ... - - @L.rt_field - def construction_dependency(self): - class _(F.has_construction_dependency.impl()): - def _construct(_self): - if F.Constant(F.Filter.Response.LOWPASS).is_subset_of(self.response): - # TODO other orders, types - self.order.constrain_subset(1) - self.response.constrain_subset(F.Filter.Response.LOWPASS) - - R = self.resistor.resistance - C = self.capacitor.capacitance - fc = self.cutoff_frequency + def __preinit__(self): + R = self.resistor.resistance + C = self.capacitor.capacitance + fc = self.cutoff_frequency - fc.alias_is(1 / (2 * math.pi * R * C)) + def build_lowpass(): + # TODO other orders, types + self.order.constrain_subset(1) + self.response.constrain_subset(F.Filter.Response.LOWPASS) - # low pass - self.in_.signal.connect_via( - (self.resistor, self.capacitor), - self.in_.reference.lv, - ) + fc.alias_is(1 / (2 * math.pi * R * C)) - self.in_.signal.connect_via(self.resistor, self.out.signal) - return + # low pass + self.in_.signal.connect_via( + (self.resistor, self.capacitor), + self.in_.reference.lv, + ) - if isinstance(self.response, F.Constant): - raise F.has_construction_dependency.NotConstructableEver() + self.in_.signal.connect_via(self.resistor, self.out.signal) - raise F.has_construction_dependency.NotConstructableYet() + ( + self.response.operation_is_subset(F.Filter.Response.LOWPASS) + & self.order.operation_is_subset(1) + ).if_then_else( + build_lowpass, + lambda: raise_(NotImplementedError()), + ) - return _() + # TODO add construction dependency trait diff --git a/src/faebryk/library/HLK_LD2410B_P.py b/src/faebryk/library/HLK_LD2410B_P.py index 063c1fc7..f9032f20 100644 --- a/src/faebryk/library/HLK_LD2410B_P.py +++ b/src/faebryk/library/HLK_LD2410B_P.py @@ -4,7 +4,8 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P +from faebryk.libs.units import P, Quantity +from faebryk.libs.util import cast_assert class HLK_LD2410B_P(Module): @@ -12,11 +13,11 @@ class _ld2410b_esphome_config(F.has_esphome_config.impl()): throttle = L.p_field( units=P.ms, soft_set=L.Range(10 * P.ms, 1000 * P.ms), + cardinality=1, ) def get_config(self) -> dict: - val = self.throttle.get_most_narrow() - assert isinstance(val, F.Constant), "No update interval set!" + val = cast_assert(Quantity, self.throttle.get_any_single()) obj = self.obj assert isinstance(obj, HLK_LD2410B_P), "This is not an HLK_LD2410B_P!" @@ -37,7 +38,7 @@ def get_config(self) -> dict: return { "ld2410": { - "throttle": f"{val.value.to('ms')}", + "throttle": f"{val.to('ms')}", "uart_id": uart_cfg["id"], }, "binary_sensor": [ diff --git a/src/faebryk/library/M24C08_FMN6TP.py b/src/faebryk/library/M24C08_FMN6TP.py index 4c2c6086..dce1ff6e 100644 --- a/src/faebryk/library/M24C08_FMN6TP.py +++ b/src/faebryk/library/M24C08_FMN6TP.py @@ -47,6 +47,8 @@ def __preinit__(self): L.Range(10 * P.nF, 100 * P.nF) ) + self.power.voltage.constrain_subset(L.Range(1.7 * P.V, 5.5 * P.V)) + self.add( F.has_descriptive_properties_defined( { diff --git a/src/faebryk/library/PM1006.py b/src/faebryk/library/PM1006.py index bb4f37e9..7bb4dd60 100644 --- a/src/faebryk/library/PM1006.py +++ b/src/faebryk/library/PM1006.py @@ -5,7 +5,8 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P +from faebryk.libs.units import P, Quantity +from faebryk.libs.util import cast_assert class PM1006(Module): @@ -26,11 +27,10 @@ class PM1006(Module): """ class _pm1006_esphome_config(F.has_esphome_config.impl()): - update_interval = L.p_field(units=P.s) + update_interval = L.p_field(units=P.s, cardinality=1) def get_config(self) -> dict: - val = self.update_interval.get_most_narrow() - assert isinstance(val, F.Constant), "No update interval set!" + val = cast_assert(Quantity, self.update_interval.get_any_single()) obj = self.obj assert isinstance(obj, PM1006), "This is not an PM1006!" diff --git a/src/faebryk/library/RP2040_ReferenceDesign.py b/src/faebryk/library/RP2040_ReferenceDesign.py index 973ba16a..b953454e 100644 --- a/src/faebryk/library/RP2040_ReferenceDesign.py +++ b/src/faebryk/library/RP2040_ReferenceDesign.py @@ -105,7 +105,7 @@ def __preinit__(self): self.clock_source.crystal.add( F.has_descriptive_properties_defined( { - DescriptiveProperties.manufacturer.value: "Abracon LLC", + DescriptiveProperties.manufacturer: "Abracon LLC", DescriptiveProperties.partno: "ABM8-272-T3", } ) diff --git a/src/faebryk/library/SCD40.py b/src/faebryk/library/SCD40.py index 2bc00049..3c1dabc5 100644 --- a/src/faebryk/library/SCD40.py +++ b/src/faebryk/library/SCD40.py @@ -5,7 +5,8 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P +from faebryk.libs.units import P, Quantity +from faebryk.libs.util import cast_assert class SCD40(Module): @@ -14,11 +15,10 @@ class SCD40(Module): """ class _scd4x_esphome_config(F.has_esphome_config.impl()): - update_interval = L.p_field(units=P.s) + update_interval = L.p_field(units=P.s, cardinality=1) def get_config(self) -> dict: - val = self.update_interval.get_most_narrow() - assert isinstance(val, F.Constant) + val = cast_assert(Quantity, self.update_interval.get_any_single()) obj = self.get_obj(SCD40) @@ -44,12 +44,6 @@ def get_config(self) -> dict: ] } - def is_implemented(self): - return ( - isinstance(self.update_interval.get_most_narrow(), F.Constant) - and super().is_implemented() - ) - esphome_config: _scd4x_esphome_config # interfaces diff --git a/src/faebryk/library/SP3243E.py b/src/faebryk/library/SP3243E.py index 11f9018b..6ff81f9b 100644 --- a/src/faebryk/library/SP3243E.py +++ b/src/faebryk/library/SP3243E.py @@ -60,7 +60,7 @@ def enable_auto_online(self): def descriptive_properties(self): return F.has_descriptive_properties_defined( { - DescriptiveProperties.manufacturer.value: "MaxLinear", + DescriptiveProperties.manufacturer: "MaxLinear", DescriptiveProperties.partno: "SP3243EBEA-L/TR", }, ) diff --git a/src/faebryk/library/TI_CD4011BE.py b/src/faebryk/library/TI_CD4011BE.py index 587a436c..f9f10823 100644 --- a/src/faebryk/library/TI_CD4011BE.py +++ b/src/faebryk/library/TI_CD4011BE.py @@ -36,7 +36,7 @@ def __preinit__(self): self.add( F.has_descriptive_properties_defined( { - DescriptiveProperties.manufacturer.value: "Texas Instruments", + DescriptiveProperties.manufacturer: "Texas Instruments", DescriptiveProperties.partno: "CD4011BE", }, ) diff --git a/src/faebryk/library/USB_Type_C_Receptacle_14_pin_Vertical.py b/src/faebryk/library/USB_Type_C_Receptacle_14_pin_Vertical.py index 99a5e135..adf0849f 100644 --- a/src/faebryk/library/USB_Type_C_Receptacle_14_pin_Vertical.py +++ b/src/faebryk/library/USB_Type_C_Receptacle_14_pin_Vertical.py @@ -26,7 +26,7 @@ class USB_Type_C_Receptacle_14_pin_Vertical(Module): descriptive_properties = L.f_field(F.has_descriptive_properties_defined)( { - DescriptiveProperties.manufacturer.value: "Jing Extension of the Electronic Co.", # noqa: E501 + DescriptiveProperties.manufacturer: "Jing Extension of the Electronic Co.", # noqa: E501 DescriptiveProperties.partno: "918-418K2022Y40000", } ) diff --git a/src/faebryk/library/XL_3528RGBW_WS2812B.py b/src/faebryk/library/XL_3528RGBW_WS2812B.py index 6c29922b..dadc4d78 100644 --- a/src/faebryk/library/XL_3528RGBW_WS2812B.py +++ b/src/faebryk/library/XL_3528RGBW_WS2812B.py @@ -4,25 +4,25 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P +from faebryk.libs.units import P, Quantity +from faebryk.libs.util import cast_assert class XL_3528RGBW_WS2812B(Module): class _ws2812b_esphome_config(F.has_esphome_config.impl()): - update_interval = L.p_field(units=P.s) + update_interval = L.p_field(units=P.s, cardinality=1) def get_config(self) -> dict: - assert isinstance(self.update_interval, F.Constant) - obj = self.get_obj(XL_3528RGBW_WS2812B) data_pin = F.is_esphome_bus.find_connected_bus(obj.di.signal) + val = cast_assert(Quantity, self.update_interval.get_any_single()) return { "light": [ { "platform": "esp32_rmt_led_strip", - "update_interval": f"{self.update_interval.value.to('s')}", + "update_interval": f"{val.to('s')}", "num_leds": 1, # TODO: make dynamic "rmt_channel": 0, # TODO: make dynamic "chipset": "WS2812", @@ -33,12 +33,6 @@ def get_config(self) -> dict: ] } - def is_implemented(self): - return ( - isinstance(self.update_interval.get_most_narrow(), F.Constant) - and super().is_implemented() - ) - # interfaces power: F.ElectricPower diff --git a/src/faebryk/library/has_descriptive_properties_defined.py b/src/faebryk/library/has_descriptive_properties_defined.py index 794c0fc2..4b86e047 100644 --- a/src/faebryk/library/has_descriptive_properties_defined.py +++ b/src/faebryk/library/has_descriptive_properties_defined.py @@ -7,12 +7,15 @@ import faebryk.library._F as F from faebryk.core.node import Node from faebryk.core.trait import TraitImpl +from faebryk.libs.picker.picker import DescriptiveProperties class has_descriptive_properties_defined(F.has_descriptive_properties.impl()): - def __init__(self, properties: Mapping[str, str]) -> None: + def __init__( + self, properties: Mapping[str, str] | Mapping[DescriptiveProperties, str] + ) -> None: super().__init__() - self.properties = dict(properties) + self.properties = dict(properties.items()) def get_properties(self) -> dict[str, str]: return self.properties diff --git a/src/faebryk/library/is_esphome_bus.py b/src/faebryk/library/is_esphome_bus.py index d42104a2..32e78d6d 100644 --- a/src/faebryk/library/is_esphome_bus.py +++ b/src/faebryk/library/is_esphome_bus.py @@ -4,7 +4,7 @@ from abc import abstractmethod from faebryk.core.moduleinterface import ModuleInterface -from faebryk.libs.util import find +from faebryk.libs.util import cast_assert, find class is_esphome_bus(ModuleInterface.TraitT): @@ -14,9 +14,12 @@ class is_esphome_bus(ModuleInterface.TraitT): def get_bus_id(self) -> str: ... @staticmethod - def find_connected_bus(bus: ModuleInterface): + def find_connected_bus[T: ModuleInterface](bus: T) -> T: connected_mifs = bus.get_direct_connections() try: - return find(connected_mifs, lambda mif: mif.has_trait(is_esphome_bus)) + return cast_assert( + type(bus), + find(connected_mifs, lambda mif: mif.has_trait(is_esphome_bus)), + ) except ValueError: raise Exception(f"No esphome bus connected to {bus}: {connected_mifs}") diff --git a/src/faebryk/libs/app/parameters.py b/src/faebryk/libs/app/parameters.py index 1e19e0b8..db4b9a47 100644 --- a/src/faebryk/libs/app/parameters.py +++ b/src/faebryk/libs/app/parameters.py @@ -3,33 +3,4 @@ import logging -import faebryk.library._F as F -from faebryk.core.module import Module -from faebryk.core.parameter import Parameter - logger = logging.getLogger(__name__) - - -def replace_tbd_with_any(module: Module, recursive: bool, loglvl: int | None = None): - """ - Replace all F.TBD instances with F.ANY instances in the given module. - - :param module: The module to replace F.TBD instances in. - :param recursive: If True, replace F.TBD instances in submodules as well. - """ - lvl = logger.getEffectiveLevel() - if loglvl is not None: - logger.setLevel(loglvl) - - module = module.get_most_special() - - for param in module.get_children(direct_only=True, types=Parameter): - if isinstance(param.get_most_narrow(), F.TBD): - logger.debug(f"Replacing in {module}: {param} with F.ANY") - param.merge(F.ANY()) - - logger.setLevel(lvl) - - if recursive: - for m in module.get_children_modules(types=Module): - replace_tbd_with_any(m, recursive=False, loglvl=loglvl) diff --git a/src/faebryk/libs/examples/buildutil.py b/src/faebryk/libs/examples/buildutil.py index c9fd6ab2..7103f221 100644 --- a/src/faebryk/libs/examples/buildutil.py +++ b/src/faebryk/libs/examples/buildutil.py @@ -10,7 +10,6 @@ from faebryk.core.module import Module from faebryk.exporters.pcb.kicad.transformer import PCB_Transformer from faebryk.libs.app.checks import run_checks -from faebryk.libs.app.parameters import replace_tbd_with_any from faebryk.libs.app.pcb import apply_design from faebryk.libs.examples.pickers import add_example_pickers from faebryk.libs.picker.jlcpcb.jlcpcb import JLCPCB_DB @@ -49,10 +48,6 @@ def apply_design_to_pcb( logger.info("Filling unspecified parameters") - replace_tbd_with_any( - m, recursive=True, loglvl=logging.DEBUG if DEV_MODE else logging.INFO - ) - G = m.get_graph() run_checks(m, G) diff --git a/src/faebryk/libs/examples/pickers.py b/src/faebryk/libs/examples/pickers.py index bbd33027..8e84ad3a 100644 --- a/src/faebryk/libs/examples/pickers.py +++ b/src/faebryk/libs/examples/pickers.py @@ -10,7 +10,6 @@ import faebryk.library._F as F from faebryk.core.module import Module -from faebryk.libs.app.parameters import replace_tbd_with_any from faebryk.libs.picker.lcsc import LCSC_Part from faebryk.libs.picker.picker import PickerOption, pick_module_by_params from faebryk.libs.units import P @@ -237,7 +236,6 @@ def pick_tvs(module: F.TVS): def pick_battery(module: F.Battery): if not isinstance(module, F.ButtonCell): bcell = F.ButtonCell() - replace_tbd_with_any(bcell, recursive=False) module.specialize(bcell) bcell.add( F.has_multi_picker(0, F.has_multi_picker.FunctionPicker(pick_battery)) diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index c3ae1cae..88c9fd1c 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -17,7 +17,7 @@ def __sub__(self, __value: Self) -> Self: ... def __add__(self, __value: Self) -> Self: ... -class _SupportsArithmeticOpsWithFloatMul(_SupportsRangeOps): +class _SupportsArithmeticOpsWithFloatMul(_SupportsRangeOps, Protocol): def __mul__(self, __value: float | Self) -> Self: ... diff --git a/test/exporters/netlist/kicad/test_netlist_kicad.py b/test/exporters/netlist/kicad/test_netlist_kicad.py index a3bf37e5..259b30f8 100644 --- a/test/exporters/netlist/kicad/test_netlist_kicad.py +++ b/test/exporters/netlist/kicad/test_netlist_kicad.py @@ -19,8 +19,12 @@ # Netlists -------------------------------------------------------------------- def _test_netlist_graph(): - resistor1 = F.Resistor().builder(lambda r: r.resistance.merge(100 * P.ohm)) - resistor2 = F.Resistor().builder(lambda r: r.resistance.merge(200 * P.ohm)) + resistor1 = F.Resistor().builder( + lambda r: r.resistance.constrain_subset(100 * P.ohm) + ) + resistor2 = F.Resistor().builder( + lambda r: r.resistance.constrain_subset(200 * P.ohm) + ) power = F.ElectricPower() # net labels From 59965dda591540599d34730a8e862d3fabf70935 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 16:38:21 +0200 Subject: [PATCH 29/80] Remove esphome Param stuff --- src/faebryk/exporters/esphome/esphome.py | 57 +----------------------- src/faebryk/library/HLK_LD2410B_P.py | 2 +- src/faebryk/libs/util.py | 38 ++++++++++++++++ 3 files changed, 41 insertions(+), 56 deletions(-) diff --git a/src/faebryk/exporters/esphome/esphome.py b/src/faebryk/exporters/esphome/esphome.py index e33cacfe..6832d632 100644 --- a/src/faebryk/exporters/esphome/esphome.py +++ b/src/faebryk/exporters/esphome/esphome.py @@ -2,75 +2,22 @@ # SPDX-License-Identifier: MIT import logging -from typing import Any, Callable import yaml import faebryk.library._F as F from faebryk.core.graphinterface import Graph -from faebryk.core.parameter import Parameter +from faebryk.libs.util import merge_dicts logger = logging.getLogger(__name__) -# TODO move to util -def dict_map_values(d: dict, function: Callable[[Any], Any]) -> dict: - """recursively map all values in a dict""" - - result = {} - for key, value in d.items(): - if isinstance(value, dict): - result[key] = dict_map_values(value, function) - elif isinstance(value, list): - result[key] = [dict_map_values(v, function) for v in value] - else: - result[key] = function(value) - return result - - -def merge_dicts(*dicts: dict) -> dict: - """merge a list of dicts into a single dict, - if same key is present and value is list, lists are merged - if same key is dict, dicts are merged recursively - """ - result = {} - for d in dicts: - for k, v in d.items(): - if k in result: - if isinstance(v, list): - assert isinstance( - result[k], list - ), f"Trying to merge list into key '{k}' of type {type(result[k])}" - result[k] += v - elif isinstance(v, dict): - assert isinstance(result[k], dict) - result[k] = merge_dicts(result[k], v) - else: - result[k] = v - else: - result[k] = v - return result - - def make_esphome_config(G: Graph) -> dict: esphome_components = G.nodes_with_trait(F.has_esphome_config) esphome_config = merge_dicts(*[t.get_config() for _, t in esphome_components]) - def instantiate_param(param: Parameter | Any): - if not isinstance(param, Parameter): - return param - - if not isinstance(param, F.Constant): - raise Exception( - f"Parameter {param} is not a F.Constant, but {type(param)}" - f"Config: {esphome_config}" - ) - return param.value - - instantiated = dict_map_values(esphome_config, instantiate_param) - - return instantiated + return esphome_config def dump_esphome_config(config: dict) -> str: diff --git a/src/faebryk/library/HLK_LD2410B_P.py b/src/faebryk/library/HLK_LD2410B_P.py index f9032f20..4480176b 100644 --- a/src/faebryk/library/HLK_LD2410B_P.py +++ b/src/faebryk/library/HLK_LD2410B_P.py @@ -20,7 +20,7 @@ def get_config(self) -> dict: val = cast_assert(Quantity, self.throttle.get_any_single()) obj = self.obj - assert isinstance(obj, HLK_LD2410B_P), "This is not an HLK_LD2410B_P!" + assert isinstance(obj, HLK_LD2410B_P) uart_candidates = { mif diff --git a/src/faebryk/libs/util.py b/src/faebryk/libs/util.py index 6ceba3be..ecc5c7b8 100644 --- a/src/faebryk/libs/util.py +++ b/src/faebryk/libs/util.py @@ -1076,3 +1076,41 @@ def setdefault(self, key: T, default: U) -> U: except KeyError: self[key] = default return default + + +def dict_map_values(d: dict, function: Callable[[Any], Any]) -> dict: + """recursively map all values in a dict""" + + result = {} + for key, value in d.items(): + if isinstance(value, dict): + result[key] = dict_map_values(value, function) + elif isinstance(value, list): + result[key] = [dict_map_values(v, function) for v in value] + else: + result[key] = function(value) + return result + + +def merge_dicts(*dicts: dict) -> dict: + """merge a list of dicts into a single dict, + if same key is present and value is list, lists are merged + if same key is dict, dicts are merged recursively + """ + result = {} + for d in dicts: + for k, v in d.items(): + if k in result: + if isinstance(v, list): + assert isinstance( + result[k], list + ), f"Trying to merge list into key '{k}' of type {type(result[k])}" + result[k] += v + elif isinstance(v, dict): + assert isinstance(result[k], dict) + result[k] = merge_dicts(result[k], v) + else: + result[k] = v + else: + result[k] = v + return result From 41afca7cbcd816ee9dea8b6e4402e11ceabbc563 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 16:52:30 +0200 Subject: [PATCH 30/80] add abstract decorator --- src/faebryk/core/parameter.py | 3 ++- src/faebryk/libs/util.py | 16 ++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index a3414428..285bee97 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -10,6 +10,7 @@ from faebryk.core.node import Node, f_field from faebryk.libs.sets import Range, Set_ from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless +from faebryk.libs.util import abstract logger = logging.getLogger(__name__) @@ -176,7 +177,7 @@ def if_then_else( # ) -> None: ... -# TODO: prohibit instantiation +@abstract class Expression(Node, ParameterOperatable): pass diff --git a/src/faebryk/libs/util.py b/src/faebryk/libs/util.py index ecc5c7b8..9a1f696f 100644 --- a/src/faebryk/libs/util.py +++ b/src/faebryk/libs/util.py @@ -1114,3 +1114,19 @@ def merge_dicts(*dicts: dict) -> dict: else: result[k] = v return result + + +def abstract[T: type](cls: T) -> T: + """ + Mark a class as abstract. + """ + + old_new = cls.__new__ + + def _new(cls_, *args, **kwargs): + if cls_ is cls: + raise TypeError(f"{cls.__name__} is abstract and cannot be instantiated") + return old_new(cls_, *args, **kwargs) + + cls.__new__ = _new + return cls From 3a98300efc6e337b70e8dcec94c242fdcbe9b8a9 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Wed, 9 Oct 2024 16:55:23 +0200 Subject: [PATCH 31/80] Now that we have units, get rid of all the useless stuff to do with luminousity --- src/faebryk/libs/brightness.py | 165 +++++++++++++-------------------- 1 file changed, 63 insertions(+), 102 deletions(-) diff --git a/src/faebryk/libs/brightness.py b/src/faebryk/libs/brightness.py index 678e31b4..7bc71040 100644 --- a/src/faebryk/libs/brightness.py +++ b/src/faebryk/libs/brightness.py @@ -1,11 +1,9 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from copy import copy from enum import Enum -import faebryk.library._F as F -from faebryk.core.parameter import Parameter +from faebryk.libs.library import L from faebryk.libs.units import P, Quantity """ @@ -35,43 +33,20 @@ """ -class _Unit: - def __init__(self, value: Parameter[Quantity]): - self._value = value +def luminous_flux_to_intensity(flux: Quantity, solid_angle: Quantity) -> Quantity: + return flux / solid_angle - def __repr__(self): - return f"{self._value!r}" - @property - def value(self): - return copy(self._value) +def luminous_intensity_to_flux(intensity: Quantity, solid_angle: Quantity) -> Quantity: + return intensity * solid_angle -# Temporary unit classes until faebryk supports units -class LuminousIntensity(_Unit): - pass +def luminous_flux_to_illuminance(flux: Quantity, area: Quantity) -> Quantity: + return flux / area -class LuminousFlux(_Unit): - @classmethod - def from_intensity( - cls, - intensity: LuminousIntensity, - solid_angle: Parameter[Quantity], - ) -> "LuminousFlux": - return LuminousFlux(intensity.value * solid_angle) - - def to_intensity(self, solid_angle: Parameter[Quantity]) -> LuminousIntensity: - return LuminousIntensity(self.value / solid_angle) - - -class Illuminance(_Unit): - @classmethod - def from_flux(cls, flux: LuminousFlux, area: Parameter[Quantity]) -> "Illuminance": - return Illuminance(flux.value / area) - - def to_luminous_flux(self, area: Parameter[Quantity]) -> LuminousFlux: - return LuminousFlux(self.value * area) +def illuminance_to_flux(illuminance: Quantity, area: Quantity) -> Quantity: + return illuminance * area class TypicalLuminousIntensity(Enum): @@ -79,50 +54,36 @@ class TypicalLuminousIntensity(Enum): Well known luminous intensities in candela. """ - CANDLE = LuminousFlux(F.Constant(1 * P.candela)) - - CREE_SMD_LED_EXTREMELY_DIM = LuminousFlux(F.Constant(10 * P.millicandela)) - CREE_SMD_LED_VERY_DIM = LuminousFlux(F.Constant(25 * P.millicandela)) - CREE_SMD_LED_DIM = LuminousFlux(F.Constant(50 * P.millicandela)) - CREE_SMD_LED_NORMAL = LuminousFlux(F.Constant(100 * P.millicandela)) - CREE_SMD_LED_BRIGHT = LuminousFlux(F.Constant(250 * P.millicandela)) - CREE_SMD_LED_VERY_BRIGHT = LuminousFlux(F.Constant(2 * P.candela)) - CREE_SMD_LED_EXTREMELY_BRIGHT = LuminousFlux(F.Constant(14 * P.candela)) - - TYPICAL_SMD_LED_MAX_BRIGHTNESS = LuminousFlux( - F.Range(60 * P.millicandela, 800 * P.mcandela) - ) - - WS2812B_LED_RED = LuminousFlux(F.Constant(420 * P.millicandela)) - WS2812B_LED_GREEN = LuminousFlux(F.Constant(720 * P.millicandela)) - WS2812B_LED_BLUE = LuminousFlux(F.Constant(200 * P.millicandela)) - - APPLICATION_CAR_HEADLIGHTS_HALOGEN_LOW_BEAM_MEDIUM = LuminousFlux( - F.Constant(20 * P.kcandela) - ) - APPLICATION_CAR_HEADLIGHTS_HALOGEN_HIGH_BEAM_MEDIUM = LuminousFlux( - F.Constant(40 * P.kcandela) - ) - APPLICATION_CAR_TURN_INDICATOR_DIM = LuminousFlux(F.Constant(1 * P.kcandela)) - APPLICATION_CAR_TURN_INDICATOR_BRIGHT = LuminousFlux(F.Constant(10 * P.kcandela)) - APPLICATION_CAR_BREAK_LIGHT_DIM = LuminousFlux(F.Constant(5 * P.kcandela)) - APPLICATION_CAR_BREAK_LIGHT_BRIGHT = LuminousFlux(F.Constant(50 * P.kcandela)) + CANDLE = 1 * P.candela + + CREE_SMD_LED_EXTREMELY_DIM = 10 * P.millicandela + CREE_SMD_LED_VERY_DIM = 25 * P.millicandela + CREE_SMD_LED_DIM = 50 * P.millicandela + CREE_SMD_LED_NORMAL = 100 * P.millicandela + CREE_SMD_LED_BRIGHT = 250 * P.millicandela + CREE_SMD_LED_VERY_BRIGHT = 2 * P.candela + CREE_SMD_LED_EXTREMELY_BRIGHT = 14 * P.candela + + TYPICAL_SMD_LED_MAX_BRIGHTNESS = L.Range(60 * P.millicandela, 800 * P.millicandela) + + WS2812B_LED_RED = 420 * P.millicandela + WS2812B_LED_GREEN = 720 * P.millicandela + WS2812B_LED_BLUE = 200 * P.millicandela + + APPLICATION_CAR_HEADLIGHTS_HALOGEN_LOW_BEAM_MEDIUM = 20 * P.kcandela + APPLICATION_CAR_HEADLIGHTS_HALOGEN_HIGH_BEAM_MEDIUM = 40 * P.kcandela + APPLICATION_CAR_TURN_INDICATOR_DIM = 1 * P.kcandela + APPLICATION_CAR_TURN_INDICATOR_BRIGHT = 10 * P.kcandela + APPLICATION_CAR_BREAK_LIGHT_DIM = 5 * P.kcandela + APPLICATION_CAR_BREAK_LIGHT_BRIGHT = 50 * P.kcandela # not sure about these values - APPLICATION_LED_STANDBY = LuminousFlux(F.Range(1 * P.millicandela, 10 * P.mcandela)) - APPLICATION_LED_INDICATOR_INSIDE = LuminousFlux( - F.Range(10 * P.millicandela, 100 * P.mcandela) - ) - APPLICATION_LED_KEYBOARD_BACKLIGHT = LuminousFlux( - F.Range(50 * P.millicandela, 500 * P.mcandela) - ) - APPLICATION_LED_INDICATOR_OUTSIDE = LuminousFlux( - F.Range(100 * P.millicandela, 1 * P.candela) - ) - APPLICATION_LED_DECORATIVE_LIGHTING = LuminousFlux( - F.Range(100 * P.millicandela, 1 * P.candela) - ) - APPLICATION_LED_FLASHLIGHT = LuminousFlux(F.Range(10 * P.candela, 1 * P.kcandela)) + APPLICATION_LED_STANDBY = L.Range(1 * P.millicandela, 10 * P.mcandela) + APPLICATION_LED_INDICATOR_INSIDE = L.Range(10 * P.millicandela, 100 * P.mcandela) + APPLICATION_LED_KEYBOARD_BACKLIGHT = L.Range(50 * P.millicandela, 500 * P.mcandela) + APPLICATION_LED_INDICATOR_OUTSIDE = L.Range(100 * P.millicandela, 1 * P.candela) + APPLICATION_LED_DECORATIVE_LIGHTING = L.Range(100 * P.millicandela, 1 * P.candela) + APPLICATION_LED_FLASHLIGHT = L.Range(10 * P.candela, 1 * P.kcandela) class TypicalLuminousFlux(Enum): @@ -130,21 +91,21 @@ class TypicalLuminousFlux(Enum): Well known luminous flux in lumen. """ - IKEA_E14_BULB_LED_DIM = LuminousFlux(F.Constant(100 * P.lm)) - IKEA_E14_BULB_LED_MEDIUM = LuminousFlux(F.Constant(250 * P.lm)) - IKEA_E14_BULB_LED_BRIGHT = LuminousFlux(F.Constant(470 * P.lm)) - IKEA_GU10_BULB_LED_DIM = LuminousFlux(F.Constant(230 * P.lm)) - IKEA_GU10_BULB_LED_MEDIUM = LuminousFlux(F.Constant(345 * P.lm)) - IKEA_E27_BULB_LED_DIM = LuminousFlux(F.Constant(470 * P.lm)) - IKEA_E27_BULB_LED_MEDIUM = LuminousFlux(F.Constant(806 * P.lm)) - IKEA_E27_BULB_LED_BRIGHT = LuminousFlux(F.Constant(1500 * P.lm)) + IKEA_E14_BULB_LED_DIM = 100 * P.lm + IKEA_E14_BULB_LED_MEDIUM = 250 * P.lm + IKEA_E14_BULB_LED_BRIGHT = 470 * P.lm + IKEA_GU10_BULB_LED_DIM = 230 * P.lm + IKEA_GU10_BULB_LED_MEDIUM = 345 * P.lm + IKEA_E27_BULB_LED_DIM = 470 * P.lm + IKEA_E27_BULB_LED_MEDIUM = 806 * P.lm + IKEA_E27_BULB_LED_BRIGHT = 1500 * P.lm - CREE_SMD_LED_VERY_BRIGHT = LuminousFlux(F.Constant(6000 * P.lm)) + CREE_SMD_LED_VERY_BRIGHT = 6000 * P.lm - LASER_POINTER_GREEN_5MW = LuminousFlux(F.Constant(3.4 * P.lm)) + LASER_POINTER_GREEN_5MW = 3.4 * P.lm - CAR_HEADLIGHTS_HALOGEN_LOW_BEAM_MEDIUM = LuminousFlux(F.Constant(1000 * P.lm)) - CAR_HEADLIGHTS_HALOGEN_HIGH_BEAM_MEDIUM = LuminousFlux(F.Constant(1300 * P.lm)) + CAR_HEADLIGHTS_HALOGEN_LOW_BEAM_MEDIUM = 1000 * P.lm + CAR_HEADLIGHTS_HALOGEN_HIGH_BEAM_MEDIUM = 1300 * P.lm class TypicalIlluminance(Enum): @@ -153,17 +114,17 @@ class TypicalIlluminance(Enum): """ # https://en.wikipedia.org/wiki/Lux - MOONLESS_OVERCAST_NIGHT_SKY_STARLIGHT = Illuminance(F.Constant(0.0001 * P.lx)) - MOONLESS_CLEAR_NIGHT_SKY_WITH_AIRGLOW = Illuminance(F.Constant(0.002 * P.lx)) - FULL_MOON_ON_A_CLEAR_NIGHT = Illuminance(F.Constant(0.05 * P.lx)) - DARK_LIMIT_OF_CIVIL_TWILIGHT_UNDER_A_CLEAR_SKY = Illuminance(F.Constant(3.4 * P.lx)) - PUBLIC_AREAS_WITH_DARK_SURROUNDINGS = Illuminance(F.Constant(20 * P.lx)) - FAMILY_LIVING_ROOM_LIGHTS = Illuminance(F.Constant(50 * P.lx)) - OFFICE_BUILDING_HALLWAY_TOILET_LIGHTING = Illuminance(F.Constant(80 * P.lx)) - VERY_DARK_OVERCAST_DAY = Illuminance(F.Constant(100 * P.lx)) - TRAIN_STATION_PLATFORMS = Illuminance(F.Constant(150 * P.lx)) - OFFICE_LIGHTING = Illuminance(F.Constant(320 * P.lx)) - SUNRISE_OR_SUNSET_ON_A_CLEAR_DAY = Illuminance(F.Constant(400 * P.lx)) - OVERCAST_DAY = Illuminance(F.Constant(1000 * P.lx)) - FULL_DAYLIGHT = Illuminance(F.Constant(25000 * P.lx)) - DIRECT_SUNLIGHT = Illuminance(F.Constant(100000 * P.lx)) + MOONLESS_OVERCAST_NIGHT_SKY_STARLIGHT = 0.0001 * P.lx + MOONLESS_CLEAR_NIGHT_SKY_WITH_AIRGLOW = 0.002 * P.lx + FULL_MOON_ON_A_CLEAR_NIGHT = 0.05 * P.lx + DARK_LIMIT_OF_CIVIL_TWILIGHT_UNDER_A_CLEAR_SKY = 3.4 * P.lx + PUBLIC_AREAS_WITH_DARK_SURROUNDINGS = 20 * P.lx + FAMILY_LIVING_ROOM_LIGHTS = 50 * P.lx + OFFICE_BUILDING_HALLWAY_TOILET_LIGHTING = 80 * P.lx + VERY_DARK_OVERCAST_DAY = 100 * P.lx + TRAIN_STATION_PLATFORMS = 150 * P.lx + OFFICE_LIGHTING = 320 * P.lx + SUNRISE_OR_SUNSET_ON_A_CLEAR_DAY = 400 * P.lx + OVERCAST_DAY = 1000 * P.lx + FULL_DAYLIGHT = 25000 * P.lx + DIRECT_SUNLIGHT = 100000 * P.lx From 6e8d033065fbf3540bfaa15e12d8062d3b069ab4 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 17:38:47 +0200 Subject: [PATCH 32/80] assert_true --- src/faebryk/core/parameter.py | 5 ++++ src/faebryk/library/ElectricPower.py | 2 +- test/library/nodes/test_electricpower.py | 34 +++++++++++------------- 3 files changed, 22 insertions(+), 19 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 285bee97..0a788e60 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -6,6 +6,8 @@ from types import NotImplementedType from typing import Any, Callable, Protocol +from more_itertools import raise_ + from faebryk.core.core import Namespace from faebryk.core.node import Node, f_field from faebryk.libs.sets import Range, Set_ @@ -170,6 +172,9 @@ def if_then_else( if_false: Callable[[], Any], ) -> None: ... + def assert_true(self) -> None: + self.if_then_else(lambda: None, lambda: raise_(ValueError())) + # TODO # def switch_case( # self, diff --git a/src/faebryk/library/ElectricPower.py b/src/faebryk/library/ElectricPower.py index e3a836af..aef97303 100644 --- a/src/faebryk/library/ElectricPower.py +++ b/src/faebryk/library/ElectricPower.py @@ -86,7 +86,7 @@ def fused(self, attach_to: Node | None = None): def __preinit__(self) -> None: ... - # self.voltage.merge( + # self.voltage.alias_is( # self.hv.potential - self.lv.potential # ) diff --git a/test/library/nodes/test_electricpower.py b/test/library/nodes/test_electricpower.py index d41f19ee..882b9998 100644 --- a/test/library/nodes/test_electricpower.py +++ b/test/library/nodes/test_electricpower.py @@ -1,29 +1,27 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -import unittest +from faebryk.libs.library import L -class TestFusedPower(unittest.TestCase): - def test_fused_power(self): - import faebryk.library._F as F - from faebryk.libs.units import P - power_in = F.ElectricPower() - power_out = F.ElectricPower() +def test_fused_power(): + import faebryk.library._F as F + from faebryk.libs.units import P - power_in.voltage.merge(10 * P.V) - power_in.max_current.merge(500 * P.mA) + power_in = F.ElectricPower() + power_out = F.ElectricPower() - power_in_fused = power_in.fused() + power_in.voltage.constrain_subset(10 * P.V) + power_in.max_current.constrain_subset(500 * P.mA) - power_in_fused.connect(power_out) + power_in_fused = power_in.fused() + power_in_fused.connect(power_out) - fuse = next(iter(power_in_fused.get_children(direct_only=False, types=F.Fuse))) + fuse = next(iter(power_in_fused.get_children(direct_only=False, types=F.Fuse))) - self.assertEqual(fuse.trip_current.get_most_narrow(), F.Constant(500 * P.mA)) - self.assertEqual(power_out.voltage.get_most_narrow(), 10 * P.V) - # self.assertEqual( - # power_in_fused.max_current.get_most_narrow(), F.Range(0 * P.A, 500 * P.mA) - # ) - self.assertEqual(power_out.max_current.get_most_narrow(), F.TBD()) + fuse.trip_current.operation_is_subset( + L.Range.from_center_rel(500 * P.mA, 0.1) + ).assert_true() + power_out.voltage.operation_is_subset(10 * P.V).assert_true() + power_out.max_current.operation_is_le(500 * P.mA * 0.9).assert_true() From 1930f729b0503fa325ddec5d3489f0e7a0115a2d Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Wed, 9 Oct 2024 17:57:54 +0200 Subject: [PATCH 33/80] Get rid of some F.Constant --- src/faebryk/core/parameter.py | 10 ++- src/faebryk/library/Resistor.py | 14 ++-- src/faebryk/library/SK9822_EC20.py | 2 +- src/faebryk/libs/examples/pickers.py | 97 ++++++++++++++-------------- src/faebryk/libs/picker/picker.py | 8 ++- src/faebryk/libs/sets.py | 4 +- 6 files changed, 73 insertions(+), 62 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 0a788e60..b29c7915 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -21,11 +21,15 @@ class ParameterOperatable(Protocol): type QuantityLike = Quantity | NotImplementedType type Number = int | float | QuantityLike - type NumberLike = ParameterOperatable | Number | Set_[Number] - type BooleanLike = ParameterOperatable | bool | Set_[bool] - type EnumLike = ParameterOperatable | Enum | Set_[Enum] + type NonParamNumber = Number | Set_[Number] + type NumberLike = ParameterOperatable | NonParamNumber + type NonParamBoolean = bool | Set_[bool] + type BooleanLike = ParameterOperatable | NonParamBoolean + type NonParamEnum = Enum | Set_[Enum] + type EnumLike = ParameterOperatable | NonParamEnum type All = NumberLike | BooleanLike | EnumLike + type NonParamSet = NonParamNumber | NonParamBoolean | NonParamEnum type Sets = All def alias_is(self, other: All): ... diff --git a/src/faebryk/library/Resistor.py b/src/faebryk/library/Resistor.py index 118384b0..54593f6f 100644 --- a/src/faebryk/library/Resistor.py +++ b/src/faebryk/library/Resistor.py @@ -1,6 +1,7 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT +from more_itertools import raise_ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L @@ -45,13 +46,14 @@ def allow_removal_if_zero(self): def replace_zero(m: Module): assert m is self - r = self.resistance.get_most_narrow() - if not F.Constant(0.0 * P.ohm).is_subset_of(r): - raise PickError("", self) + def do_replace(): + self.resistance.constrain_subset(0.0 * P.ohm) + self.unnamed[0].connect(self.unnamed[1]) + self.add(has_part_picked_remove()) - self.resistance.constrain_subset(0.0 * P.ohm) - self.unnamed[0].connect(self.unnamed[1]) - self.add(has_part_picked_remove()) + self.resistance.operation_is_superset(0.0 * P.ohm).if_then_else( + lambda: do_replace(), lambda: raise_(PickError("", self)) + ) self.add( F.has_multi_picker(-100, F.has_multi_picker.FunctionPicker(replace_zero)) diff --git a/src/faebryk/library/SK9822_EC20.py b/src/faebryk/library/SK9822_EC20.py index 3ef39d61..430dd836 100644 --- a/src/faebryk/library/SK9822_EC20.py +++ b/src/faebryk/library/SK9822_EC20.py @@ -12,7 +12,7 @@ class SK9822_EC20(Module): (RGB) driving intelligent control circuit and the light emitting circuit in one of the LED light source control. Products containing a signal - decoding module, data buffer, a built-in F.Constant + decoding module, data buffer, a built-in Constant current circuit and RC oscillator; CMOS, low voltage, low power consumption; 256 level grayscale PWM adjustment and 32 brightness adjustment; diff --git a/src/faebryk/libs/examples/pickers.py b/src/faebryk/libs/examples/pickers.py index 8e84ad3a..236d3847 100644 --- a/src/faebryk/libs/examples/pickers.py +++ b/src/faebryk/libs/examples/pickers.py @@ -10,6 +10,7 @@ import faebryk.library._F as F from faebryk.core.module import Module +from faebryk.libs.library import L from faebryk.libs.picker.lcsc import LCSC_Part from faebryk.libs.picker.picker import PickerOption, pick_module_by_params from faebryk.libs.units import P @@ -27,17 +28,17 @@ def pick_fuse(module: F.Fuse): PickerOption( part=LCSC_Part(partno="C914087"), params={ - "fuse_type": F.Constant(F.Fuse.FuseType.RESETTABLE), - "response_type": F.Constant(F.Fuse.ResponseType.SLOW), - "trip_current": F.Constant(1 * P.A), + "fuse_type": L.Single(F.Fuse.FuseType.RESETTABLE), + "response_type": L.Single(F.Fuse.ResponseType.SLOW), + "trip_current": 1 * P.A, }, ), PickerOption( part=LCSC_Part(partno="C914085"), params={ - "fuse_type": F.Constant(F.Fuse.FuseType.RESETTABLE), - "response_type": F.Constant(F.Fuse.ResponseType.SLOW), - "trip_current": F.Constant(0.5 * P.A), + "fuse_type": L.Single(F.Fuse.FuseType.RESETTABLE), + "response_type": L.Single(F.Fuse.ResponseType.SLOW), + "trip_current": 0.5 * P.A, }, ), ], @@ -56,14 +57,14 @@ def pick_mosfet(module: F.MOSFET): PickerOption( part=LCSC_Part(partno="C20917"), params={ - "channel_type": F.Constant(F.MOSFET.ChannelType.N_CHANNEL), + "channel_type": L.Single(F.MOSFET.ChannelType.N_CHANNEL), }, pinmap=standard_pinmap, ), PickerOption( part=LCSC_Part(partno="C15127"), params={ - "channel_type": F.Constant(F.MOSFET.ChannelType.P_CHANNEL), + "channel_type": L.Single(F.MOSFET.ChannelType.P_CHANNEL), }, pinmap=standard_pinmap, ), @@ -84,23 +85,23 @@ def pick_capacitor(module: F.Capacitor): PickerOption( part=LCSC_Part(partno="C1525"), params={ - "temperature_coefficient": F.Range( + "temperature_coefficient": L.Range( F.Capacitor.TemperatureCoefficient.Y5V, F.Capacitor.TemperatureCoefficient.X7R, ), - "capacitance": F.Constant(100 * P.nF), - "max_voltage": F.Range(0 * P.V, 16 * P.V), + "capacitance": 100 * P.nF, + "max_voltage": L.Range(0 * P.V, 16 * P.V), }, ), PickerOption( part=LCSC_Part(partno="C19702"), params={ - "temperature_coefficient": F.Range( + "temperature_coefficient": L.Range( F.Capacitor.TemperatureCoefficient.Y5V, F.Capacitor.TemperatureCoefficient.X7R, ), - "capacitance": F.Constant(10 * P.uF), - "max_voltage": F.Range(0 * P.V, 10 * P.V), + "capacitance": 10 * P.uF, + "max_voltage": L.Range(0 * P.V, 10 * P.V), }, ), ], @@ -119,59 +120,59 @@ def pick_resistor(resistor: F.Resistor): [ PickerOption( part=LCSC_Part(partno="C25111"), - params={"resistance": F.Constant(40.2 * P.kohm)}, + params={"resistance": 40.2 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25076"), - params={"resistance": F.Constant(100 * P.kohm)}, + params={"resistance": 100 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25087"), - params={"resistance": F.Constant(200 * P.kohm)}, + params={"resistance": 200 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C11702"), - params={"resistance": F.Constant(1 * P.kohm)}, + params={"resistance": 1 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25879"), - params={"resistance": F.Constant(2.2 * P.kohm)}, + params={"resistance": 2.2 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25900"), - params={"resistance": F.Constant(4.7 * P.kohm)}, + params={"resistance": 4.7 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25905"), - params={"resistance": F.Constant(5.1 * P.kohm)}, + params={"resistance": 5.1 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25917"), - params={"resistance": F.Constant(6.8 * P.kohm)}, + params={"resistance": 6.8 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25744"), - params={"resistance": F.Constant(10 * P.kohm)}, + params={"resistance": 10 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25752"), - params={"resistance": F.Constant(12 * P.kohm)}, + params={"resistance": 12 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25771"), - params={"resistance": F.Constant(27 * P.kohm)}, + params={"resistance": 27 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25741"), - params={"resistance": F.Constant(100 * P.kohm)}, + params={"resistance": 100 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25782"), - params={"resistance": F.Constant(390 * P.kohm)}, + params={"resistance": 390 * P.kohm}, ), PickerOption( part=LCSC_Part(partno="C25790"), - params={"resistance": F.Constant(470 * P.kohm)}, + params={"resistance": 470 * P.kohm}, ), ], ) @@ -184,30 +185,30 @@ def pick_led(module: F.LED): PickerOption( part=LCSC_Part(partno="C72043"), params={ - "color": F.Constant(F.LED.Color.EMERALD), - "max_brightness": F.Constant(285 * P.mcandela), - "forward_voltage": F.Constant(3.7 * P.volt), - "max_current": F.Constant(100 * P.mA), + "color": L.Single(F.LED.Color.EMERALD), + "max_brightness": 285 * P.mcandela, + "forward_voltage": 3.7 * P.volt, + "max_current": 100 * P.mA, }, pinmap={"1": module.cathode, "2": module.anode}, ), PickerOption( part=LCSC_Part(partno="C72041"), params={ - "color": F.Constant(F.LED.Color.BLUE), - "max_brightness": F.Constant(28.5 * P.mcandela), - "forward_voltage": F.Constant(3.1 * P.volt), - "max_current": F.Constant(100 * P.mA), + "color": L.Single(F.LED.Color.BLUE), + "max_brightness": 28.5 * P.mcandela, + "forward_voltage": 3.1 * P.volt, + "max_current": 100 * P.mA, }, pinmap={"1": module.cathode, "2": module.anode}, ), PickerOption( part=LCSC_Part(partno="C72038"), params={ - "color": F.Constant(F.LED.Color.YELLOW), - "max_brightness": F.Constant(180 * P.mcandela), - "forward_voltage": F.Constant(2.3 * P.volt), - "max_current": F.Constant(60 * P.mA), + "color": L.Single(F.LED.Color.YELLOW), + "max_brightness": 180 * P.mcandela, + "forward_voltage": 2.3 * P.volt, + "max_current": 60 * P.mA, }, pinmap={"1": module.cathode, "2": module.anode}, ), @@ -222,7 +223,7 @@ def pick_tvs(module: F.TVS): PickerOption( part=LCSC_Part(partno="C85402"), params={ - "reverse_working_voltage": F.Constant(5 * P.V), + "reverse_working_voltage": 5 * P.V, }, pinmap={ "1": module.cathode, @@ -233,7 +234,9 @@ def pick_tvs(module: F.TVS): ) -def pick_battery(module: F.Battery): +def pick_battery(module): + if not isinstance(module, F.Battery): + raise ValueError("Module is not a Battery") if not isinstance(module, F.ButtonCell): bcell = F.ButtonCell() module.specialize(bcell) @@ -248,11 +251,11 @@ def pick_battery(module: F.Battery): PickerOption( part=LCSC_Part(partno="C5239862"), params={ - "voltage": F.Constant(3 * P.V), - "capacity": F.Range.from_center(225 * P.mAh, 50 * P.mAh), - "material": F.Constant(F.ButtonCell.Material.Lithium), - "size": F.Constant(F.ButtonCell.Size.N_2032), - "shape": F.Constant(F.ButtonCell.Shape.Round), + "voltage": 3 * P.V, + "capacity": L.Range.from_center(225 * P.mAh, 50 * P.mAh), + "material": L.Single(F.ButtonCell.Material.Lithium), + "size": L.Single(F.ButtonCell.Size.N_2032), + "shape": L.Single(F.ButtonCell.Shape.Round), }, pinmap={ "1": module.power.lv, diff --git a/src/faebryk/libs/picker/picker.py b/src/faebryk/libs/picker/picker.py index 212956d7..313108f3 100644 --- a/src/faebryk/libs/picker/picker.py +++ b/src/faebryk/libs/picker/picker.py @@ -15,7 +15,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.core.moduleinterface import ModuleInterface -from faebryk.core.parameter import Parameter +from faebryk.core.parameter import Parameter, ParameterOperatable from faebryk.libs.util import flatten, not_none logger = logging.getLogger(__name__) @@ -41,7 +41,7 @@ class DescriptiveProperties(StrEnum): @dataclass class PickerOption: part: Part - params: dict[str, Parameter] | None = None + params: dict[str, ParameterOperatable.NonParamSet] | None = None filter: Callable[[Module], bool] | None = None pinmap: dict[str, F.Electrical] | None = None info: dict[str | DescriptiveProperties, str] | None = None @@ -148,12 +148,14 @@ def pick_module_by_params(module: Module, options: Iterable[PickerOption]): options = list(options) + # TODO this doesn't work + raise NotImplementedError("This doesn't work") try: option = next( filter( lambda o: (not o.filter or o.filter(module)) and all( - v.is_subset_of(params.get(k, F.ANY())) + params[k].operation_is_superset(v) for k, v in (o.params or {}).items() if not k.startswith("_") ), diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 88c9fd1c..0f560be1 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -13,8 +13,8 @@ def __lt__(self, __value) -> bool: ... def __ge__(self, __value) -> bool: ... def __gt__(self, __value) -> bool: ... - def __sub__(self, __value: Self) -> Self: ... - def __add__(self, __value: Self) -> Self: ... + def __sub__(self, __value: Self) -> "_SupportsRangeOps": ... + def __add__(self, __value: Self) -> "_SupportsRangeOps": ... class _SupportsArithmeticOpsWithFloatMul(_SupportsRangeOps, Protocol): From f3f3818b2a56738f02478444a787e4695e98c5fb Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 9 Oct 2024 19:17:48 +0200 Subject: [PATCH 34/80] minor mini stuff --- examples/signal_processing.py | 3 ++- src/faebryk/core/node.py | 2 +- .../has_simple_value_representation_based_on_params.py | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/examples/signal_processing.py b/examples/signal_processing.py index be6bf899..4dc603db 100644 --- a/examples/signal_processing.py +++ b/examples/signal_processing.py @@ -44,8 +44,9 @@ def __preinit__(self) -> None: L.Range.from_center_rel(3 * P.V, 0.05) ) + # TODO # Construct - special.get_trait(F.has_construction_dependency).construct() + # special.get_trait(F.has_construction_dependency).construct() def main(): diff --git a/src/faebryk/core/node.py b/src/faebryk/core/node.py index 743974ba..fc9d93ef 100644 --- a/src/faebryk/core/node.py +++ b/src/faebryk/core/node.py @@ -572,7 +572,7 @@ def pretty_params(self) -> str: from faebryk.core.parameter import Parameter params = { - not_none(p.get_parent())[1]: p.get_most_narrow() + not_none(p.get_parent())[1]: p for p in self.get_children(direct_only=True, types=Parameter) } params_str = "\n".join(f"{k}: {v}" for k, v in params.items()) diff --git a/src/faebryk/library/has_simple_value_representation_based_on_params.py b/src/faebryk/library/has_simple_value_representation_based_on_params.py index 31a2c255..9c576aef 100644 --- a/src/faebryk/library/has_simple_value_representation_based_on_params.py +++ b/src/faebryk/library/has_simple_value_representation_based_on_params.py @@ -20,6 +20,6 @@ def __init__[*P]( assert all(isinstance(p, Parameter) for p in params) self.params = params + # TODO make this more useful def get_value(self) -> str: - params_const = tuple(param.get_most_narrow() for param in self.params) - return self.transformer(*params_const) + return self.transformer(*self.params) From 4076a684962ca09e04a15dadc38b289810bed562 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Thu, 10 Oct 2024 19:45:23 +0200 Subject: [PATCH 35/80] Reworks sets.py, adding union, add and subtract --- src/faebryk/libs/picker/jlcpcb/picker_lib.py | 9 +- src/faebryk/libs/sets.py | 295 +++++++++++++++---- test/libs/test_sets.py | 92 +++++- 3 files changed, 336 insertions(+), 60 deletions(-) diff --git a/src/faebryk/libs/picker/jlcpcb/picker_lib.py b/src/faebryk/libs/picker/jlcpcb/picker_lib.py index 9a3468dd..affd106d 100644 --- a/src/faebryk/libs/picker/jlcpcb/picker_lib.py +++ b/src/faebryk/libs/picker/jlcpcb/picker_lib.py @@ -5,6 +5,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.e_series import E_SERIES_VALUES +from faebryk.libs.library import L from faebryk.libs.picker.jlcpcb.jlcpcb import ( Component, ComponentQuery, @@ -32,15 +33,15 @@ # Generic pickers ---------------------------------------------------------------------- -def str_to_enum[T: Enum](enum: type[T], x: str) -> F.Constant[T]: +def str_to_enum[T: Enum](enum: type[T], x: str) -> L.Single[T]: name = x.replace(" ", "_").replace("-", "_").upper() if name not in [e.name for e in enum]: raise ValueError(f"Enum translation error: {x}[={name}] not in {enum}") - return F.Constant(enum[name]) + return L.Single(enum[name]) -def str_to_enum_func[T: Enum](enum: type[T]) -> Callable[[str], F.Constant[T]]: - def f(x: str) -> F.Constant[T]: +def str_to_enum_func[T: Enum](enum: type[T]) -> Callable[[str], L.Single[T]]: + def f(x: str) -> L.Single[T]: return str_to_enum(enum, x) return f diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 0f560be1..73c0cdd5 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -2,9 +2,11 @@ # SPDX-License-Identifier: MIT from abc import ABC, abstractmethod +from collections import OrderedDict from typing import Any, Protocol, Self -from faebryk.libs.units import HasUnit, Unit, dimensionless +from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless +from faebryk.libs.util import groupby class _SupportsRangeOps(Protocol): @@ -13,8 +15,8 @@ def __lt__(self, __value) -> bool: ... def __ge__(self, __value) -> bool: ... def __gt__(self, __value) -> bool: ... - def __sub__(self, __value: Self) -> "_SupportsRangeOps": ... - def __add__(self, __value: Self) -> "_SupportsRangeOps": ... + def __sub__(self, __value: Self) -> Self: ... + def __add__(self, __value: Self) -> Self: ... class _SupportsArithmeticOpsWithFloatMul(_SupportsRangeOps, Protocol): @@ -22,13 +24,29 @@ def __mul__(self, __value: float | Self) -> Self: ... class Set_[T](ABC, HasUnit): - def __init__(self): - pass + def __init__(self, empty: bool, units: Unit): + self.empty = empty + self.units = units @abstractmethod def __contains__(self, item: T): pass + @abstractmethod + def min_elem(self) -> T | None: + pass + + +class Empty[T](Set_[T]): + def __init__(self, units: Unit): + super().__init__(True, units) + + def __contains__(self, item: T): + return False + + def min_elem(self) -> T | None: + return None + class Range[T: _SupportsRangeOps](Set_[T]): def __init__( @@ -38,68 +56,107 @@ def __init__( empty: bool = False, units: Unit | None = None, ): - self.empty = empty - self.min = min - self.max = max if empty and (min is not None or max is not None): raise ValueError("empty range cannot have min or max") - if min is not None and max is not None and not min <= max: - raise ValueError("min must be less than or equal to max") if min is None and max is None: + if not empty: + raise ValueError("must provide at least one of min or max") if units is None: raise ValueError("units must be provided for empyt and full ranges") - self.units = units else: - min_unit = min.units if isinstance(min, HasUnit) else dimensionless - max_unit = max.units if isinstance(max, HasUnit) else dimensionless - if units and not min_unit.is_compatible_with(units): + min_unit = ( + None + if min is None + else min.units + if isinstance(min, HasUnit) + else dimensionless + ) + max_unit = ( + None + if max is None + else max.units + if isinstance(max, HasUnit) + else dimensionless + ) + if units and min_unit and not min_unit.is_compatible_with(units): raise ValueError("min incompatible with units") - if units and not max_unit.is_compatible_with(units): + if units and max_unit and not max_unit.is_compatible_with(units): raise ValueError("max incompatible with units") - self.units = units or min_unit + if min_unit and max_unit and not min_unit.is_compatible_with(max_unit): + raise ValueError("min and max must be compatible") + units = units or min_unit or max_unit + assert units is not None # stop typer check from being annoying + if not empty: + is_float = isinstance(min, float) or isinstance(max, float) + is_quantity = isinstance(min, Quantity) or isinstance(max, Quantity) + if isinstance(min, Quantity): + is_float = isinstance(min.magnitude, float) + if isinstance(max, Quantity): + is_float = isinstance(max.magnitude, float) + if is_quantity and is_float: + self.min = min if min else Quantity(float("-inf"), units=units) + self.max = max if max else Quantity(float("inf"), units=units) + elif is_float: + self.min = min or float("-inf") + self.max = max or float("inf") + else: + if min is None or max is None: + raise ValueError( + "must provide both min and max for types other than float and float quantity" # noqa: E501 + ) + self.min = min + self.max = max + if not self.min <= self.max: + raise ValueError("min must be less than or equal to max") + super().__init__(empty, units) + + def min_elem(self) -> T | None: + if self.empty: + return None + return self.min def __contains__(self, item: T): - if self.min is not None and not self.min <= item: - return False - if self.max is not None and not item <= self.max: + if self.empty: return False - return True + return self.min <= item <= self.max - @classmethod - def from_center(cls, center: T, abs_tol: T) -> "Range[T]": - return cls(center - abs_tol, center + abs_tol) + @staticmethod + def from_center(center: T, abs_tol: T) -> "Range[_SupportsRangeOps]": + return Range[_SupportsRangeOps](center - abs_tol, center + abs_tol) @staticmethod - def from_center_rel[U: _SupportsArithmeticOpsWithFloatMul]( - center: U, rel_tol: float - ) -> "Range[U]": - return Range[U](center - center * rel_tol, center + center * rel_tol) + def from_center_rel( + center: _SupportsArithmeticOpsWithFloatMul, rel_tol: float + ) -> "Range[_SupportsRangeOps]": + return Range[_SupportsRangeOps]( + center - center * rel_tol, center + center * rel_tol + ) - def intersection(self, other: "Range[T]") -> "Range[T]": + def range_intersection(self, other: "Range[T]") -> "Range[T]": if self.empty or other.empty: - return Range(empty=True) - - if self.min is None: - _min = other.min - elif other.min is None: - _min = self.min - else: - _min = max(self.min, other.min) + return Range(empty=True, units=self.units) - if self.max is None: - _max = other.max - elif other.max is None: - _max = self.max - else: - _max = min(self.max, other.max) + _min = max(self.min, other.min) + _max = min(self.max, other.max) - if (_min is not None and (_min not in self or _min not in other)) or ( - _max is not None and (_max not in self or _max not in other) + if ( + _min not in self + or _min not in other + or _max not in self + or _max not in other ): return Range(empty=True, units=self.units) return Range(_min, _max) + # def __copy__(self) -> Self: + # r = Range.__new__(Range) + # r.min = self.min + # r.max = self.max + # r.empty = self.empty + # r.units = self.units + # return r + def __eq__(self, value: Any) -> bool: if not isinstance(value, Range): return False @@ -107,23 +164,157 @@ def __eq__(self, value: Any) -> bool: return self.empty and value.empty return self.min == value.min and self.max == value.max + def __hash__(self) -> int: + return hash((self.min, self.max, self.units, self.empty)) + + def __repr__(self) -> str: + return f"Range({self.min}, {self.max})" + class Single[T](Set_[T]): def __init__(self, value: T): self.value = value - self.units = value.units if isinstance(value, HasUnit) else dimensionless + units = value.units if isinstance(value, HasUnit) else dimensionless + super().__init__(False, units) def __contains__(self, item: T): return item == self.value + def min_elem(self) -> T | None: + return self.value + + def __eq__(self, value: Any) -> bool: + if not isinstance(value, Single): + return False + return self.value == value.value + + def __hash__(self) -> int: + return hash(self.value) -class Set[T](Set_[T]): - def __init__(self, *elements: T): - self.elements = set(elements) - units = [e.units if isinstance(e, HasUnit) else dimensionless for e in elements] - self.units = units[0] - if not all(u.is_compatible_with(self.units) for u in units): + def __repr__(self) -> str: + return f"Single({self.value})" + + +class Union[T](Set_[T]): + def __init__(self, *elements: Set_[T], units: Unit | None = None): + def flat(): + for element in elements: + if element.empty: + continue + if isinstance(element, Union): + yield from element.elements + else: + yield element + + self.elements = OrderedDict( + (element, None) for element in sorted(flat(), key=lambda e: e.min_elem()) + ) + elem_units = [ + e.units if isinstance(e, HasUnit) else dimensionless for e in elements + ] + if len(elem_units) == 0 and units is None: + raise ValueError("units must be provided for empty union") + units = units or elem_units[0] + if not all(units.is_compatible_with(u) for u in elem_units): raise ValueError("all elements must have compatible units") + super().__init__(len(self.elements) == 0, units) def __contains__(self, item: T): - return item in self.elements + for element in self.elements: + if item in element: + return True + if item < element.min_elem(): + return False + return False + + def min_elem(self) -> T | None: + if not self.elements: + return None + return next(iter(self.elements)).min_elem() + + def __eq__(self, value: Any) -> bool: + if not isinstance(value, Union): + return False + # TODO: need to simplify, {1} u [0, 2] == [0, 2] + return self.elements == value.elements + + def __repr__(self) -> str: + return f"Set({', '.join(repr(e) for e in self.elements)})" + + +class Set[T](Union[T]): + def __init__(self, *elements: T, units: Unit | None = None): + super().__init__(*(Single(e) for e in elements), units=units) + + def __contains__(self, item: T): + return Single(item) in self.elements + + +def operation_add[T: _SupportsRangeOps, U: _SupportsRangeOps]( + *sets: Set_[T], +) -> Set_[_SupportsRangeOps]: + def add_singles(*singles: Single[T]) -> T: + if len(singles) == 0: + return 0 + return sum(s.value for s in singles) + + def add_ranges(*ranges: Range[T], offset: T) -> list[Range[T]]: + if len(ranges) == 0: + return [] + return [ + Range( + min=sum(r.min for r in ranges) + offset, + max=sum(r.max for r in ranges) + offset, + ) + ] + + if any(s.empty for s in sets): + return Empty(units=sets[0].units) + + def group(set: Set_[T]) -> str: + if isinstance(set, Single): + return "single" + if isinstance(set, Range): + return "range" + return "union" + + grouped_sets = groupby(sets, key=group) + singles = grouped_sets["single"] + ranges = grouped_sets["range"] + unions = grouped_sets["union"] + single_offset = add_singles(*singles) + range_sum = add_ranges(*ranges, offset=single_offset) + + if len(range_sum) > 0: + recursion_set = range_sum + elif len(singles) > 0: + recursion_set = [Single(single_offset)] + else: + recursion_set = [] + + if len(unions) == 0: + assert len(recursion_set) == 1 + return recursion_set[0] + return Union( # TODO this is exponential, we'll want to defer the computation + *(operation_add(e, *unions[1:], *recursion_set) for e in unions[0].elements) + ) + + +def operation_negate[T: _SupportsRangeOps]( + *sets: Set_[T], +) -> list[Set_[_SupportsRangeOps]]: + def negate(set: Set_[T]) -> Set_[T]: + if isinstance(set, Single): + return Single(-set.value) + if isinstance(set, Range): + return Range(-set.max, -set.min) + return Union(*(negate(e) for e in set.elements)) + + return [negate(e) for e in sets] + + +def operation_subtract[T: _SupportsRangeOps]( + first: Set_[T], + *sets: Set_[T], +) -> Set_[_SupportsRangeOps]: + return operation_add(first, *operation_negate(*sets)) diff --git a/test/libs/test_sets.py b/test/libs/test_sets.py index 3f182693..672e59d4 100644 --- a/test/libs/test_sets.py +++ b/test/libs/test_sets.py @@ -4,20 +4,27 @@ import pytest from pint import DimensionalityError -from faebryk.libs.sets import Range +from faebryk.libs.sets import ( + Range, + Set, + Single, + Union, + operation_add, + operation_subtract, +) from faebryk.libs.units import P, Unit, dimensionless from faebryk.libs.util import cast_assert def test_range_intersection_simple(): x = Range(0, 10) - y = x.intersection(Range(5, 15)) + y = x.range_intersection(Range(5, 15)) assert y == Range(5, 10) def test_range_intersection_empty(): x = Range(0, 10) - y = x.intersection(Range(15, 20)) + y = x.range_intersection(Range(15, 20)) assert y == Range(empty=True, units=dimensionless) @@ -32,7 +39,7 @@ def test_range_unit_same(): def test_range_unit_different(): - with pytest.raises(DimensionalityError): + with pytest.raises(ValueError): Range(0 * P.V, 10 * P.A) with pytest.raises(ValueError): Range(0 * P.V, 10 * P.V, units=cast_assert(Unit, P.A)) @@ -47,3 +54,80 @@ def test_range_force_unit(): Range(empty=True) with pytest.raises(ValueError): Range() + + +def test_set_min_elem(): + x = Set(5, 3, 2, 4, 1) + assert x.min_elem() == 1 + + +def test_set_contains(): + x = Set(5, 3, 2, 4, 1) + assert 3 in x + assert 6 not in x + + +def test_union_min_elem(): + x = Union( + Range(4, 5), Range(3, 7), Single(9), Union(Range(1, 2), Union(Range(0, 1))) + ) + assert x.min_elem() == 0 + + +def test_union_contains(): + x = Union( + Range(4, 5), Range(3, 7), Single(9), Union(Range(1, 2), Union(Range(0, 1))) + ) + assert 0 in x + assert 1 in x + assert 2 in x + assert 3 in x + assert 4 in x + assert 5 in x + assert 6 in x + assert 7 in x + assert 8 not in x + assert 9 in x + assert 10 not in x + + x = Union(Range(max=1.5 * P.V), Range(2.5 * P.V, 3.5 * P.V)) + assert float("-inf") * P.V in x + assert 1 * P.V in x + assert 1.5 * P.V in x + assert 2 * P.V not in x + assert 2.5 * P.V in x + assert 3 * P.V in x + assert 3.5 * P.V in x + assert 4 * P.V not in x + assert float("inf") * P.V not in x + with pytest.raises(ValueError): # units + assert 1 not in x + + +def test_union_empty(): + x = Union( + Range(empty=True, units=dimensionless), + Union(Range(empty=True, units=dimensionless), Set(units=dimensionless)), + ) + assert x.empty + + +def test_addition(): + assert operation_add(Range(0, 1), Range(2, 3)) == Range(2, 4) + assert operation_add(Range(0, 1), Single(2), Single(3)) == Range(5, 6) + assert operation_add(Set(0, 1), Set(2, 3)) == Set(2, 3, 4) + assert operation_add(Set(0, 1), Set(2, 3), Range(-1, 0)) == Union( + Range(1, 2), Range(2, 3), Range(3, 4) + ) + assert operation_add( + Single(3), Set(0, 1), Set(2, 3), Range(-1, 0), Single(7) + ) == Union(Range(11, 12), Range(12, 13), Range(13, 14)) + assert operation_add( + Union(Range(0, 1), Range(2, 3)), + Union(Range(4, 5), Range(6, 7)), + ) == Union(Range(4, 6), Range(6, 8), Range(6, 8), Range(8, 10)) + + +def test_subtraction(): + assert operation_subtract(Range(0, 1), Range(2, 3)) == Range(-3, -1) + assert operation_subtract(Range(0, 1), Single(2)) == Range(-2, -1) From ff8d312b9e94982e9ca57314236ea99a30922ed9 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Fri, 11 Oct 2024 19:08:49 +0200 Subject: [PATCH 36/80] Params: delete old data structures (Range, Constant, Any, TBD) --- src/faebryk/library/ANY.py | 25 ----- src/faebryk/library/Operation.py | 72 -------------- src/faebryk/library/Range.py | 160 ------------------------------- src/faebryk/library/Set.py | 112 ---------------------- src/faebryk/library/TBD.py | 35 ------- 5 files changed, 404 deletions(-) delete mode 100644 src/faebryk/library/ANY.py delete mode 100644 src/faebryk/library/Operation.py delete mode 100644 src/faebryk/library/Range.py delete mode 100644 src/faebryk/library/Set.py delete mode 100644 src/faebryk/library/TBD.py diff --git a/src/faebryk/library/ANY.py b/src/faebryk/library/ANY.py deleted file mode 100644 index fd81098c..00000000 --- a/src/faebryk/library/ANY.py +++ /dev/null @@ -1,25 +0,0 @@ -# This file is part of the faebryk project -# SPDX-License-Identifier: MIT - -from faebryk.core.parameter import Parameter -from faebryk.libs.units import UnitsContainer - - -class ANY[PV](Parameter[PV]): - """ - Allow parameter to take any value. - Operations with this parameter automatically resolve to ANY too. - Don't mistake with F.TBD. - """ - - def __eq__(self, __value: object) -> bool: - if isinstance(__value, ANY): - return True - - return False - - def __hash__(self) -> int: - return super().__hash__() - - def _as_unit(self, unit: UnitsContainer, base: int, required: bool) -> str: - return "ANY" if required else "" diff --git a/src/faebryk/library/Operation.py b/src/faebryk/library/Operation.py deleted file mode 100644 index 8bb9829e..00000000 --- a/src/faebryk/library/Operation.py +++ /dev/null @@ -1,72 +0,0 @@ -# This file is part of the faebryk project -# SPDX-License-Identifier: MIT - -import logging -import typing -from textwrap import indent - -from faebryk.core.parameter import Parameter -from faebryk.libs.util import TwistArgs, find, try_avoid_endless_recursion - -logger = logging.getLogger(__name__) - - -class Operation[PV](Parameter[PV]): - class OperationNotExecutable(Exception): ... - - type LIT_OR_PARAM = Parameter[PV].LIT_OR_PARAM - - def __init__( - self, - operands: typing.Iterable[LIT_OR_PARAM], - operation: typing.Callable[..., Parameter[PV]], - ) -> None: - super().__init__() - self.operands = tuple(self.from_literal(o) for o in operands) - self.operation = operation - - @try_avoid_endless_recursion - def __repr__(self): - opsnames = { - "Parameter.__truediv__": "/", - "Parameter.__add__": "+", - "Parameter.__sub__": "-", - "Parameter.__mul__": "*", - } - - op = self.operation - operands = self.operands - - # little hack to make it look better - if isinstance(op, TwistArgs): - op = op.op - operands = list(reversed(operands)) - - fname = op.__qualname__ - - try: - fname = find(opsnames.items(), lambda x: fname.startswith(x[0]))[1] - except KeyError: - ... - - n = self.get_most_narrow() - rep = repr(n) if n is not self else super().__repr__() - return ( - rep - + f"[{fname}]" - + f"(\n{'\n'.join(indent(repr(o), ' ') for o in operands)}\n)" - ) - - def _execute(self): - operands = [o.get_most_narrow() for o in self.operands] - out = self.operation(*operands) - if isinstance(out, Operation): - raise Operation.OperationNotExecutable() - logger.debug(f"{operands=} resolved to {out}") - return out - - def try_compress(self) -> Parameter[PV]: - try: - return self._execute() - except Operation.OperationNotExecutable: - return self diff --git a/src/faebryk/library/Range.py b/src/faebryk/library/Range.py deleted file mode 100644 index c217873b..00000000 --- a/src/faebryk/library/Range.py +++ /dev/null @@ -1,160 +0,0 @@ -# This file is part of the faebryk project -# SPDX-License-Identifier: MIT - -from math import inf -from typing import Any, Protocol, Self - -from faebryk.core.parameter import Parameter -from faebryk.libs.units import UnitsContainer, to_si_str - - -class _SupportsRangeOps(Protocol): - def __add__(self, __value) -> "_SupportsRangeOps": ... - def __sub__(self, __value) -> "_SupportsRangeOps": ... - def __mul__(self, __value) -> "_SupportsRangeOps": ... - - def __le__(self, __value) -> bool: ... - def __lt__(self, __value) -> bool: ... - def __ge__(self, __value) -> bool: ... - - -class Range[PV: _SupportsRangeOps](Parameter[PV], Parameter[PV].SupportsSetOps): - type LIT_OR_PARAM = Parameter[PV].LIT_OR_PARAM - - class MinMaxError(Exception): ... - - def __init__(self, *bounds: PV | Parameter[PV]) -> None: - super().__init__() - - self._bounds: list[Parameter[PV]] = [ - Parameter[PV].from_literal(b) for b in bounds - ] - - def _get_narrowed_bounds(self) -> list[Parameter[PV]]: - return list({b.get_most_narrow() for b in self._bounds}) - - @property - def min(self) -> Parameter[PV]: - try: - return min(self._get_narrowed_bounds()) - except (TypeError, ValueError): - raise self.MinMaxError() - - @property - def max(self) -> Parameter[PV]: - try: - return max(self._get_narrowed_bounds()) - except (TypeError, ValueError): - raise self.MinMaxError() - - @property - def bounds(self) -> list[Parameter[PV]]: - try: - return [self.min, self.max] - except self.MinMaxError: - return self._get_narrowed_bounds() - - def as_tuple(self) -> tuple[Parameter[PV], Parameter[PV]]: - return (self.min, self.max) - - def as_center_tuple(self, relative=False) -> tuple[Parameter[PV], Parameter[PV]]: - center = (self.min + self.max) / 2 - delta = (self.max - self.min) / 2 - if relative: - delta /= center - return center, delta - - @classmethod - def from_center(cls, center: LIT_OR_PARAM, delta: LIT_OR_PARAM) -> "Range[PV]": - return cls(center - delta, center + delta) - - @classmethod - def from_center_rel(cls, center: PV, factor: PV) -> "Range[PV]": - return cls.from_center(center, center * factor) - - @classmethod - def _with_bound(cls, bound: LIT_OR_PARAM, other: float) -> "Range[PV]": - try: - other_with_unit = Parameter.with_same_unit(bound, other) - except NotImplementedError: - raise NotImplementedError("Specify zero/inf manually in params") - - return cls(bound, other_with_unit) - - @classmethod - def lower_bound(cls, lower: LIT_OR_PARAM) -> "Range[PV]": - return cls._with_bound(lower, inf) - - @classmethod - def upper_bound(cls, upper: LIT_OR_PARAM) -> "Range[PV]": - return cls._with_bound(upper, 0) - - def __str__(self) -> str: - bounds = map(str, self.bounds) - return super().__str__() + f"({', '.join(bounds)})" - - def __repr__(self): - bounds = map(repr, self.bounds) - return super().__repr__() + f"({', '.join(bounds)})" - - def __eq__(self, other: Any) -> bool: - if not isinstance(other, Range): - return False - return self.bounds == other.bounds - - def __hash__(self) -> int: - return sum(hash(b) for b in self._bounds) - - # comparison operators - def __le__(self, other) -> bool: - return self.max <= other - - def __lt__(self, other) -> bool: - return self.max < other - - def __ge__(self, other) -> bool: - return self.min >= other - - def __gt__(self, other) -> bool: - return self.min > other - - def __format__(self, format_spec): - bounds = [format(b, format_spec) for b in self._get_narrowed_bounds()] - return f"{super().__str__()}({', '.join(bounds)})" - - def __copy__(self) -> Self: - return type(self)(*self._bounds) - - def try_compress(self) -> Parameter[PV]: - # compress into constant if possible - if len(set(map(id, self.bounds))) == 1: - return Parameter.from_literal(self.bounds[0]) - return super().try_compress() - - def __contains__(self, other: LIT_OR_PARAM) -> bool: - return self.min <= other and self.max >= other - - def _max(self): - return max(p.get_max() for p in self._get_narrowed_bounds()) - - def _as_unit(self, unit: UnitsContainer, base: int, required: bool) -> str: - return ( - self.min.as_unit(unit, base=base) - + " - " - + self.max.as_unit(unit, base=base, required=True) - ) - - def _as_unit_with_tolerance( - self, unit: UnitsContainer, base: int, required: bool - ) -> str: - center, delta = self.as_center_tuple(relative=True) - delta_percent_str = f"±{to_si_str(delta.value, "%", 0)}" - return ( - f"{center.as_unit(unit, base=base, required=required)} {delta_percent_str}" - ) - - def _enum_parameter_representation(self, required: bool) -> str: - return ( - f"{self.min.enum_parameter_representation(required)} - " - f"{self.max.enum_parameter_representation(required)}" - ) diff --git a/src/faebryk/library/Set.py b/src/faebryk/library/Set.py deleted file mode 100644 index 112b5ece..00000000 --- a/src/faebryk/library/Set.py +++ /dev/null @@ -1,112 +0,0 @@ -# This file is part of the faebryk project -# SPDX-License-Identifier: MIT - -from typing import Iterable, Self - -import faebryk.library._F as F -from faebryk.core.parameter import Parameter, _resolved -from faebryk.libs.units import UnitsContainer - - -class Set[PV](Parameter[PV], Parameter[PV].SupportsSetOps): - type LIT_OR_PARAM = Parameter[PV].LIT_OR_PARAM - - def __init__(self, params: Iterable[Parameter[LIT_OR_PARAM]]) -> None: - super().__init__() - - # make primitves to constants - self._params = set( - p if isinstance(p, Parameter) else F.Constant(p) for p in params - ) - - @staticmethod - def _flatten(params: set[Parameter[PV]]) -> set[Parameter[PV]]: - param_set = set( - p for p in params if not isinstance(p, Set) and isinstance(p, Parameter) - ) - set_set = set(x for p in params if isinstance(p, Set) for x in p.params) - - return param_set | set_set - - def flat(self) -> set[Parameter[PV]]: - return Set._flatten(self._params) - - @property - def params(self) -> set[Parameter[PV]]: - return self.flat() - - def __str__(self) -> str: - return super().__str__() + f"({self.params})" - - def __repr__(self): - return super().__repr__() + f"({self.params!r})" - - def __eq__(self, other) -> bool: - if not isinstance(other, Set): - return False - - return self.params == other.params - - def __hash__(self) -> int: - return sum(hash(p) for p in self.params) - - # comparison operators - def __le__(self, other) -> bool: - return all(p <= other for p in self.params) - - def __lt__(self, other) -> bool: - return all(p < other for p in self.params) - - def __ge__(self, other) -> bool: - return all(p >= other for p in self.params) - - def __gt__(self, other) -> bool: - return all(p > other for p in self.params) - - def copy(self) -> Self: - return type(self)(self.params) - - @_resolved - def __contains__(self, other: Parameter[PV]) -> bool: - def nested_in(p): - if other == p: - return True - if isinstance(p, F.Range): - return other in p - return False - - return any(nested_in(p) for p in self.params) - - def try_compress(self) -> Parameter[PV]: - # compress into constant if possible - if len(set(map(id, self.params))) == 1: - return Parameter.from_literal(next(iter(self.params))) - return super().try_compress() - - def _max(self): - return max(p.get_max() for p in self.params) - - def _as_unit(self, unit: UnitsContainer, base: int, required: bool) -> str: - return ( - "Set(" - + ", ".join(x.as_unit(unit, required=True) for x in self.params) - + ")" - ) - - def _as_unit_with_tolerance( - self, unit: UnitsContainer, base: int, required: bool - ) -> str: - return ( - "Set(" - + ", ".join( - x.as_unit_with_tolerance(unit, base, required) for x in self.params - ) - + ")" - ) - - def _enum_parameter_representation(self, required: bool) -> str: - return ( - "Set(" - + ", ".join(p.enum_parameter_representation(required) for p in self.params) - + ")" - ) diff --git a/src/faebryk/library/TBD.py b/src/faebryk/library/TBD.py deleted file mode 100644 index 0f52ec82..00000000 --- a/src/faebryk/library/TBD.py +++ /dev/null @@ -1,35 +0,0 @@ -# This file is part of the faebryk project -# SPDX-License-Identifier: MIT - -from textwrap import indent - -from faebryk.core.parameter import Parameter -from faebryk.libs.units import UnitsContainer - - -class TBD[PV](Parameter[PV]): - def __eq__(self, __value: object) -> bool: - if isinstance(__value, TBD): - return True - - return False - - def __hash__(self) -> int: - return super().__hash__() - - def __repr__(self) -> str: - o = self.get_most_narrow() - if o is self: - return super().__repr__() - else: - out = f"{super().__repr__():<80} ===> " - or_ = repr(o) - if "\n" in or_: - out += indent(or_, len(out) * " ") - else: - out += or_ - - return out - - def _as_unit(self, unit: UnitsContainer, base: int, required: bool) -> str: - return "TBD" if required else "" From c8067801ef9c5f629aaf3df8931dc3729de08ca4 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Fri, 11 Oct 2024 19:16:58 +0200 Subject: [PATCH 37/80] Params: Solver API proposal --- src/faebryk/core/parameter.py | 64 ++++++++++++++++++++--------- src/faebryk/core/solver.py | 76 +++++++++++++++++++++++++++++++++++ 2 files changed, 121 insertions(+), 19 deletions(-) create mode 100644 src/faebryk/core/solver.py diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index b29c7915..1bbda11e 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -4,7 +4,7 @@ import logging from enum import Enum, auto from types import NotImplementedType -from typing import Any, Callable, Protocol +from typing import Any, Callable, Protocol, Self from more_itertools import raise_ @@ -108,7 +108,15 @@ def operation_is_subset(self, other: Sets) -> "Expression": ... def operation_is_superset(self, other: Sets) -> "Expression": ... - def get_any_single(self) -> Number | Enum: ... + def inspect_known_min(self) -> Number: ... + + def inspect_known_max(self) -> Number: ... + + # Run by the solver on finalization + # inspect_final: Callable[[Self], None] + + # def inspect_num_known_supersets(self) -> int: ... + # def inspect_get_known_superset(self) -> Iterable[Set_]: ... # ---------------------------------------------------------------------------------- def __add__(self, other: NumberLike): @@ -170,14 +178,29 @@ def __rxor__(self, other: BooleanLike): # TODO: move + # should be eager, in the sense that, if the outcome is known, the callable is + # called immediately, without storing an expression + # we must force a value (at the end of solving at the least) def if_then_else( self, if_true: Callable[[], Any], if_false: Callable[[], Any], + preference: bool | None = None, ) -> None: ... - def assert_true(self) -> None: - self.if_then_else(lambda: None, lambda: raise_(ValueError())) + # the way this is used right now (for testing) is problematic + # we don't want to add a constraint, because that would force it to hold + # instead we want to make an inspection at the "final" stage during solving + # could still be useful if we want to abort early with an error + def assert_true( + self, error: Callable[[], None] = lambda: raise_(ValueError()) + ) -> None: + self.if_then_else(lambda: None, error, True) + + # def assert_false( + # self, error: Callable[[], None] = lambda: raise_(ValueError()) + # ) -> None: + # self.if_then_else(error, lambda: None, False) # TODO # def switch_case( @@ -417,17 +440,27 @@ def __init__(self, enum_t: type[Enum]): class Predicate(Expression): - def __init__(self, left, right): + def __init__(self, constraint: bool, left, right): + self._constraint = constraint l_units = left.units if isinstance(left, HasUnit) else dimensionless r_units = right.units if isinstance(right, HasUnit) else dimensionless if not l_units.is_compatible_with(r_units): raise ValueError("operands must have compatible units") self.operands = [left, right] + def constrain(self): + self._constraint = True + + def is_constraint(self): + return self._constraint + + # def run_when_known(self, f: Callable[[bool], None]): + # getattr(self, "run_when_known_funcs", []).append(f) + class NumericPredicate(Predicate): - def __init__(self, left, right): - super().__init__(left, right) + def __init__(self, constraint: bool, left, right): + super().__init__(constraint, left, right) if isinstance(left, Parameter) and left.domain not in [Numbers, ESeries]: raise ValueError("left operand must have domain Numbers or ESeries") if isinstance(right, Parameter) and right.domain not in [Numbers, ESeries]: @@ -455,8 +488,8 @@ class NotEqual(Predicate): class SeticPredicate(Predicate): - def __init__(self, left, right): - super().__init__(left, right) + def __init__(self, constraint: bool, left, right): + super().__init__(constraint, left, right) types = [Parameter, ParameterOperatable.Sets] if any(type(op) not in types for op in self.operands): raise ValueError("operands must be Parameter or Set") @@ -475,16 +508,9 @@ class IsSuperset(SeticPredicate): pass -class Alias(Expression): - pass - - -class Is(Alias): - pass - - -class Aliases(Namespace): - IS = Is +class Is(Predicate): + def __init__(self, constraint: bool, left, right): + super().__init__(constraint, left, right) # TODO rename? diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py new file mode 100644 index 00000000..0f3e36f4 --- /dev/null +++ b/src/faebryk/core/solver.py @@ -0,0 +1,76 @@ +from typing import Any, Protocol + +from faebryk.core.graph import Graph +from faebryk.core.parameter import Expression, Parameter, Predicate + + +class Solver(Protocol): + # timeout per solve call in milliseconds + timeout: int + # threads: int + # in megabytes + # memory: int + + # solve for a single value for the given expression + # while trying to minimize the value of the optional minimize expression + # suppose_constraint can be added, which by constraining the solution further can make solving easier + # it is only in effect for the duration of the solve call + # constrain_result will make sure the result is actually part of the solution set of the expression + # returns a tuple of the value chosen and a list of parameters that have an empty solution set + def get_any_single( + self, + G: Graph, + expression: Expression, + suppose_constraint: Predicate | None = None, + minimize: Expression | None = None, + constrain_result: bool = True, + ) -> tuple[Any, list[Parameter]]: ... # TODO Any -> NumberLike? + + # make at least one of the passed predicates true + # while trying to minimize the value of the optional minimize expression + # there is no specific order in which the predicates are solved + # suppose_constraint can be added, which by constraining the solution further can make solving easier + # it is only in effect for the duration of the solve call + # constrain_solved will add the solutions as constraints + # returns a tuple of two lists: + # - the first list contains the predicates that were actually solved, i.e. they are true/false + # - the second list contains the expressions that remain unknown + # - the third list contains the parameters that have an empty solution set + def assert_any_predicate( + self, + G: Graph, + predicates: list[Predicate], + suppose_constraint: Predicate | None = None, + minimize: Expression | None = None, + constrain_solved: bool = True, + ) -> tuple[list[Expression], list[Expression], list[Parameter]]: ... + + # run deferred work + def finalize(self, G: Graph) -> None: ... + + +class DefaultSolver(Solver): + timeout: int = 1000 + + def get_any_single( + self, + G: Graph, + expression: Expression, + suppose_constraint: Predicate | None = None, + minimize: Expression | None = None, + constrain_result: bool = True, + ): + raise NotImplementedError() + + def assert_any_predicate( + self, + G: Graph, + predicates: list[Predicate], + suppose_constraint: Predicate | None = None, + minimize: Expression | None = None, + constrain_solved: bool = True, + ) -> tuple[list[Expression], list[Expression], list[Parameter]]: + raise NotImplementedError() + + def finalize(self, G: Graph) -> None: + raise NotImplementedError() From 9acc1dd525c4fbd775ae1a0d218c20c7c00b5c6e Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Fri, 18 Oct 2024 15:58:31 +0200 Subject: [PATCH 38/80] improve ranges/sets/unions two layers, one without units, one with constrain types to numeric TODO cleanup better user interface (want to use RangeUnion everywhere) non numeric sets --- src/faebryk/libs/sets.py | 863 +++++++++++++++++++++++++++------------ test/libs/test_sets.py | 132 +++--- 2 files changed, 681 insertions(+), 314 deletions(-) diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 73c0cdd5..20bb5260 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -1,320 +1,667 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from abc import ABC, abstractmethod -from collections import OrderedDict -from typing import Any, Protocol, Self +from collections.abc import Generator +from typing import Any, Protocol, TypeVar from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless -from faebryk.libs.util import groupby +# class _SupportsRangeOps(Protocol): +# def __le__(self, __value) -> bool: ... +# def __lt__(self, __value) -> bool: ... +# def __ge__(self, __value) -> bool: ... +# def __gt__(self, __value) -> bool: ... +# +# def __sub__(self, __value: Self) -> Self: ... +# def __add__(self, __value: Self) -> Self: ... +# +# +# class _SupportsArithmeticOpsWithFloatMul(_SupportsRangeOps, Protocol): +# def __mul__(self, __value: float | Self) -> Self: ... -class _SupportsRangeOps(Protocol): - def __le__(self, __value) -> bool: ... - def __lt__(self, __value) -> bool: ... - def __ge__(self, __value) -> bool: ... - def __gt__(self, __value) -> bool: ... - def __sub__(self, __value: Self) -> Self: ... - def __add__(self, __value: Self) -> Self: ... +class _Set[T](Protocol): + def is_empty(self) -> bool: ... + def __contains__(self, item: T) -> bool: ... -class _SupportsArithmeticOpsWithFloatMul(_SupportsRangeOps, Protocol): - def __mul__(self, __value: float | Self) -> Self: ... +T = TypeVar("T", int, float, contravariant=False, covariant=False) -class Set_[T](ABC, HasUnit): - def __init__(self, empty: bool, units: Unit): - self.empty = empty - self.units = units - @abstractmethod - def __contains__(self, item: T): - pass +class _Range(_Set[T]): + def __init__(self, min: T, max: T): + if not min <= max: + raise ValueError("min must be less than or equal to max") + self.min = min + self.max = max - @abstractmethod - def min_elem(self) -> T | None: - pass - - -class Empty[T](Set_[T]): - def __init__(self, units: Unit): - super().__init__(True, units) - - def __contains__(self, item: T): + def is_empty(self) -> bool: return False - def min_elem(self) -> T | None: - return None + def min_elem(self) -> T: + return self.min + def op_add_range(self, other: "_Range[T]") -> "_Range[T]": + return _Range(self.min + other.min, self.max + other.max) + + def op_negate(self) -> "_Range[T]": + return _Range(-self.max, -self.min) + + def op_subtract_range(self, other: "_Range[T]") -> "_Range[T]": + return self.op_add_range(other.op_negate()) + + def op_mul_range(self, other: "_Range[T]") -> "_Range[T]": + return _Range( + min( + self.min * other.min, + self.min * other.max, + self.max * other.min, + self.max * other.max, + ), + max( + self.min * other.min, + self.min * other.max, + self.max * other.min, + self.max * other.max, + ), + ) -class Range[T: _SupportsRangeOps](Set_[T]): - def __init__( - self, - min: T | None = None, - max: T | None = None, - empty: bool = False, - units: Unit | None = None, - ): - if empty and (min is not None or max is not None): - raise ValueError("empty range cannot have min or max") - if min is None and max is None: - if not empty: - raise ValueError("must provide at least one of min or max") - if units is None: - raise ValueError("units must be provided for empyt and full ranges") - else: - min_unit = ( - None - if min is None - else min.units - if isinstance(min, HasUnit) - else dimensionless - ) - max_unit = ( - None - if max is None - else max.units - if isinstance(max, HasUnit) - else dimensionless + def op_invert(self) -> "_RangeUnion[float]": + if self.min == 0 == self.max: + return _NumericEmpty() + if self.min < 0 < self.max: + return _RangeUnion( + _Range(float("-inf"), 1 / self.min), + _Range(1 / self.max, float("inf")), ) - if units and min_unit and not min_unit.is_compatible_with(units): - raise ValueError("min incompatible with units") - if units and max_unit and not max_unit.is_compatible_with(units): - raise ValueError("max incompatible with units") - if min_unit and max_unit and not min_unit.is_compatible_with(max_unit): - raise ValueError("min and max must be compatible") - units = units or min_unit or max_unit - assert units is not None # stop typer check from being annoying - if not empty: - is_float = isinstance(min, float) or isinstance(max, float) - is_quantity = isinstance(min, Quantity) or isinstance(max, Quantity) - if isinstance(min, Quantity): - is_float = isinstance(min.magnitude, float) - if isinstance(max, Quantity): - is_float = isinstance(max.magnitude, float) - if is_quantity and is_float: - self.min = min if min else Quantity(float("-inf"), units=units) - self.max = max if max else Quantity(float("inf"), units=units) - elif is_float: - self.min = min or float("-inf") - self.max = max or float("inf") - else: - if min is None or max is None: - raise ValueError( - "must provide both min and max for types other than float and float quantity" # noqa: E501 - ) - self.min = min - self.max = max - if not self.min <= self.max: - raise ValueError("min must be less than or equal to max") - super().__init__(empty, units) - - def min_elem(self) -> T | None: - if self.empty: - return None - return self.min - - def __contains__(self, item: T): - if self.empty: + elif self.min < 0 == self.max: + return _RangeUnion(_Range(float("-inf"), 1 / self.min)) + elif self.min == 0 < self.max: + return _RangeUnion(_Range(1 / self.max, float("inf"))) + else: + return _RangeUnion(_Range(1 / self.max, 1 / self.min)) + + def op_div_range( + self: "_Range[float]", other: "_Range[float]" + ) -> "_RangeUnion[float]": + return _RangeUnion(*(self.op_mul_range(o) for o in other.op_invert().ranges)) + + def op_intersect_range(self, other: "_Range[T]") -> "_RangeUnion[T]": + min_ = max(self.min, other.min) + max_ = min(self.max, other.max) + if min_ <= max_: + return _RangeUnion(_Range(min_, max_)) + return _NumericEmpty() + + def maybe_merge_range(self, other: "_Range[T]") -> list["_Range[T]"]: + is_left = self.min <= other.min + left = self if is_left else other + right = other if is_left else self + if right.min in self: + return [_Range(left.min, max(left.max, right.max))] + return [left, right] + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, _Range): return False + return self.min == other.min and self.max == other.max + + def __contains__(self, item: T) -> bool: return self.min <= item <= self.max - @staticmethod - def from_center(center: T, abs_tol: T) -> "Range[_SupportsRangeOps]": - return Range[_SupportsRangeOps](center - abs_tol, center + abs_tol) + def __hash__(self) -> int: + return hash((self.min, self.max)) - @staticmethod - def from_center_rel( - center: _SupportsArithmeticOpsWithFloatMul, rel_tol: float - ) -> "Range[_SupportsRangeOps]": - return Range[_SupportsRangeOps]( - center - center * rel_tol, center + center * rel_tol + def __repr__(self) -> str: + return f"_Range({self.min}, {self.max})" + + +def _Single(value: T) -> _Range[T]: + return _Range(value, value) + + +class _RangeUnion(_Set[T]): + def __init__(self, *ranges: _Range[T] | "_RangeUnion[T]"): + def gen_flat_non_empty() -> Generator[_Range[T]]: + for r in ranges: + if r.is_empty(): + continue + if isinstance(r, _RangeUnion): + yield from r.ranges + else: + assert isinstance(r, _Range) + yield r + + non_empty_ranges = list(gen_flat_non_empty()) + sorted_ranges = sorted(non_empty_ranges, key=lambda e: e.min_elem()) + + def gen_merge(): + last = None + for range in sorted_ranges: + if last is None: + last = range + else: + *prefix, last = last.maybe_merge_range(range) + yield from prefix + if last is not None: + yield last + + self.ranges = list(gen_merge()) + + def is_empty(self) -> bool: + return len(self.ranges) == 0 + + def min_elem(self) -> T: + if self.is_empty(): + raise ValueError("empty range cannot have min element") + return self.ranges[0].min_elem() + + def op_add_range_union(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": + return _RangeUnion( + *(r.op_add_range(o) for r in self.ranges for o in other.ranges) ) - def range_intersection(self, other: "Range[T]") -> "Range[T]": - if self.empty or other.empty: - return Range(empty=True, units=self.units) + def op_negate(self) -> "_RangeUnion[T]": + return _RangeUnion(*(r.op_negate() for r in self.ranges)) - _min = max(self.min, other.min) - _max = min(self.max, other.max) + def op_subtract_range_union(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": + return self.op_add_range_union(other.op_negate()) - if ( - _min not in self - or _min not in other - or _max not in self - or _max not in other - ): - return Range(empty=True, units=self.units) + def op_mul_range_union(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": + return _RangeUnion( + *(r.op_mul_range(o) for r in self.ranges for o in other.ranges) + ) - return Range(_min, _max) + def op_invert(self) -> "_RangeUnion[float]": + return _RangeUnion(*(r.op_invert() for r in self.ranges)) - # def __copy__(self) -> Self: - # r = Range.__new__(Range) - # r.min = self.min - # r.max = self.max - # r.empty = self.empty - # r.units = self.units - # return r + def op_div_range_union( + self: "_RangeUnion[float]", other: "_RangeUnion[float]" + ) -> "_RangeUnion[float]": + return self.op_mul_range_union(other.op_invert()) + + def __contains__(self, item: T) -> bool: + from bisect import bisect + + index = bisect(self.ranges, item, key=lambda r: r.min_elem()) + + if index == 0: + return False + return item in self.ranges[index - 1] def __eq__(self, value: Any) -> bool: - if not isinstance(value, Range): + if not isinstance(value, _RangeUnion): + return False + if len(self.ranges) != len(value.ranges): return False - if self.empty or value.empty: - return self.empty and value.empty - return self.min == value.min and self.max == value.max + for r1, r2 in zip(self.ranges, value.ranges): + if r1 != r2: + return False + return True def __hash__(self) -> int: - return hash((self.min, self.max, self.units, self.empty)) + return hash(tuple(hash(r) for r in self.ranges)) def __repr__(self) -> str: - return f"Range({self.min}, {self.max})" + return f"_RangeUnion({', '.join(f"[{r.min}, {r.max}]" for r in self.ranges)})" -class Single[T](Set_[T]): - def __init__(self, value: T): - self.value = value - units = value.units if isinstance(value, HasUnit) else dimensionless - super().__init__(False, units) +def _Singles(*values: T) -> _RangeUnion[T]: + return _RangeUnion(*(_Single(v) for v in values)) - def __contains__(self, item: T): - return item == self.value - def min_elem(self) -> T | None: - return self.value +__numeric_empty = _RangeUnion() - def __eq__(self, value: Any) -> bool: - if not isinstance(value, Single): - return False - return self.value == value.value - def __hash__(self) -> int: - return hash(self.value) +def _NumericEmpty() -> _RangeUnion: + return __numeric_empty - def __repr__(self) -> str: - return f"Single({self.value})" +class __NonNumericSet[U](_Set[U]): + def __init__(self, *elements: U): + self.elements = set(elements) + + def is_empty(self) -> bool: + return len(self.elements) == 0 + + def __contains__(self, item: U) -> bool: + return item in self.elements + + +# class Empty[T](Set_[T]): +# def __init__(self, units: Unit): +# super().__init__(True, units) +# +# def __contains__(self, item: T): +# return False +# +# def min_elem(self) -> T | None: +# return None + + +class UnitSet[T](_Set[T], HasUnit, Protocol): ... -class Union[T](Set_[T]): - def __init__(self, *elements: Set_[T], units: Unit | None = None): - def flat(): - for element in elements: - if element.empty: - continue - if isinstance(element, Union): - yield from element.elements - else: - yield element - self.elements = OrderedDict( - (element, None) for element in sorted(flat(), key=lambda e: e.min_elem()) +TQuant = TypeVar("TQuant", int, float, Quantity, contravariant=False, covariant=False) + + +def base_units(units: Unit) -> Unit: + return Quantity(1, units).to_base_units().units + + +class Range(UnitSet[TQuant]): + def __init__( + self, + min: TQuant | None = None, + max: TQuant | None = None, + units: Unit | None = None, + ): + if min is None and max is None: + raise ValueError("must provide at least one of min or max") + + min_unit = ( + None + if min is None + else min.units + if isinstance(min, Quantity) + else dimensionless ) - elem_units = [ - e.units if isinstance(e, HasUnit) else dimensionless for e in elements - ] - if len(elem_units) == 0 and units is None: - raise ValueError("units must be provided for empty union") - units = units or elem_units[0] - if not all(units.is_compatible_with(u) for u in elem_units): - raise ValueError("all elements must have compatible units") - super().__init__(len(self.elements) == 0, units) + max_unit = ( + None + if max is None + else max.units + if isinstance(max, Quantity) + else dimensionless + ) + if units and min_unit and not min_unit.is_compatible_with(units): + raise ValueError("min incompatible with units") + if units and max_unit and not max_unit.is_compatible_with(units): + raise ValueError("max incompatible with units") + if min_unit and max_unit and not min_unit.is_compatible_with(max_unit): + raise ValueError("min and max must be compatible") + self.units = units or min_unit or max_unit + self.range_units = base_units(self.units) + + if isinstance(min, Quantity): + num_min = min.to_base_units().magnitude + if not (isinstance(num_min, float) or isinstance(num_min, int)): + raise ValueError("min must be a float or int quantity") + else: + num_min = min + + if isinstance(max, Quantity): + num_max = max.to_base_units().magnitude + if not (isinstance(num_max, float) or isinstance(num_max, int)): + raise ValueError("max must be a float or int quantity") + else: + num_max = max + + is_float = isinstance(num_min, float) or isinstance(num_max, float) + if is_float: + num_min = float(num_min) if num_min is not None else float("-inf") + num_max = float(num_max) if num_max is not None else float("inf") + else: + assert isinstance(num_min, int) or isinstance(num_max, int) + if num_min is None or num_max is None: + raise ValueError("min and max must be provided for ints") + + self._range = _Range(num_min, num_max) + + @staticmethod + def from_center(center: TQuant, abs_tol: TQuant) -> "Range[TQuant]": + left = center - abs_tol + right = center + abs_tol + return Range(left, right) + + @staticmethod + def from_center_rel(center: TQuant, rel_tol: float) -> "Range[TQuant]": + return Range(center - center * rel_tol, center + center * rel_tol) - def __contains__(self, item: T): - for element in self.elements: - if item in element: - return True - if item < element.min_elem(): + @staticmethod + def _from_range(range: _Range[T], units: Unit) -> "Range[TQuant]": + return Range( + min=Quantity(range.min, base_units(units)), + max=Quantity(range.max, base_units(units)), + units=units, + ) + + def base_to_units(self, value: T) -> Quantity: + return Quantity(value, self.range_units).to(self.units) + + def min_elem(self) -> Quantity: + return self.base_to_units(self._range.min) + + def is_empty(self) -> bool: + return self._range.is_empty() + + def op_intersect_range(self, other: "Range[TQuant]") -> "RangeUnion[TQuant]": + if not self.units.is_compatible_with(other.units): + return RangeUnion(units=self.units) + _range = self._range.op_intersect_range(other._range) + return RangeUnion._from_range_union(_range, self.units) + + def op_add_range(self, other: "Range[TQuant]") -> "Range[TQuant]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._range.op_add_range(other._range) + return Range._from_range(_range, self.units) + + def op_negate(self) -> "Range[TQuant]": + _range = self._range.op_negate() + return Range._from_range(_range, self.units) + + def op_subtract_range(self, other: "Range[TQuant]") -> "Range[TQuant]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._range.op_subtract_range(other._range) + return Range._from_range(_range, self.units) + + def op_mul_range(self, other: "Range[TQuant]") -> "Range[TQuant]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._range.op_mul_range(other._range) + return Range._from_range(_range, self.units * other.units) + + def op_invert(self) -> "RangeUnion[TQuant]": + _range = self._range.op_invert() + return RangeUnion._from_range_union(_range, 1 / self.units) + + def op_div_range(self, other: "Range[TQuant]") -> "RangeUnion[TQuant]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._range.op_div_range(other._range) + return RangeUnion._from_range_union(_range, self.units / other.units) + + # def __copy__(self) -> Self: + # r = Range.__new__(Range) + # r.min = self.min + # r.max = self.max + # r.empty = self.empty + # r.units = self.units + # return r + + def __contains__(self, item: Any) -> bool: + if isinstance(item, Quantity): + if not item.units.is_compatible_with(self.units): + return False + item = item.to(self.range_units).magnitude + if not isinstance(item, float) and not isinstance(item, int): return False + return self._range.__contains__(item) return False - def min_elem(self) -> T | None: - if not self.elements: - return None - return next(iter(self.elements)).min_elem() - + # yucky with floats def __eq__(self, value: Any) -> bool: - if not isinstance(value, Union): + if not isinstance(value, HasUnit): + return False + if not self.units.is_compatible_with(value.units): return False - # TODO: need to simplify, {1} u [0, 2] == [0, 2] - return self.elements == value.elements + if isinstance(value, Range): + return self._range == value._range + if isinstance(value, RangeUnion) and len(value._ranges.ranges) == 1: + return self._range == value._ranges.ranges[0] + return False + + # TODO, convert to base unit first + # def __hash__(self) -> int: + # return hash((self._range, self.units)) def __repr__(self) -> str: - return f"Set({', '.join(repr(e) for e in self.elements)})" + if self.units.is_compatible_with(dimensionless): + return f"Range({self._range.min}, {self._range.max})" + return f"Range({self.base_to_units(self._range.min)}, {self.base_to_units(self._range.max)} | {self.units})" -class Set[T](Union[T]): - def __init__(self, *elements: T, units: Unit | None = None): - super().__init__(*(Single(e) for e in elements), units=units) +def Single(value: TQuant) -> Range[TQuant]: + return Range(value, value) - def __contains__(self, item: T): - return Single(item) in self.elements +class RangeUnion(UnitSet[TQuant]): + def __init__( + self, *ranges: Range[TQuant] | "RangeUnion[TQuant]", units: Unit | None = None + ): + range_units = [ + r.units if isinstance(r, HasUnit) else dimensionless for r in ranges + ] + if len(range_units) == 0 and units is None: + raise ValueError("units must be provided for empty union") + self.units = units or range_units[0] + self.range_units = base_units(self.units) + if not all(self.units.is_compatible_with(u) for u in range_units): + raise ValueError("all elements must have compatible units") -def operation_add[T: _SupportsRangeOps, U: _SupportsRangeOps]( - *sets: Set_[T], -) -> Set_[_SupportsRangeOps]: - def add_singles(*singles: Single[T]) -> T: - if len(singles) == 0: - return 0 - return sum(s.value for s in singles) + def get_backing(r: Range[TQuant] | "RangeUnion[TQuant]"): + if isinstance(r, Range): + return r._range + else: + return r._ranges - def add_ranges(*ranges: Range[T], offset: T) -> list[Range[T]]: - if len(ranges) == 0: - return [] - return [ - Range( - min=sum(r.min for r in ranges) + offset, - max=sum(r.max for r in ranges) + offset, - ) - ] + self._ranges = _RangeUnion(*(get_backing(r) for r in ranges)) + + @staticmethod + def _from_range_union( + range_union: _RangeUnion[T], units: Unit + ) -> "RangeUnion[TQuant]": + r = RangeUnion.__new__(RangeUnion) + r._ranges = range_union + r.units = units + r.range_units = base_units(units) + return r + + def is_empty(self) -> bool: + return self._ranges.is_empty() + + def base_to_units(self, value: T) -> Quantity: + return Quantity(value, self.range_units).to(self.units) + + def min_elem(self) -> TQuant: + if self.is_empty(): + raise ValueError("empty range cannot have min element") + return self.base_to_units(self._ranges.min_elem()) + + def op_add_range_union(self, other: "RangeUnion[TQuant]") -> "RangeUnion[TQuant]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._ranges.op_add_range_union(other._ranges) + return RangeUnion._from_range_union(_range, self.units) + + def op_negate(self) -> "RangeUnion[TQuant]": + _range = self._ranges.op_negate() + return RangeUnion._from_range_union(_range, self.units) + + def op_subtract_range_union( + self, other: "RangeUnion[TQuant]" + ) -> "RangeUnion[TQuant]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._ranges.op_subtract_range_union(other._ranges) + return RangeUnion._from_range_union(_range, self.units) + + def op_mul_range_union(self, other: "RangeUnion[TQuant]") -> "RangeUnion[TQuant]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._ranges.op_mul_range_union(other._ranges) + return RangeUnion._from_range_union(_range, self.units * other.units) + + def op_invert(self) -> "RangeUnion[TQuant]": + _range = self._ranges.op_invert() + return RangeUnion._from_range_union(_range, 1 / self.units) + + def op_div_range_union(self, other: "RangeUnion[TQuant]") -> "RangeUnion[TQuant]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._ranges.op_div_range_union(other._ranges) + return RangeUnion._from_range_union(_range, self.units / other.units) + + def __contains__(self, item: Any) -> bool: + if isinstance(item, Quantity): + if not item.units.is_compatible_with(self.units): + return False + item = item.to(self.range_units).magnitude + if not isinstance(item, float) and not isinstance(item, int): + return False + return self._ranges.__contains__(item) + return False - if any(s.empty for s in sets): - return Empty(units=sets[0].units) - - def group(set: Set_[T]) -> str: - if isinstance(set, Single): - return "single" - if isinstance(set, Range): - return "range" - return "union" - - grouped_sets = groupby(sets, key=group) - singles = grouped_sets["single"] - ranges = grouped_sets["range"] - unions = grouped_sets["union"] - single_offset = add_singles(*singles) - range_sum = add_ranges(*ranges, offset=single_offset) - - if len(range_sum) > 0: - recursion_set = range_sum - elif len(singles) > 0: - recursion_set = [Single(single_offset)] - else: - recursion_set = [] - - if len(unions) == 0: - assert len(recursion_set) == 1 - return recursion_set[0] - return Union( # TODO this is exponential, we'll want to defer the computation - *(operation_add(e, *unions[1:], *recursion_set) for e in unions[0].elements) - ) - - -def operation_negate[T: _SupportsRangeOps]( - *sets: Set_[T], -) -> list[Set_[_SupportsRangeOps]]: - def negate(set: Set_[T]) -> Set_[T]: - if isinstance(set, Single): - return Single(-set.value) - if isinstance(set, Range): - return Range(-set.max, -set.min) - return Union(*(negate(e) for e in set.elements)) - - return [negate(e) for e in sets] - - -def operation_subtract[T: _SupportsRangeOps]( - first: Set_[T], - *sets: Set_[T], -) -> Set_[_SupportsRangeOps]: - return operation_add(first, *operation_negate(*sets)) + def __eq__(self, value: Any) -> bool: + if not isinstance(value, HasUnit): + return False + if not self.units.is_compatible_with(value.units): + return False + if isinstance(value, RangeUnion): + return self._ranges == value._ranges + if isinstance(value, Range) and len(self._ranges.ranges) == 1: + return self._ranges.ranges[0] == value._range + return False + + def __repr__(self) -> str: + if self.units.is_compatible_with(dimensionless): + return f"_RangeUnion({', '.join(f"[{r.min}, {r.max}]" for r in self._ranges.ranges)})" + return f"_RangeUnion({', '.join(f"[{self.base_to_units(r.min)}, {self.base_to_units(r.max)}]" for r in self._ranges.ranges)} | {self.units})" + + +def UnitEmpty(units: Unit) -> RangeUnion[TQuant]: + return RangeUnion(units=units) + + +def Singles(*values: TQuant, units: Unit | None = None) -> RangeUnion[TQuant]: + return RangeUnion(*(Single(v) for v in values), units=units) + + +# class Set[T](Union[T]): +# def __init__(self, *elements: T, units: Unit | None = None): +# super().__init__(*(Single(e) for e in elements), units=units) +# +# def __contains__(self, item: T): +# return Single(item) in self.elements +# +# +# def operation_add[T: _SupportsRangeOps]( +# *sets: Set_[T], +# ) -> Set_[_SupportsRangeOps]: +# def add_singles(*singles: Single[T]) -> T: +# if len(singles) == 0: +# return 0 +# return sum(s.value for s in singles) +# +# def add_ranges(*ranges: Range[T], offset: T) -> list[Range[T]]: +# if len(ranges) == 0: +# return [] +# return [ +# Range( +# min=sum(r.min for r in ranges) + offset, +# max=sum(r.max for r in ranges) + offset, +# ) +# ] +# +# if any(s.empty for s in sets): +# return Empty(units=sets[0].units) +# +# def group(set: Set_[T]) -> str: +# if isinstance(set, Single): +# return "single" +# if isinstance(set, Range): +# return "range" +# return "union" +# +# grouped_sets = groupby(sets, key=group) +# singles = grouped_sets["single"] +# ranges = grouped_sets["range"] +# unions = grouped_sets["union"] +# single_offset = add_singles(*singles) +# range_sum = add_ranges(*ranges, offset=single_offset) +# +# if len(range_sum) > 0: +# recursion_set = range_sum +# elif len(singles) > 0: +# recursion_set = [Single(single_offset)] +# else: +# recursion_set = [] +# +# if len(unions) == 0: +# assert len(recursion_set) == 1 +# return recursion_set[0] +# return Union( # TODO this is exponential, we'll want to defer the computation +# *(operation_add(e, *unions[1:], *recursion_set) for e in unions[0].elements) +# ) +# +# +# def operation_negate[T: _SupportsRangeOps]( +# *sets: Set_[T], +# ) -> list[Set_[_SupportsRangeOps]]: +# def negate(set: Set_[T]) -> Set_[T]: +# if isinstance(set, Single): +# return Single(-set.value) +# if isinstance(set, Range): +# return Range(-set.max, -set.min) +# return Union(*(negate(e) for e in set.elements)) +# +# return [negate(e) for e in sets] +# +# +# def operation_subtract[T: _SupportsRangeOps]( +# first: Set_[T], +# *sets: Set_[T], +# ) -> Set_[_SupportsRangeOps]: +# return operation_add(first, *operation_negate(*sets)) +# +# +# def operation_mul[T: _SupportsRangeOps]( +# *sets: Set_[T], +# ) -> Set_[_SupportsRangeOps]: +# def mul_singles(*singles: Single[T]) -> Single[T]: +# return Single(math.prod((s.value for s in singles), start=1)) +# +# def mul_ranges(r1: Range[T], r2: Range[T]) -> Range[T]: +# return Range( +# min=min(r1.min * r2.min, r1.min * r2.max, r1.max * r2.min, r1.max * r2.max), +# max=max(r1.min * r2.min, r1.min * r2.max, r1.max * r2.min, r1.max * r2.max), +# ) +# +# def mul_single_range(single: Single[T], range: Range[T]) -> Range[T]: +# if single.value < 0: +# return Range(min=single.value * range.max, max=single.value * range.min) +# return Range(min=single.value * range.min, max=single.value * range.max) +# +# def mul_range_list( +# *ranges: Range[T], factor: Single[T] = Single(1) +# ) -> list[Range[T]]: +# if len(ranges) == 0: +# return [] +# first, *rest = ranges +# first = mul_single_range(factor, first) +# for r in rest: +# first = mul_ranges(first, r) +# return [first] +# +# if any(s.empty for s in sets): +# return Empty(units=sets[0].units) +# +# def group(set: Set_[T]) -> str: +# if isinstance(set, Single): +# return "single" +# if isinstance(set, Range): +# return "range" +# return "union" +# +# grouped_sets = groupby(sets, key=group) +# singles = grouped_sets["single"] +# ranges = grouped_sets["range"] +# unions = grouped_sets["union"] +# single_product = mul_singles(*singles) +# range_product = mul_range_list(*ranges, factor=single_product) +# +# if len(range_product) > 0: +# recursion_set = range_product +# elif len(singles) > 0: +# recursion_set = [single_product] +# else: +# recursion_set = [] +# +# if len(unions) == 0: +# assert len(recursion_set) == 1 +# return recursion_set[0] +# return Union( # TODO this is exponential, we'll want to defer the computation +# *(operation_mul(e, *unions[1:], *recursion_set) for e in unions[0].elements) +# ) diff --git a/test/libs/test_sets.py b/test/libs/test_sets.py index 672e59d4..49054472 100644 --- a/test/libs/test_sets.py +++ b/test/libs/test_sets.py @@ -6,11 +6,10 @@ from faebryk.libs.sets import ( Range, - Set, + RangeUnion, Single, - Union, - operation_add, - operation_subtract, + Singles, + UnitEmpty, ) from faebryk.libs.units import P, Unit, dimensionless from faebryk.libs.util import cast_assert @@ -18,14 +17,14 @@ def test_range_intersection_simple(): x = Range(0, 10) - y = x.range_intersection(Range(5, 15)) + y = x.op_intersect_range(Range(5, 15)) assert y == Range(5, 10) def test_range_intersection_empty(): x = Range(0, 10) - y = x.range_intersection(Range(15, 20)) - assert y == Range(empty=True, units=dimensionless) + y = x.op_intersect_range(Range(15, 20)) + assert y == UnitEmpty(dimensionless) def test_range_unit_none(): @@ -49,48 +48,47 @@ def test_range_unit_different(): Range(min=10 * P.V, units=cast_assert(Unit, P.A)) -def test_range_force_unit(): - with pytest.raises(ValueError): - Range(empty=True) - with pytest.raises(ValueError): - Range() - - def test_set_min_elem(): - x = Set(5, 3, 2, 4, 1) + x = Singles(5, 3, 2, 4, 1) assert x.min_elem() == 1 def test_set_contains(): - x = Set(5, 3, 2, 4, 1) - assert 3 in x - assert 6 not in x + x = Singles(5, 3, 2, 4, 1) + assert 3 * dimensionless in x + assert 6 * dimensionless not in x def test_union_min_elem(): - x = Union( - Range(4, 5), Range(3, 7), Single(9), Union(Range(1, 2), Union(Range(0, 1))) + x = RangeUnion( + Range(4, 5), + Range(3, 7), + Single(9), + RangeUnion(Range(1, 2), RangeUnion(Range(0, 1))), ) assert x.min_elem() == 0 def test_union_contains(): - x = Union( - Range(4, 5), Range(3, 7), Single(9), Union(Range(1, 2), Union(Range(0, 1))) + x = RangeUnion( + Range(4, 5), + Range(3, 7), + Single(9), + RangeUnion(Range(1, 2), RangeUnion(Range(0, 1))), ) - assert 0 in x - assert 1 in x - assert 2 in x - assert 3 in x - assert 4 in x - assert 5 in x - assert 6 in x - assert 7 in x - assert 8 not in x - assert 9 in x - assert 10 not in x - - x = Union(Range(max=1.5 * P.V), Range(2.5 * P.V, 3.5 * P.V)) + assert 0 * dimensionless in x + assert 1 * dimensionless in x + assert 2 * dimensionless in x + assert 3 * dimensionless in x + assert 4 * dimensionless in x + assert 5 * dimensionless in x + assert 6 * dimensionless in x + assert 7 * dimensionless in x + assert 8 * dimensionless not in x + assert 9 * dimensionless in x + assert 10 * dimensionless not in x + + x = RangeUnion(Range(max=1.5 * P.V), Range(2.5 * P.V, 3.5 * P.V)) assert float("-inf") * P.V in x assert 1 * P.V in x assert 1.5 * P.V in x @@ -100,34 +98,56 @@ def test_union_contains(): assert 3.5 * P.V in x assert 4 * P.V not in x assert float("inf") * P.V not in x - with pytest.raises(ValueError): # units - assert 1 not in x + assert 1 not in x + assert 1 * dimensionless not in x def test_union_empty(): - x = Union( - Range(empty=True, units=dimensionless), - Union(Range(empty=True, units=dimensionless), Set(units=dimensionless)), + x = RangeUnion( + UnitEmpty(dimensionless), + RangeUnion(UnitEmpty(dimensionless), Singles(units=dimensionless)), ) - assert x.empty + assert x.is_empty() def test_addition(): - assert operation_add(Range(0, 1), Range(2, 3)) == Range(2, 4) - assert operation_add(Range(0, 1), Single(2), Single(3)) == Range(5, 6) - assert operation_add(Set(0, 1), Set(2, 3)) == Set(2, 3, 4) - assert operation_add(Set(0, 1), Set(2, 3), Range(-1, 0)) == Union( - Range(1, 2), Range(2, 3), Range(3, 4) - ) - assert operation_add( - Single(3), Set(0, 1), Set(2, 3), Range(-1, 0), Single(7) - ) == Union(Range(11, 12), Range(12, 13), Range(13, 14)) - assert operation_add( - Union(Range(0, 1), Range(2, 3)), - Union(Range(4, 5), Range(6, 7)), - ) == Union(Range(4, 6), Range(6, 8), Range(6, 8), Range(8, 10)) + assert Range(0, 1).op_add_range(Range(2, 3)) == Range(2, 4) + assert Range(0, 1).op_add_range(Single(2)) == Range(2, 3) + assert RangeUnion(Single(2), Single(3)).op_add_range_union( + RangeUnion(Range(0, 1)) + ) == Range(2, 4) + assert RangeUnion(Single(10), Range(20, 21)).op_add_range_union( + RangeUnion(Range(0, 1), Range(100, 101)) + ) == RangeUnion(Range(10, 11), Range(110, 111), Range(20, 22), Range(120, 122)) def test_subtraction(): - assert operation_subtract(Range(0, 1), Range(2, 3)) == Range(-3, -1) - assert operation_subtract(Range(0, 1), Single(2)) == Range(-2, -1) + assert Range(0, 1).op_subtract_range(Range(2, 3)) == Range(-3, -1) + assert Range(0, 1).op_subtract_range(Single(2)) == Range(-2, -1) + + +def test_multiplication(): + assert Range(0, 2).op_mul_range(Range(2, 3)) == Range(0, 6) + assert Range(0, 1).op_mul_range(Single(2)) == Range(0, 2) + assert Range(0, 1).op_mul_range(Single(-2)) == Range(-2, 0) + assert Range(-1, 1).op_mul_range(Range(2, 4)) == Range(-4, 4) + assert Singles(0, 1).op_mul_range_union(Singles(2, 3)) == Singles(0, 2, 3) + assert Singles(0, 1).op_mul_range_union(Singles(2, 3)).op_mul_range_union( + RangeUnion(Range(-1, 0)) + ) == RangeUnion(Range(0, 0), Range(-2, 0), Range(-3, 0)) + + +def test_invert(): + assert Range(1, 2).op_invert() == Range(0.5, 1) + assert Range(-2, -1).op_invert() == Range(-1, -0.5) + assert Range(-1, 1).op_invert() == RangeUnion( + Range(float("-inf"), -1), Range(1, float("inf")) + ) + assert RangeUnion(Range(-4, 2), Range(-1, 3)).op_invert() == RangeUnion( + Range(max=-0.25), Range(min=1 / 3) + ) + + +def test_division(): + assert Range(0, 1).op_div_range(Range(2, 3)) == Range(0, 0.5) + assert Range(0, 1).op_div_range(Range(0, 3)) == Range(min=0.0) From 2313191c862baa03bcd70f89ed7ab7c9de3b8a70 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Fri, 18 Oct 2024 17:33:24 +0200 Subject: [PATCH 39/80] rename (unit) RangeUnion to Ranges --- src/faebryk/libs/sets.py | 88 +++++++++++++++++++--------------------- test/libs/test_sets.py | 40 +++++++++--------- 2 files changed, 62 insertions(+), 66 deletions(-) diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 20bb5260..113617a1 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -157,7 +157,7 @@ def min_elem(self) -> T: raise ValueError("empty range cannot have min element") return self.ranges[0].min_elem() - def op_add_range_union(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": + def op_add_ranges(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": return _RangeUnion( *(r.op_add_range(o) for r in self.ranges for o in other.ranges) ) @@ -165,10 +165,10 @@ def op_add_range_union(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": def op_negate(self) -> "_RangeUnion[T]": return _RangeUnion(*(r.op_negate() for r in self.ranges)) - def op_subtract_range_union(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": - return self.op_add_range_union(other.op_negate()) + def op_subtract_ranges(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": + return self.op_add_ranges(other.op_negate()) - def op_mul_range_union(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": + def op_mul_ranges(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": return _RangeUnion( *(r.op_mul_range(o) for r in self.ranges for o in other.ranges) ) @@ -176,10 +176,10 @@ def op_mul_range_union(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": def op_invert(self) -> "_RangeUnion[float]": return _RangeUnion(*(r.op_invert() for r in self.ranges)) - def op_div_range_union( + def op_div_ranges( self: "_RangeUnion[float]", other: "_RangeUnion[float]" ) -> "_RangeUnion[float]": - return self.op_mul_range_union(other.op_invert()) + return self.op_mul_ranges(other.op_invert()) def __contains__(self, item: T) -> bool: from bisect import bisect @@ -218,7 +218,7 @@ def _NumericEmpty() -> _RangeUnion: return __numeric_empty -class __NonNumericSet[U](_Set[U]): +class _NonNumericSet[U](_Set[U]): def __init__(self, *elements: U): self.elements = set(elements) @@ -335,11 +335,11 @@ def min_elem(self) -> Quantity: def is_empty(self) -> bool: return self._range.is_empty() - def op_intersect_range(self, other: "Range[TQuant]") -> "RangeUnion[TQuant]": + def op_intersect_range(self, other: "Range[TQuant]") -> "Ranges[TQuant]": if not self.units.is_compatible_with(other.units): - return RangeUnion(units=self.units) + return Ranges(units=self.units) _range = self._range.op_intersect_range(other._range) - return RangeUnion._from_range_union(_range, self.units) + return Ranges._from_ranges(_range, self.units) def op_add_range(self, other: "Range[TQuant]") -> "Range[TQuant]": if not self.units.is_compatible_with(other.units): @@ -363,15 +363,15 @@ def op_mul_range(self, other: "Range[TQuant]") -> "Range[TQuant]": _range = self._range.op_mul_range(other._range) return Range._from_range(_range, self.units * other.units) - def op_invert(self) -> "RangeUnion[TQuant]": + def op_invert(self) -> "Ranges[TQuant]": _range = self._range.op_invert() - return RangeUnion._from_range_union(_range, 1 / self.units) + return Ranges._from_ranges(_range, 1 / self.units) - def op_div_range(self, other: "Range[TQuant]") -> "RangeUnion[TQuant]": + def op_div_range(self, other: "Range[TQuant]") -> "Ranges[TQuant]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._range.op_div_range(other._range) - return RangeUnion._from_range_union(_range, self.units / other.units) + return Ranges._from_ranges(_range, self.units / other.units) # def __copy__(self) -> Self: # r = Range.__new__(Range) @@ -399,7 +399,7 @@ def __eq__(self, value: Any) -> bool: return False if isinstance(value, Range): return self._range == value._range - if isinstance(value, RangeUnion) and len(value._ranges.ranges) == 1: + if isinstance(value, Ranges) and len(value._ranges.ranges) == 1: return self._range == value._ranges.ranges[0] return False @@ -417,9 +417,9 @@ def Single(value: TQuant) -> Range[TQuant]: return Range(value, value) -class RangeUnion(UnitSet[TQuant]): +class Ranges(UnitSet[TQuant]): def __init__( - self, *ranges: Range[TQuant] | "RangeUnion[TQuant]", units: Unit | None = None + self, *ranges: Range[TQuant] | "Ranges[TQuant]", units: Unit | None = None ): range_units = [ r.units if isinstance(r, HasUnit) else dimensionless for r in ranges @@ -431,7 +431,7 @@ def __init__( if not all(self.units.is_compatible_with(u) for u in range_units): raise ValueError("all elements must have compatible units") - def get_backing(r: Range[TQuant] | "RangeUnion[TQuant]"): + def get_backing(r: Range[TQuant] | "Ranges[TQuant]"): if isinstance(r, Range): return r._range else: @@ -440,11 +440,9 @@ def get_backing(r: Range[TQuant] | "RangeUnion[TQuant]"): self._ranges = _RangeUnion(*(get_backing(r) for r in ranges)) @staticmethod - def _from_range_union( - range_union: _RangeUnion[T], units: Unit - ) -> "RangeUnion[TQuant]": - r = RangeUnion.__new__(RangeUnion) - r._ranges = range_union + def _from_ranges(ranges: _RangeUnion[T], units: Unit) -> "Ranges[TQuant]": + r = Ranges.__new__(Ranges) + r._ranges = ranges r.units = units r.range_units = base_units(units) return r @@ -460,39 +458,37 @@ def min_elem(self) -> TQuant: raise ValueError("empty range cannot have min element") return self.base_to_units(self._ranges.min_elem()) - def op_add_range_union(self, other: "RangeUnion[TQuant]") -> "RangeUnion[TQuant]": + def op_add_ranges(self, other: "Ranges[TQuant]") -> "Ranges[TQuant]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") - _range = self._ranges.op_add_range_union(other._ranges) - return RangeUnion._from_range_union(_range, self.units) + _range = self._ranges.op_add_ranges(other._ranges) + return Ranges._from_ranges(_range, self.units) - def op_negate(self) -> "RangeUnion[TQuant]": + def op_negate(self) -> "Ranges[TQuant]": _range = self._ranges.op_negate() - return RangeUnion._from_range_union(_range, self.units) + return Ranges._from_ranges(_range, self.units) - def op_subtract_range_union( - self, other: "RangeUnion[TQuant]" - ) -> "RangeUnion[TQuant]": + def op_subtract_ranges(self, other: "Ranges[TQuant]") -> "Ranges[TQuant]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") - _range = self._ranges.op_subtract_range_union(other._ranges) - return RangeUnion._from_range_union(_range, self.units) + _range = self._ranges.op_subtract_ranges(other._ranges) + return Ranges._from_ranges(_range, self.units) - def op_mul_range_union(self, other: "RangeUnion[TQuant]") -> "RangeUnion[TQuant]": + def op_mul_ranges(self, other: "Ranges[TQuant]") -> "Ranges[TQuant]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") - _range = self._ranges.op_mul_range_union(other._ranges) - return RangeUnion._from_range_union(_range, self.units * other.units) + _range = self._ranges.op_mul_ranges(other._ranges) + return Ranges._from_ranges(_range, self.units * other.units) - def op_invert(self) -> "RangeUnion[TQuant]": + def op_invert(self) -> "Ranges[TQuant]": _range = self._ranges.op_invert() - return RangeUnion._from_range_union(_range, 1 / self.units) + return Ranges._from_ranges(_range, 1 / self.units) - def op_div_range_union(self, other: "RangeUnion[TQuant]") -> "RangeUnion[TQuant]": + def op_div_ranges(self, other: "Ranges[TQuant]") -> "Ranges[TQuant]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") - _range = self._ranges.op_div_range_union(other._ranges) - return RangeUnion._from_range_union(_range, self.units / other.units) + _range = self._ranges.op_div_ranges(other._ranges) + return Ranges._from_ranges(_range, self.units / other.units) def __contains__(self, item: Any) -> bool: if isinstance(item, Quantity): @@ -509,7 +505,7 @@ def __eq__(self, value: Any) -> bool: return False if not self.units.is_compatible_with(value.units): return False - if isinstance(value, RangeUnion): + if isinstance(value, Ranges): return self._ranges == value._ranges if isinstance(value, Range) and len(self._ranges.ranges) == 1: return self._ranges.ranges[0] == value._range @@ -521,12 +517,12 @@ def __repr__(self) -> str: return f"_RangeUnion({', '.join(f"[{self.base_to_units(r.min)}, {self.base_to_units(r.max)}]" for r in self._ranges.ranges)} | {self.units})" -def UnitEmpty(units: Unit) -> RangeUnion[TQuant]: - return RangeUnion(units=units) +def UnitEmpty(units: Unit) -> Ranges[TQuant]: + return Ranges(units=units) -def Singles(*values: TQuant, units: Unit | None = None) -> RangeUnion[TQuant]: - return RangeUnion(*(Single(v) for v in values), units=units) +def Singles(*values: TQuant, units: Unit | None = None) -> Ranges[TQuant]: + return Ranges(*(Single(v) for v in values), units=units) # class Set[T](Union[T]): diff --git a/test/libs/test_sets.py b/test/libs/test_sets.py index 49054472..4405620a 100644 --- a/test/libs/test_sets.py +++ b/test/libs/test_sets.py @@ -6,7 +6,7 @@ from faebryk.libs.sets import ( Range, - RangeUnion, + Ranges, Single, Singles, UnitEmpty, @@ -60,21 +60,21 @@ def test_set_contains(): def test_union_min_elem(): - x = RangeUnion( + x = Ranges( Range(4, 5), Range(3, 7), Single(9), - RangeUnion(Range(1, 2), RangeUnion(Range(0, 1))), + Ranges(Range(1, 2), Ranges(Range(0, 1))), ) assert x.min_elem() == 0 def test_union_contains(): - x = RangeUnion( + x = Ranges( Range(4, 5), Range(3, 7), Single(9), - RangeUnion(Range(1, 2), RangeUnion(Range(0, 1))), + Ranges(Range(1, 2), Ranges(Range(0, 1))), ) assert 0 * dimensionless in x assert 1 * dimensionless in x @@ -88,7 +88,7 @@ def test_union_contains(): assert 9 * dimensionless in x assert 10 * dimensionless not in x - x = RangeUnion(Range(max=1.5 * P.V), Range(2.5 * P.V, 3.5 * P.V)) + x = Ranges(Range(max=1.5 * P.V), Range(2.5 * P.V, 3.5 * P.V)) assert float("-inf") * P.V in x assert 1 * P.V in x assert 1.5 * P.V in x @@ -103,9 +103,9 @@ def test_union_contains(): def test_union_empty(): - x = RangeUnion( + x = Ranges( UnitEmpty(dimensionless), - RangeUnion(UnitEmpty(dimensionless), Singles(units=dimensionless)), + Ranges(UnitEmpty(dimensionless), Singles(units=dimensionless)), ) assert x.is_empty() @@ -113,12 +113,12 @@ def test_union_empty(): def test_addition(): assert Range(0, 1).op_add_range(Range(2, 3)) == Range(2, 4) assert Range(0, 1).op_add_range(Single(2)) == Range(2, 3) - assert RangeUnion(Single(2), Single(3)).op_add_range_union( - RangeUnion(Range(0, 1)) - ) == Range(2, 4) - assert RangeUnion(Single(10), Range(20, 21)).op_add_range_union( - RangeUnion(Range(0, 1), Range(100, 101)) - ) == RangeUnion(Range(10, 11), Range(110, 111), Range(20, 22), Range(120, 122)) + assert Ranges(Single(2), Single(3)).op_add_ranges(Ranges(Range(0, 1))) == Range( + 2, 4 + ) + assert Ranges(Single(10), Range(20, 21)).op_add_ranges( + Ranges(Range(0, 1), Range(100, 101)) + ) == Ranges(Range(10, 11), Range(110, 111), Range(20, 22), Range(120, 122)) def test_subtraction(): @@ -131,19 +131,19 @@ def test_multiplication(): assert Range(0, 1).op_mul_range(Single(2)) == Range(0, 2) assert Range(0, 1).op_mul_range(Single(-2)) == Range(-2, 0) assert Range(-1, 1).op_mul_range(Range(2, 4)) == Range(-4, 4) - assert Singles(0, 1).op_mul_range_union(Singles(2, 3)) == Singles(0, 2, 3) - assert Singles(0, 1).op_mul_range_union(Singles(2, 3)).op_mul_range_union( - RangeUnion(Range(-1, 0)) - ) == RangeUnion(Range(0, 0), Range(-2, 0), Range(-3, 0)) + assert Singles(0, 1).op_mul_ranges(Singles(2, 3)) == Singles(0, 2, 3) + assert Singles(0, 1).op_mul_ranges(Singles(2, 3)).op_mul_ranges( + Ranges(Range(-1, 0)) + ) == Ranges(Range(0, 0), Range(-2, 0), Range(-3, 0)) def test_invert(): assert Range(1, 2).op_invert() == Range(0.5, 1) assert Range(-2, -1).op_invert() == Range(-1, -0.5) - assert Range(-1, 1).op_invert() == RangeUnion( + assert Range(-1, 1).op_invert() == Ranges( Range(float("-inf"), -1), Range(1, float("inf")) ) - assert RangeUnion(Range(-4, 2), Range(-1, 3)).op_invert() == RangeUnion( + assert Ranges(Range(-4, 2), Range(-1, 3)).op_invert() == Ranges( Range(max=-0.25), Range(min=1 / 3) ) From 4d77e857088931b1fe4e8035c70f4fccc4166802 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Fri, 18 Oct 2024 20:17:49 +0200 Subject: [PATCH 40/80] solver api, picker stuff --- src/faebryk/core/parameter.py | 23 +++++++-- src/faebryk/core/solver.py | 10 ++-- src/faebryk/library/has_multi_picker.py | 6 ++- src/faebryk/libs/app/erc.py | 27 +++++------ src/faebryk/libs/examples/pickers.py | 4 +- src/faebryk/libs/picker/jlcpcb/picker_lib.py | 21 ++++---- src/faebryk/libs/picker/jlcpcb/pickers.py | 13 +++-- src/faebryk/libs/picker/picker.py | 51 ++++++++++++-------- src/faebryk/libs/sets.py | 34 +++++++++++-- test/library/nodes/test_electricpower.py | 16 ++++-- 10 files changed, 140 insertions(+), 65 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 1bbda11e..42ebaf75 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -17,6 +17,10 @@ logger = logging.getLogger(__name__) +# When we make this generic, two types, type T of elements, and type S of known subsets +# boolean: T == S == bool +# enum: T == S == Enum +# number: T == Number type, S == Range[Number] class ParameterOperatable(Protocol): type QuantityLike = Quantity | NotImplementedType type Number = int | float | QuantityLike @@ -108,15 +112,26 @@ def operation_is_subset(self, other: Sets) -> "Expression": ... def operation_is_superset(self, other: Sets) -> "Expression": ... - def inspect_known_min(self) -> Number: ... + def inspect_known_min(self: NumberLike) -> Number: ... - def inspect_known_max(self) -> Number: ... + def inspect_known_max(self: NumberLike) -> Number: ... + + def inspect_known_values(self: BooleanLike) -> Set_[bool]: ... # Run by the solver on finalization - # inspect_final: Callable[[Self], None] + inspect_final: Callable[[Self], None] = lambda _: None + + def inspect_add_on_final(self, fun: Callable[[Self], None]) -> None: + current = self.inspect_final + + def new(self2): + current(self2) + fun(self2) + + self.inspect_final = new # def inspect_num_known_supersets(self) -> int: ... - # def inspect_get_known_superset(self) -> Iterable[Set_]: ... + # def inspect_get_known_supersets(self) -> Iterable[Set_]: ... # ---------------------------------------------------------------------------------- def __add__(self, other: NumberLike): diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index 0f3e36f4..752646f7 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -39,11 +39,13 @@ def get_any_single( def assert_any_predicate( self, G: Graph, - predicates: list[Predicate], + predicates: list[tuple[Predicate, Any]], suppose_constraint: Predicate | None = None, minimize: Expression | None = None, constrain_solved: bool = True, - ) -> tuple[list[Expression], list[Expression], list[Parameter]]: ... + ) -> tuple[ + list[tuple[Predicate, Any]], list[tuple[Predicate, Any]], list[Parameter] + ]: ... # run deferred work def finalize(self, G: Graph) -> None: ... @@ -69,7 +71,9 @@ def assert_any_predicate( suppose_constraint: Predicate | None = None, minimize: Expression | None = None, constrain_solved: bool = True, - ) -> tuple[list[Expression], list[Expression], list[Parameter]]: + ) -> tuple[ + list[tuple[Predicate, Any]], list[tuple[Predicate, Any]], list[Parameter] + ]: raise NotImplementedError() def finalize(self, G: Graph) -> None: diff --git a/src/faebryk/library/has_multi_picker.py b/src/faebryk/library/has_multi_picker.py index 106c9e4a..066a5746 100644 --- a/src/faebryk/library/has_multi_picker.py +++ b/src/faebryk/library/has_multi_picker.py @@ -6,6 +6,7 @@ from abc import abstractmethod from typing import Callable, Mapping +from faebryk.core.solver import Solver import faebryk.library._F as F from faebryk.core.module import Module from faebryk.core.node import Node @@ -41,11 +42,12 @@ def __init__(self, prio: int, picker: Picker): def __preinit__(self): ... class FunctionPicker(Picker): - def __init__(self, picker: Callable[[Module], None]): + def __init__(self, picker: Callable[[Module, Solver], None], solver: Solver): self.picker = picker + self.solver = solver def pick(self, module: Module) -> None: - self.picker(module) + self.picker(module, self.solver) def __repr__(self) -> str: return f"{type(self).__name__}({self.picker.__name__})" diff --git a/src/faebryk/libs/app/erc.py b/src/faebryk/libs/app/erc.py index 78b65aed..5b84fa37 100644 --- a/src/faebryk/libs/app/erc.py +++ b/src/faebryk/libs/app/erc.py @@ -9,8 +9,8 @@ from faebryk.core.graphinterface import Graph from faebryk.core.module import Module from faebryk.core.moduleinterface import ModuleInterface -from faebryk.library.Operation import Operation from faebryk.libs.picker.picker import has_part_picked +from faebryk.libs.units import P from faebryk.libs.util import groupby, print_stack logger = logging.getLogger(__name__) @@ -37,15 +37,12 @@ def __init__(self, faulting_ifs: Sequence[ModuleInterface], *args: object) -> No class ERCFaultElectricPowerUndefinedVoltage(ERCFault): - def __init__(self, faulting_EP: list[F.ElectricPower], *args: object) -> None: - faulting_EP = list(sorted(faulting_EP, key=lambda ep: ep.get_name())) - msg = "ElectricPower(s) with undefined or unsolved voltage: " + ",\n ".join( - f"{ep}: {ep.voltage}" for ep in faulting_EP - ) - super().__init__(faulting_EP, msg, *args) + def __init__(self, faulting_EP: F.ElectricPower, *args: object) -> None: + msg = f"ElectricPower with undefined or unsolved voltage: {faulting_EP}: {faulting_EP.voltage}" + super().__init__([faulting_EP], msg, *args) -def simple_erc(G: Graph): +def simple_erc(G: Graph, voltage_limit=1e5 * P.V): """Simple ERC check. This function will check for the following ERC violations: @@ -71,14 +68,14 @@ def simple_erc(G: Graph): if ep.lv.is_connected_to(ep.hv): raise ERCFaultShort([ep], "shorted power") - unresolved_voltage = [ - ep - for ep in electricpower - if isinstance(ep.voltage.get_most_narrow(), (F.TBD, Operation)) - ] + for ep in electricpower: + if ep.voltage.inspect_known_max() > voltage_limit: + + def raise_on_limit(x): + if x.inspect_known_max() > voltage_limit: + raise ERCFaultElectricPowerUndefinedVoltage(ep) - if unresolved_voltage: - raise ERCFaultElectricPowerUndefinedVoltage(unresolved_voltage) + ep.voltage.inspect_add_on_final(raise_on_limit) # shorted nets nets = G.nodes_of_type(F.Net) diff --git a/src/faebryk/libs/examples/pickers.py b/src/faebryk/libs/examples/pickers.py index 236d3847..d68bf3d1 100644 --- a/src/faebryk/libs/examples/pickers.py +++ b/src/faebryk/libs/examples/pickers.py @@ -8,6 +8,7 @@ import logging from typing import TYPE_CHECKING +from faebryk.core.solver import DefaultSolver import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L @@ -285,6 +286,7 @@ def pick_switch(module: "_TSwitch[F.Electrical]"): def add_example_pickers(module: Module): + solver = DefaultSolver() lookup = { F.Resistor: pick_resistor, F.LED: pick_led, @@ -298,5 +300,5 @@ def add_example_pickers(module: Module): F.has_multi_picker.add_pickers_by_type( module, lookup, - F.has_multi_picker.FunctionPicker, + lambda pick_fn: F.has_multi_picker.FunctionPicker(pick_fn, solver), ) diff --git a/src/faebryk/libs/picker/jlcpcb/picker_lib.py b/src/faebryk/libs/picker/jlcpcb/picker_lib.py index affd106d..bb614747 100644 --- a/src/faebryk/libs/picker/jlcpcb/picker_lib.py +++ b/src/faebryk/libs/picker/jlcpcb/picker_lib.py @@ -2,6 +2,7 @@ from enum import Enum from typing import Callable +from faebryk.core.solver import Solver import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.e_series import E_SERIES_VALUES @@ -61,7 +62,7 @@ def find_component_by_lcsc_id(lcsc_id: str) -> Component: return next(iter(parts)) -def find_and_attach_by_lcsc_id(module: Module): +def find_and_attach_by_lcsc_id(module: Module, solver: Solver): """ Find a part in the JLCPCB database by its LCSC part number """ @@ -113,7 +114,7 @@ def find_component_by_mfr(mfr: str, mfr_pn: str) -> Component: return next(iter(parts)) -def find_and_attach_by_mfr(module: Module): +def find_and_attach_by_mfr(module: Module, solver: Solver): """ Find a part in the JLCPCB database by its manufacturer part number """ @@ -163,7 +164,7 @@ def find_and_attach_by_mfr(module: Module): # Type specific pickers ---------------------------------------------------------------- -def find_resistor(cmp: Module): +def find_resistor(cmp: Module, solver: Solver): """ Find a resistor part in the JLCPCB database that matches the parameters of the provided resistor @@ -197,7 +198,7 @@ def find_resistor(cmp: Module): ) -def find_capacitor(cmp: Module): +def find_capacitor(cmp: Module, solver: Solver): """ Find a capacitor part in the JLCPCB database that matches the parameters of the provided capacitor @@ -234,7 +235,7 @@ def find_capacitor(cmp: Module): ) -def find_inductor(cmp: Module): +def find_inductor(cmp: Module, solver: Solver): """ Find an inductor part in the JLCPCB database that matches the parameters of the provided inductor. @@ -278,7 +279,7 @@ def find_inductor(cmp: Module): ) -def find_tvs(cmp: Module): +def find_tvs(cmp: Module, solver: Solver): """ Find a TVS diode part in the JLCPCB database that matches the parameters of the provided diode @@ -324,7 +325,7 @@ def find_tvs(cmp: Module): ) -def find_diode(cmp: Module): +def find_diode(cmp: Module, solver: Solver): """ Find a diode part in the JLCPCB database that matches the parameters of the provided diode @@ -363,7 +364,7 @@ def find_diode(cmp: Module): ) -def find_led(cmp: Module): +def find_led(cmp: Module, solver: Solver): """ Find a LED part in the JLCPCB database that matches the parameters of the provided LED @@ -401,7 +402,7 @@ def find_led(cmp: Module): ) -def find_mosfet(cmp: Module): +def find_mosfet(cmp: Module, solver: Solver): """ Find a MOSFET part in the JLCPCB database that matches the parameters of the provided MOSFET @@ -443,7 +444,7 @@ def find_mosfet(cmp: Module): ) -def find_ldo(cmp: Module): +def find_ldo(cmp: Module, solver: Solver): """ Find a LDO part in the JLCPCB database that matches the parameters of the provided LDO diff --git a/src/faebryk/libs/picker/jlcpcb/pickers.py b/src/faebryk/libs/picker/jlcpcb/pickers.py index 376f4d3c..63b57a25 100644 --- a/src/faebryk/libs/picker/jlcpcb/pickers.py +++ b/src/faebryk/libs/picker/jlcpcb/pickers.py @@ -1,5 +1,6 @@ import logging +from faebryk.core.solver import DefaultSolver import faebryk.library._F as F import faebryk.libs.picker.jlcpcb.picker_lib as P from faebryk.core.module import Module @@ -86,12 +87,18 @@ def add_jlcpcb_pickers(module: Module, base_prio: int = 0) -> None: # Generic pickers prio = base_prio - module.add(F.has_multi_picker(prio, JLCPCBPicker(P.find_and_attach_by_lcsc_id))) - module.add(F.has_multi_picker(prio, JLCPCBPicker(P.find_and_attach_by_mfr))) + solver = DefaultSolver() + module.add( + F.has_multi_picker(prio, JLCPCBPicker(P.find_and_attach_by_lcsc_id, solver)) + ) + module.add(F.has_multi_picker(prio, JLCPCBPicker(P.find_and_attach_by_mfr, solver))) # Type specific pickers prio = base_prio + 1 F.has_multi_picker.add_pickers_by_type( - module, P.TYPE_SPECIFIC_LOOKUP, JLCPCBPicker, prio + module, + P.TYPE_SPECIFIC_LOOKUP, + lambda pick_fn: JLCPCBPicker(pick_fn, solver), + prio, ) diff --git a/src/faebryk/libs/picker/picker.py b/src/faebryk/libs/picker/picker.py index 313108f3..2f00c558 100644 --- a/src/faebryk/libs/picker/picker.py +++ b/src/faebryk/libs/picker/picker.py @@ -142,28 +142,41 @@ def pick_module_by_params(module: Module, options: Iterable[PickerOption]): return params = { - not_none(p.get_parent())[1]: p.get_most_narrow() + not_none(p.get_parent())[1]: p for p in module.get_children(direct_only=True, types=Parameter) } - options = list(options) + filtered_options = [o for o in options if not o.filter or o.filter(module)] + predicates = {} + for o in filtered_options: + predicate_list = [] - # TODO this doesn't work - raise NotImplementedError("This doesn't work") - try: - option = next( - filter( - lambda o: (not o.filter or o.filter(module)) - and all( - params[k].operation_is_superset(v) - for k, v in (o.params or {}).items() - if not k.startswith("_") - ), - options, - ) - ) - except StopIteration: - raise PickErrorParams(module, options) + for k, v in (o.params or {}).items(): + if not k.startswith("_"): + param = params[k] + predicate_list.append(param.operation_is_superset(v)) + + if len(predicate_list) == 0: + continue + + anded = predicate_list[0] + for p in predicate_list[1:]: + anded = anded.operation_and(p) + + predicates[o] = anded + + if len(predicates) == 0: + raise PickErrorParams(module, list(options)) + + true_predicates, unknown_predicates, empty_params = solver.assert_any_predicate( + module.get_graph(), [(p, k) for k, p in predicates.items()] + ) + + # TODO handle failure parameters + + # do we expect more than one? + # if so we can add a heuristic here + option = true_predicates[0][1] if option.pinmap: module.add(F.can_attach_to_footprint_via_pinmap(option.pinmap)) @@ -175,7 +188,7 @@ def pick_module_by_params(module: Module, options: Iterable[PickerOption]): for k, v in (option.params or {}).items(): if k not in params: continue - params[k].override(v) + params[k].alias_is(v) logger.debug(f"Attached {option.part.partno} to {module}") return option diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 113617a1..770173c3 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -1,7 +1,7 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from collections.abc import Generator +from collections.abc import Generator, Iterable from typing import Any, Protocol, TypeVar from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless @@ -206,9 +206,22 @@ def __hash__(self) -> int: def __repr__(self) -> str: return f"_RangeUnion({', '.join(f"[{r.min}, {r.max}]" for r in self.ranges)})" + def __iter__(self) -> Generator[_Range[T]]: + yield from self.ranges -def _Singles(*values: T) -> _RangeUnion[T]: - return _RangeUnion(*(_Single(v) for v in values)) + +class _RangeUnionIter(_RangeUnion[T], Iterable[_Range[T]]): + def __iter__(self) -> Generator[_Range[T]]: + yield from self.ranges + + +class _Singles(_RangeUnion[T], Iterable[T]): + def __init__(self, *values: T): + super().__init__(*(_Single(v) for v in values)) + + def __iter__(self) -> Generator[T]: + for r in self.ranges: + yield r.min __numeric_empty = _RangeUnion() @@ -218,7 +231,7 @@ def _NumericEmpty() -> _RangeUnion: return __numeric_empty -class _NonNumericSet[U](_Set[U]): +class PlainSet[U](_Set[U]): def __init__(self, *elements: U): self.elements = set(elements) @@ -228,6 +241,17 @@ def is_empty(self) -> bool: def __contains__(self, item: U) -> bool: return item in self.elements + def __eq__(self, value: Any) -> bool: + if not isinstance(value, PlainSet): + return False + return self.elements == value.elements + + def __hash__(self) -> int: + return sum(hash(e) for e in self.elements) + + def __repr__(self) -> str: + return f"PlainSet({', '.join(repr(e) for e in self.elements)})" + # class Empty[T](Set_[T]): # def __init__(self, units: Unit): @@ -440,7 +464,7 @@ def get_backing(r: Range[TQuant] | "Ranges[TQuant]"): self._ranges = _RangeUnion(*(get_backing(r) for r in ranges)) @staticmethod - def _from_ranges(ranges: _RangeUnion[T], units: Unit) -> "Ranges[TQuant]": + def _from_ranges(ranges: "_RangeUnion[T]", units: Unit) -> "Ranges[TQuant]": r = Ranges.__new__(Ranges) r._ranges = ranges r.units = units diff --git a/test/library/nodes/test_electricpower.py b/test/library/nodes/test_electricpower.py index 882b9998..49448f24 100644 --- a/test/library/nodes/test_electricpower.py +++ b/test/library/nodes/test_electricpower.py @@ -2,7 +2,9 @@ # SPDX-License-Identifier: MIT +from faebryk.core.solver import DefaultSolver from faebryk.libs.library import L +from faebryk.libs.sets import PlainSet def test_fused_power(): @@ -22,6 +24,14 @@ def test_fused_power(): fuse.trip_current.operation_is_subset( L.Range.from_center_rel(500 * P.mA, 0.1) - ).assert_true() - power_out.voltage.operation_is_subset(10 * P.V).assert_true() - power_out.max_current.operation_is_le(500 * P.mA * 0.9).assert_true() + ).inspect_add_on_final(lambda x: x.inspect_known_values() == PlainSet(True)) + power_out.voltage.operation_is_subset(10 * P.V).inspect_add_on_final( + lambda x: x.inspect_known_values() == PlainSet(True) + ) + power_out.max_current.operation_is_le(500 * P.mA * 0.9).inspect_add_on_final( + lambda x: x.inspect_known_values() == PlainSet(True) + ) + + graph = power_in.get_graph() + solver = DefaultSolver() + solver.finalize(graph) From e5f1e16b526d001d615716666c959160e4f94f64 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Mon, 21 Oct 2024 11:43:52 +0200 Subject: [PATCH 41/80] Add tests with units; Fix mul & div; Move things around --- src/faebryk/libs/sets.py | 441 +++++++++++++-------------------------- test/libs/test_sets.py | 43 +++- 2 files changed, 185 insertions(+), 299 deletions(-) diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 770173c3..d23abba9 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -2,35 +2,72 @@ # SPDX-License-Identifier: MIT from collections.abc import Generator, Iterable -from typing import Any, Protocol, TypeVar +from typing import Any, Protocol, TypeVar, cast from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless -# class _SupportsRangeOps(Protocol): -# def __le__(self, __value) -> bool: ... -# def __lt__(self, __value) -> bool: ... -# def __ge__(self, __value) -> bool: ... -# def __gt__(self, __value) -> bool: ... -# -# def __sub__(self, __value: Self) -> Self: ... -# def __add__(self, __value: Self) -> Self: ... -# -# -# class _SupportsArithmeticOpsWithFloatMul(_SupportsRangeOps, Protocol): -# def __mul__(self, __value: float | Self) -> Self: ... - - -class _Set[T](Protocol): +# Protocols ---------------------------------------------------------------------------- + + +class P_Set[T](Protocol): def is_empty(self) -> bool: ... def __contains__(self, item: T) -> bool: ... -T = TypeVar("T", int, float, contravariant=False, covariant=False) +class P_UnitSet[T](P_Set[T], HasUnit, Protocol): ... + + +# -------------------------------------------------------------------------------------- + +# Types -------------------------------------------------------------------------------- + +NumericT = TypeVar("NumericT", int, float, contravariant=False, covariant=False) +QuantityT = TypeVar( + "QuantityT", int, float, Quantity, contravariant=False, covariant=False +) + + +# -------------------------------------------------------------------------------------- + +# Helpers ------------------------------------------------------------------------------ + +def base_units(units: Unit) -> Unit: + return cast(Unit, Quantity(1, units).to_base_units().units) + + +# -------------------------------------------------------------------------------------- +# Generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class PlainSet[U](P_Set[U]): + def __init__(self, *elements: U): + self.elements = set(elements) + + def is_empty(self) -> bool: + return len(self.elements) == 0 -class _Range(_Set[T]): - def __init__(self, min: T, max: T): + def __contains__(self, item: U) -> bool: + return item in self.elements + + def __eq__(self, value: Any) -> bool: + if not isinstance(value, PlainSet): + return False + return self.elements == value.elements + + def __hash__(self) -> int: + return sum(hash(e) for e in self.elements) + + def __repr__(self) -> str: + return f"PlainSet({', '.join(repr(e) for e in self.elements)})" + + +# Numeric ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class _N_Range(P_Set[NumericT]): + def __init__(self, min: NumericT, max: NumericT): if not min <= max: raise ValueError("min must be less than or equal to max") self.min = min @@ -39,20 +76,20 @@ def __init__(self, min: T, max: T): def is_empty(self) -> bool: return False - def min_elem(self) -> T: + def min_elem(self) -> NumericT: return self.min - def op_add_range(self, other: "_Range[T]") -> "_Range[T]": - return _Range(self.min + other.min, self.max + other.max) + def op_add_range(self, other: "_N_Range[NumericT]") -> "_N_Range[NumericT]": + return _N_Range(self.min + other.min, self.max + other.max) - def op_negate(self) -> "_Range[T]": - return _Range(-self.max, -self.min) + def op_negate(self) -> "_N_Range[NumericT]": + return _N_Range(-self.max, -self.min) - def op_subtract_range(self, other: "_Range[T]") -> "_Range[T]": + def op_subtract_range(self, other: "_N_Range[NumericT]") -> "_N_Range[NumericT]": return self.op_add_range(other.op_negate()) - def op_mul_range(self, other: "_Range[T]") -> "_Range[T]": - return _Range( + def op_mul_range(self, other: "_N_Range[NumericT]") -> "_N_Range[NumericT]": + return _N_Range( min( self.min * other.min, self.min * other.max, @@ -67,47 +104,49 @@ def op_mul_range(self, other: "_Range[T]") -> "_Range[T]": ), ) - def op_invert(self) -> "_RangeUnion[float]": + def op_invert(self) -> "_N_Ranges[float]": if self.min == 0 == self.max: - return _NumericEmpty() + return _N_Empty() if self.min < 0 < self.max: - return _RangeUnion( - _Range(float("-inf"), 1 / self.min), - _Range(1 / self.max, float("inf")), + return _N_Ranges( + _N_Range(float("-inf"), 1 / self.min), + _N_Range(1 / self.max, float("inf")), ) elif self.min < 0 == self.max: - return _RangeUnion(_Range(float("-inf"), 1 / self.min)) + return _N_Ranges(_N_Range(float("-inf"), 1 / self.min)) elif self.min == 0 < self.max: - return _RangeUnion(_Range(1 / self.max, float("inf"))) + return _N_Ranges(_N_Range(1 / self.max, float("inf"))) else: - return _RangeUnion(_Range(1 / self.max, 1 / self.min)) + return _N_Ranges(_N_Range(1 / self.max, 1 / self.min)) def op_div_range( - self: "_Range[float]", other: "_Range[float]" - ) -> "_RangeUnion[float]": - return _RangeUnion(*(self.op_mul_range(o) for o in other.op_invert().ranges)) + self: "_N_Range[float]", other: "_N_Range[float]" + ) -> "_N_Ranges[float]": + return _N_Ranges(*(self.op_mul_range(o) for o in other.op_invert().ranges)) - def op_intersect_range(self, other: "_Range[T]") -> "_RangeUnion[T]": + def op_intersect_range(self, other: "_N_Range[NumericT]") -> "_N_Ranges[NumericT]": min_ = max(self.min, other.min) max_ = min(self.max, other.max) if min_ <= max_: - return _RangeUnion(_Range(min_, max_)) - return _NumericEmpty() + return _N_Ranges(_N_Range(min_, max_)) + return _N_Empty() - def maybe_merge_range(self, other: "_Range[T]") -> list["_Range[T]"]: + def maybe_merge_range( + self, other: "_N_Range[NumericT]" + ) -> list["_N_Range[NumericT]"]: is_left = self.min <= other.min left = self if is_left else other right = other if is_left else self if right.min in self: - return [_Range(left.min, max(left.max, right.max))] + return [_N_Range(left.min, max(left.max, right.max))] return [left, right] def __eq__(self, other: Any) -> bool: - if not isinstance(other, _Range): + if not isinstance(other, _N_Range): return False return self.min == other.min and self.max == other.max - def __contains__(self, item: T) -> bool: + def __contains__(self, item: NumericT) -> bool: return self.min <= item <= self.max def __hash__(self) -> int: @@ -117,20 +156,20 @@ def __repr__(self) -> str: return f"_Range({self.min}, {self.max})" -def _Single(value: T) -> _Range[T]: - return _Range(value, value) +def _N_Single(value: NumericT) -> _N_Range[NumericT]: + return _N_Range(value, value) -class _RangeUnion(_Set[T]): - def __init__(self, *ranges: _Range[T] | "_RangeUnion[T]"): - def gen_flat_non_empty() -> Generator[_Range[T]]: +class _N_Ranges(P_Set[NumericT]): + def __init__(self, *ranges: _N_Range[NumericT] | "_N_Ranges[NumericT]"): + def gen_flat_non_empty() -> Generator[_N_Range[NumericT]]: for r in ranges: if r.is_empty(): continue - if isinstance(r, _RangeUnion): + if isinstance(r, _N_Ranges): yield from r.ranges else: - assert isinstance(r, _Range) + assert isinstance(r, _N_Range) yield r non_empty_ranges = list(gen_flat_non_empty()) @@ -152,36 +191,36 @@ def gen_merge(): def is_empty(self) -> bool: return len(self.ranges) == 0 - def min_elem(self) -> T: + def min_elem(self) -> NumericT: if self.is_empty(): raise ValueError("empty range cannot have min element") return self.ranges[0].min_elem() - def op_add_ranges(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": - return _RangeUnion( + def op_add_ranges(self, other: "_N_Ranges[NumericT]") -> "_N_Ranges[NumericT]": + return _N_Ranges( *(r.op_add_range(o) for r in self.ranges for o in other.ranges) ) - def op_negate(self) -> "_RangeUnion[T]": - return _RangeUnion(*(r.op_negate() for r in self.ranges)) + def op_negate(self) -> "_N_Ranges[NumericT]": + return _N_Ranges(*(r.op_negate() for r in self.ranges)) - def op_subtract_ranges(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": + def op_subtract_ranges(self, other: "_N_Ranges[NumericT]") -> "_N_Ranges[NumericT]": return self.op_add_ranges(other.op_negate()) - def op_mul_ranges(self, other: "_RangeUnion[T]") -> "_RangeUnion[T]": - return _RangeUnion( + def op_mul_ranges(self, other: "_N_Ranges[NumericT]") -> "_N_Ranges[NumericT]": + return _N_Ranges( *(r.op_mul_range(o) for r in self.ranges for o in other.ranges) ) - def op_invert(self) -> "_RangeUnion[float]": - return _RangeUnion(*(r.op_invert() for r in self.ranges)) + def op_invert(self) -> "_N_Ranges[float]": + return _N_Ranges(*(r.op_invert() for r in self.ranges)) def op_div_ranges( - self: "_RangeUnion[float]", other: "_RangeUnion[float]" - ) -> "_RangeUnion[float]": + self: "_N_Ranges[float]", other: "_N_Ranges[float]" + ) -> "_N_Ranges[float]": return self.op_mul_ranges(other.op_invert()) - def __contains__(self, item: T) -> bool: + def __contains__(self, item: NumericT) -> bool: from bisect import bisect index = bisect(self.ranges, item, key=lambda r: r.min_elem()) @@ -191,7 +230,7 @@ def __contains__(self, item: T) -> bool: return item in self.ranges[index - 1] def __eq__(self, value: Any) -> bool: - if not isinstance(value, _RangeUnion): + if not isinstance(value, _N_Ranges): return False if len(self.ranges) != len(value.ranges): return False @@ -206,79 +245,33 @@ def __hash__(self) -> int: def __repr__(self) -> str: return f"_RangeUnion({', '.join(f"[{r.min}, {r.max}]" for r in self.ranges)})" - def __iter__(self) -> Generator[_Range[T]]: - yield from self.ranges - -class _RangeUnionIter(_RangeUnion[T], Iterable[_Range[T]]): - def __iter__(self) -> Generator[_Range[T]]: +class _N_RangesIterable(_N_Ranges[NumericT], Iterable[_N_Range[NumericT]]): + def __iter__(self) -> Generator[_N_Range[NumericT]]: yield from self.ranges -class _Singles(_RangeUnion[T], Iterable[T]): - def __init__(self, *values: T): - super().__init__(*(_Single(v) for v in values)) +class _N_Singles(_N_Ranges[NumericT], Iterable[NumericT]): + def __init__(self, *values: NumericT): + super().__init__(*(_N_Single(v) for v in values)) - def __iter__(self) -> Generator[T]: + def __iter__(self) -> Generator[NumericT]: for r in self.ranges: yield r.min -__numeric_empty = _RangeUnion() +def _N_Empty() -> _N_Ranges: + return _N_Ranges() -def _NumericEmpty() -> _RangeUnion: - return __numeric_empty +# Units ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -class PlainSet[U](_Set[U]): - def __init__(self, *elements: U): - self.elements = set(elements) - - def is_empty(self) -> bool: - return len(self.elements) == 0 - - def __contains__(self, item: U) -> bool: - return item in self.elements - - def __eq__(self, value: Any) -> bool: - if not isinstance(value, PlainSet): - return False - return self.elements == value.elements - - def __hash__(self) -> int: - return sum(hash(e) for e in self.elements) - - def __repr__(self) -> str: - return f"PlainSet({', '.join(repr(e) for e in self.elements)})" - - -# class Empty[T](Set_[T]): -# def __init__(self, units: Unit): -# super().__init__(True, units) -# -# def __contains__(self, item: T): -# return False -# -# def min_elem(self) -> T | None: -# return None - - -class UnitSet[T](_Set[T], HasUnit, Protocol): ... - - -TQuant = TypeVar("TQuant", int, float, Quantity, contravariant=False, covariant=False) - - -def base_units(units: Unit) -> Unit: - return Quantity(1, units).to_base_units().units - - -class Range(UnitSet[TQuant]): +class Range(P_UnitSet[QuantityT]): def __init__( self, - min: TQuant | None = None, - max: TQuant | None = None, + min: QuantityT | None = None, + max: QuantityT | None = None, units: Unit | None = None, ): if min is None and max is None: @@ -330,27 +323,27 @@ def __init__( if num_min is None or num_max is None: raise ValueError("min and max must be provided for ints") - self._range = _Range(num_min, num_max) + self._range = _N_Range(num_min, num_max) @staticmethod - def from_center(center: TQuant, abs_tol: TQuant) -> "Range[TQuant]": + def from_center(center: QuantityT, abs_tol: QuantityT) -> "Range[QuantityT]": left = center - abs_tol right = center + abs_tol return Range(left, right) @staticmethod - def from_center_rel(center: TQuant, rel_tol: float) -> "Range[TQuant]": + def from_center_rel(center: QuantityT, rel_tol: float) -> "Range[QuantityT]": return Range(center - center * rel_tol, center + center * rel_tol) @staticmethod - def _from_range(range: _Range[T], units: Unit) -> "Range[TQuant]": + def _from_range(range: _N_Range[NumericT], units: Unit) -> "Range[QuantityT]": return Range( min=Quantity(range.min, base_units(units)), max=Quantity(range.max, base_units(units)), units=units, ) - def base_to_units(self, value: T) -> Quantity: + def base_to_units(self, value: NumericT) -> Quantity: return Quantity(value, self.range_units).to(self.units) def min_elem(self) -> Quantity: @@ -359,41 +352,37 @@ def min_elem(self) -> Quantity: def is_empty(self) -> bool: return self._range.is_empty() - def op_intersect_range(self, other: "Range[TQuant]") -> "Ranges[TQuant]": + def op_intersect_range(self, other: "Range[QuantityT]") -> "Ranges[QuantityT]": if not self.units.is_compatible_with(other.units): return Ranges(units=self.units) _range = self._range.op_intersect_range(other._range) return Ranges._from_ranges(_range, self.units) - def op_add_range(self, other: "Range[TQuant]") -> "Range[TQuant]": + def op_add_range(self, other: "Range[QuantityT]") -> "Range[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._range.op_add_range(other._range) return Range._from_range(_range, self.units) - def op_negate(self) -> "Range[TQuant]": + def op_negate(self) -> "Range[QuantityT]": _range = self._range.op_negate() return Range._from_range(_range, self.units) - def op_subtract_range(self, other: "Range[TQuant]") -> "Range[TQuant]": + def op_subtract_range(self, other: "Range[QuantityT]") -> "Range[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._range.op_subtract_range(other._range) return Range._from_range(_range, self.units) - def op_mul_range(self, other: "Range[TQuant]") -> "Range[TQuant]": - if not self.units.is_compatible_with(other.units): - raise ValueError("incompatible units") + def op_mul_range(self, other: "Range[QuantityT]") -> "Range[QuantityT]": _range = self._range.op_mul_range(other._range) return Range._from_range(_range, self.units * other.units) - def op_invert(self) -> "Ranges[TQuant]": + def op_invert(self) -> "Ranges[QuantityT]": _range = self._range.op_invert() return Ranges._from_ranges(_range, 1 / self.units) - def op_div_range(self, other: "Range[TQuant]") -> "Ranges[TQuant]": - if not self.units.is_compatible_with(other.units): - raise ValueError("incompatible units") + def op_div_range(self, other: "Range[QuantityT]") -> "Ranges[QuantityT]": _range = self._range.op_div_range(other._range) return Ranges._from_ranges(_range, self.units / other.units) @@ -437,13 +426,15 @@ def __repr__(self) -> str: return f"Range({self.base_to_units(self._range.min)}, {self.base_to_units(self._range.max)} | {self.units})" -def Single(value: TQuant) -> Range[TQuant]: +def Single(value: QuantityT) -> Range[QuantityT]: return Range(value, value) -class Ranges(UnitSet[TQuant]): +class Ranges(P_UnitSet[QuantityT]): def __init__( - self, *ranges: Range[TQuant] | "Ranges[TQuant]", units: Unit | None = None + self, + *ranges: Range[QuantityT] | "Ranges[QuantityT]", + units: Unit | None = None, ): range_units = [ r.units if isinstance(r, HasUnit) else dimensionless for r in ranges @@ -455,16 +446,16 @@ def __init__( if not all(self.units.is_compatible_with(u) for u in range_units): raise ValueError("all elements must have compatible units") - def get_backing(r: Range[TQuant] | "Ranges[TQuant]"): + def get_backing(r: Range[QuantityT] | "Ranges[QuantityT]"): if isinstance(r, Range): return r._range else: return r._ranges - self._ranges = _RangeUnion(*(get_backing(r) for r in ranges)) + self._ranges = _N_Ranges(*(get_backing(r) for r in ranges)) @staticmethod - def _from_ranges(ranges: "_RangeUnion[T]", units: Unit) -> "Ranges[TQuant]": + def _from_ranges(ranges: "_N_Ranges[NumericT]", units: Unit) -> "Ranges[QuantityT]": r = Ranges.__new__(Ranges) r._ranges = ranges r.units = units @@ -474,41 +465,41 @@ def _from_ranges(ranges: "_RangeUnion[T]", units: Unit) -> "Ranges[TQuant]": def is_empty(self) -> bool: return self._ranges.is_empty() - def base_to_units(self, value: T) -> Quantity: + def base_to_units(self, value: NumericT) -> Quantity: return Quantity(value, self.range_units).to(self.units) - def min_elem(self) -> TQuant: + def min_elem(self) -> QuantityT: if self.is_empty(): raise ValueError("empty range cannot have min element") return self.base_to_units(self._ranges.min_elem()) - def op_add_ranges(self, other: "Ranges[TQuant]") -> "Ranges[TQuant]": + def op_add_ranges(self, other: "Ranges[QuantityT]") -> "Ranges[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._ranges.op_add_ranges(other._ranges) return Ranges._from_ranges(_range, self.units) - def op_negate(self) -> "Ranges[TQuant]": + def op_negate(self) -> "Ranges[QuantityT]": _range = self._ranges.op_negate() return Ranges._from_ranges(_range, self.units) - def op_subtract_ranges(self, other: "Ranges[TQuant]") -> "Ranges[TQuant]": + def op_subtract_ranges(self, other: "Ranges[QuantityT]") -> "Ranges[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._ranges.op_subtract_ranges(other._ranges) return Ranges._from_ranges(_range, self.units) - def op_mul_ranges(self, other: "Ranges[TQuant]") -> "Ranges[TQuant]": + def op_mul_ranges(self, other: "Ranges[QuantityT]") -> "Ranges[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._ranges.op_mul_ranges(other._ranges) return Ranges._from_ranges(_range, self.units * other.units) - def op_invert(self) -> "Ranges[TQuant]": + def op_invert(self) -> "Ranges[QuantityT]": _range = self._ranges.op_invert() return Ranges._from_ranges(_range, 1 / self.units) - def op_div_ranges(self, other: "Ranges[TQuant]") -> "Ranges[TQuant]": + def op_div_ranges(self, other: "Ranges[QuantityT]") -> "Ranges[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._ranges.op_div_ranges(other._ranges) @@ -541,147 +532,9 @@ def __repr__(self) -> str: return f"_RangeUnion({', '.join(f"[{self.base_to_units(r.min)}, {self.base_to_units(r.max)}]" for r in self._ranges.ranges)} | {self.units})" -def UnitEmpty(units: Unit) -> Ranges[TQuant]: +def Empty(units: Unit) -> Ranges[QuantityT]: return Ranges(units=units) -def Singles(*values: TQuant, units: Unit | None = None) -> Ranges[TQuant]: +def Singles(*values: QuantityT, units: Unit | None = None) -> Ranges[QuantityT]: return Ranges(*(Single(v) for v in values), units=units) - - -# class Set[T](Union[T]): -# def __init__(self, *elements: T, units: Unit | None = None): -# super().__init__(*(Single(e) for e in elements), units=units) -# -# def __contains__(self, item: T): -# return Single(item) in self.elements -# -# -# def operation_add[T: _SupportsRangeOps]( -# *sets: Set_[T], -# ) -> Set_[_SupportsRangeOps]: -# def add_singles(*singles: Single[T]) -> T: -# if len(singles) == 0: -# return 0 -# return sum(s.value for s in singles) -# -# def add_ranges(*ranges: Range[T], offset: T) -> list[Range[T]]: -# if len(ranges) == 0: -# return [] -# return [ -# Range( -# min=sum(r.min for r in ranges) + offset, -# max=sum(r.max for r in ranges) + offset, -# ) -# ] -# -# if any(s.empty for s in sets): -# return Empty(units=sets[0].units) -# -# def group(set: Set_[T]) -> str: -# if isinstance(set, Single): -# return "single" -# if isinstance(set, Range): -# return "range" -# return "union" -# -# grouped_sets = groupby(sets, key=group) -# singles = grouped_sets["single"] -# ranges = grouped_sets["range"] -# unions = grouped_sets["union"] -# single_offset = add_singles(*singles) -# range_sum = add_ranges(*ranges, offset=single_offset) -# -# if len(range_sum) > 0: -# recursion_set = range_sum -# elif len(singles) > 0: -# recursion_set = [Single(single_offset)] -# else: -# recursion_set = [] -# -# if len(unions) == 0: -# assert len(recursion_set) == 1 -# return recursion_set[0] -# return Union( # TODO this is exponential, we'll want to defer the computation -# *(operation_add(e, *unions[1:], *recursion_set) for e in unions[0].elements) -# ) -# -# -# def operation_negate[T: _SupportsRangeOps]( -# *sets: Set_[T], -# ) -> list[Set_[_SupportsRangeOps]]: -# def negate(set: Set_[T]) -> Set_[T]: -# if isinstance(set, Single): -# return Single(-set.value) -# if isinstance(set, Range): -# return Range(-set.max, -set.min) -# return Union(*(negate(e) for e in set.elements)) -# -# return [negate(e) for e in sets] -# -# -# def operation_subtract[T: _SupportsRangeOps]( -# first: Set_[T], -# *sets: Set_[T], -# ) -> Set_[_SupportsRangeOps]: -# return operation_add(first, *operation_negate(*sets)) -# -# -# def operation_mul[T: _SupportsRangeOps]( -# *sets: Set_[T], -# ) -> Set_[_SupportsRangeOps]: -# def mul_singles(*singles: Single[T]) -> Single[T]: -# return Single(math.prod((s.value for s in singles), start=1)) -# -# def mul_ranges(r1: Range[T], r2: Range[T]) -> Range[T]: -# return Range( -# min=min(r1.min * r2.min, r1.min * r2.max, r1.max * r2.min, r1.max * r2.max), -# max=max(r1.min * r2.min, r1.min * r2.max, r1.max * r2.min, r1.max * r2.max), -# ) -# -# def mul_single_range(single: Single[T], range: Range[T]) -> Range[T]: -# if single.value < 0: -# return Range(min=single.value * range.max, max=single.value * range.min) -# return Range(min=single.value * range.min, max=single.value * range.max) -# -# def mul_range_list( -# *ranges: Range[T], factor: Single[T] = Single(1) -# ) -> list[Range[T]]: -# if len(ranges) == 0: -# return [] -# first, *rest = ranges -# first = mul_single_range(factor, first) -# for r in rest: -# first = mul_ranges(first, r) -# return [first] -# -# if any(s.empty for s in sets): -# return Empty(units=sets[0].units) -# -# def group(set: Set_[T]) -> str: -# if isinstance(set, Single): -# return "single" -# if isinstance(set, Range): -# return "range" -# return "union" -# -# grouped_sets = groupby(sets, key=group) -# singles = grouped_sets["single"] -# ranges = grouped_sets["range"] -# unions = grouped_sets["union"] -# single_product = mul_singles(*singles) -# range_product = mul_range_list(*ranges, factor=single_product) -# -# if len(range_product) > 0: -# recursion_set = range_product -# elif len(singles) > 0: -# recursion_set = [single_product] -# else: -# recursion_set = [] -# -# if len(unions) == 0: -# assert len(recursion_set) == 1 -# return recursion_set[0] -# return Union( # TODO this is exponential, we'll want to defer the computation -# *(operation_mul(e, *unions[1:], *recursion_set) for e in unions[0].elements) -# ) diff --git a/test/libs/test_sets.py b/test/libs/test_sets.py index 4405620a..9138a7d0 100644 --- a/test/libs/test_sets.py +++ b/test/libs/test_sets.py @@ -2,14 +2,13 @@ # SPDX-License-Identifier: MIT import pytest -from pint import DimensionalityError from faebryk.libs.sets import ( + Empty, Range, Ranges, Single, Singles, - UnitEmpty, ) from faebryk.libs.units import P, Unit, dimensionless from faebryk.libs.util import cast_assert @@ -24,7 +23,7 @@ def test_range_intersection_simple(): def test_range_intersection_empty(): x = Range(0, 10) y = x.op_intersect_range(Range(15, 20)) - assert y == UnitEmpty(dimensionless) + assert y == Empty(dimensionless) def test_range_unit_none(): @@ -104,12 +103,18 @@ def test_union_contains(): def test_union_empty(): x = Ranges( - UnitEmpty(dimensionless), - Ranges(UnitEmpty(dimensionless), Singles(units=dimensionless)), + Empty(dimensionless), + Ranges(Empty(dimensionless), Singles(units=dimensionless)), ) assert x.is_empty() +def test_add_empty(): + assert (Empty(dimensionless).op_add_ranges(Ranges(Range(0, 1)))) == Empty( + dimensionless + ) + + def test_addition(): assert Range(0, 1).op_add_range(Range(2, 3)) == Range(2, 4) assert Range(0, 1).op_add_range(Single(2)) == Range(2, 3) @@ -121,11 +126,23 @@ def test_addition(): ) == Ranges(Range(10, 11), Range(110, 111), Range(20, 22), Range(120, 122)) +def test_addition_unit(): + assert Range(0 * P.V, 1 * P.V).op_add_range(Range(2 * P.V, 3 * P.V)) == Range( + 2 * P.V, 4 * P.V + ) + + def test_subtraction(): assert Range(0, 1).op_subtract_range(Range(2, 3)) == Range(-3, -1) assert Range(0, 1).op_subtract_range(Single(2)) == Range(-2, -1) +def test_subtraction_unit(): + assert Range(0 * P.V, 1 * P.V).op_subtract_range(Range(2 * P.V, 3 * P.V)) == Range( + -3 * P.V, -1 * P.V + ) + + def test_multiplication(): assert Range(0, 2).op_mul_range(Range(2, 3)) == Range(0, 6) assert Range(0, 1).op_mul_range(Single(2)) == Range(0, 2) @@ -137,6 +154,12 @@ def test_multiplication(): ) == Ranges(Range(0, 0), Range(-2, 0), Range(-3, 0)) +def test_multiplication_unit(): + assert Range(0 * P.V, 2 * P.V).op_mul_range(Range(2 * P.A, 3 * P.A)) == Range( + 0 * P.W, 6 * P.W + ) + + def test_invert(): assert Range(1, 2).op_invert() == Range(0.5, 1) assert Range(-2, -1).op_invert() == Range(-1, -0.5) @@ -148,6 +171,16 @@ def test_invert(): ) +def test_invert_unit(): + assert Range(1 * P.V, 2 * P.V).op_invert() == Range(1 / (2 * P.V), 1 / (1 * P.V)) + + def test_division(): assert Range(0, 1).op_div_range(Range(2, 3)) == Range(0, 0.5) assert Range(0, 1).op_div_range(Range(0, 3)) == Range(min=0.0) + + +def test_division_unit(): + assert Range(0 * P.V, 1 * P.V).op_div_range(Range(2 * P.A, 3 * P.A)) == Range( + 0 * P.ohm, 1 / 2 * P.ohm + ) From f12da6802176b14a64f9f727f8266dcd3d729db7 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Mon, 21 Oct 2024 13:24:51 +0200 Subject: [PATCH 42/80] Minor solve API fixes Return predicates on ops Solver named return Fix example pickers Add test lib for easy solve query --- src/faebryk/core/parameter.py | 24 ++--- src/faebryk/core/solver.py | 28 +++--- src/faebryk/libs/examples/pickers.py | 109 +++++++++++++---------- src/faebryk/libs/picker/picker.py | 30 ++++--- src/faebryk/libs/test/solver.py | 21 +++++ test/library/nodes/test_electricpower.py | 20 ++--- 6 files changed, 137 insertions(+), 95 deletions(-) create mode 100644 src/faebryk/libs/test/solver.py diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 42ebaf75..ef24ff07 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -88,29 +88,29 @@ def operation_difference(self, other: Sets) -> "Expression": ... def operation_symmetric_difference(self, other: Sets) -> "Expression": ... - def operation_and(self, other: BooleanLike) -> "Expression": ... + def operation_and(self, other: BooleanLike) -> "Predicate": ... - def operation_or(self, other: BooleanLike) -> "Expression": ... + def operation_or(self, other: BooleanLike) -> "Predicate": ... - def operation_not(self) -> "Expression": ... + def operation_not(self) -> "Predicate": ... - def operation_xor(self, other: BooleanLike) -> "Expression": ... + def operation_xor(self, other: BooleanLike) -> "Predicate": ... - def operation_implies(self, other: BooleanLike) -> "Expression": ... + def operation_implies(self, other: BooleanLike) -> "Predicate": ... - def operation_is_le(self, other: NumberLike) -> "Expression": ... + def operation_is_le(self, other: NumberLike) -> "Predicate": ... - def operation_is_ge(self, other: NumberLike) -> "Expression": ... + def operation_is_ge(self, other: NumberLike) -> "Predicate": ... - def operation_is_lt(self, other: NumberLike) -> "Expression": ... + def operation_is_lt(self, other: NumberLike) -> "Predicate": ... - def operation_is_gt(self, other: NumberLike) -> "Expression": ... + def operation_is_gt(self, other: NumberLike) -> "Predicate": ... - def operation_is_ne(self, other: NumberLike) -> "Expression": ... + def operation_is_ne(self, other: NumberLike) -> "Predicate": ... - def operation_is_subset(self, other: Sets) -> "Expression": ... + def operation_is_subset(self, other: Sets) -> "Predicate": ... - def operation_is_superset(self, other: Sets) -> "Expression": ... + def operation_is_superset(self, other: Sets) -> "Predicate": ... def inspect_known_min(self: NumberLike) -> Number: ... diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index 752646f7..28916984 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -1,10 +1,20 @@ +from dataclasses import dataclass from typing import Any, Protocol from faebryk.core.graph import Graph -from faebryk.core.parameter import Expression, Parameter, Predicate +from faebryk.core.parameter import Expression, Parameter, ParameterOperatable, Predicate class Solver(Protocol): + # TODO booleanlike is very permissive + type PredicateWithInfo[ArgType] = tuple[ParameterOperatable.BooleanLike, ArgType] + + @dataclass + class SolveResult[ArgType]: + true_predicates: list["Solver.PredicateWithInfo[ArgType]"] + false_predicates: list["Solver.PredicateWithInfo[ArgType]"] + unknown_predicates: list["Solver.PredicateWithInfo[ArgType]"] + # timeout per solve call in milliseconds timeout: int # threads: int @@ -36,16 +46,14 @@ def get_any_single( # - the first list contains the predicates that were actually solved, i.e. they are true/false # - the second list contains the expressions that remain unknown # - the third list contains the parameters that have an empty solution set - def assert_any_predicate( + def assert_any_predicate[ArgType]( self, G: Graph, - predicates: list[tuple[Predicate, Any]], + predicates: list["Solver.PredicateWithInfo[ArgType]"], suppose_constraint: Predicate | None = None, minimize: Expression | None = None, constrain_solved: bool = True, - ) -> tuple[ - list[tuple[Predicate, Any]], list[tuple[Predicate, Any]], list[Parameter] - ]: ... + ) -> SolveResult[ArgType]: ... # run deferred work def finalize(self, G: Graph) -> None: ... @@ -64,16 +72,14 @@ def get_any_single( ): raise NotImplementedError() - def assert_any_predicate( + def assert_any_predicate[ArgType]( self, G: Graph, - predicates: list[Predicate], + predicates: list["Solver.PredicateWithInfo[ArgType]"], suppose_constraint: Predicate | None = None, minimize: Expression | None = None, constrain_solved: bool = True, - ) -> tuple[ - list[tuple[Predicate, Any]], list[tuple[Predicate, Any]], list[Parameter] - ]: + ) -> Solver.SolveResult[ArgType]: raise NotImplementedError() def finalize(self, G: Graph) -> None: diff --git a/src/faebryk/libs/examples/pickers.py b/src/faebryk/libs/examples/pickers.py index d68bf3d1..178da375 100644 --- a/src/faebryk/libs/examples/pickers.py +++ b/src/faebryk/libs/examples/pickers.py @@ -8,9 +8,9 @@ import logging from typing import TYPE_CHECKING -from faebryk.core.solver import DefaultSolver import faebryk.library._F as F from faebryk.core.module import Module +from faebryk.core.solver import DefaultSolver, Solver from faebryk.libs.library import L from faebryk.libs.picker.lcsc import LCSC_Part from faebryk.libs.picker.picker import PickerOption, pick_module_by_params @@ -22,23 +22,27 @@ logger = logging.getLogger(__name__) -def pick_fuse(module: F.Fuse): +# TODO replace Single with actual Range.from_center_rel + + +def pick_fuse(module: F.Fuse, solver: Solver): pick_module_by_params( module, + solver, [ PickerOption( part=LCSC_Part(partno="C914087"), params={ - "fuse_type": L.Single(F.Fuse.FuseType.RESETTABLE), - "response_type": L.Single(F.Fuse.ResponseType.SLOW), + "fuse_type": L.PlainSet(F.Fuse.FuseType.RESETTABLE), + "response_type": L.PlainSet(F.Fuse.ResponseType.SLOW), "trip_current": 1 * P.A, }, ), PickerOption( part=LCSC_Part(partno="C914085"), params={ - "fuse_type": L.Single(F.Fuse.FuseType.RESETTABLE), - "response_type": L.Single(F.Fuse.ResponseType.SLOW), + "fuse_type": L.PlainSet(F.Fuse.FuseType.RESETTABLE), + "response_type": L.PlainSet(F.Fuse.ResponseType.SLOW), "trip_current": 0.5 * P.A, }, ), @@ -46,7 +50,7 @@ def pick_fuse(module: F.Fuse): ) -def pick_mosfet(module: F.MOSFET): +def pick_mosfet(module: F.MOSFET, solver: Solver): standard_pinmap = { "1": module.gate, "2": module.source, @@ -54,18 +58,19 @@ def pick_mosfet(module: F.MOSFET): } pick_module_by_params( module, + solver, [ PickerOption( part=LCSC_Part(partno="C20917"), params={ - "channel_type": L.Single(F.MOSFET.ChannelType.N_CHANNEL), + "channel_type": L.PlainSet(F.MOSFET.ChannelType.N_CHANNEL), }, pinmap=standard_pinmap, ), PickerOption( part=LCSC_Part(partno="C15127"), params={ - "channel_type": L.Single(F.MOSFET.ChannelType.P_CHANNEL), + "channel_type": L.PlainSet(F.MOSFET.ChannelType.P_CHANNEL), }, pinmap=standard_pinmap, ), @@ -73,7 +78,7 @@ def pick_mosfet(module: F.MOSFET): ) -def pick_capacitor(module: F.Capacitor): +def pick_capacitor(module: F.Capacitor, solver: Solver): """ Link a partnumber/footprint to a Capacitor @@ -82,6 +87,7 @@ def pick_capacitor(module: F.Capacitor): pick_module_by_params( module, + solver, [ PickerOption( part=LCSC_Part(partno="C1525"), @@ -90,8 +96,8 @@ def pick_capacitor(module: F.Capacitor): F.Capacitor.TemperatureCoefficient.Y5V, F.Capacitor.TemperatureCoefficient.X7R, ), - "capacitance": 100 * P.nF, - "max_voltage": L.Range(0 * P.V, 16 * P.V), + "capacitance": L.Single(100 * P.nF), + "max_voltage": 16 * P.V, }, ), PickerOption( @@ -101,15 +107,15 @@ def pick_capacitor(module: F.Capacitor): F.Capacitor.TemperatureCoefficient.Y5V, F.Capacitor.TemperatureCoefficient.X7R, ), - "capacitance": 10 * P.uF, - "max_voltage": L.Range(0 * P.V, 10 * P.V), + "capacitance": L.Single(10 * P.uF), + "max_voltage": 10 * P.V, }, ), ], ) -def pick_resistor(resistor: F.Resistor): +def pick_resistor(resistor: F.Resistor, solver: Solver): """ Link a partnumber/footprint to a Resistor @@ -118,77 +124,79 @@ def pick_resistor(resistor: F.Resistor): pick_module_by_params( resistor, + solver, [ PickerOption( part=LCSC_Part(partno="C25111"), - params={"resistance": 40.2 * P.kohm}, + params={"resistance": L.Single(40.2 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25076"), - params={"resistance": 100 * P.kohm}, + params={"resistance": L.Single(100 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25087"), - params={"resistance": 200 * P.kohm}, + params={"resistance": L.Single(200 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C11702"), - params={"resistance": 1 * P.kohm}, + params={"resistance": L.Single(1 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25879"), - params={"resistance": 2.2 * P.kohm}, + params={"resistance": L.Single(2.2 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25900"), - params={"resistance": 4.7 * P.kohm}, + params={"resistance": L.Single(4.7 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25905"), - params={"resistance": 5.1 * P.kohm}, + params={"resistance": L.Single(5.1 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25917"), - params={"resistance": 6.8 * P.kohm}, + params={"resistance": L.Single(6.8 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25744"), - params={"resistance": 10 * P.kohm}, + params={"resistance": L.Single(10 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25752"), - params={"resistance": 12 * P.kohm}, + params={"resistance": L.Single(12 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25771"), - params={"resistance": 27 * P.kohm}, + params={"resistance": L.Single(27 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25741"), - params={"resistance": 100 * P.kohm}, + params={"resistance": L.Single(100 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25782"), - params={"resistance": 390 * P.kohm}, + params={"resistance": L.Single(390 * P.kohm)}, ), PickerOption( part=LCSC_Part(partno="C25790"), - params={"resistance": 470 * P.kohm}, + params={"resistance": L.Single(470 * P.kohm)}, ), ], ) -def pick_led(module: F.LED): +def pick_led(module: F.LED, solver: Solver): pick_module_by_params( module, + solver, [ PickerOption( part=LCSC_Part(partno="C72043"), params={ - "color": L.Single(F.LED.Color.EMERALD), + "color": L.PlainSet(F.LED.Color.EMERALD), "max_brightness": 285 * P.mcandela, - "forward_voltage": 3.7 * P.volt, + "forward_voltage": L.Single(3.7 * P.volt), "max_current": 100 * P.mA, }, pinmap={"1": module.cathode, "2": module.anode}, @@ -196,9 +204,9 @@ def pick_led(module: F.LED): PickerOption( part=LCSC_Part(partno="C72041"), params={ - "color": L.Single(F.LED.Color.BLUE), + "color": L.PlainSet(F.LED.Color.BLUE), "max_brightness": 28.5 * P.mcandela, - "forward_voltage": 3.1 * P.volt, + "forward_voltage": L.Single(3.1 * P.volt), "max_current": 100 * P.mA, }, pinmap={"1": module.cathode, "2": module.anode}, @@ -206,9 +214,9 @@ def pick_led(module: F.LED): PickerOption( part=LCSC_Part(partno="C72038"), params={ - "color": L.Single(F.LED.Color.YELLOW), + "color": L.PlainSet(F.LED.Color.YELLOW), "max_brightness": 180 * P.mcandela, - "forward_voltage": 2.3 * P.volt, + "forward_voltage": L.Single(2.3 * P.volt), "max_current": 60 * P.mA, }, pinmap={"1": module.cathode, "2": module.anode}, @@ -217,14 +225,15 @@ def pick_led(module: F.LED): ) -def pick_tvs(module: F.TVS): +def pick_tvs(module: F.TVS, solver: Solver): pick_module_by_params( module, + solver, [ PickerOption( part=LCSC_Part(partno="C85402"), params={ - "reverse_working_voltage": 5 * P.V, + "reverse_working_voltage": L.Single(5 * P.V), }, pinmap={ "1": module.cathode, @@ -235,28 +244,31 @@ def pick_tvs(module: F.TVS): ) -def pick_battery(module): +def pick_battery(module: F.Battery | Module, solver: Solver): if not isinstance(module, F.Battery): raise ValueError("Module is not a Battery") if not isinstance(module, F.ButtonCell): bcell = F.ButtonCell() module.specialize(bcell) bcell.add( - F.has_multi_picker(0, F.has_multi_picker.FunctionPicker(pick_battery)) + F.has_multi_picker( + 0, F.has_multi_picker.FunctionPicker(pick_battery, solver) + ) ) return pick_module_by_params( module, + solver, [ PickerOption( part=LCSC_Part(partno="C5239862"), params={ - "voltage": 3 * P.V, + "voltage": L.Single(3 * P.V), "capacity": L.Range.from_center(225 * P.mAh, 50 * P.mAh), - "material": L.Single(F.ButtonCell.Material.Lithium), + "material": L.PlainSet(F.ButtonCell.Material.Lithium), "size": L.Single(F.ButtonCell.Size.N_2032), - "shape": L.Single(F.ButtonCell.Shape.Round), + "shape": L.PlainSet(F.ButtonCell.Shape.Round), }, pinmap={ "1": module.power.lv, @@ -267,18 +279,19 @@ def pick_battery(module): ) -def pick_switch(module: "_TSwitch[F.Electrical]"): +def pick_switch(module: "_TSwitch[F.Electrical]", solver: Solver): module.add(F.can_attach_to_footprint_symmetrically()) pick_module_by_params( module, + solver, [ PickerOption( part=LCSC_Part(partno="C318884"), pinmap={ - "1": module.unnamed[0], - "2": module.unnamed[0], - "3": module.unnamed[1], - "4": module.unnamed[1], + "1": module.unnamed[0], # type: ignore + "2": module.unnamed[0], # type: ignore + "3": module.unnamed[1], # type: ignore + "4": module.unnamed[1], # type: ignore }, ) ], diff --git a/src/faebryk/libs/picker/picker.py b/src/faebryk/libs/picker/picker.py index 2f00c558..e194289c 100644 --- a/src/faebryk/libs/picker/picker.py +++ b/src/faebryk/libs/picker/picker.py @@ -12,11 +12,12 @@ from rich.progress import Progress +from faebryk.core.solver import Solver import faebryk.library._F as F from faebryk.core.module import Module from faebryk.core.moduleinterface import ModuleInterface -from faebryk.core.parameter import Parameter, ParameterOperatable -from faebryk.libs.util import flatten, not_none +from faebryk.core.parameter import Parameter, ParameterOperatable, Predicate +from faebryk.libs.util import cast_assert, flatten, not_none logger = logging.getLogger(__name__) @@ -42,6 +43,11 @@ class DescriptiveProperties(StrEnum): class PickerOption: part: Part params: dict[str, ParameterOperatable.NonParamSet] | None = None + """ + Parameters that need to be matched for this option to be valid. + + Assumes specified params are narrowest possible value for this part + """ filter: Callable[[Module], bool] | None = None pinmap: dict[str, F.Electrical] | None = None info: dict[str | DescriptiveProperties, str] | None = None @@ -136,7 +142,9 @@ def get_part(self) -> Part: return self.part -def pick_module_by_params(module: Module, options: Iterable[PickerOption]): +def pick_module_by_params( + module: Module, solver: Solver, options: Iterable[PickerOption] +): if module.has_trait(has_part_picked): logger.debug(f"Ignoring already picked module: {module}") return @@ -147,16 +155,18 @@ def pick_module_by_params(module: Module, options: Iterable[PickerOption]): } filtered_options = [o for o in options if not o.filter or o.filter(module)] - predicates = {} + predicates: dict[PickerOption, ParameterOperatable.BooleanLike] = {} for o in filtered_options: - predicate_list = [] + predicate_list: list[Predicate] = [] for k, v in (o.params or {}).items(): if not k.startswith("_"): param = params[k] predicate_list.append(param.operation_is_superset(v)) + # No predicates, thus always valid option if len(predicate_list) == 0: + predicates[o] = True continue anded = predicate_list[0] @@ -168,15 +178,14 @@ def pick_module_by_params(module: Module, options: Iterable[PickerOption]): if len(predicates) == 0: raise PickErrorParams(module, list(options)) - true_predicates, unknown_predicates, empty_params = solver.assert_any_predicate( + solve_result = solver.assert_any_predicate( module.get_graph(), [(p, k) for k, p in predicates.items()] ) # TODO handle failure parameters - # do we expect more than one? - # if so we can add a heuristic here - option = true_predicates[0][1] + # pick first valid option + _, option = next(iter(solve_result.true_predicates)) if option.pinmap: module.add(F.can_attach_to_footprint_via_pinmap(option.pinmap)) @@ -184,7 +193,8 @@ def pick_module_by_params(module: Module, options: Iterable[PickerOption]): option.part.supplier.attach(module, option) module.add(has_part_picked_defined(option.part)) - # Merge params from footprint option + # Shrink solution space that we need to search for + # by hinting that option params are biggest possible set we might want to support for k, v in (option.params or {}).items(): if k not in params: continue diff --git a/src/faebryk/libs/test/solver.py b/src/faebryk/libs/test/solver.py new file mode 100644 index 00000000..70f2aeb8 --- /dev/null +++ b/src/faebryk/libs/test/solver.py @@ -0,0 +1,21 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT +from faebryk.core.graphinterface import Graph, Node +from faebryk.core.parameter import ParameterOperatable +from faebryk.core.solver import DefaultSolver +from faebryk.libs.sets import PlainSet + + +def solves_to(stmt: ParameterOperatable, result: bool): + stmt.inspect_add_on_final(lambda x: x.inspect_known_values() == PlainSet(result)) + + +def solve_and_test(G: Graph | Node, *stmts: ParameterOperatable): + if isinstance(G, Node): + G = G.get_graph() + + for stmt in stmts: + solves_to(stmt, True) + + solver = DefaultSolver() + solver.finalize(G) diff --git a/test/library/nodes/test_electricpower.py b/test/library/nodes/test_electricpower.py index 49448f24..3ae70f96 100644 --- a/test/library/nodes/test_electricpower.py +++ b/test/library/nodes/test_electricpower.py @@ -2,9 +2,8 @@ # SPDX-License-Identifier: MIT -from faebryk.core.solver import DefaultSolver from faebryk.libs.library import L -from faebryk.libs.sets import PlainSet +from faebryk.libs.test.solver import solve_and_test def test_fused_power(): @@ -22,16 +21,9 @@ def test_fused_power(): fuse = next(iter(power_in_fused.get_children(direct_only=False, types=F.Fuse))) - fuse.trip_current.operation_is_subset( - L.Range.from_center_rel(500 * P.mA, 0.1) - ).inspect_add_on_final(lambda x: x.inspect_known_values() == PlainSet(True)) - power_out.voltage.operation_is_subset(10 * P.V).inspect_add_on_final( - lambda x: x.inspect_known_values() == PlainSet(True) + solve_and_test( + power_in, + fuse.trip_current.operation_is_subset(L.Range.from_center_rel(500 * P.mA, 0.1)), + power_out.voltage.operation_is_subset(10 * P.V), + power_out.max_current.operation_is_le(500 * P.mA * 0.9), ) - power_out.max_current.operation_is_le(500 * P.mA * 0.9).inspect_add_on_final( - lambda x: x.inspect_known_values() == PlainSet(True) - ) - - graph = power_in.get_graph() - solver = DefaultSolver() - solver.finalize(graph) From 8a05bbac6062b2213b2b71ad33283228334806e0 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Mon, 21 Oct 2024 13:26:20 +0200 Subject: [PATCH 43/80] fix L Set --- src/faebryk/libs/library/L.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/faebryk/libs/library/L.py b/src/faebryk/libs/library/L.py index 725834f9..7593f69f 100644 --- a/src/faebryk/libs/library/L.py +++ b/src/faebryk/libs/library/L.py @@ -15,7 +15,7 @@ ) from faebryk.core.parameter import R, p_field # noqa: F401 from faebryk.core.reference import reference # noqa: F401 -from faebryk.libs.sets import Range, Set, Single # noqa: F401 +from faebryk.libs.sets import PlainSet, Range, Single # noqa: F401 class AbstractclassError(Exception): ... From 64e6080f19a62aa111e6b06afa0ad5098720ff0e Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Mon, 21 Oct 2024 17:27:46 +0200 Subject: [PATCH 44/80] Model param deps in graph --- src/faebryk/core/parameter.py | 279 ++++++++++++++++++++---------- src/faebryk/library/Resistor.py | 5 +- src/faebryk/libs/picker/picker.py | 4 +- src/faebryk/libs/sets.py | 15 +- src/faebryk/libs/units.py | 13 +- test/core/test_parameters.py | 7 +- 6 files changed, 212 insertions(+), 111 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index ef24ff07..c14cfcc1 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -4,13 +4,13 @@ import logging from enum import Enum, auto from types import NotImplementedType -from typing import Any, Callable, Protocol, Self - -from more_itertools import raise_ +from typing import Any, Callable, Self from faebryk.core.core import Namespace +from faebryk.core.graphinterface import GraphInterface +from faebryk.core.link import LinkParent from faebryk.core.node import Node, f_field -from faebryk.libs.sets import Range, Set_ +from faebryk.libs.sets import Empty, P_Set, Range, Ranges from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless from faebryk.libs.util import abstract @@ -21,102 +21,122 @@ # boolean: T == S == bool # enum: T == S == Enum # number: T == Number type, S == Range[Number] -class ParameterOperatable(Protocol): +class ParameterOperatable: type QuantityLike = Quantity | NotImplementedType type Number = int | float | QuantityLike - type NonParamNumber = Number | Set_[Number] + type NonParamNumber = Number | P_Set[Number] type NumberLike = ParameterOperatable | NonParamNumber - type NonParamBoolean = bool | Set_[bool] + type NonParamBoolean = bool | P_Set[bool] type BooleanLike = ParameterOperatable | NonParamBoolean - type NonParamEnum = Enum | Set_[Enum] + type NonParamEnum = Enum | P_Set[Enum] type EnumLike = ParameterOperatable | NonParamEnum type All = NumberLike | BooleanLike | EnumLike type NonParamSet = NonParamNumber | NonParamBoolean | NonParamEnum type Sets = All - def alias_is(self, other: All): ... - - def constrain_le(self, other: NumberLike): ... - - def constrain_ge(self, other: NumberLike): ... - - def constrain_lt(self, other: NumberLike): ... - - def constrain_gt(self, other: NumberLike): ... - - def constrain_ne(self, other: NumberLike): ... - - def constrain_subset(self, other: Sets): ... + operated_on: GraphInterface - def constrain_superset(self, other: Sets): ... + def operation_add(self, other: NumberLike) -> "Expression": + return Add(self, other) - def constrain_cardinality(self, other: int): ... + def operation_subtract(self, other: NumberLike) -> "Expression": + return Subtract(minuend=self, subtrahend=other) - def operation_add(self, other: NumberLike) -> "Expression": ... + def operation_multiply(self, other: NumberLike) -> "Expression": + return Multiply(self, other) - def operation_subtract(self, other: NumberLike) -> "Expression": ... + def operation_divide(self: NumberLike, other: NumberLike) -> "Expression": + return Divide(numerator=self, denominator=other) - def operation_multiply(self, other: NumberLike) -> "Expression": ... + def operation_power(self, other: NumberLike) -> "Expression": + return Power(base=self, exponent=other) - def operation_divide(self: NumberLike, other: NumberLike) -> "Expression": ... + def operation_log(self) -> "Expression": + return Log(self) - def operation_power(self, other: NumberLike) -> "Expression": ... + def operation_sqrt(self) -> "Expression": + return Sqrt(self) - def operation_log(self) -> "Expression": ... + def operation_abs(self) -> "Expression": + return Abs(self) - def operation_sqrt(self) -> "Expression": ... + def operation_floor(self) -> "Expression": + return Floor(self) - def operation_abs(self) -> "Expression": ... + def operation_ceil(self) -> "Expression": + return Ceil(self) - def operation_floor(self) -> "Expression": ... + def operation_round(self) -> "Expression": + return Round(self) - def operation_ceil(self) -> "Expression": ... + def operation_sin(self) -> "Expression": + return Sin(self) - def operation_round(self) -> "Expression": ... + def operation_cos(self) -> "Expression": + return Cos(self) - def operation_sin(self) -> "Expression": ... + def operation_union(self, other: Sets) -> "Expression": + return Union(self, other) - def operation_cos(self) -> "Expression": ... + def operation_intersection(self, other: Sets) -> "Expression": + return Intersection(self, other) - def operation_union(self, other: Sets) -> "Expression": ... + def operation_difference(self, other: Sets) -> "Expression": + return Difference(minuend=self, subtrahend=other) - def operation_intersection(self, other: Sets) -> "Expression": ... + def operation_symmetric_difference(self, other: Sets) -> "Expression": + return SymmetricDifference(self, other) - def operation_difference(self, other: Sets) -> "Expression": ... + def operation_and(self, other: BooleanLike) -> "Logic": + return And(self, other) - def operation_symmetric_difference(self, other: Sets) -> "Expression": ... + def operation_or(self, other: BooleanLike) -> "Logic": + return Or(self, other) - def operation_and(self, other: BooleanLike) -> "Predicate": ... + def operation_not(self) -> "Logic": + return Not(self) - def operation_or(self, other: BooleanLike) -> "Predicate": ... + def operation_xor(self, other: BooleanLike) -> "Logic": + return Xor(left=self, right=other) - def operation_not(self) -> "Predicate": ... + def operation_implies(self, other: BooleanLike) -> "Logic": + return Implies(condition=self, implication=other) - def operation_xor(self, other: BooleanLike) -> "Predicate": ... + def operation_is_le(self, other: NumberLike) -> "NumericPredicate": + return LessOrEqual(constraint=False, left=self, right=other) - def operation_implies(self, other: BooleanLike) -> "Predicate": ... + def operation_is_ge(self, other: NumberLike) -> "NumericPredicate": + return GreaterOrEqual(constraint=False, left=self, right=other) - def operation_is_le(self, other: NumberLike) -> "Predicate": ... + def operation_is_lt(self, other: NumberLike) -> "NumericPredicate": + return LessThan(constraint=False, left=self, right=other) - def operation_is_ge(self, other: NumberLike) -> "Predicate": ... + def operation_is_gt(self, other: NumberLike) -> "NumericPredicate": + return GreaterThan(constraint=False, left=self, right=other) - def operation_is_lt(self, other: NumberLike) -> "Predicate": ... + def operation_is_ne(self, other: NumberLike) -> "NumericPredicate": + return NotEqual(constraint=False, left=self, right=other) - def operation_is_gt(self, other: NumberLike) -> "Predicate": ... + def operation_is_subset(self, other: Sets) -> "SeticPredicate": + return IsSubset(constraint=False, left=self, right=other) - def operation_is_ne(self, other: NumberLike) -> "Predicate": ... + def operation_is_superset(self, other: Sets) -> "SeticPredicate": + return IsSuperset(constraint=False, left=self, right=other) - def operation_is_subset(self, other: Sets) -> "Predicate": ... + # TODO implement + def inspect_known_min(self: NumberLike) -> Number: + return 1 / 0 + # raise NotImplementedError() - def operation_is_superset(self, other: Sets) -> "Predicate": ... + def inspect_known_max(self: NumberLike) -> Number: + return 1 / 0 + # raise NotImplementedError() - def inspect_known_min(self: NumberLike) -> Number: ... - - def inspect_known_max(self: NumberLike) -> Number: ... - - def inspect_known_values(self: BooleanLike) -> Set_[bool]: ... + def inspect_known_values(self: BooleanLike) -> P_Set[bool]: + return 1 / 0 + # raise NotImplementedError() # Run by the solver on finalization inspect_final: Callable[[Self], None] = lambda _: None @@ -131,7 +151,7 @@ def new(self2): self.inspect_final = new # def inspect_num_known_supersets(self) -> int: ... - # def inspect_get_known_supersets(self) -> Iterable[Set_]: ... + # def inspect_get_known_supersets(self) -> Iterable[P_Set]: ... # ---------------------------------------------------------------------------------- def __add__(self, other: NumberLike): @@ -191,8 +211,6 @@ def __rxor__(self, other: BooleanLike): # ---------------------------------------------------------------------------------- - # TODO: move - # should be eager, in the sense that, if the outcome is known, the callable is # called immediately, without storing an expression # we must force a value (at the end of solving at the least) @@ -201,16 +219,13 @@ def if_then_else( if_true: Callable[[], Any], if_false: Callable[[], Any], preference: bool | None = None, - ) -> None: ... - - # the way this is used right now (for testing) is problematic - # we don't want to add a constraint, because that would force it to hold - # instead we want to make an inspection at the "final" stage during solving - # could still be useful if we want to abort early with an error - def assert_true( - self, error: Callable[[], None] = lambda: raise_(ValueError()) ) -> None: - self.if_then_else(lambda: None, error, True) + IfThenElse(self, if_true, if_false, preference) + + # def assert_true( + # self, error: Callable[[], None] = lambda: raise_(ValueError()) + # ) -> None: + # self.if_then_else(lambda: None, error, True) # def assert_false( # self, error: Callable[[], None] = lambda: raise_(ValueError()) @@ -224,13 +239,70 @@ def assert_true( # ) -> None: ... +class Constrainable: + type All = ParameterOperatable.All + type Sets = ParameterOperatable.Sets + type NumberLike = ParameterOperatable.NumberLike + + constraints: GraphInterface + + def _constrain(self, constraint: "Predicate"): + self.constraints.connect(constraint.constrains) + + def alias_is(self, other: All): + self._constrain(Is(constraint=True, left=self, right=other)) + + def constrain_le(self, other: NumberLike): + self._constrain(LessOrEqual(constraint=True, left=self, right=other)) + + def constrain_ge(self, other: NumberLike): + self._constrain(GreaterOrEqual(constraint=True, left=self, right=other)) + + def constrain_lt(self, other: NumberLike): + self._constrain(LessThan(constraint=True, left=self, right=other)) + + def constrain_gt(self, other: NumberLike): + self._constrain(GreaterThan(constraint=True, left=self, right=other)) + + def constrain_ne(self, other: NumberLike): + self._constrain(NotEqual(constraint=True, left=self, right=other)) + + def constrain_subset(self, other: Sets): + self._constrain(IsSubset(constraint=True, left=self, right=other)) + + def constrain_superset(self, other: Sets): + self._constrain(IsSuperset(constraint=True, left=self, right=other)) + + def constrain_cardinality(self, other: int): + self._constrain(Cardinality(constraint=True, left=self, right=other)) + + # shortcuts + def constraint_true(self): + self.alias_is(True) + + def constraint_false(self): + self.alias_is(False) + + @abstract class Expression(Node, ParameterOperatable): - pass + operates_on: GraphInterface + operated_on: GraphInterface + def __init__(self, *operatable_operands: "Parameter | Expression"): + super().__init__() + for op in operatable_operands: + self.operates_on.connect(op.operated_on) -class Arithmetic(HasUnit, Expression): - def __init__(self, *operands): + +@abstract +class ConstrainableExpression(Expression, Constrainable): + constraints: GraphInterface + + +@abstract +class Arithmetic(ConstrainableExpression, HasUnit): + def __init__(self, *operands: ParameterOperatable.NumberLike): types = [int, float, Quantity, Parameter, Arithmetic] if any(type(op) not in types for op in operands): raise ValueError( @@ -245,12 +317,11 @@ def __init__(self, *operands): self.operands = operands +@abstract class Additive(Arithmetic): def __init__(self, *operands): super().__init__(*operands) - units = [ - op.units if isinstance(op, HasUnit) else dimensionless for op in operands - ] + units = [HasUnit.get_units_or_dimensionless(op) for op in operands] self.units = units[0] if not all(u.is_compatible_with(self.units) for u in units): raise ValueError("All operands must have compatible units") @@ -262,22 +333,20 @@ def __init__(self, *operands): class Subtract(Additive): - def __init__(self, *operands): - super().__init__(*operands) + def __init__(self, minuend, subtrahend): + super().__init__(minuend, subtrahend) class Multiply(Arithmetic): def __init__(self, *operands): super().__init__(*operands) - units = [ - op.units if isinstance(op, HasUnit) else dimensionless for op in operands - ] + units = [HasUnit.get_units_or_dimensionless(op) for op in operands] self.units = units[0] for u in units[1:]: self.units *= u -class Divide(Multiply): +class Divide(Arithmetic): def __init__(self, numerator, denominator): super().__init__(numerator, denominator) self.units = numerator.units / denominator.units @@ -296,7 +365,7 @@ def __init__(self, base, exponent: int): dimensionless ): raise ValueError("exponent must have dimensionless unit") - units = base.units**exponent if isinstance(base, HasUnit) else dimensionless + units = HasUnit.get_units_or_dimensionless(base) ** exponent assert isinstance(units, Unit) self.units = units @@ -349,7 +418,7 @@ def __init__(self, operand): self.units = operand.units -class Logic(Expression): +class Logic(ConstrainableExpression): def __init__(self, *operands): types = [bool, Parameter, Logic, Predicate] if any(type(op) not in types for op in operands): @@ -382,11 +451,19 @@ def __init__(self, left, right): class Implies(Logic): - def __init__(self, left, right): - super().__init__(left, right) + def __init__(self, condition, implication): + super().__init__(condition, implication) -class Setic(Expression): +class IfThenElse(Expression): + def __init__(self, condition, if_true, if_false, preference: bool | None = None): + super().__init__(condition) + self.preference = preference + self.if_true = if_true + self.if_false = if_false + + +class Setic(ConstrainableExpression): def __init__(self, *operands): super().__init__(*operands) types = [Parameter, ParameterOperatable.Sets] @@ -455,10 +532,12 @@ def __init__(self, enum_t: type[Enum]): class Predicate(Expression): + constrains: GraphInterface + def __init__(self, constraint: bool, left, right): self._constraint = constraint - l_units = left.units if isinstance(left, HasUnit) else dimensionless - r_units = right.units if isinstance(right, HasUnit) else dimensionless + l_units = HasUnit.get_units_or_dimensionless(left) + r_units = HasUnit.get_units_or_dimensionless(right) if not l_units.is_compatible_with(r_units): raise ValueError("operands must have compatible units") self.operands = [left, right] @@ -498,7 +577,7 @@ class GreaterOrEqual(NumericPredicate): pass -class NotEqual(Predicate): +class NotEqual(NumericPredicate): pass @@ -523,6 +602,10 @@ class IsSuperset(SeticPredicate): pass +class Cardinality(SeticPredicate): + pass + + class Is(Predicate): def __init__(self, constraint: bool, left, right): super().__init__(constraint, left, right) @@ -594,13 +677,13 @@ class Set(Namespace): SYMMETRIC_DIFFERENCE = SymmetricDifference -class Parameter(Node, ParameterOperatable): +class Parameter(Node, ParameterOperatable, Constrainable): def __init__( self, *, units: Unit | Quantity | None = dimensionless, # hard constraints - within: Range | None = None, + within: Ranges | Range | None = None, domain: Domain = Numbers(negative=False), # soft constraints soft_set: Range | None = None, @@ -615,7 +698,7 @@ def __init__( ): super().__init__() if within is None: - within = Range() + within = Empty(units) if not within.units.is_compatible_with(units): raise ValueError("incompatible units") @@ -630,9 +713,15 @@ def __init__( self.likely_constrained = likely_constrained self.cardinality = cardinality - # ---------------------------------------------------------------------------------- - # TODO implement ParameterOperatable functions - # ---------------------------------------------------------------------------------- + # Type forwards + type All = ParameterOperatable.All + type NumberLike = ParameterOperatable.NumberLike + type Sets = ParameterOperatable.Sets + type BooleanLike = ParameterOperatable.BooleanLike + type Number = ParameterOperatable.Number + + constraints: GraphInterface + operated_on: GraphInterface p_field = f_field(Parameter) diff --git a/src/faebryk/library/Resistor.py b/src/faebryk/library/Resistor.py index 54593f6f..d079252b 100644 --- a/src/faebryk/library/Resistor.py +++ b/src/faebryk/library/Resistor.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: MIT from more_itertools import raise_ + import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L @@ -52,7 +53,9 @@ def do_replace(): self.add(has_part_picked_remove()) self.resistance.operation_is_superset(0.0 * P.ohm).if_then_else( - lambda: do_replace(), lambda: raise_(PickError("", self)) + lambda: do_replace(), + lambda: raise_(PickError("", self)), + preference=True, ) self.add( diff --git a/src/faebryk/libs/picker/picker.py b/src/faebryk/libs/picker/picker.py index e194289c..2eb5e863 100644 --- a/src/faebryk/libs/picker/picker.py +++ b/src/faebryk/libs/picker/picker.py @@ -12,12 +12,12 @@ from rich.progress import Progress -from faebryk.core.solver import Solver import faebryk.library._F as F from faebryk.core.module import Module from faebryk.core.moduleinterface import ModuleInterface from faebryk.core.parameter import Parameter, ParameterOperatable, Predicate -from faebryk.libs.util import cast_assert, flatten, not_none +from faebryk.core.solver import Solver +from faebryk.libs.util import flatten, not_none logger = logging.getLogger(__name__) diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index d23abba9..45ed4a50 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -15,7 +15,8 @@ def is_empty(self) -> bool: ... def __contains__(self, item: T) -> bool: ... -class P_UnitSet[T](P_Set[T], HasUnit, Protocol): ... +class P_UnitSet[T](P_Set[T], Protocol): + units: Unit # -------------------------------------------------------------------------------------- @@ -406,7 +407,7 @@ def __contains__(self, item: Any) -> bool: # yucky with floats def __eq__(self, value: Any) -> bool: - if not isinstance(value, HasUnit): + if not HasUnit.check(value): return False if not self.units.is_compatible_with(value.units): return False @@ -436,9 +437,7 @@ def __init__( *ranges: Range[QuantityT] | "Ranges[QuantityT]", units: Unit | None = None, ): - range_units = [ - r.units if isinstance(r, HasUnit) else dimensionless for r in ranges - ] + range_units = [HasUnit.get_units_or_dimensionless(r) for r in ranges] if len(range_units) == 0 and units is None: raise ValueError("units must be provided for empty union") self.units = units or range_units[0] @@ -516,7 +515,7 @@ def __contains__(self, item: Any) -> bool: return False def __eq__(self, value: Any) -> bool: - if not isinstance(value, HasUnit): + if not HasUnit.check(value): return False if not self.units.is_compatible_with(value.units): return False @@ -532,7 +531,9 @@ def __repr__(self) -> str: return f"_RangeUnion({', '.join(f"[{self.base_to_units(r.min)}, {self.base_to_units(r.max)}]" for r in self._ranges.ranges)} | {self.units})" -def Empty(units: Unit) -> Ranges[QuantityT]: +def Empty(units: Unit | None = None) -> Ranges[QuantityT]: + if units is None: + units = dimensionless return Ranges(units=units) diff --git a/src/faebryk/libs/units.py b/src/faebryk/libs/units.py index 8b1c22d0..8aeabae2 100644 --- a/src/faebryk/libs/units.py +++ b/src/faebryk/libs/units.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: MIT # re-exporting Quantity in-case we ever want to change it -from typing import Protocol, runtime_checkable +from typing import Any from pint import Quantity as _Quantity # noqa: F401 from pint import UndefinedUnitError, Unit, UnitRegistry # noqa: F401 @@ -17,10 +17,17 @@ dimensionless = cast_assert(Unit, P.dimensionless) -@runtime_checkable -class HasUnit(Protocol): +class HasUnit: units: Unit + @staticmethod + def check(obj: Any) -> bool: + return hasattr(obj, "units") + + @staticmethod + def get_units_or_dimensionless(obj: Any) -> Unit: + return obj.units if HasUnit.check(obj) else dimensionless + def to_si_str( value: Quantity | float | int, diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index e253a911..c6cd9386 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -3,7 +3,8 @@ import logging -from faebryk.core.parameter import Domains, Parameter +from faebryk.core.parameter import Parameter +from faebryk.libs.library import L from faebryk.libs.sets import Range from faebryk.libs.units import P @@ -12,8 +13,8 @@ def test_new_definitions(): _ = Parameter( - unit=P.ohm, - domain=Domains.Numbers.Reals.Positive(), + units=P.ohm, + domain=L.Domains.Numbers.REAL(negative=False), soft_set=Range(1 * P.ohm, 10 * P.Mohm), likely_constrained=True, ) From 9b52eea8d43d152b1917489bc9806327baa23b5b Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Mon, 21 Oct 2024 18:13:50 +0200 Subject: [PATCH 45/80] Minor fixes; visualize example --- src/faebryk/core/parameter.py | 33 +++++++++++++++++++++++++-------- test/core/test_parameters.py | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+), 8 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index c14cfcc1..6bbca25d 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -8,7 +8,6 @@ from faebryk.core.core import Namespace from faebryk.core.graphinterface import GraphInterface -from faebryk.core.link import LinkParent from faebryk.core.node import Node, f_field from faebryk.libs.sets import Empty, P_Set, Range, Ranges from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless @@ -289,9 +288,14 @@ class Expression(Node, ParameterOperatable): operates_on: GraphInterface operated_on: GraphInterface - def __init__(self, *operatable_operands: "Parameter | Expression"): + def __init__(self, *operands: ParameterOperatable.All): super().__init__() - for op in operatable_operands: + self.operatable_operands = { + op for op in operands if isinstance(op, (Parameter, Expression)) + } + + def __preinit__(self): + for op in self.operatable_operands: self.operates_on.connect(op.operated_on) @@ -303,13 +307,14 @@ class ConstrainableExpression(Expression, Constrainable): @abstract class Arithmetic(ConstrainableExpression, HasUnit): def __init__(self, *operands: ParameterOperatable.NumberLike): + super().__init__(*operands) types = [int, float, Quantity, Parameter, Arithmetic] if any(type(op) not in types for op in operands): raise ValueError( "operands must be int, float, Quantity, Parameter, or Expression" ) if any( - param.domain not in [Numbers, ESeries] + not isinstance(param.domain, (Numbers, ESeries)) for param in operands if isinstance(param, Parameter) ): @@ -420,6 +425,7 @@ def __init__(self, operand): class Logic(ConstrainableExpression): def __init__(self, *operands): + super().__init__(*operands) types = [bool, Parameter, Logic, Predicate] if any(type(op) not in types for op in operands): raise ValueError("operands must be bool, Parameter, Logic, or Predicate") @@ -535,6 +541,7 @@ class Predicate(Expression): constrains: GraphInterface def __init__(self, constraint: bool, left, right): + super().__init__(left, right) self._constraint = constraint l_units = HasUnit.get_units_or_dimensionless(left) r_units = HasUnit.get_units_or_dimensionless(right) @@ -555,10 +562,20 @@ def is_constraint(self): class NumericPredicate(Predicate): def __init__(self, constraint: bool, left, right): super().__init__(constraint, left, right) - if isinstance(left, Parameter) and left.domain not in [Numbers, ESeries]: - raise ValueError("left operand must have domain Numbers or ESeries") - if isinstance(right, Parameter) and right.domain not in [Numbers, ESeries]: - raise ValueError("right operand must have domain Numbers or ESeries") + if isinstance(left, Parameter) and not isinstance( + left.domain, (Numbers, ESeries) + ): + raise ValueError( + "left operand must have domain Numbers or ESeries," + f" not {type(left.domain)}" + ) + if isinstance(right, Parameter) and not isinstance( + right.domain, (Numbers, ESeries) + ): + raise ValueError( + "right operand must have domain Numbers or ESeries," + f" not {type(right.domain)}" + ) class LessThan(NumericPredicate): diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index c6cd9386..e082b30a 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -3,6 +3,7 @@ import logging +from faebryk.core.node import Node from faebryk.core.parameter import Parameter from faebryk.libs.library import L from faebryk.libs.sets import Range @@ -18,3 +19,35 @@ def test_new_definitions(): soft_set=Range(1 * P.ohm, 10 * P.Mohm), likely_constrained=True, ) + + +def test_visualize(): + """ + Creates webserver that opens automatically if run in jupyter notebook + """ + from faebryk.exporters.visualize.interactive_graph import interactive_graph + + class App(Node): + p1 = L.f_field(Parameter)(units=P.ohm) + + app = App() + + p2 = Parameter(units=P.ohm) + + app.p1.constrain_ge(p2 * 5) + + G = app.get_graph() + interactive_graph(G) + + +# TODO remove +if __name__ == "__main__": + # if run in jupyter notebook + import sys + + if "ipykernel" in sys.modules: + test_visualize() + else: + import typer + + typer.run(test_visualize) From a09ff7de3fb25a2e29d1b4e12d8f8ad826f6d9fa Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 22 Oct 2024 12:37:09 +0200 Subject: [PATCH 46/80] Fix: Node __preinit__ multicall --- src/faebryk/core/node.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/src/faebryk/core/node.py b/src/faebryk/core/node.py index fc9d93ef..204fa93e 100644 --- a/src/faebryk/core/node.py +++ b/src/faebryk/core/node.py @@ -485,12 +485,17 @@ def _setup(self) -> None: # Call 2-stage constructors if self._init: - for base in reversed(type(self).mro()): - if hasattr(base, "__preinit__"): - base.__preinit__(self) - for base in reversed(type(self).mro()): - if hasattr(base, "__postinit__"): - base.__postinit__(self) + bases = list(reversed(type(self).mro())) + preinits = { + base.__preinit__ for base in bases if hasattr(base, "__preinit__") + } + postinits = { + base.__postinit__ for base in bases if hasattr(base, "__postinit__") + } + for preinit in preinits: + preinit(self) + for postinit in postinits: + postinit(self) def __init__(self): assert not hasattr(self, "_is_setup") From 7c0a01691b5543c58719e0d516d4fd1d11bcd948 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 22 Oct 2024 12:37:46 +0200 Subject: [PATCH 47/80] backport: lazy_mifs interactive graph --- src/faebryk/core/graph_backends/graphpy.py | 4 + src/faebryk/core/node.py | 6 +- .../exporters/visualize/interactive_graph.py | 399 +++++++++++------- 3 files changed, 256 insertions(+), 153 deletions(-) diff --git a/src/faebryk/core/graph_backends/graphpy.py b/src/faebryk/core/graph_backends/graphpy.py index 8eb45bd4..97d227e2 100644 --- a/src/faebryk/core/graph_backends/graphpy.py +++ b/src/faebryk/core/graph_backends/graphpy.py @@ -119,6 +119,10 @@ def is_connected(self, from_obj: T, to_obj: T) -> "Link | None": def get_edges(self, obj: T) -> Mapping[T, L]: return self().edges(obj) + @property + def edges(self) -> list[tuple[T, T, L]]: + return self()._e + @staticmethod def _union(rep: GI, old: GI): # merge big into small diff --git a/src/faebryk/core/node.py b/src/faebryk/core/node.py index 204fa93e..39aac1d3 100644 --- a/src/faebryk/core/node.py +++ b/src/faebryk/core/node.py @@ -541,8 +541,10 @@ def get_graph(self): def get_parent(self): return self.parent.get_parent() - def get_name(self): + def get_name(self, accept_no_parent: bool = False): p = self.get_parent() + if not p and accept_no_parent: + return f"<{hex(id(self))[-3:]}>" if not p: raise NodeNoParent(self, "Parent required for name") return p[1] @@ -550,7 +552,7 @@ def get_name(self): def get_hierarchy(self) -> list[tuple["Node", str]]: parent = self.get_parent() if not parent: - return [(self, "*")] + return [(self, f"<{hex(id(self))[-3:]}>")] parent_obj, name = parent return parent_obj.get_hierarchy() + [(self, name)] diff --git a/src/faebryk/exporters/visualize/interactive_graph.py b/src/faebryk/exporters/visualize/interactive_graph.py index 5b78d9ee..0868e16b 100644 --- a/src/faebryk/exporters/visualize/interactive_graph.py +++ b/src/faebryk/exporters/visualize/interactive_graph.py @@ -1,87 +1,94 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -import rich -import rich.text +from typing import Collection, Iterable -from faebryk.core.graph import Graph -from faebryk.core.graphinterface import GraphInterface +import dash_cytoscape as cyto +from dash import Dash, html +from rich.console import Console +from rich.table import Table + +# import faebryk.library._F as F +from faebryk.core.graphinterface import Graph, GraphInterface from faebryk.core.link import Link +from faebryk.core.module import Module +from faebryk.core.moduleinterface import ModuleInterface from faebryk.core.node import Node -from faebryk.exporters.visualize.util import IDSet, generate_pastel_palette +from faebryk.core.parameter import Expression, Parameter, Predicate +from faebryk.core.trait import Trait +from faebryk.exporters.visualize.util import generate_pastel_palette +from faebryk.libs.util import KeyErrorAmbiguous, find_or -def interactive_graph(G: Graph): - import dash_cytoscape as cyto - from dash import Dash, html +def typename(obj): + if isinstance(obj, type): + return obj.__name__ + return type(obj).__name__ - # Register the fcose layout - cyto.load_extra_layouts() - app = Dash(__name__) +# Transformers ------------------------------------------------------------------------- +def _gif(gif: GraphInterface): + return { + "data": { + "id": id(gif), + "label": gif.name, + "type": typename(gif), + "parent": id(gif.node), + } + } - node_types: set[str] = set() - groups = {} - - def _group(gif: GraphInterface) -> str: - node = gif.node - my_node_id = str(id(node)) - if my_node_id not in groups: - label = f"{node.get_full_name()} ({type(node).__name__})" - groups[my_node_id] = { - "data": { - "id": my_node_id, - "label": label, - "type": "group", - } - } - return my_node_id - - def _node(node: Node): - full_name = node.get_full_name() - type_name = type(node).__name__ - node_types.add(type_name) - data = {"id": str(id(node)), "label": full_name, "type": type_name} - if isinstance(node, GraphInterface): - data["parent"] = _group(node) - return {"data": data} - - link_types: set[str] = set() - links_touched = IDSet[Link]() - - def _link(link: Link): - if link in links_touched: - return None - links_touched.add(link) - - try: - source, target = tuple(str(id(n)) for n in link.get_connections()) - except ValueError: - return None - - type_name = type(link).__name__ - link_types.add(type_name) - - return {"data": {"source": source, "target": target, "type": type_name}} - - def _not_none(x): - return x is not None - - elements = [ - *(filter(_not_none, (_node(gif) for gif in G))), - *( - filter( - _not_none, - (_link(link) for gif in G for link in gif.get_links()), - ) - ), - *( - groups.values() - ), # must go after nodes because the node iteration creates the groups - ] +def _link(source, target, link: Link): + return { + "data": { + "source": id(source), + "target": id(target), + "type": typename(link), + } + } + + +_GROUP_TYPES = { + Predicate: "#FCF3CF", # Very light goldenrod + Expression: "#D1F2EB", # Very soft turquoise + Parameter: "#FFD9DE", # Very light pink + Module: "#E0F0FF", # Very light blue + Trait: "#FCFCFF", # Almost white + # F.Electrical: "#D1F2EB", # Very soft turquoise + # F.ElectricPower: "#FCF3CF", # Very light goldenrod + # F.ElectricLogic: "#EBE1F1", # Very soft lavender + # Defaults + ModuleInterface: "#DFFFE4", # Very light green + Node: "#FCFCFF", # Almost white +} + + +def _group(node: Node): + try: + subtype = find_or(_GROUP_TYPES, lambda t: isinstance(node, t), default=Node) + except KeyErrorAmbiguous as e: + subtype = e.duplicates[0] + + return { + "data": { + "id": id(node), + "label": f"{node.get_name(accept_no_parent=True)}\n({typename(node)})", + "type": "group", + "subtype": typename(subtype), + "parent": id(p[0]) if (p := node.get_parent()) else None, + } + } + + +# Style -------------------------------------------------------------------------------- - stylesheet = [ + +def _with_pastels[T](iterable: Collection[T]): + return zip(sorted(iterable), generate_pastel_palette(len(iterable))) + + +class _Stylesheet: + _BASE = [ { "selector": "node", "style": { @@ -89,7 +96,13 @@ def _not_none(x): "text-opacity": 0.8, "text-valign": "center", "text-halign": "center", + "font-size": "0.5em", "background-color": "#BFD7B5", + "text-outline-color": "#FFFFFF", + "text-outline-width": 0.5, + "border-width": 1, + "border-color": "#888888", + "border-opacity": 0.5, }, }, { @@ -101,22 +114,10 @@ def _not_none(x): "target-arrow-shape": "triangle", "arrow-scale": 1, "target-arrow-color": "#A3C4BC", + "text-outline-color": "#FFFFFF", + "text-outline-width": 2, }, }, - ] - - def _pastels(iterable): - return zip(iterable, generate_pastel_palette(len(iterable))) - - for node_type, color in _pastels(node_types): - stylesheet.append( - { - "selector": f'node[type = "{node_type}"]', - "style": {"background-color": color}, - } - ) - - stylesheet.append( { "selector": 'node[type = "group"]', "style": { @@ -124,88 +125,184 @@ def _pastels(iterable): "font-weight": "bold", "font-size": "1.5em", "text-valign": "top", + "text-outline-color": "#FFFFFF", + "text-outline-width": 1.5, + "text-wrap": "wrap", + "border-width": 4, }, - } - ) + }, + ] + + def __init__(self): + self.stylesheet = list(self._BASE) + + def add_node_type(self, node_type: str, color: str): + self.stylesheet.append( + { + "selector": f'node[type = "{node_type}"]', + "style": {"background-color": color}, + } + ) - for link_type, color in _pastels(link_types): - stylesheet.append( + def add_link_type(self, link_type: str, color: str): + self.stylesheet.append( { "selector": f'edge[type = "{link_type}"]', "style": {"line-color": color, "target-arrow-color": color}, } ) - container_style = { - "position": "fixed", - "display": "flex", - "flex-direction": "column", - "height": "100%", - "width": "100%", - } + def add_group_type(self, group_type: str, color: str): + self.stylesheet.append( + { + "selector": f'node[subtype = "{group_type}"]', + "style": {"background-color": color}, + } + ) - graph_view_style = { - "position": "absolute", - "width": "100%", - "height": "100%", - "zIndex": 999, - } - _cyto = cyto.Cytoscape( - id="graph-view", - stylesheet=stylesheet, - style=graph_view_style, - elements=elements, - layout={ - "name": "fcose", - "quality": "proof", - "animate": False, - "randomize": False, - "fit": True, - "padding": 50, - "nodeDimensionsIncludeLabels": True, - "uniformNodeDimensions": False, - "packComponents": True, - "nodeRepulsion": 8000, - "idealEdgeLength": 50, - "edgeElasticity": 0.45, - "nestingFactor": 0.1, - "gravity": 0.25, - "numIter": 2500, - "tile": True, - "tilingPaddingVertical": 10, - "tilingPaddingHorizontal": 10, - "gravityRangeCompound": 1.5, - "gravityCompound": 1.0, - "gravityRange": 3.8, - "initialEnergyOnIncremental": 0.5, +def _Layout(stylesheet: _Stylesheet, elements: list[dict[str, dict]]): + return html.Div( + style={ + "position": "fixed", + "display": "flex", + "flex-direction": "column", + "height": "100%", + "width": "100%", }, - ) - - app.layout = html.Div( - style=container_style, children=[ html.Div( className="cy-container", style={"flex": "1", "position": "relative"}, - children=[_cyto], + children=[ + cyto.Cytoscape( + id="graph-view", + stylesheet=stylesheet.stylesheet, + style={ + "position": "absolute", + "width": "100%", + "height": "100%", + "zIndex": 999, + }, + elements=elements, + layout={ + "name": "fcose", + "quality": "proof", + "animate": False, + "randomize": False, + "fit": True, + "padding": 50, + "nodeDimensionsIncludeLabels": True, + "uniformNodeDimensions": False, + "packComponents": True, + "nodeRepulsion": 1000, + "idealEdgeLength": 50, + "edgeElasticity": 0.45, + "nestingFactor": 0.1, + "gravity": 0.25, + "numIter": 2500, + "tile": True, + "tilingPaddingVertical": 10, + "tilingPaddingHorizontal": 10, + "gravityRangeCompound": 1.5, + "gravityCompound": 1.5, + "gravityRange": 3.8, + "initialEnergyOnIncremental": 0.5, + "componentSpacing": 40, + }, + ) + ], ), ], ) - # print the color palette - print("Node types:") - for node_type, color in _pastels(node_types): - colored_text = rich.text.Text(f"{node_type}: {color}") - colored_text.stylize(f"on {color}") - rich.print(colored_text) - print("\n") - - print("Link types:") - for link_type, color in _pastels(link_types): - colored_text = rich.text.Text(f"{link_type}: {color}") - colored_text.stylize(f"on {color}") - rich.print(colored_text) - print("\n") - - app.run() + +# -------------------------------------------------------------------------------------- + + +def interactive_subgraph( + edges: Iterable[tuple[GraphInterface, GraphInterface, Link]], + gifs: list[GraphInterface], + nodes: Iterable[Node], + height: int | None = None, +): + links = [link for _, _, link in edges] + link_types = {typename(link) for link in links} + gif_types = {typename(gif) for gif in gifs} + + elements = ( + [_gif(gif) for gif in gifs] + + [_link(*edge) for edge in edges] + + [_group(node) for node in nodes] + ) + + # Build stylesheet + stylesheet = _Stylesheet() + + gif_type_colors = list(_with_pastels(gif_types)) + link_type_colors = list(_with_pastels(link_types)) + group_types_colors = [ + (typename(group_type), color) for group_type, color in _GROUP_TYPES.items() + ] + + for gif_type, color in gif_type_colors: + stylesheet.add_node_type(gif_type, color) + + for link_type, color in link_type_colors: + stylesheet.add_link_type(link_type, color) + + for group_type, color in group_types_colors: + stylesheet.add_group_type(group_type, color) + + # Register the fcose layout + cyto.load_extra_layouts() + app = Dash(__name__) + app.layout = _Layout(stylesheet, elements) + + # Print legend + console = Console() + + for typegroup, colors in [ + ("GIF", gif_type_colors), + ("Link", link_type_colors), + ("Node", group_types_colors), + ]: + table = Table(title="Legend") + table.add_column("Type", style="cyan") + table.add_column("Color", style="green") + table.add_column("Name") + + for text, color in colors: + table.add_row(typegroup, f"[on {color}] [/]", text) + + console.print(table) + + # + app.run(jupyter_height=height or 1000) + + +def interactive_graph( + G: Graph, + node_types: tuple[type[Node], ...] | None = None, + depth: int = 0, + filter_unconnected: bool = True, + height: int | None = None, +): + if node_types is None: + node_types = (Node,) + + # Build elements + nodes = G.nodes_of_types(node_types) + if depth > 0: + nodes = [node for node in nodes if len(node.get_hierarchy()) <= depth] + + gifs = [gif for gif in G if gif.node in nodes] + if filter_unconnected: + gifs = [gif for gif in gifs if len(gif.edges) > 1] + + edges = [ + (edge[0], edge[1], edge[2]) + for edge in G.edges + if edge[0] in gifs and edge[1] in gifs + ] + return interactive_subgraph(edges, gifs, nodes, height=height) From 0471c9f3e6c27c01ed787c294cab602c09682e4d Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 22 Oct 2024 12:38:39 +0200 Subject: [PATCH 48/80] predicates have to be constrain() --- src/faebryk/core/parameter.py | 130 +++++++++++++++++++--------------- test/core/test_parameters.py | 10 ++- 2 files changed, 80 insertions(+), 60 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 6bbca25d..4ac3bd97 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -9,9 +9,9 @@ from faebryk.core.core import Namespace from faebryk.core.graphinterface import GraphInterface from faebryk.core.node import Node, f_field -from faebryk.libs.sets import Empty, P_Set, Range, Ranges +from faebryk.libs.sets import P_Set, Range, Ranges from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless -from faebryk.libs.util import abstract +from faebryk.libs.util import abstract, cast_assert logger = logging.getLogger(__name__) @@ -104,37 +104,37 @@ def operation_implies(self, other: BooleanLike) -> "Logic": return Implies(condition=self, implication=other) def operation_is_le(self, other: NumberLike) -> "NumericPredicate": - return LessOrEqual(constraint=False, left=self, right=other) + return LessOrEqual(left=self, right=other) def operation_is_ge(self, other: NumberLike) -> "NumericPredicate": - return GreaterOrEqual(constraint=False, left=self, right=other) + return GreaterOrEqual(left=self, right=other) def operation_is_lt(self, other: NumberLike) -> "NumericPredicate": - return LessThan(constraint=False, left=self, right=other) + return LessThan(left=self, right=other) def operation_is_gt(self, other: NumberLike) -> "NumericPredicate": - return GreaterThan(constraint=False, left=self, right=other) + return GreaterThan(left=self, right=other) def operation_is_ne(self, other: NumberLike) -> "NumericPredicate": - return NotEqual(constraint=False, left=self, right=other) + return NotEqual(left=self, right=other) def operation_is_subset(self, other: Sets) -> "SeticPredicate": - return IsSubset(constraint=False, left=self, right=other) + return IsSubset(left=self, right=other) def operation_is_superset(self, other: Sets) -> "SeticPredicate": - return IsSuperset(constraint=False, left=self, right=other) + return IsSuperset(left=self, right=other) # TODO implement def inspect_known_min(self: NumberLike) -> Number: - return 1 / 0 + raise Exception("not implemented") # raise NotImplementedError() def inspect_known_max(self: NumberLike) -> Number: - return 1 / 0 + raise Exception("not implemented") # raise NotImplementedError() def inspect_known_values(self: BooleanLike) -> P_Set[bool]: - return 1 / 0 + raise Exception("not implemented") # raise NotImplementedError() # Run by the solver on finalization @@ -187,6 +187,27 @@ def __abs__(self): def __round__(self): return self.operation_round() + def __floor__(self): + return self.operation_floor() + + def __ceil__(self): + return self.operation_ceil() + + def __le__(self, other: NumberLike): + return self.operation_is_le(other) + + def __ge__(self, other: NumberLike): + return self.operation_is_ge(other) + + def __lt__(self, other: NumberLike): + return self.operation_is_lt(other) + + def __gt__(self, other: NumberLike): + return self.operation_is_gt(other) + + def __ne__(self, other: NumberLike): + return self.operation_is_ne(other) + # bitwise and def __and__(self, other: BooleanLike): # TODO could be set intersection @@ -243,50 +264,54 @@ class Constrainable: type Sets = ParameterOperatable.Sets type NumberLike = ParameterOperatable.NumberLike - constraints: GraphInterface + def __init__(self): + super().__init__() + self.constrained: bool = False def _constrain(self, constraint: "Predicate"): - self.constraints.connect(constraint.constrains) + constraint.constrain() + def _get(self) -> ParameterOperatable: + return cast_assert(ParameterOperatable, self) + + # Generic def alias_is(self, other: All): - self._constrain(Is(constraint=True, left=self, right=other)) + return self._constrain(Is(left=self, right=other)) + # Numberlike def constrain_le(self, other: NumberLike): - self._constrain(LessOrEqual(constraint=True, left=self, right=other)) + return self._constrain(self._get().operation_is_le(other)) def constrain_ge(self, other: NumberLike): - self._constrain(GreaterOrEqual(constraint=True, left=self, right=other)) + return self._constrain(self._get().operation_is_ge(other)) def constrain_lt(self, other: NumberLike): - self._constrain(LessThan(constraint=True, left=self, right=other)) + return self._constrain(self._get().operation_is_lt(other)) def constrain_gt(self, other: NumberLike): - self._constrain(GreaterThan(constraint=True, left=self, right=other)) + return self._constrain(self._get().operation_is_gt(other)) def constrain_ne(self, other: NumberLike): - self._constrain(NotEqual(constraint=True, left=self, right=other)) + return self._constrain(self._get().operation_is_ne(other)) + # Setlike def constrain_subset(self, other: Sets): - self._constrain(IsSubset(constraint=True, left=self, right=other)) + return self._constrain(self._get().operation_is_subset(other)) def constrain_superset(self, other: Sets): - self._constrain(IsSuperset(constraint=True, left=self, right=other)) + return self._constrain(self._get().operation_is_superset(other)) def constrain_cardinality(self, other: int): - self._constrain(Cardinality(constraint=True, left=self, right=other)) + return self._constrain(Cardinality(self._get(), other)) # shortcuts - def constraint_true(self): - self.alias_is(True) - - def constraint_false(self): - self.alias_is(False) + def constrain(self): + self.constrained = True @abstract class Expression(Node, ParameterOperatable): operates_on: GraphInterface - operated_on: GraphInterface def __init__(self, *operands: ParameterOperatable.All): super().__init__() @@ -301,7 +326,7 @@ def __preinit__(self): @abstract class ConstrainableExpression(Expression, Constrainable): - constraints: GraphInterface + pass @abstract @@ -348,7 +373,7 @@ def __init__(self, *operands): units = [HasUnit.get_units_or_dimensionless(op) for op in operands] self.units = units[0] for u in units[1:]: - self.units *= u + self.units = cast_assert(Unit, self.units * u) class Divide(Arithmetic): @@ -475,7 +500,7 @@ def __init__(self, *operands): types = [Parameter, ParameterOperatable.Sets] if any(type(op) not in types for op in operands): raise ValueError("operands must be Parameter or Set") - units = [op.units for op in operands] + units = [HasUnit.get_units_or_dimensionless(op) for op in operands] self.units = units[0] for u in units[1:]: if not self.units.is_compatible_with(u): @@ -537,31 +562,22 @@ def __init__(self, enum_t: type[Enum]): self.enum_t = enum_t -class Predicate(Expression): - constrains: GraphInterface - - def __init__(self, constraint: bool, left, right): +class Predicate(ConstrainableExpression): + def __init__(self, left, right): super().__init__(left, right) - self._constraint = constraint l_units = HasUnit.get_units_or_dimensionless(left) r_units = HasUnit.get_units_or_dimensionless(right) if not l_units.is_compatible_with(r_units): raise ValueError("operands must have compatible units") self.operands = [left, right] - def constrain(self): - self._constraint = True - - def is_constraint(self): - return self._constraint - - # def run_when_known(self, f: Callable[[bool], None]): - # getattr(self, "run_when_known_funcs", []).append(f) + def __bool__(self): + raise ValueError("Predicate cannot be converted to bool") class NumericPredicate(Predicate): - def __init__(self, constraint: bool, left, right): - super().__init__(constraint, left, right) + def __init__(self, left, right): + super().__init__(left, right) if isinstance(left, Parameter) and not isinstance( left.domain, (Numbers, ESeries) ): @@ -599,8 +615,8 @@ class NotEqual(NumericPredicate): class SeticPredicate(Predicate): - def __init__(self, constraint: bool, left, right): - super().__init__(constraint, left, right) + def __init__(self, left, right): + super().__init__(left, right) types = [Parameter, ParameterOperatable.Sets] if any(type(op) not in types for op in self.operands): raise ValueError("operands must be Parameter or Set") @@ -620,12 +636,15 @@ class IsSuperset(SeticPredicate): class Cardinality(SeticPredicate): - pass + def __init__( + self, set: ParameterOperatable.Sets, cardinality: ParameterOperatable.NumberLike + ): + super().__init__(set, cardinality) class Is(Predicate): - def __init__(self, constraint: bool, left, right): - super().__init__(constraint, left, right) + def __init__(self, left, right): + super().__init__(left, right) # TODO rename? @@ -714,9 +733,7 @@ def __init__( cardinality: int | None = None, ): super().__init__() - if within is None: - within = Empty(units) - if not within.units.is_compatible_with(units): + if within is not None and not within.units.is_compatible_with(units): raise ValueError("incompatible units") if not isinstance(units, Unit): @@ -737,8 +754,5 @@ def __init__( type BooleanLike = ParameterOperatable.BooleanLike type Number = ParameterOperatable.Number - constraints: GraphInterface - operated_on: GraphInterface - p_field = f_field(Parameter) diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index e082b30a..1e2dac80 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -3,6 +3,8 @@ import logging +import pytest + from faebryk.core.node import Node from faebryk.core.parameter import Parameter from faebryk.libs.library import L @@ -34,10 +36,14 @@ class App(Node): p2 = Parameter(units=P.ohm) - app.p1.constrain_ge(p2 * 5) + # app.p1.constrain_ge(p2 * 5) + # app.p1.operation_is_ge(p2 * 5).constrain() + (app.p1 >= p2 * 5).constrain() + + # pytest.raises(ValueError, bool, app.p1 >= p2 * 5) G = app.get_graph() - interactive_graph(G) + interactive_graph(G, height=1400) # TODO remove From 5fa829e15638c01e8cfd21071132b983c19e5d88 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 22 Oct 2024 13:30:10 +0200 Subject: [PATCH 49/80] genF --- src/faebryk/library/_F.py | 37 ++++++++++++++++--------------------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/src/faebryk/library/_F.py b/src/faebryk/library/_F.py index 6a6b214c..8f3eea9c 100644 --- a/src/faebryk/library/_F.py +++ b/src/faebryk/library/_F.py @@ -15,21 +15,18 @@ # flake8: noqa: I001 # flake8: noqa: E501 -from faebryk.library.TBD import TBD -from faebryk.library.Range import Range from faebryk.library.has_designator_prefix import has_designator_prefix -from faebryk.library.Constant import Constant +from faebryk.library.Electrical import Electrical from faebryk.library.has_esphome_config import has_esphome_config from faebryk.library.is_esphome_bus import is_esphome_bus from faebryk.library.has_pcb_position import has_pcb_position +from faebryk.library.Constant import Constant from faebryk.library.has_single_electric_reference import has_single_electric_reference from faebryk.library.Power import Power from faebryk.library.Signal import Signal -from faebryk.library.has_construction_dependency import has_construction_dependency from faebryk.library.has_footprint import has_footprint from faebryk.library.Mechanical import Mechanical from faebryk.library.has_overriden_name import has_overriden_name -from faebryk.library.Operation import Operation from faebryk.library.has_linked_pad import has_linked_pad from faebryk.library.has_reference import has_reference from faebryk.library.can_bridge import can_bridge @@ -37,6 +34,7 @@ from faebryk.library.has_descriptive_properties import has_descriptive_properties from faebryk.library.has_simple_value_representation import has_simple_value_representation from faebryk.library.has_capacitance import has_capacitance +from faebryk.library.has_construction_dependency import has_construction_dependency from faebryk.library.has_datasheet import has_datasheet from faebryk.library.has_footprint_requirement import has_footprint_requirement from faebryk.library.has_kicad_ref import has_kicad_ref @@ -46,10 +44,9 @@ from faebryk.library.has_resistance import has_resistance from faebryk.library.has_single_connection import has_single_connection from faebryk.library.is_representable_by_single_value import is_representable_by_single_value -from faebryk.library.ANY import ANY -from faebryk.library.Electrical import Electrical from faebryk.library.has_designator_prefix_defined import has_designator_prefix_defined -from faebryk.library.Set import Set +from faebryk.library.XtalIF import XtalIF +from faebryk.library.has_pin_association_heuristic import has_pin_association_heuristic from faebryk.library.has_esphome_config_defined import has_esphome_config_defined from faebryk.library.is_esphome_bus_defined import is_esphome_bus_defined from faebryk.library.has_pcb_position_defined import has_pcb_position_defined @@ -61,6 +58,7 @@ from faebryk.library.Footprint import Footprint from faebryk.library.has_overriden_name_defined import has_overriden_name_defined from faebryk.library.has_linked_pad_defined import has_linked_pad_defined +from faebryk.library.Symbol import Symbol from faebryk.library.can_bridge_defined import can_bridge_defined from faebryk.library.has_designator_defined import has_designator_defined from faebryk.library.has_descriptive_properties_defined import has_descriptive_properties_defined @@ -72,23 +70,23 @@ from faebryk.library.has_pcb_layout_defined import has_pcb_layout_defined from faebryk.library.has_single_connection_impl import has_single_connection_impl from faebryk.library.is_representable_by_single_value_defined import is_representable_by_single_value_defined -from faebryk.library.Symbol import Symbol -from faebryk.library.XtalIF import XtalIF -from faebryk.library.has_pin_association_heuristic import has_pin_association_heuristic from faebryk.library.Header import Header from faebryk.library.PJ398SM import PJ398SM from faebryk.library.RJ45_Receptacle import RJ45_Receptacle from faebryk.library.Relay import Relay +from faebryk.library.has_pin_association_heuristic_lookup_table import has_pin_association_heuristic_lookup_table from faebryk.library.LogicOps import LogicOps from faebryk.library.can_attach_to_footprint import can_attach_to_footprint from faebryk.library.can_attach_via_pinmap import can_attach_via_pinmap from faebryk.library.has_footprint_impl import has_footprint_impl from faebryk.library.has_kicad_footprint import has_kicad_footprint from faebryk.library.Pad import Pad +from faebryk.library.has_symbol_layout import has_symbol_layout from faebryk.library.Button import Button from faebryk.library.GDT import GDT -from faebryk.library.has_symbol_layout import has_symbol_layout -from faebryk.library.has_pin_association_heuristic_lookup_table import has_pin_association_heuristic_lookup_table +from faebryk.library.BJT import BJT +from faebryk.library.Diode import Diode +from faebryk.library.MOSFET import MOSFET from faebryk.library.LogicGate import LogicGate from faebryk.library.has_footprint_defined import has_footprint_defined from faebryk.library.Net import Net @@ -97,9 +95,7 @@ from faebryk.library.has_kicad_manual_footprint import has_kicad_manual_footprint from faebryk.library.has_pcb_routing_strategy_greedy_direct_line import has_pcb_routing_strategy_greedy_direct_line from faebryk.library.has_symbol_layout_defined import has_symbol_layout_defined -from faebryk.library.BJT import BJT -from faebryk.library.Diode import Diode -from faebryk.library.MOSFET import MOSFET +from faebryk.library.TVS import TVS from faebryk.library.LogicGates import LogicGates from faebryk.library.can_attach_to_footprint_symmetrically import can_attach_to_footprint_symmetrically from faebryk.library.can_attach_to_footprint_via_pinmap import can_attach_to_footprint_via_pinmap @@ -109,7 +105,8 @@ from faebryk.library.has_equal_pins_in_ifs import has_equal_pins_in_ifs from faebryk.library.has_kicad_footprint_equal_ifs import has_kicad_footprint_equal_ifs from faebryk.library.KicadFootprint import KicadFootprint -from faebryk.library.TVS import TVS +from faebryk.library.can_be_surge_protected import can_be_surge_protected +from faebryk.library.is_surge_protected import is_surge_protected from faebryk.library.Capacitor import Capacitor from faebryk.library.Crystal import Crystal from faebryk.library.Fuse import Fuse @@ -125,8 +122,7 @@ from faebryk.library.SOIC import SOIC from faebryk.library.has_kicad_footprint_equal_ifs_defined import has_kicad_footprint_equal_ifs_defined from faebryk.library.Mounting_Hole import Mounting_Hole -from faebryk.library.can_be_surge_protected import can_be_surge_protected -from faebryk.library.is_surge_protected import is_surge_protected +from faebryk.library.is_surge_protected_defined import is_surge_protected_defined from faebryk.library.MultiCapacitor import MultiCapacitor from faebryk.library.can_be_decoupled import can_be_decoupled from faebryk.library.is_decoupled import is_decoupled @@ -135,9 +131,8 @@ from faebryk.library.Potentiometer import Potentiometer from faebryk.library.ResistorVoltageDivider import ResistorVoltageDivider from faebryk.library.Resistor_Voltage_Divider import Resistor_Voltage_Divider -from faebryk.library.is_surge_protected_defined import is_surge_protected_defined -from faebryk.library.is_decoupled_nodes import is_decoupled_nodes from faebryk.library.can_be_surge_protected_defined import can_be_surge_protected_defined +from faebryk.library.is_decoupled_nodes import is_decoupled_nodes from faebryk.library.can_be_decoupled_defined import can_be_decoupled_defined from faebryk.library.ElectricPower import ElectricPower from faebryk.library.B0505S_1WR3 import B0505S_1WR3 From 3e8e89f1a4c6e2a9c57e20d7c5f2a35e8c618bcb Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 22 Oct 2024 13:56:14 +0200 Subject: [PATCH 50/80] Fix import errors --- src/faebryk/core/parameter.py | 3 + src/faebryk/library/Comparator.py | 4 +- src/faebryk/library/Constant.py | 110 -- src/faebryk/library/LDO.py | 4 +- src/faebryk/library/_F.py | 1 - src/faebryk/libs/e_series.py | 1137 +++++++------- src/faebryk/libs/picker/jlcpcb/jlcpcb.py | 1392 +++++++++--------- src/faebryk/libs/picker/jlcpcb/picker_lib.py | 10 +- src/faebryk/libs/sets.py | 7 +- src/faebryk/libs/test/solver.py | 3 +- 10 files changed, 1310 insertions(+), 1361 deletions(-) delete mode 100644 src/faebryk/library/Constant.py diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 4ac3bd97..6233e59a 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -9,6 +9,7 @@ from faebryk.core.core import Namespace from faebryk.core.graphinterface import GraphInterface from faebryk.core.node import Node, f_field +from faebryk.core.trait import Trait from faebryk.libs.sets import P_Set, Range, Ranges from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless from faebryk.libs.util import abstract, cast_assert @@ -714,6 +715,8 @@ class Set(Namespace): class Parameter(Node, ParameterOperatable, Constrainable): + class TraitT(Trait): ... + def __init__( self, *, diff --git a/src/faebryk/library/Comparator.py b/src/faebryk/library/Comparator.py index c14cf8b8..780acc41 100644 --- a/src/faebryk/library/Comparator.py +++ b/src/faebryk/library/Comparator.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P +from faebryk.libs.units import P, Quantity class Comparator(Module): @@ -18,7 +18,7 @@ class OutputType(Enum): common_mode_rejection_ratio = L.p_field( units=P.dB, likely_constrained=True, - soft_set=L.Range(60 * P.dB, 120 * P.dB), + soft_set=L.Range(Quantity(60, P.dB), Quantity(120, P.dB)), tolerance_guess=10 * P.percent, ) input_bias_current = L.p_field( diff --git a/src/faebryk/library/Constant.py b/src/faebryk/library/Constant.py deleted file mode 100644 index f949a316..00000000 --- a/src/faebryk/library/Constant.py +++ /dev/null @@ -1,110 +0,0 @@ -# This file is part of the faebryk project -# SPDX-License-Identifier: MIT - -from enum import Enum -from typing import Self, SupportsAbs - -import numpy as np - -from faebryk.core.parameter import Parameter, _resolved -from faebryk.libs.units import Quantity, UnitsContainer, to_si_str - - -class Constant[PV](Parameter[PV], Parameter[PV].SupportsSetOps): - type LIT_OR_PARAM = Parameter[PV].LIT_OR_PARAM - - def __init__(self, value: LIT_OR_PARAM) -> None: - super().__init__() - self.value = value - - def _pretty_val(self): - val = repr(self.value) - # TODO - if isinstance(self.value, Quantity): - val = f"{self.value:.2f#~P}" - return val - - def __str__(self) -> str: - return super().__str__() + f"({self._pretty_val()})" - - def __repr__(self): - return super().__repr__() + f"({self._pretty_val()})" - - @_resolved - def __eq__(self, other) -> bool: - if not isinstance(other, Constant): - return False - - try: - return np.allclose(self.value, other.value) - except (TypeError, np.exceptions.DTypePromotionError): - ... - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - # comparison operators - @_resolved - def __le__(self, other) -> bool: - if isinstance(other, Constant): - return self.value <= other.value - return other >= self.value - - @_resolved - def __lt__(self, other) -> bool: - if isinstance(other, Constant): - return self.value < other.value - return other > self.value - - @_resolved - def __ge__(self, other) -> bool: - if isinstance(other, Constant): - return self.value >= other.value - return other <= self.value - - @_resolved - def __gt__(self, other) -> bool: - if isinstance(other, Constant): - return self.value > other.value - return other < self.value - - def __abs__(self): - assert isinstance(self.value, SupportsAbs) - return Constant(abs(self.value)) - - def __format__(self, format_spec): - return f"{super().__str__()}({format(self.value, format_spec)})" - - def copy(self) -> Self: - return type(self)(self.value) - - def unpack(self): - if isinstance(self.value, Constant): - return self.value.unpack() - - return self.value - - def __int__(self): - return int(self.value) - - @_resolved - def __contains__(self, other: Parameter[PV]) -> bool: - if not isinstance(other, Constant): - return False - return other.value == self.value - - def try_compress(self) -> Parameter[PV]: - if isinstance(self.value, Parameter): - return self.value - return super().try_compress() - - def _max(self): - return self.value - - def _as_unit(self, unit: UnitsContainer, base: int, required: bool) -> str: - return to_si_str(self.value, unit) - - def _enum_parameter_representation(self, required: bool) -> str: - return self.value.name if isinstance(self.value, Enum) else str(self.value) diff --git a/src/faebryk/library/LDO.py b/src/faebryk/library/LDO.py index c6791d73..c69562cb 100644 --- a/src/faebryk/library/LDO.py +++ b/src/faebryk/library/LDO.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P +from faebryk.libs.units import P, Quantity from faebryk.libs.util import assert_once, join_if_non_empty @@ -46,7 +46,7 @@ class OutputPolarity(Enum): psrr = L.p_field( units=P.dB, likely_constrained=True, - soft_set=L.Range(1 * P.dB, 100 * P.dB), + soft_set=L.Range(Quantity(1, P.dB), Quantity(100, P.dB)), ) output_polarity = L.p_field( domain=L.Domains.ENUM(OutputPolarity), diff --git a/src/faebryk/library/_F.py b/src/faebryk/library/_F.py index 8f3eea9c..ce6d2ed2 100644 --- a/src/faebryk/library/_F.py +++ b/src/faebryk/library/_F.py @@ -20,7 +20,6 @@ from faebryk.library.has_esphome_config import has_esphome_config from faebryk.library.is_esphome_bus import is_esphome_bus from faebryk.library.has_pcb_position import has_pcb_position -from faebryk.library.Constant import Constant from faebryk.library.has_single_electric_reference import has_single_electric_reference from faebryk.library.Power import Power from faebryk.library.Signal import Signal diff --git a/src/faebryk/libs/e_series.py b/src/faebryk/libs/e_series.py index 728cd2db..ccadcaf8 100644 --- a/src/faebryk/libs/e_series.py +++ b/src/faebryk/libs/e_series.py @@ -1,587 +1,604 @@ -import copy -import logging -import math -from math import ceil, floor, log10 -from typing import Tuple - -import faebryk.library._F as F -from faebryk.core.parameter import Parameter -from faebryk.libs.units import Quantity - -logger = logging.getLogger(__name__) - -E_SERIES = set[float] - - -class E_SERIES_VALUES: - E192 = { - 1.00, - 1.01, - 1.02, - 1.04, - 1.05, - 1.06, - 1.07, - 1.09, - 1.10, - 1.11, - 1.13, - 1.14, - 1.15, - 1.17, - 1.18, - 1.20, - 1.21, - 1.23, - 1.24, - 1.26, - 1.27, - 1.29, - 1.30, - 1.32, - 1.33, - 1.35, - 1.37, - 1.38, - 1.40, - 1.42, - 1.43, - 1.45, - 1.47, - 1.49, - 1.50, - 1.52, - 1.54, - 1.56, - 1.58, - 1.60, - 1.62, - 1.64, - 1.65, - 1.67, - 1.69, - 1.72, - 1.74, - 1.76, - 1.78, - 1.80, - 1.82, - 1.84, - 1.87, - 1.89, - 1.91, - 1.93, - 1.96, - 1.98, - 2.00, - 2.03, - 2.05, - 2.08, - 2.10, - 2.13, - 2.15, - 2.18, - 2.21, - 2.23, - 2.26, - 2.29, - 2.32, - 2.34, - 2.37, - 2.40, - 2.43, - 2.46, - 2.49, - 2.52, - 2.55, - 2.58, - 2.61, - 2.64, - 2.67, - 2.71, - 2.74, - 2.77, - 2.80, - 2.84, - 2.87, - 2.91, - 2.94, - 2.98, - 3.01, - 3.05, - 3.09, - 3.12, - 3.16, - 3.20, - 3.24, - 3.28, - 3.32, - 3.36, - 3.40, - 3.44, - 3.48, - 3.52, - 3.57, - 3.61, - 3.65, - 3.70, - 3.74, - 3.79, - 3.83, - 3.88, - 3.92, - 3.97, - 4.02, - 4.07, - 4.12, - 4.17, - 4.22, - 4.27, - 4.32, - 4.37, - 4.42, - 4.48, - 4.53, - 4.59, - 4.64, - 4.70, - 4.75, - 4.81, - 4.87, - 4.93, - 4.99, - 5.05, - 5.11, - 5.17, - 5.23, - 5.30, - 5.36, - 5.42, - 5.49, - 5.56, - 5.62, - 5.69, - 5.76, - 5.83, - 5.90, - 5.97, - 6.04, - 6.12, - 6.19, - 6.26, - 6.34, - 6.42, - 6.49, - 6.57, - 6.65, - 6.73, - 6.81, - 6.90, - 6.98, - 7.06, - 7.15, - 7.23, - 7.32, - 7.41, - 7.50, - 7.59, - 7.68, - 7.77, - 7.87, - 7.96, - 8.06, - 8.16, - 8.25, - 8.35, - 8.45, - 8.56, - 8.66, - 8.76, - 8.87, - 8.98, - 9.09, - 9.20, - 9.31, - 9.42, - 9.53, - 9.65, - 9.76, - 9.88, - } - - E96 = { - 1.00, - 1.02, - 1.05, - 1.07, - 1.10, - 1.13, - 1.15, - 1.18, - 1.21, - 1.24, - 1.27, - 1.30, - 1.33, - 1.37, - 1.40, - 1.43, - 1.47, - 1.50, - 1.54, - 1.58, - 1.62, - 1.65, - 1.69, - 1.74, - 1.78, - 1.82, - 1.87, - 1.91, - 1.96, - 2.00, - 2.05, - 2.10, - 2.15, - 2.21, - 2.26, - 2.32, - 2.37, - 2.43, - 2.49, - 2.55, - 2.61, - 2.67, - 2.74, - 2.80, - 2.87, - 2.94, - 3.01, - 3.09, - 3.16, - 3.24, - 3.32, - 3.40, - 3.48, - 3.57, - 3.65, - 3.74, - 3.83, - 3.92, - 4.02, - 4.12, - 4.22, - 4.32, - 4.42, - 4.53, - 4.64, - 4.75, - 4.87, - 4.99, - 5.11, - 5.23, - 5.36, - 5.49, - 5.62, - 5.76, - 5.90, - 6.04, - 6.19, - 6.34, - 6.49, - 6.65, - 6.81, - 6.98, - 7.15, - 7.32, - 7.50, - 7.68, - 7.87, - 8.06, - 8.25, - 8.45, - 8.66, - 8.87, - 9.09, - 9.31, - 9.53, - 9.76, - } - - E48 = { - 1.00, - 1.05, - 1.10, - 1.15, - 1.21, - 1.27, - 1.33, - 1.40, - 1.47, - 1.54, - 1.62, - 1.69, - 1.78, - 1.87, - 1.96, - 2.05, - 2.15, - 2.26, - 2.37, - 2.49, - 2.61, - 2.74, - 2.87, - 3.01, - 3.16, - 3.32, - 3.48, - 3.65, - 3.83, - 4.02, - 4.22, - 4.42, - 4.64, - 4.87, - 5.11, - 5.36, - 5.62, - 5.90, - 6.19, - 6.49, - 6.81, - 7.15, - 7.50, - 7.87, - 8.25, - 8.66, - 9.09, - 9.53, - } - - E24 = { - 1.0, - 1.1, - 1.2, - 1.3, - 1.5, - 1.6, - 1.8, - 2.0, - 2.2, - 2.4, - 2.7, - 3.0, - 3.3, - 3.6, - 3.9, - 4.3, - 4.7, - 5.1, - 5.6, - 6.2, - 6.8, - 7.5, - 8.2, - 9.1, - } - - E12 = { - 1.0, - 1.2, - 1.5, - 1.8, - 2.2, - 2.7, - 3.3, - 3.9, - 4.7, - 5.6, - 6.8, - 8.2, - } - - E6 = { - 1.0, - 1.5, - 2.2, - 3.3, - 4.7, - 6.8, - } - - E3 = { - 1.0, - 2.2, - 4.7, - } - - E_ALL = set(sorted(E24 | E192)) - - -def repeat_set_over_base( - values: set[float], base: int, exp_range: range, n_decimals: int = 13 -) -> set[float]: - assert all(v >= 1 and v < base for v in values) - return set( - [round(val * base**exp, n_decimals) for val in values for exp in exp_range] - ) - - -class ParamNotResolvedError(Exception): ... - - -_e_series_cache: list[tuple[Parameter, int, set]] = [] - - -def e_series_intersect[T: float | Quantity]( - value: Parameter[T], e_series: E_SERIES = E_SERIES_VALUES.E_ALL -) -> F.Set[T]: - # TODO this got really uglu, need to clean up - - value = value.get_most_narrow() - - for k, i, v in _e_series_cache: - if k == value and i == id(e_series): - return F.Set(v) - - if isinstance(value, F.Constant): - value = F.Range(value) - elif isinstance(value, F.Set): - raise NotImplementedError - elif isinstance(value, (F.Operation, F.TBD)): - raise ParamNotResolvedError() - elif isinstance(value, F.ANY): - # TODO - raise ParamNotResolvedError() - - assert isinstance(value, F.Range) - - min_val = value.min - max_val = value.max - unit = 1 - - if not isinstance(min_val, F.Constant) or not isinstance(max_val, F.Constant): - # TODO - raise Exception() - - min_val = min_val.value - max_val = max_val.value - - if isinstance(min_val, Quantity): - assert isinstance(max_val, Quantity) - - min_val_q = min_val.to_compact() - - unit = min_val_q.units - max_val_q = max_val.to(unit) - assert max_val_q.units == unit - - min_val: float = min_val_q.magnitude - max_val: float = max_val_q.magnitude - - assert isinstance(min_val, (float, int)) and isinstance(max_val, (float, int)) - - # TODO ugly - if max_val == math.inf: - max_val = min_val * 10e3 +from faebryk.libs.picker.jlcpcb.jlcpcb import _RaiseType + + +class E_SERIES_VALUES(_RaiseType): ... + + +def e_series_intersect(*args, **kwargs): + raise NotImplementedError() + + +def e_series_ratio(*args, **kwargs): + raise NotImplementedError() + + +def e_series_discretize_to_nearest(*args, **kwargs): + raise NotImplementedError() + + +# TODO +if False: + import copy + import logging + import math + from math import ceil, floor, log10 + from typing import Tuple + + import faebryk.library._F as F + from faebryk.core.parameter import Parameter + from faebryk.libs.units import Quantity + + logger = logging.getLogger(__name__) + + E_SERIES = set[float] + + class E_SERIES_VALUES: + E192 = { + 1.00, + 1.01, + 1.02, + 1.04, + 1.05, + 1.06, + 1.07, + 1.09, + 1.10, + 1.11, + 1.13, + 1.14, + 1.15, + 1.17, + 1.18, + 1.20, + 1.21, + 1.23, + 1.24, + 1.26, + 1.27, + 1.29, + 1.30, + 1.32, + 1.33, + 1.35, + 1.37, + 1.38, + 1.40, + 1.42, + 1.43, + 1.45, + 1.47, + 1.49, + 1.50, + 1.52, + 1.54, + 1.56, + 1.58, + 1.60, + 1.62, + 1.64, + 1.65, + 1.67, + 1.69, + 1.72, + 1.74, + 1.76, + 1.78, + 1.80, + 1.82, + 1.84, + 1.87, + 1.89, + 1.91, + 1.93, + 1.96, + 1.98, + 2.00, + 2.03, + 2.05, + 2.08, + 2.10, + 2.13, + 2.15, + 2.18, + 2.21, + 2.23, + 2.26, + 2.29, + 2.32, + 2.34, + 2.37, + 2.40, + 2.43, + 2.46, + 2.49, + 2.52, + 2.55, + 2.58, + 2.61, + 2.64, + 2.67, + 2.71, + 2.74, + 2.77, + 2.80, + 2.84, + 2.87, + 2.91, + 2.94, + 2.98, + 3.01, + 3.05, + 3.09, + 3.12, + 3.16, + 3.20, + 3.24, + 3.28, + 3.32, + 3.36, + 3.40, + 3.44, + 3.48, + 3.52, + 3.57, + 3.61, + 3.65, + 3.70, + 3.74, + 3.79, + 3.83, + 3.88, + 3.92, + 3.97, + 4.02, + 4.07, + 4.12, + 4.17, + 4.22, + 4.27, + 4.32, + 4.37, + 4.42, + 4.48, + 4.53, + 4.59, + 4.64, + 4.70, + 4.75, + 4.81, + 4.87, + 4.93, + 4.99, + 5.05, + 5.11, + 5.17, + 5.23, + 5.30, + 5.36, + 5.42, + 5.49, + 5.56, + 5.62, + 5.69, + 5.76, + 5.83, + 5.90, + 5.97, + 6.04, + 6.12, + 6.19, + 6.26, + 6.34, + 6.42, + 6.49, + 6.57, + 6.65, + 6.73, + 6.81, + 6.90, + 6.98, + 7.06, + 7.15, + 7.23, + 7.32, + 7.41, + 7.50, + 7.59, + 7.68, + 7.77, + 7.87, + 7.96, + 8.06, + 8.16, + 8.25, + 8.35, + 8.45, + 8.56, + 8.66, + 8.76, + 8.87, + 8.98, + 9.09, + 9.20, + 9.31, + 9.42, + 9.53, + 9.65, + 9.76, + 9.88, + } + + E96 = { + 1.00, + 1.02, + 1.05, + 1.07, + 1.10, + 1.13, + 1.15, + 1.18, + 1.21, + 1.24, + 1.27, + 1.30, + 1.33, + 1.37, + 1.40, + 1.43, + 1.47, + 1.50, + 1.54, + 1.58, + 1.62, + 1.65, + 1.69, + 1.74, + 1.78, + 1.82, + 1.87, + 1.91, + 1.96, + 2.00, + 2.05, + 2.10, + 2.15, + 2.21, + 2.26, + 2.32, + 2.37, + 2.43, + 2.49, + 2.55, + 2.61, + 2.67, + 2.74, + 2.80, + 2.87, + 2.94, + 3.01, + 3.09, + 3.16, + 3.24, + 3.32, + 3.40, + 3.48, + 3.57, + 3.65, + 3.74, + 3.83, + 3.92, + 4.02, + 4.12, + 4.22, + 4.32, + 4.42, + 4.53, + 4.64, + 4.75, + 4.87, + 4.99, + 5.11, + 5.23, + 5.36, + 5.49, + 5.62, + 5.76, + 5.90, + 6.04, + 6.19, + 6.34, + 6.49, + 6.65, + 6.81, + 6.98, + 7.15, + 7.32, + 7.50, + 7.68, + 7.87, + 8.06, + 8.25, + 8.45, + 8.66, + 8.87, + 9.09, + 9.31, + 9.53, + 9.76, + } + + E48 = { + 1.00, + 1.05, + 1.10, + 1.15, + 1.21, + 1.27, + 1.33, + 1.40, + 1.47, + 1.54, + 1.62, + 1.69, + 1.78, + 1.87, + 1.96, + 2.05, + 2.15, + 2.26, + 2.37, + 2.49, + 2.61, + 2.74, + 2.87, + 3.01, + 3.16, + 3.32, + 3.48, + 3.65, + 3.83, + 4.02, + 4.22, + 4.42, + 4.64, + 4.87, + 5.11, + 5.36, + 5.62, + 5.90, + 6.19, + 6.49, + 6.81, + 7.15, + 7.50, + 7.87, + 8.25, + 8.66, + 9.09, + 9.53, + } + + E24 = { + 1.0, + 1.1, + 1.2, + 1.3, + 1.5, + 1.6, + 1.8, + 2.0, + 2.2, + 2.4, + 2.7, + 3.0, + 3.3, + 3.6, + 3.9, + 4.3, + 4.7, + 5.1, + 5.6, + 6.2, + 6.8, + 7.5, + 8.2, + 9.1, + } + + E12 = { + 1.0, + 1.2, + 1.5, + 1.8, + 2.2, + 2.7, + 3.3, + 3.9, + 4.7, + 5.6, + 6.8, + 8.2, + } + + E6 = { + 1.0, + 1.5, + 2.2, + 3.3, + 4.7, + 6.8, + } + + E3 = { + 1.0, + 2.2, + 4.7, + } + + E_ALL = set(sorted(E24 | E192)) + + def repeat_set_over_base( + values: set[float], base: int, exp_range: range, n_decimals: int = 13 + ) -> set[float]: + assert all(v >= 1 and v < base for v in values) + return set( + [round(val * base**exp, n_decimals) for val in values for exp in exp_range] + ) - e_series_values = repeat_set_over_base( - e_series, 10, range(floor(log10(min_val)), ceil(log10(max_val)) + 1) - ) - out = value & {e * unit for e in e_series_values} - _e_series_cache.append((copy.copy(value), id(e_series), out.params)) - return out + class ParamNotResolvedError(Exception): ... + _e_series_cache: list[tuple[Parameter, int, set]] = [] -def e_series_discretize_to_nearest( - value: Parameter, e_series: E_SERIES = E_SERIES_VALUES.E_ALL -) -> F.Constant: - if not isinstance(value, (F.Constant, F.Range)): - raise NotImplementedError + def e_series_intersect[T: float | Quantity]( + value: Parameter[T], e_series: E_SERIES = E_SERIES_VALUES.E_ALL + ) -> F.Set[T]: + # TODO this got really uglu, need to clean up - target = value.value if isinstance(value, F.Constant) else sum(value.as_tuple()) / 2 + value = value.get_most_narrow() - e_series_values = repeat_set_over_base( - e_series, 10, range(floor(log10(target)), ceil(log10(target)) + 1) - ) + for k, i, v in _e_series_cache: + if k == value and i == id(e_series): + return F.Set(v) - return F.Constant(min(e_series_values, key=lambda x: abs(x - target))) + if isinstance(value, F.Constant): + value = F.Range(value) + elif isinstance(value, F.Set): + raise NotImplementedError + elif isinstance(value, (F.Operation, F.TBD)): + raise ParamNotResolvedError() + elif isinstance(value, F.ANY): + # TODO + raise ParamNotResolvedError() + assert isinstance(value, F.Range) -def e_series_ratio( - RH: Parameter, - RL: Parameter, - output_input_ratio: Parameter, - e_values: E_SERIES = E_SERIES_VALUES.E_ALL, -) -> Tuple[float, float]: - """ - Calculates the values for two components in the E series range which are bound by a - ratio. + min_val = value.min + max_val = value.max + unit = 1 - RH and RL define the contstraints for the components, and output_input_ratio is the - output/input voltage ratio as defined below. - RH and output_input_ratio must be constrained to a range or constant, but RL can be - ANY. + if not isinstance(min_val, F.Constant) or not isinstance(max_val, F.Constant): + # TODO + raise Exception() - output_input_ratio = RL/(RH + RL) - RL/oir = RH + RL - RL * (1/oir -1) = RH - RL = RH / (1/oir -1) + min_val = min_val.value + max_val = max_val.value - Returns a tuple of RH/RL values. + if isinstance(min_val, Quantity): + assert isinstance(max_val, Quantity) - Can be used for a resistive divider. - """ + min_val_q = min_val.to_compact() - if ( - not isinstance(RH, (F.Constant, F.Range)) - or not isinstance(RL, (F.Constant, F.Range, F.ANY)) - or not isinstance(output_input_ratio, (F.Constant, F.Range)) - ): - raise NotImplementedError + unit = min_val_q.units + max_val_q = max_val.to(unit) + assert max_val_q.units == unit - if not output_input_ratio.is_subset_of(F.Range(0, 1)): - raise ValueError("Invalid output/input voltage ratio") + min_val: float = min_val_q.magnitude + max_val: float = max_val_q.magnitude - rh = F.Range(RH.value, RH.value) if isinstance(RH, F.Constant) else RH - rl = F.Range(RL.value, RL.value) if isinstance(RL, F.Constant) else RL - oir = ( - F.Range(output_input_ratio.value, output_input_ratio.value) - if isinstance(output_input_ratio, F.Constant) - else output_input_ratio - ) + assert isinstance(min_val, (float, int)) and isinstance(max_val, (float, int)) - rh_values = e_series_intersect(rh, e_values) - rl_values = e_series_intersect(rl, e_values) if isinstance(rl, F.Range) else None + # TODO ugly + if max_val == math.inf: + max_val = min_val * 10e3 - target_ratio = oir.as_center_tuple()[0] + e_series_values = repeat_set_over_base( + e_series, 10, range(floor(log10(min_val)), ceil(log10(max_val)) + 1) + ) + out = value & {e * unit for e in e_series_values} + _e_series_cache.append((copy.copy(value), id(e_series), out.params)) + return out + + def e_series_discretize_to_nearest( + value: Parameter, e_series: E_SERIES = E_SERIES_VALUES.E_ALL + ) -> F.Constant: + if not isinstance(value, (F.Constant, F.Range)): + raise NotImplementedError + + target = ( + value.value if isinstance(value, F.Constant) else sum(value.as_tuple()) / 2 + ) - solutions = [] + e_series_values = repeat_set_over_base( + e_series, 10, range(floor(log10(target)), ceil(log10(target)) + 1) + ) - for rh_val in rh_values.params: - rl_ideal = rh_val / (F.Constant(1) / target_ratio - 1) + return F.Constant(min(e_series_values, key=lambda x: abs(x - target))) + + def e_series_ratio( + RH: Parameter, + RL: Parameter, + output_input_ratio: Parameter, + e_values: E_SERIES = E_SERIES_VALUES.E_ALL, + ) -> Tuple[float, float]: + """ + Calculates the values for two components in the E series range which are bound by a + ratio. + + RH and RL define the contstraints for the components, and output_input_ratio is the + output/input voltage ratio as defined below. + RH and output_input_ratio must be constrained to a range or constant, but RL can be + ANY. + + output_input_ratio = RL/(RH + RL) + RL/oir = RH + RL + RL * (1/oir -1) = RH + RL = RH / (1/oir -1) + + Returns a tuple of RH/RL values. + + Can be used for a resistive divider. + """ + + if ( + not isinstance(RH, (F.Constant, F.Range)) + or not isinstance(RL, (F.Constant, F.Range, F.ANY)) + or not isinstance(output_input_ratio, (F.Constant, F.Range)) + ): + raise NotImplementedError + + if not output_input_ratio.is_subset_of(F.Range(0, 1)): + raise ValueError("Invalid output/input voltage ratio") + + rh = F.Range(RH.value, RH.value) if isinstance(RH, F.Constant) else RH + rl = F.Range(RL.value, RL.value) if isinstance(RL, F.Constant) else RL + oir = ( + F.Range(output_input_ratio.value, output_input_ratio.value) + if isinstance(output_input_ratio, F.Constant) + else output_input_ratio + ) - rl_nearest_e_val = ( - min(rl_values.params, key=lambda x: abs(x - rl_ideal)) - if rl_values - else e_series_discretize_to_nearest(rl_ideal, e_values) + rh_values = e_series_intersect(rh, e_values) + rl_values = ( + e_series_intersect(rl, e_values) if isinstance(rl, F.Range) else None ) - real_ratio = rl_nearest_e_val / (rh_val + rl_nearest_e_val) - solutions.append((real_ratio, (rh_val, rl_nearest_e_val))) + target_ratio = oir.as_center_tuple()[0] - optimum = min(solutions, key=lambda x: abs(x[0] - target_ratio)) + solutions = [] - logger.debug( - f"{target_ratio=}, {optimum[0]=}, {oir}, " - f"error: {abs(optimum[0]/ target_ratio - 1)*100:.4f}%" - ) + for rh_val in rh_values.params: + rl_ideal = rh_val / (F.Constant(1) / target_ratio - 1) - if optimum[0] not in oir: - raise ArithmeticError( - "Calculated optimum RH RL value pair gives output/input voltage ratio " - "outside of specified range. Consider relaxing the constraints" + rl_nearest_e_val = ( + min(rl_values.params, key=lambda x: abs(x - rl_ideal)) + if rl_values + else e_series_discretize_to_nearest(rl_ideal, e_values) + ) + real_ratio = rl_nearest_e_val / (rh_val + rl_nearest_e_val) + + solutions.append((real_ratio, (rh_val, rl_nearest_e_val))) + + optimum = min(solutions, key=lambda x: abs(x[0] - target_ratio)) + + logger.debug( + f"{target_ratio=}, {optimum[0]=}, {oir}, " + f"error: {abs(optimum[0]/ target_ratio - 1)*100:.4f}%" ) - return optimum[1] + if optimum[0] not in oir: + raise ArithmeticError( + "Calculated optimum RH RL value pair gives output/input voltage ratio " + "outside of specified range. Consider relaxing the constraints" + ) + + return optimum[1] diff --git a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py index 721754a8..cda85cd1 100644 --- a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py +++ b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py @@ -1,761 +1,799 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -import asyncio -import datetime -import logging -import os -import struct -import sys -from dataclasses import dataclass -from pathlib import Path -from textwrap import indent -from typing import Any, Callable, Generator, Self, Sequence - -import patoolib -import requests -from pint import DimensionalityError -from rich.progress import track -from tortoise import Tortoise -from tortoise.expressions import Q -from tortoise.fields import CharField, IntField, JSONField -from tortoise.models import Model - -import faebryk.library._F as F -from faebryk.core.module import Module -from faebryk.core.parameter import Parameter -from faebryk.libs.e_series import ( - E_SERIES_VALUES, - ParamNotResolvedError, - e_series_intersect, -) -from faebryk.libs.picker.lcsc import ( - LCSC_NoDataException, - LCSC_Part, - LCSC_PinmapException, - attach, -) -from faebryk.libs.picker.picker import ( - DescriptiveProperties, - PickError, - has_part_picked_defined, -) -from faebryk.libs.units import P, Quantity, UndefinedUnitError, to_si_str -from faebryk.libs.util import at_exit, cast_assert, try_or - -logger = logging.getLogger(__name__) - -# TODO dont hardcode relative paths -BUILD_FOLDER = Path("./build") -CACHE_FOLDER = BUILD_FOLDER / Path("cache") - - -class JLCPCB_Part(LCSC_Part): - def __init__(self, partno: str) -> None: - super().__init__(partno=partno) - - -class TBD_ParseError(F.TBD): - """ - Wrapper for TBD that behaves exactly like TBD for the core and picker - But gives us the possibility to attach parser errors to it for deferred - error logging - """ - - def __init__(self, e: Exception, msg: str): - self.e = e - self.msg = msg - super().__init__() - - def __repr__(self): - return f"{super().__repr__()}({self.msg}: {self.e})" - - -@dataclass -class MappingParameterDB: - param_name: str - attr_keys: list[str] - attr_tolerance_key: str | None = None - transform_fn: Callable[[str], Parameter] | None = None - ignore_at: bool = True - - -class Category(Model): - id = IntField(primary_key=True) - category = CharField(max_length=255) - subcategory = CharField(max_length=255) - - class Meta: - table = "categories" - - async def get_ids( - self, category: str = "", subcategory: str = "" - ) -> list[dict[str, Any]]: - """ - Get the category ids for the given category and subcategory - - :param category: The category to search for, use "" for any - :param subcategory: The subcategory to search for, use "" for any - :return: A list of category ids for the JLCPCB database Component id field +class _RaiseType: + def __init__(self): + raise NotImplementedError() + + +class JLCPCB_DB(_RaiseType): + class config: + class db_path: + @staticmethod + def exists() -> bool: + return False + + +class ComponentQuery(_RaiseType): ... + + +class Component(_RaiseType): ... + + +class MappingParameterDB(_RaiseType): ... + + +# TODO +if False: + import asyncio + import datetime + import logging + import os + import struct + import sys + from dataclasses import dataclass + from pathlib import Path + from textwrap import indent + from typing import Any, Callable, Generator, Self, Sequence + + import patoolib + import requests + from pint import DimensionalityError + from rich.progress import track + from tortoise import Tortoise + from tortoise.expressions import Q + from tortoise.fields import CharField, IntField, JSONField + from tortoise.models import Model + + import faebryk.library._F as F + from faebryk.core.module import Module + from faebryk.core.parameter import Parameter + from faebryk.libs.e_series import ( + E_SERIES_VALUES, + ParamNotResolvedError, + e_series_intersect, + ) + from faebryk.libs.picker.lcsc import ( + LCSC_NoDataException, + LCSC_Part, + LCSC_PinmapException, + attach, + ) + from faebryk.libs.picker.picker import ( + DescriptiveProperties, + PickError, + has_part_picked_defined, + ) + from faebryk.libs.units import P, Quantity, UndefinedUnitError, to_si_str + from faebryk.libs.util import at_exit, cast_assert, try_or + + logger = logging.getLogger(__name__) + + # TODO dont hardcode relative paths + BUILD_FOLDER = Path("./build") + CACHE_FOLDER = BUILD_FOLDER / Path("cache") + + class JLCPCB_Part(LCSC_Part): + def __init__(self, partno: str) -> None: + super().__init__(partno=partno) + + class TBD_ParseError(F.TBD): + """ + Wrapper for TBD that behaves exactly like TBD for the core and picker + But gives us the possibility to attach parser errors to it for deferred + error logging """ - filter_query = Q() - if category != "": - filter_query &= Q(category__icontains=category) - if subcategory != "": - filter_query &= Q(subcategory__icontains=subcategory) - category_ids = await self.filter(filter_query).values("id") - if len(category_ids) < 1: - raise LookupError( - f"Could not find a match for category {category} " - f"and subcategory {subcategory}", - ) - return [c["id"] for c in category_ids] + def __init__(self, e: Exception, msg: str): + self.e = e + self.msg = msg + super().__init__() -class Manufacturers(Model): - id = IntField(primary_key=True) - name = CharField(max_length=255) + def __repr__(self): + return f"{super().__repr__()}({self.msg}: {self.e})" - class Meta: - table = "manufacturers" + @dataclass + class MappingParameterDB: + param_name: str + attr_keys: list[str] + attr_tolerance_key: str | None = None + transform_fn: Callable[[str], Parameter] | None = None + ignore_at: bool = True + + class Category(Model): + id = IntField(primary_key=True) + category = CharField(max_length=255) + subcategory = CharField(max_length=255) + + class Meta: + table = "categories" + + async def get_ids( + self, category: str = "", subcategory: str = "" + ) -> list[dict[str, Any]]: + """ + Get the category ids for the given category and subcategory + + :param category: The category to search for, use "" for any + :param subcategory: The subcategory to search for, use "" for any + + :return: A list of category ids for the JLCPCB database Component id field + """ + filter_query = Q() + if category != "": + filter_query &= Q(category__icontains=category) + if subcategory != "": + filter_query &= Q(subcategory__icontains=subcategory) + category_ids = await self.filter(filter_query).values("id") + if len(category_ids) < 1: + raise LookupError( + f"Could not find a match for category {category} " + f"and subcategory {subcategory}", + ) + return [c["id"] for c in category_ids] - async def get_ids(self, manufacturer: str) -> list[int]: - """ - Get the manufacturer ids for the given manufacturer + class Manufacturers(Model): + id = IntField(primary_key=True) + name = CharField(max_length=255) - :param manufacturer: The manufacturer to search for + class Meta: + table = "manufacturers" - :return: A list of manufacturer ids for the JLCPCB database Component id field - """ - manufacturer_ids = await self.filter(name__icontains=manufacturer).values("id") - if len(manufacturer_ids) < 1: - raise LookupError(f"Could not find a match for manufacturer {manufacturer}") - return [m["id"] for m in manufacturer_ids] - - async def get_from_id(self, manufacturer_id: int) -> str: - return (await self.get(id=manufacturer_id)).name - - -class Component(Model): - lcsc = IntField(primary_key=True) - category_id = IntField() - mfr = CharField(max_length=255) - package = CharField(max_length=255) - joints = IntField() - manufacturer_id = IntField() - basic = IntField() - description = CharField(max_length=255) - datasheet = CharField(max_length=255) - stock = IntField() - price = JSONField() - last_update = IntField() - extra = JSONField() - flag = IntField() - last_on_stock = IntField() - preferred = IntField() - - class Meta: - table = "components" - - class ParseError(Exception): - pass - - @property - def partno(self): - return f"C{self.lcsc}" - - def get_price(self, qty: int = 1) -> float: - """ - Get the price for qty of the component including handling fees + async def get_ids(self, manufacturer: str) -> list[int]: + """ + Get the manufacturer ids for the given manufacturer - For handling fees and component price classifications, see: - https://jlcpcb.com/help/article/pcb-assembly-faqs - """ - BASIC_HANDLING_FEE = 0 - PREFERRED_HANDLING_FEE = 0 - EXTENDED_HANDLING_FEE = 3 - - if qty < 1: - raise ValueError("Quantity must be greater than 0") - - if self.basic: - handling_fee = BASIC_HANDLING_FEE - elif self.preferred: - handling_fee = PREFERRED_HANDLING_FEE - else: - handling_fee = EXTENDED_HANDLING_FEE - - unit_price = float("inf") - try: - for p in self.price: - if p["qTo"] is None or qty < p["qTo"]: - unit_price = float(p["price"]) - unit_price = float(self.price[-1]["price"]) - except LookupError: - pass + :param manufacturer: The manufacturer to search for - return unit_price * qty + handling_fee + :return: A list of manufacturer ids for the JLCPCB database Component id field + """ + manufacturer_ids = await self.filter(name__icontains=manufacturer).values( + "id" + ) + if len(manufacturer_ids) < 1: + raise LookupError( + f"Could not find a match for manufacturer {manufacturer}" + ) + return [m["id"] for m in manufacturer_ids] + + async def get_from_id(self, manufacturer_id: int) -> str: + return (await self.get(id=manufacturer_id)).name + + class Component(Model): + lcsc = IntField(primary_key=True) + category_id = IntField() + mfr = CharField(max_length=255) + package = CharField(max_length=255) + joints = IntField() + manufacturer_id = IntField() + basic = IntField() + description = CharField(max_length=255) + datasheet = CharField(max_length=255) + stock = IntField() + price = JSONField() + last_update = IntField() + extra = JSONField() + flag = IntField() + last_on_stock = IntField() + preferred = IntField() + + class Meta: + table = "components" + + class ParseError(Exception): + pass - def attribute_to_parameter( - self, attribute_name: str, use_tolerance: bool = False, ignore_at: bool = True - ) -> Parameter: - """ - Convert a component value in the extra['attributes'] dict to a parameter + @property + def partno(self): + return f"C{self.lcsc}" - :param attribute_name: The key in the extra['attributes'] dict to convert - :param use_tolerance: Whether to use the tolerance field in the component + def get_price(self, qty: int = 1) -> float: + """ + Get the price for qty of the component including handling fees - :return: The parameter representing the attribute value - """ - assert isinstance(self.extra, dict) and "attributes" in self.extra - - value_field = self.extra["attributes"][attribute_name] - # parse fields like "850mV@1A" - # TODO better to actually parse this - if ignore_at: - value_field = value_field.split("@")[0] - - value_field = value_field.replace("cd", "candela") - - # parse fields like "1.5V~2.5V" - if "~" in value_field: - values = value_field.split("~") - if len(values) != 2: - raise ValueError(f"Invalid range from value '{value_field}'") - return F.Range(*(P.Quantity(v) for v in values)) - - # unit hacks - - try: - value = P.Quantity(value_field) - except UndefinedUnitError as e: - raise ValueError(f"Could not parse value field '{value_field}'") from e - - if not use_tolerance: - return F.Constant(value) - - if "Tolerance" not in self.extra["attributes"]: - raise ValueError(f"No Tolerance field in component (lcsc: {self.lcsc})") - if "ppm" in self.extra["attributes"]["Tolerance"]: - tolerance = float(self.extra["attributes"]["Tolerance"].strip("±pm")) / 1e6 - elif "%~+" in self.extra["attributes"]["Tolerance"]: - tolerances = self.extra["attributes"]["Tolerance"].split("~") - tolerances = [float(t.strip("%+-")) for t in tolerances] - tolerance = max(tolerances) / 100 - elif "%" in self.extra["attributes"]["Tolerance"]: - tolerance = float(self.extra["attributes"]["Tolerance"].strip("%±")) / 100 - else: - raise ValueError( - "Could not parse tolerance field " - f"'{self.extra['attributes']['Tolerance']}'" - ) + For handling fees and component price classifications, see: + https://jlcpcb.com/help/article/pcb-assembly-faqs + """ + BASIC_HANDLING_FEE = 0 + PREFERRED_HANDLING_FEE = 0 + EXTENDED_HANDLING_FEE = 3 - return F.Range.from_center_rel(value, tolerance) + if qty < 1: + raise ValueError("Quantity must be greater than 0") - def get_parameter(self, m: MappingParameterDB) -> Parameter: - """ - Transform a component attribute to a parameter + if self.basic: + handling_fee = BASIC_HANDLING_FEE + elif self.preferred: + handling_fee = PREFERRED_HANDLING_FEE + else: + handling_fee = EXTENDED_HANDLING_FEE - :param attribute_search_keys: The key in the component's extra['attributes'] - dict that holds the value to check - :param tolerance_search_key: The key in the component's extra['attributes'] dict - that holds the tolerance value - :param parser: A function to convert the attribute value to the correct type + unit_price = float("inf") + try: + for p in self.price: + if p["qTo"] is None or qty < p["qTo"]: + unit_price = float(p["price"]) + unit_price = float(self.price[-1]["price"]) + except LookupError: + pass + + return unit_price * qty + handling_fee + + def attribute_to_parameter( + self, + attribute_name: str, + use_tolerance: bool = False, + ignore_at: bool = True, + ) -> Parameter: + """ + Convert a component value in the extra['attributes'] dict to a parameter + + :param attribute_name: The key in the extra['attributes'] dict to convert + :param use_tolerance: Whether to use the tolerance field in the component + + :return: The parameter representing the attribute value + """ + assert isinstance(self.extra, dict) and "attributes" in self.extra + + value_field = self.extra["attributes"][attribute_name] + # parse fields like "850mV@1A" + # TODO better to actually parse this + if ignore_at: + value_field = value_field.split("@")[0] + + value_field = value_field.replace("cd", "candela") + + # parse fields like "1.5V~2.5V" + if "~" in value_field: + values = value_field.split("~") + if len(values) != 2: + raise ValueError(f"Invalid range from value '{value_field}'") + return F.Range(*(P.Quantity(v) for v in values)) + + # unit hacks - :return: The parameter representing the attribute value - """ + try: + value = P.Quantity(value_field) + except UndefinedUnitError as e: + raise ValueError(f"Could not parse value field '{value_field}'") from e + + if not use_tolerance: + return F.Constant(value) + + if "Tolerance" not in self.extra["attributes"]: + raise ValueError(f"No Tolerance field in component (lcsc: {self.lcsc})") + if "ppm" in self.extra["attributes"]["Tolerance"]: + tolerance = ( + float(self.extra["attributes"]["Tolerance"].strip("±pm")) / 1e6 + ) + elif "%~+" in self.extra["attributes"]["Tolerance"]: + tolerances = self.extra["attributes"]["Tolerance"].split("~") + tolerances = [float(t.strip("%+-")) for t in tolerances] + tolerance = max(tolerances) / 100 + elif "%" in self.extra["attributes"]["Tolerance"]: + tolerance = ( + float(self.extra["attributes"]["Tolerance"].strip("%±")) / 100 + ) + else: + raise ValueError( + "Could not parse tolerance field " + f"'{self.extra['attributes']['Tolerance']}'" + ) - attribute_search_keys = m.attr_keys - tolerance_search_key = m.attr_tolerance_key - parser = m.transform_fn + return F.Range.from_center_rel(value, tolerance) - if tolerance_search_key is not None and parser is not None: - raise NotImplementedError( - "Cannot provide both tolerance_search_key and parser arguments" - ) + def get_parameter(self, m: MappingParameterDB) -> Parameter: + """ + Transform a component attribute to a parameter - assert isinstance(self.extra, dict) + :param attribute_search_keys: The key in the component's extra['attributes'] + dict that holds the value to check + :param tolerance_search_key: The key in the component's extra['attributes'] dict + that holds the tolerance value + :param parser: A function to convert the attribute value to the correct type - attr_key = next( - (k for k in attribute_search_keys if k in self.extra.get("attributes", "")), - None, - ) + :return: The parameter representing the attribute value + """ - if "attributes" not in self.extra: - raise LookupError("does not have any attributes") - if attr_key is None: - raise LookupError( - f"does not have any of required attribute fields: " - f"{attribute_search_keys} in {self.extra['attributes']}" - ) - if ( - tolerance_search_key is not None - and tolerance_search_key not in self.extra["attributes"] - ): - raise LookupError( - f"does not have any of required tolerance fields: " - f"{tolerance_search_key}" - ) + attribute_search_keys = m.attr_keys + tolerance_search_key = m.attr_tolerance_key + parser = m.transform_fn - if parser is not None: - return parser(self.extra["attributes"][attr_key]) + if tolerance_search_key is not None and parser is not None: + raise NotImplementedError( + "Cannot provide both tolerance_search_key and parser arguments" + ) - return self.attribute_to_parameter( - attr_key, tolerance_search_key is not None, m.ignore_at - ) + assert isinstance(self.extra, dict) - def get_params(self, mapping: list[MappingParameterDB]) -> list[Parameter]: - return [ - try_or( - lambda: self.get_parameter(m), - default_f=lambda e: TBD_ParseError( - e, f"Failed to parse {m.param_name}" + attr_key = next( + ( + k + for k in attribute_search_keys + if k in self.extra.get("attributes", "") ), - catch=(LookupError, ValueError, AssertionError), - ) - for m in mapping - ] - - def attach( - self, - module: Module, - mapping: list[MappingParameterDB], - qty: int = 1, - allow_TBD: bool = False, - ): - params = self.get_params(mapping) - - if not allow_TBD and any(isinstance(p, TBD_ParseError) for p in params): - params_str = indent( - "\n" - + "\n".join(repr(p) for p in params if isinstance(p, TBD_ParseError)), - " " * 4, + None, ) - raise Component.ParseError( - f"Failed to parse parameters for component {self.partno}: {params_str}" + + if "attributes" not in self.extra: + raise LookupError("does not have any attributes") + if attr_key is None: + raise LookupError( + f"does not have any of required attribute fields: " + f"{attribute_search_keys} in {self.extra['attributes']}" + ) + if ( + tolerance_search_key is not None + and tolerance_search_key not in self.extra["attributes"] + ): + raise LookupError( + f"does not have any of required tolerance fields: " + f"{tolerance_search_key}" + ) + + if parser is not None: + return parser(self.extra["attributes"][attr_key]) + + return self.attribute_to_parameter( + attr_key, tolerance_search_key is not None, m.ignore_at ) - for name, value in zip([m.param_name for m in mapping], params): - getattr(module, name).override(value) + def get_params(self, mapping: list[MappingParameterDB]) -> list[Parameter]: + return [ + try_or( + lambda: self.get_parameter(m), + default_f=lambda e: TBD_ParseError( + e, f"Failed to parse {m.param_name}" + ), + catch=(LookupError, ValueError, AssertionError), + ) + for m in mapping + ] + + def attach( + self, + module: Module, + mapping: list[MappingParameterDB], + qty: int = 1, + allow_TBD: bool = False, + ): + params = self.get_params(mapping) - module.add( - F.has_descriptive_properties_defined( - { - DescriptiveProperties.partno: self.mfr, - DescriptiveProperties.manufacturer: asyncio.run( - Manufacturers().get_from_id(self.manufacturer_id) + if not allow_TBD and any(isinstance(p, TBD_ParseError) for p in params): + params_str = indent( + "\n" + + "\n".join( + repr(p) for p in params if isinstance(p, TBD_ParseError) ), - DescriptiveProperties.datasheet: self.datasheet, - "JLCPCB stock": str(self.stock), - "JLCPCB price": f"{self.get_price(qty):.4f}", - "JLCPCB description": self.description, - "JLCPCB Basic": str(bool(self.basic)), - "JLCPCB Preferred": str(bool(self.preferred)), - }, - ) - ) - - attach(module, self.partno) - module.add(has_part_picked_defined(JLCPCB_Part(self.partno))) - if logger.isEnabledFor(logging.DEBUG): - logger.debug( - f"Attached component {self.partno} to module {module}: \n" - f"{indent(str(params), ' '*4)}\n--->\n" - f"{indent(module.pretty_params(), ' '*4)}" - ) + " " * 4, + ) + raise Component.ParseError( + f"Failed to parse parameters for component {self.partno}: {params_str}" + ) - @property - def mfr_name(self) -> str: - return asyncio.run(Manufacturers().get_from_id(self.manufacturer_id)) + for name, value in zip([m.param_name for m in mapping], params): + getattr(module, name).override(value) + + module.add( + F.has_descriptive_properties_defined( + { + DescriptiveProperties.partno: self.mfr, + DescriptiveProperties.manufacturer: asyncio.run( + Manufacturers().get_from_id(self.manufacturer_id) + ), + DescriptiveProperties.datasheet: self.datasheet, + "JLCPCB stock": str(self.stock), + "JLCPCB price": f"{self.get_price(qty):.4f}", + "JLCPCB description": self.description, + "JLCPCB Basic": str(bool(self.basic)), + "JLCPCB Preferred": str(bool(self.preferred)), + }, + ) + ) + attach(module, self.partno) + module.add(has_part_picked_defined(JLCPCB_Part(self.partno))) + if logger.isEnabledFor(logging.DEBUG): + logger.debug( + f"Attached component {self.partno} to module {module}: \n" + f"{indent(str(params), ' '*4)}\n--->\n" + f"{indent(module.pretty_params(), ' '*4)}" + ) -class ComponentQuery: - class Error(Exception): ... + @property + def mfr_name(self) -> str: + return asyncio.run(Manufacturers().get_from_id(self.manufacturer_id)) - class ParamError(Error): - def __init__(self, param: Parameter, msg: str): - self.param = param - self.msg = msg - super().__init__(f"{msg} for parameter {param!r}") + class ComponentQuery: + class Error(Exception): ... - def __init__(self): - # init db connection - JLCPCB_DB() + class ParamError(Error): + def __init__(self, param: Parameter, msg: str): + self.param = param + self.msg = msg + super().__init__(f"{msg} for parameter {param!r}") - self.Q: Q | None = Q() - self.results: list[Component] | None = None + def __init__(self): + # init db connection + JLCPCB_DB() - async def exec(self) -> list[Component]: - queryset = Component.filter(self.Q) - logger.debug(f"Query results: {await queryset.count()}") - self.results = await queryset - self.Q = None - return self.results + self.Q: Q | None = Q() + self.results: list[Component] | None = None - def get(self) -> list[Component]: - if self.results is not None: + async def exec(self) -> list[Component]: + queryset = Component.filter(self.Q) + logger.debug(f"Query results: {await queryset.count()}") + self.results = await queryset + self.Q = None return self.results - return asyncio.run(self.exec()) - - def filter_by_stock(self, qty: int) -> Self: - assert self.Q - self.Q &= Q(stock__gte=qty) - return self - - def filter_by_description(self, *keywords: str) -> Self: - assert self.Q - - logger.debug(f"Possible keywords: {keywords}") - description_query = Q() - for keyword in keywords: - description_query |= Q(description__contains=keyword) - self.Q &= description_query - - return self - - def filter_by_value( - self, - value: Parameter[Quantity], - si_unit: str, - e_series: set[float] | None = None, - ) -> Self: - assert self.Q - value = value.get_most_narrow() - - if logger.isEnabledFor(logging.DEBUG): - logger.debug( - f"Filtering by value:\n{indent(value.get_tree_param().pretty(), ' '*4)}" - ) - if isinstance(value, F.ANY): + def get(self) -> list[Component]: + if self.results is not None: + return self.results + return asyncio.run(self.exec()) + + def filter_by_stock(self, qty: int) -> Self: + assert self.Q + self.Q &= Q(stock__gte=qty) return self - assert not self.results - try: - intersection = F.Set( - [e_series_intersect(value, e_series or E_SERIES_VALUES.E_ALL)] - ).params - except ParamNotResolvedError as e: - raise ComponentQuery.ParamError( - value, f"Could not run e_series_intersect: {e}" - ) from e - si_vals = [ - to_si_str(cast_assert(F.Constant, r).value, si_unit) - .replace("µ", "u") - .replace("inf", "∞") - for r in intersection - ] - return self.filter_by_description(*si_vals) - - def filter_by_category(self, category: str, subcategory: str) -> Self: - assert self.Q - category_ids = asyncio.run(Category().get_ids(category, subcategory)) - self.Q &= Q(category_id__in=category_ids) - return self - - def filter_by_footprint( - self, footprint_candidates: Sequence[tuple[str, int]] | None - ) -> Self: - assert self.Q - if not footprint_candidates: + + def filter_by_description(self, *keywords: str) -> Self: + assert self.Q + + logger.debug(f"Possible keywords: {keywords}") + description_query = Q() + for keyword in keywords: + description_query |= Q(description__contains=keyword) + self.Q &= description_query + return self - footprint_query = Q() - if footprint_candidates is not None: - for footprint, pin_count in footprint_candidates: - footprint_query |= Q(description__icontains=footprint) & Q( - joints=pin_count + + def filter_by_value( + self, + value: Parameter[Quantity], + si_unit: str, + e_series: set[float] | None = None, + ) -> Self: + assert self.Q + value = value.get_most_narrow() + + if logger.isEnabledFor(logging.DEBUG): + logger.debug( + f"Filtering by value:\n{indent(value.get_tree_param().pretty(), ' '*4)}" ) - self.Q &= footprint_query - return self - - def filter_by_traits(self, obj: Module) -> Self: - out = self - if obj.has_trait(F.has_footprint_requirement): - out = self.filter_by_footprint( - obj.get_trait(F.has_footprint_requirement).get_footprint_requirement() - ) - return out + if isinstance(value, F.ANY): + return self + assert not self.results + try: + intersection = F.Set( + [e_series_intersect(value, e_series or E_SERIES_VALUES.E_ALL)] + ).params + except ParamNotResolvedError as e: + raise ComponentQuery.ParamError( + value, f"Could not run e_series_intersect: {e}" + ) from e + si_vals = [ + to_si_str(cast_assert(F.Constant, r).value, si_unit) + .replace("µ", "u") + .replace("inf", "∞") + for r in intersection + ] + return self.filter_by_description(*si_vals) + + def filter_by_category(self, category: str, subcategory: str) -> Self: + assert self.Q + category_ids = asyncio.run(Category().get_ids(category, subcategory)) + self.Q &= Q(category_id__in=category_ids) + return self - def sort_by_price(self, qty: int = 1) -> Self: - self.get().sort(key=lambda x: x.get_price(qty)) - return self + def filter_by_footprint( + self, footprint_candidates: Sequence[tuple[str, int]] | None + ) -> Self: + assert self.Q + if not footprint_candidates: + return self + footprint_query = Q() + if footprint_candidates is not None: + for footprint, pin_count in footprint_candidates: + footprint_query |= Q(description__icontains=footprint) & Q( + joints=pin_count + ) + self.Q &= footprint_query + return self - def filter_by_lcsc_pn(self, partnumber: str) -> Self: - assert self.Q - self.Q &= Q(lcsc=partnumber.strip("C")) - return self + def filter_by_traits(self, obj: Module) -> Self: + out = self + if obj.has_trait(F.has_footprint_requirement): + out = self.filter_by_footprint( + obj.get_trait( + F.has_footprint_requirement + ).get_footprint_requirement() + ) - def filter_by_manufacturer_pn(self, partnumber: str) -> Self: - assert self.Q - self.Q &= Q(mfr__icontains=partnumber) - return self + return out - def filter_by_manufacturer(self, manufacturer: str) -> Self: - assert self.Q - if not manufacturer: + def sort_by_price(self, qty: int = 1) -> Self: + self.get().sort(key=lambda x: x.get_price(qty)) return self - manufacturer_ids = asyncio.run(Manufacturers().get_ids(manufacturer)) - self.Q &= Q(manufacturer_id__in=manufacturer_ids) - return self - - def filter_by_module_params( - self, - module: Module, - mapping: list[MappingParameterDB], - ) -> Generator[Component, None, None]: - """ - Filter the results by the parameters of the module - This should be used as the last step before attaching the component to the - module - - :param module: The module to filter by - :param mapping: The mapping of module parameters to component attributes - :param qty: The quantity of components needed - :param attach_first: Whether to attach the first component that matches the - parameters and return immediately + def filter_by_lcsc_pn(self, partnumber: str) -> Self: + assert self.Q + self.Q &= Q(lcsc=partnumber.strip("C")) + return self - :return: The first component that matches the parameters - """ + def filter_by_manufacturer_pn(self, partnumber: str) -> Self: + assert self.Q + self.Q &= Q(mfr__icontains=partnumber) + return self - for c in self.get(): - params = c.get_params(mapping) + def filter_by_manufacturer(self, manufacturer: str) -> Self: + assert self.Q + if not manufacturer: + return self + manufacturer_ids = asyncio.run(Manufacturers().get_ids(manufacturer)) + self.Q &= Q(manufacturer_id__in=manufacturer_ids) + return self - if not all( - pm := [ - try_or( - lambda: p.is_subset_of(getattr(module, m.param_name)), - default=False, - catch=DimensionalityError, + def filter_by_module_params( + self, + module: Module, + mapping: list[MappingParameterDB], + ) -> Generator[Component, None, None]: + """ + Filter the results by the parameters of the module + + This should be used as the last step before attaching the component to the + module + + :param module: The module to filter by + :param mapping: The mapping of module parameters to component attributes + :param qty: The quantity of components needed + :param attach_first: Whether to attach the first component that matches the + parameters and return immediately + + :return: The first component that matches the parameters + """ + + for c in self.get(): + params = c.get_params(mapping) + + if not all( + pm := [ + try_or( + lambda: p.is_subset_of(getattr(module, m.param_name)), + default=False, + catch=DimensionalityError, + ) + for p, m in zip(params, mapping) + ] + ): + logger.debug( + f"Component {c.lcsc} doesn't match: " + f"{[p for p, v in zip(params, pm) if not v]}" ) - for p, m in zip(params, mapping) - ] - ): + continue + logger.debug( - f"Component {c.lcsc} doesn't match: " - f"{[p for p, v in zip(params, pm) if not v]}" + f"Found part {c.lcsc:8} " + f"Basic: {bool(c.basic)}, Preferred: {bool(c.preferred)}, " + f"Price: ${c.get_price(1):2.4f}, " + f"{c.description:15}," ) - continue - - logger.debug( - f"Found part {c.lcsc:8} " - f"Basic: {bool(c.basic)}, Preferred: {bool(c.preferred)}, " - f"Price: ${c.get_price(1):2.4f}, " - f"{c.description:15}," - ) - yield c + yield c - def filter_by_module_params_and_attach( - self, module: Module, mapping: list[MappingParameterDB], qty: int = 1 - ): - # TODO if no modules without TBD, rerun with TBD allowed + def filter_by_module_params_and_attach( + self, module: Module, mapping: list[MappingParameterDB], qty: int = 1 + ): + # TODO if no modules without TBD, rerun with TBD allowed + + failures = [] + for c in self.filter_by_module_params(module, mapping): + try: + c.attach(module, mapping, qty, allow_TBD=False) + return self + except (ValueError, Component.ParseError) as e: + failures.append((c, e)) + except LCSC_NoDataException as e: + failures.append((c, e)) + except LCSC_PinmapException as e: + failures.append((c, e)) + + if failures: + fail_str = indent( + "\n" + f"{'\n'.join(f'{c}: {e}' for c, e in failures)}", " " * 4 + ) - failures = [] - for c in self.filter_by_module_params(module, mapping): - try: - c.attach(module, mapping, qty, allow_TBD=False) - return self - except (ValueError, Component.ParseError) as e: - failures.append((c, e)) - except LCSC_NoDataException as e: - failures.append((c, e)) - except LCSC_PinmapException as e: - failures.append((c, e)) - - if failures: - fail_str = indent( - "\n" + f"{'\n'.join(f'{c}: {e}' for c, e in failures)}", " " * 4 - ) + raise PickError( + f"Failed to attach any components to module {module}: {len(failures)}" + f" {fail_str}", + module, + ) raise PickError( - f"Failed to attach any components to module {module}: {len(failures)}" - f" {fail_str}", + "No components found that match the parameters and that can be attached", module, ) - raise PickError( - "No components found that match the parameters and that can be attached", - module, - ) - - -class JLCPCB_DB: - @dataclass - class Config: - db_path: Path = CACHE_FOLDER / Path("jlcpcb_part_database") - no_download_prompt: bool = False - force_db_update: bool = False - - config = Config() - _instance: "JLCPCB_DB | None" = None - failed: Exception | None = None - - @staticmethod - def get() -> "JLCPCB_DB": - return JLCPCB_DB.__new__(JLCPCB_DB) - - def __new__(cls) -> "JLCPCB_DB": - if cls.failed: - raise cls.failed - if not JLCPCB_DB._instance: - instance = super(JLCPCB_DB, cls).__new__(cls) - try: - instance.init() - except FileNotFoundError as e: - cls.failed = e - raise e - - JLCPCB_DB._instance = instance - at_exit(JLCPCB_DB.close) - return JLCPCB_DB._instance - - @staticmethod - def close(): - if not JLCPCB_DB._instance: - return - instance = JLCPCB_DB._instance - JLCPCB_DB._instance = None - del instance - - def init(self) -> None: - config = self.config - self.db_path = config.db_path - self.db_file = config.db_path / Path("cache.sqlite3") - self.connected = False - - no_download_prompt = config.no_download_prompt - - if not sys.stdin.isatty(): - no_download_prompt = True - - if config.force_db_update: - self.download() - elif not self.has_db(): - if no_download_prompt or self.prompt_db_update( - f"No JLCPCB database found at {self.db_file}, download now?" - ): - self.download() - else: - raise FileNotFoundError(f"No JLCPCB database found at {self.db_file}") - elif not self.is_db_up_to_date(): - if not no_download_prompt and self.prompt_db_update( - f"JLCPCB database at {self.db_file} is older than 7 days, update?" - ): + class JLCPCB_DB: + @dataclass + class Config: + db_path: Path = CACHE_FOLDER / Path("jlcpcb_part_database") + no_download_prompt: bool = False + force_db_update: bool = False + + config = Config() + _instance: "JLCPCB_DB | None" = None + failed: Exception | None = None + + @staticmethod + def get() -> "JLCPCB_DB": + return JLCPCB_DB.__new__(JLCPCB_DB) + + def __new__(cls) -> "JLCPCB_DB": + if cls.failed: + raise cls.failed + if not JLCPCB_DB._instance: + instance = super(JLCPCB_DB, cls).__new__(cls) + try: + instance.init() + except FileNotFoundError as e: + cls.failed = e + raise e + + JLCPCB_DB._instance = instance + at_exit(JLCPCB_DB.close) + return JLCPCB_DB._instance + + @staticmethod + def close(): + if not JLCPCB_DB._instance: + return + instance = JLCPCB_DB._instance + JLCPCB_DB._instance = None + del instance + + def init(self) -> None: + config = self.config + self.db_path = config.db_path + self.db_file = config.db_path / Path("cache.sqlite3") + self.connected = False + + no_download_prompt = config.no_download_prompt + + if not sys.stdin.isatty(): + no_download_prompt = True + + if config.force_db_update: self.download() - else: - logger.warning("Continuing with outdated JLCPCB database") - - asyncio.run(self._init_db()) - - def __del__(self): - if self.connected: - asyncio.run(self._close_db()) - - async def _init_db(self): - await Tortoise.init( - db_url=f"sqlite://{self.db_path}/cache.sqlite3", - modules={ - "models": [__name__] - }, # Use __name__ to refer to the current module - ) - self.connected = True - - async def _close_db(self): - from tortoise.log import logger as tortoise_logger - - # suppress close ORM info - tortoise_logger.setLevel(logging.WARNING) - await Tortoise.close_connections() - self.connected = False - - def has_db(self) -> bool: - return self.db_path.is_dir() and self.db_file.is_file() - - def is_db_up_to_date( - self, max_timediff: datetime.timedelta = datetime.timedelta(days=7) - ) -> bool: - if not self.has_db(): - return False - - return ( - datetime.datetime.fromtimestamp( - self.db_file.stat().st_mtime, tz=datetime.timezone.utc - ) - >= datetime.datetime.now(tz=datetime.timezone.utc) - max_timediff - ) - - def prompt_db_update(self, prompt: str = "Update JLCPCB database?") -> bool: - ans = input(prompt + " [y/N]:").lower() - return ans == "y" - - def download( - self, - ): - def download_file(url, output_path: Path): - with requests.get(url, stream=True) as r: - r.raise_for_status() - with open(output_path, "wb") as f: - for chunk in r.iter_content(chunk_size=8192): - f.write(chunk) - - def get_number_of_volumes(zip_path): - with open(zip_path, "rb") as f: - f.seek(-22, os.SEEK_END) # Go to the end of the file minus 22 bytes - end_of_central_dir = f.read(22) - - if len(end_of_central_dir) != 22 or not end_of_central_dir.startswith( - b"PK\x05\x06" + elif not self.has_db(): + if no_download_prompt or self.prompt_db_update( + f"No JLCPCB database found at {self.db_file}, download now?" ): - # Not a valid ZIP file or the end of central directory signature is - # missing - raise ValueError( - "Invalid ZIP file or End of Central Directory signature not " - "found" + self.download() + else: + raise FileNotFoundError( + f"No JLCPCB database found at {self.db_file}" ) + elif not self.is_db_up_to_date(): + if not no_download_prompt and self.prompt_db_update( + f"JLCPCB database at {self.db_file} is older than 7 days, update?" + ): + self.download() + else: + logger.warning("Continuing with outdated JLCPCB database") + + asyncio.run(self._init_db()) + + def __del__(self): + if self.connected: + asyncio.run(self._close_db()) + + async def _init_db(self): + await Tortoise.init( + db_url=f"sqlite://{self.db_path}/cache.sqlite3", + modules={ + "models": [__name__] + }, # Use __name__ to refer to the current module + ) + self.connected = True - # Unpack the number of this volume (should be 0 if single part zip) - current_volume, volume_with_central_dir = struct.unpack( - " bool: + return self.db_path.is_dir() and self.db_file.is_file() - zip_file = self.db_path / Path("cache.zip") - base_url = "https://yaqwsx.github.io/jlcparts/data/" + def is_db_up_to_date( + self, max_timediff: datetime.timedelta = datetime.timedelta(days=7) + ) -> bool: + if not self.has_db(): + return False - logger.info(f"Downloading {base_url}cache.zip to {zip_file}") - download_file(base_url + "cache.zip", zip_file) + return ( + datetime.datetime.fromtimestamp( + self.db_file.stat().st_mtime, tz=datetime.timezone.utc + ) + >= datetime.datetime.now(tz=datetime.timezone.utc) - max_timediff + ) - num_volumes = get_number_of_volumes(zip_file) - assert num_volumes <= 99 - logger.info(f"Number of volumes: {num_volumes}") + def prompt_db_update(self, prompt: str = "Update JLCPCB database?") -> bool: + ans = input(prompt + " [y/N]:").lower() + return ans == "y" - # Download the additional volume files - for volume_num in track( - range(num_volumes), description="Downloading and appending zip volumes" + def download( + self, ): - # Skip .zip file since it is already downloaded - if volume_num == 0: - continue + def download_file(url, output_path: Path): + with requests.get(url, stream=True) as r: + r.raise_for_status() + with open(output_path, "wb") as f: + for chunk in r.iter_content(chunk_size=8192): + f.write(chunk) + + def get_number_of_volumes(zip_path): + with open(zip_path, "rb") as f: + f.seek(-22, os.SEEK_END) # Go to the end of the file minus 22 bytes + end_of_central_dir = f.read(22) + + if len( + end_of_central_dir + ) != 22 or not end_of_central_dir.startswith(b"PK\x05\x06"): + # Not a valid ZIP file or the end of central directory signature is + # missing + raise ValueError( + "Invalid ZIP file or End of Central Directory signature not " + "found" + ) + + # Unpack the number of this volume (should be 0 if single part zip) + current_volume, volume_with_central_dir = struct.unpack( + " L.Single[T]: +def str_to_enum[T: Enum](enum: type[T], x: str) -> L.PlainSet[T]: name = x.replace(" ", "_").replace("-", "_").upper() if name not in [e.name for e in enum]: raise ValueError(f"Enum translation error: {x}[={name}] not in {enum}") - return L.Single(enum[name]) + return L.PlainSet(enum[name]) -def str_to_enum_func[T: Enum](enum: type[T]) -> Callable[[str], L.Single[T]]: - def f(x: str) -> L.Single[T]: +def str_to_enum_func[T: Enum](enum: type[T]) -> Callable[[str], L.PlainSet[T]]: + def f(x: str) -> L.PlainSet[T]: return str_to_enum(enum, x) return f diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 45ed4a50..ac1cfda7 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: MIT from collections.abc import Generator, Iterable -from typing import Any, Protocol, TypeVar, cast +from typing import Any, Generic, Protocol, TypeVar, cast from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless @@ -427,8 +427,9 @@ def __repr__(self) -> str: return f"Range({self.base_to_units(self._range.min)}, {self.base_to_units(self._range.max)} | {self.units})" -def Single(value: QuantityT) -> Range[QuantityT]: - return Range(value, value) +class Single(Range[QuantityT]): + def __init__(self, value: QuantityT): + super().__init__(value, value) class Ranges(P_UnitSet[QuantityT]): diff --git a/src/faebryk/libs/test/solver.py b/src/faebryk/libs/test/solver.py index 70f2aeb8..9beb6e9c 100644 --- a/src/faebryk/libs/test/solver.py +++ b/src/faebryk/libs/test/solver.py @@ -1,6 +1,7 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from faebryk.core.graphinterface import Graph, Node +from faebryk.core.graphinterface import Graph +from faebryk.core.node import Node from faebryk.core.parameter import ParameterOperatable from faebryk.core.solver import DefaultSolver from faebryk.libs.sets import PlainSet From 050fb015c12fd7db287421c2142bc6e02d89f8c7 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 22 Oct 2024 14:01:14 +0200 Subject: [PATCH 51/80] fix dB units --- src/faebryk/library/Comparator.py | 4 ++-- src/faebryk/library/LDO.py | 4 ++-- src/faebryk/libs/units.py | 4 ++++ 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/faebryk/library/Comparator.py b/src/faebryk/library/Comparator.py index 780acc41..04ff6d75 100644 --- a/src/faebryk/library/Comparator.py +++ b/src/faebryk/library/Comparator.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P, quantity class Comparator(Module): @@ -18,7 +18,7 @@ class OutputType(Enum): common_mode_rejection_ratio = L.p_field( units=P.dB, likely_constrained=True, - soft_set=L.Range(Quantity(60, P.dB), Quantity(120, P.dB)), + soft_set=L.Range(quantity(60, P.dB), quantity(120, P.dB)), tolerance_guess=10 * P.percent, ) input_bias_current = L.p_field( diff --git a/src/faebryk/library/LDO.py b/src/faebryk/library/LDO.py index c69562cb..ef66096d 100644 --- a/src/faebryk/library/LDO.py +++ b/src/faebryk/library/LDO.py @@ -6,7 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity +from faebryk.libs.units import P, quantity from faebryk.libs.util import assert_once, join_if_non_empty @@ -46,7 +46,7 @@ class OutputPolarity(Enum): psrr = L.p_field( units=P.dB, likely_constrained=True, - soft_set=L.Range(Quantity(1, P.dB), Quantity(100, P.dB)), + soft_set=L.Range(quantity(1, P.dB), quantity(100, P.dB)), ) output_polarity = L.p_field( domain=L.Domains.ENUM(OutputPolarity), diff --git a/src/faebryk/libs/units.py b/src/faebryk/libs/units.py index 8aeabae2..5cfd5d6c 100644 --- a/src/faebryk/libs/units.py +++ b/src/faebryk/libs/units.py @@ -17,6 +17,10 @@ dimensionless = cast_assert(Unit, P.dimensionless) +def quantity(value: float | int, unit: UnitsContainer | Unit | Quantity) -> Quantity: + return P.Quantity(value, unit) + + class HasUnit: units: Unit From 698ac2a4bdb30924bb17ebfc50183a8999680385 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 22 Oct 2024 14:23:12 +0200 Subject: [PATCH 52/80] fix some unit arithmetic --- src/faebryk/core/parameter.py | 70 +++++++++++++++++------------------ src/faebryk/libs/units.py | 4 +- test/core/test_parameters.py | 7 +++- 3 files changed, 43 insertions(+), 38 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 6233e59a..81ae33b2 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -22,7 +22,7 @@ # enum: T == S == Enum # number: T == Number type, S == Range[Number] class ParameterOperatable: - type QuantityLike = Quantity | NotImplementedType + type QuantityLike = Quantity | Unit | NotImplementedType type Number = int | float | QuantityLike type NonParamNumber = Number | P_Set[Number] @@ -38,91 +38,91 @@ class ParameterOperatable: operated_on: GraphInterface - def operation_add(self, other: NumberLike) -> "Expression": + def operation_add(self, other: NumberLike): return Add(self, other) - def operation_subtract(self, other: NumberLike) -> "Expression": + def operation_subtract(self: NumberLike, other: NumberLike): return Subtract(minuend=self, subtrahend=other) - def operation_multiply(self, other: NumberLike) -> "Expression": + def operation_multiply(self, other: NumberLike): return Multiply(self, other) - def operation_divide(self: NumberLike, other: NumberLike) -> "Expression": + def operation_divide(self: NumberLike, other: NumberLike): return Divide(numerator=self, denominator=other) - def operation_power(self, other: NumberLike) -> "Expression": + def operation_power(self, other: NumberLike): return Power(base=self, exponent=other) - def operation_log(self) -> "Expression": + def operation_log(self): return Log(self) - def operation_sqrt(self) -> "Expression": + def operation_sqrt(self): return Sqrt(self) - def operation_abs(self) -> "Expression": + def operation_abs(self): return Abs(self) - def operation_floor(self) -> "Expression": + def operation_floor(self): return Floor(self) - def operation_ceil(self) -> "Expression": + def operation_ceil(self): return Ceil(self) - def operation_round(self) -> "Expression": + def operation_round(self): return Round(self) - def operation_sin(self) -> "Expression": + def operation_sin(self): return Sin(self) - def operation_cos(self) -> "Expression": + def operation_cos(self): return Cos(self) - def operation_union(self, other: Sets) -> "Expression": + def operation_union(self, other: Sets): return Union(self, other) - def operation_intersection(self, other: Sets) -> "Expression": + def operation_intersection(self, other: Sets): return Intersection(self, other) - def operation_difference(self, other: Sets) -> "Expression": + def operation_difference(self, other: Sets): return Difference(minuend=self, subtrahend=other) - def operation_symmetric_difference(self, other: Sets) -> "Expression": + def operation_symmetric_difference(self, other: Sets): return SymmetricDifference(self, other) - def operation_and(self, other: BooleanLike) -> "Logic": + def operation_and(self, other: BooleanLike): return And(self, other) - def operation_or(self, other: BooleanLike) -> "Logic": + def operation_or(self, other: BooleanLike): return Or(self, other) - def operation_not(self) -> "Logic": + def operation_not(self): return Not(self) - def operation_xor(self, other: BooleanLike) -> "Logic": + def operation_xor(self, other: BooleanLike): return Xor(left=self, right=other) - def operation_implies(self, other: BooleanLike) -> "Logic": + def operation_implies(self, other: BooleanLike): return Implies(condition=self, implication=other) - def operation_is_le(self, other: NumberLike) -> "NumericPredicate": + def operation_is_le(self, other: NumberLike): return LessOrEqual(left=self, right=other) - def operation_is_ge(self, other: NumberLike) -> "NumericPredicate": + def operation_is_ge(self, other: NumberLike): return GreaterOrEqual(left=self, right=other) - def operation_is_lt(self, other: NumberLike) -> "NumericPredicate": + def operation_is_lt(self, other: NumberLike): return LessThan(left=self, right=other) - def operation_is_gt(self, other: NumberLike) -> "NumericPredicate": + def operation_is_gt(self, other: NumberLike): return GreaterThan(left=self, right=other) - def operation_is_ne(self, other: NumberLike) -> "NumericPredicate": + def operation_is_ne(self, other: NumberLike): return NotEqual(left=self, right=other) - def operation_is_subset(self, other: Sets) -> "SeticPredicate": + def operation_is_subset(self, other: Sets): return IsSubset(left=self, right=other) - def operation_is_superset(self, other: Sets) -> "SeticPredicate": + def operation_is_superset(self, other: Sets): return IsSuperset(left=self, right=other) # TODO implement @@ -165,7 +165,7 @@ def __sub__(self, other: NumberLike): return self.operation_subtract(other) def __rsub__(self, other: NumberLike): - return self.operation_subtract(other) + return type(self).operation_subtract(other, self) def __mul__(self, other: NumberLike): return self.operation_multiply(other) @@ -334,8 +334,8 @@ class ConstrainableExpression(Expression, Constrainable): class Arithmetic(ConstrainableExpression, HasUnit): def __init__(self, *operands: ParameterOperatable.NumberLike): super().__init__(*operands) - types = [int, float, Quantity, Parameter, Arithmetic] - if any(type(op) not in types for op in operands): + types = int, float, Quantity, Unit, Parameter, Arithmetic + if any(not isinstance(op, types) for op in operands): raise ValueError( "operands must be int, float, Quantity, Parameter, or Expression" ) @@ -452,8 +452,8 @@ def __init__(self, operand): class Logic(ConstrainableExpression): def __init__(self, *operands): super().__init__(*operands) - types = [bool, Parameter, Logic, Predicate] - if any(type(op) not in types for op in operands): + types = bool, Parameter, Logic, Predicate + if any(not isinstance(op, types) for op in operands): raise ValueError("operands must be bool, Parameter, Logic, or Predicate") if any( param.domain != Boolean or not param.units.is_compatible_with(dimensionless) diff --git a/src/faebryk/libs/units.py b/src/faebryk/libs/units.py index 5cfd5d6c..526e161a 100644 --- a/src/faebryk/libs/units.py +++ b/src/faebryk/libs/units.py @@ -26,10 +26,12 @@ class HasUnit: @staticmethod def check(obj: Any) -> bool: - return hasattr(obj, "units") + return hasattr(obj, "units") or isinstance(obj, Unit) @staticmethod def get_units_or_dimensionless(obj: Any) -> Unit: + if isinstance(obj, Unit): + return obj return obj.units if HasUnit.check(obj) else dimensionless diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index 1e2dac80..b93fbf15 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -30,16 +30,19 @@ def test_visualize(): from faebryk.exporters.visualize.interactive_graph import interactive_graph class App(Node): - p1 = L.f_field(Parameter)(units=P.ohm) + p1 = L.f_field(Parameter)(units=P.V) app = App() - p2 = Parameter(units=P.ohm) + p2 = Parameter(units=P.V) + p3 = Parameter(units=P.A) # app.p1.constrain_ge(p2 * 5) # app.p1.operation_is_ge(p2 * 5).constrain() (app.p1 >= p2 * 5).constrain() + (p2 * p3 + app.p1 * 1 * P.A <= 10 * P.W).constrain() + # pytest.raises(ValueError, bool, app.p1 >= p2 * 5) G = app.get_graph() From 7ed6fa78bf603ad1f62c3878b7ff1ea7b399cfa3 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Tue, 22 Oct 2024 14:32:55 +0200 Subject: [PATCH 53/80] flat param graph --- .../exporters/visualize/interactive_graph.py | 17 ++++++++++++++--- test/core/test_parameters.py | 4 ++-- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/src/faebryk/exporters/visualize/interactive_graph.py b/src/faebryk/exporters/visualize/interactive_graph.py index 0868e16b..280bc047 100644 --- a/src/faebryk/exporters/visualize/interactive_graph.py +++ b/src/faebryk/exporters/visualize/interactive_graph.py @@ -63,16 +63,21 @@ def _link(source, target, link: Link): } -def _group(node: Node): +def _group(node: Node, root: bool): try: subtype = find_or(_GROUP_TYPES, lambda t: isinstance(node, t), default=Node) except KeyErrorAmbiguous as e: subtype = e.duplicates[0] + if root: + label = node.get_full_name(types=True) + else: + label = f"{node.get_name(accept_no_parent=True)}\n({typename(node)})" + return { "data": { "id": id(node), - "label": f"{node.get_name(accept_no_parent=True)}\n({typename(node)})", + "label": label, "type": "group", "subtype": typename(subtype), "parent": id(p[0]) if (p := node.get_parent()) else None, @@ -230,10 +235,16 @@ def interactive_subgraph( link_types = {typename(link) for link in links} gif_types = {typename(gif) for gif in gifs} + def node_has_parent_in_graph(node: Node) -> bool: + p = node.get_parent() + if not p: + return False + return p[0] in nodes + elements = ( [_gif(gif) for gif in gifs] + [_link(*edge) for edge in edges] - + [_group(node) for node in nodes] + + [_group(node, root=not node_has_parent_in_graph(node)) for node in nodes] ) # Build stylesheet diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index b93fbf15..abdf6382 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -6,7 +6,7 @@ import pytest from faebryk.core.node import Node -from faebryk.core.parameter import Parameter +from faebryk.core.parameter import Expression, Parameter from faebryk.libs.library import L from faebryk.libs.sets import Range from faebryk.libs.units import P @@ -46,7 +46,7 @@ class App(Node): # pytest.raises(ValueError, bool, app.p1 >= p2 * 5) G = app.get_graph() - interactive_graph(G, height=1400) + interactive_graph(G, height=1400, node_types=(Parameter, Expression)) # TODO remove From 5967776d1e9896f40e19c15414ac5a59852f4a13 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Tue, 22 Oct 2024 13:42:21 +0200 Subject: [PATCH 54/80] s/F.Range/L.Range/ --- examples/iterative_design_nand.py | 2 +- examples/route.py | 2 +- src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py | 2 +- src/faebryk/library/ESP32_C3.py | 2 +- src/faebryk/library/INA228_ReferenceDesign.py | 4 ++-- src/faebryk/library/QWIIC_Connector.py | 2 +- src/faebryk/library/RS485_Bus_Protection.py | 4 ++-- src/faebryk/library/SP3243E_ReferenceDesign.py | 4 ++-- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/examples/iterative_design_nand.py b/examples/iterative_design_nand.py index 5989cca5..951ff40a 100644 --- a/examples/iterative_design_nand.py +++ b/examples/iterative_design_nand.py @@ -122,7 +122,7 @@ def App(): for _, t in app.get_graph().nodes_with_trait(F.ElectricLogic.has_pulls): for pull_resistor in (r for r in t.get_pulls() if r): pull_resistor.resistance.constrain_subset( - F.Range.from_center_rel(100 * P.kohm, 0.05) + L.Range.from_center_rel(100 * P.kohm, 0.05) ) power_source.power.voltage.constrain_subset(L.Range.from_center_rel(3 * P.V, 0.05)) led.led.led.brightness.constrain_subset( diff --git a/examples/route.py b/examples/route.py index 47c67516..410ad0b8 100644 --- a/examples/route.py +++ b/examples/route.py @@ -34,7 +34,7 @@ def __init__(self, extrude_y: float): def __preinit__(self): for resistor in self.resistors: resistor.resistance.constrain_subset( - F.Range.from_center_rel(1000 * P.ohm, 0.05) + L.Range.from_center_rel(1000 * P.ohm, 0.05) ) resistor.unnamed[0].connect(self.unnamed[0]) resistor.unnamed[1].connect(self.unnamed[1]) diff --git a/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py b/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py index 590d51c3..ca737cce 100644 --- a/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py +++ b/src/faebryk/library/Diodes_Incorporated_AP2552W6_7.py @@ -42,7 +42,7 @@ def set_current_limit(self, current: ParameterOperatable.NumberLike) -> None: # Rlim = Range(Rlim_min, Rlim_max) # Rlim = F.Constant(51 * P.kohm) # TODO: remove: ~0.52A typical current limit - # if not Rlim.is_subset_of(F.Range(10 * P.kohm, 210 * P.kohm)): + # if not Rlim.is_subset_of(L.Range(10 * P.kohm, 210 * P.kohm)): # raise ModuleException( # self, # f"Rlim must be in the range 10kOhm to 210kOhm but is {Rlim.get_most_narrow()}", # noqa: E501 diff --git a/src/faebryk/library/ESP32_C3.py b/src/faebryk/library/ESP32_C3.py index 6eff224e..8078c7cf 100644 --- a/src/faebryk/library/ESP32_C3.py +++ b/src/faebryk/library/ESP32_C3.py @@ -174,7 +174,7 @@ def __preinit__(self): # ] # ] # + [ - # F.Range(10 * P.khertz, 800 * P.khertz) + # L.Range(10 * P.khertz, 800 * P.khertz) # ], # TODO: should be range 200k-800k, but breaks parameter merge # ) # ) diff --git a/src/faebryk/library/INA228_ReferenceDesign.py b/src/faebryk/library/INA228_ReferenceDesign.py index 84794368..a6a88ea5 100644 --- a/src/faebryk/library/INA228_ReferenceDesign.py +++ b/src/faebryk/library/INA228_ReferenceDesign.py @@ -48,8 +48,8 @@ def __preinit__(self): # filter_cap = self.add(F.Capacitor()) # filter_resistors = L.list_field(2, F.Resistor) # - # filter_cap.capacitance.merge(F.Range.from_center_rel(0.1 * P.uF, 0.01)) - # filter_cap.max_voltage.merge(F.Range.from_center_rel(170 * P.V, 0.01) + # filter_cap.capacitance.merge(L.Range.from_center_rel(0.1 * P.uF, 0.01)) + # filter_cap.max_voltage.merge(L.Range.from_center_rel(170 * P.V, 0.01) # for res in filter_resistors: # res.resistance.merge(10 * P.kohm) # TODO: auto calculate, see: https://www.ti.com/lit/ug/tidu473/tidu473.pdf diff --git a/src/faebryk/library/QWIIC_Connector.py b/src/faebryk/library/QWIIC_Connector.py index 85904157..f6721958 100644 --- a/src/faebryk/library/QWIIC_Connector.py +++ b/src/faebryk/library/QWIIC_Connector.py @@ -61,4 +61,4 @@ def can_attach_to_footprint(self): def __preinit__(self): self.power.voltage.constrain_subset(L.Range.from_center(3.3 * P.V, 0.3 * P.V)) - # self.power.max_current.merge(F.Range.from_center_rel(226 * P.mA, 0.05)) + # self.power.max_current.merge(L.Range.from_center_rel(226 * P.mA, 0.05)) diff --git a/src/faebryk/library/RS485_Bus_Protection.py b/src/faebryk/library/RS485_Bus_Protection.py index bf998d04..9552c994 100644 --- a/src/faebryk/library/RS485_Bus_Protection.py +++ b/src/faebryk/library/RS485_Bus_Protection.py @@ -49,8 +49,8 @@ def __preinit__(self): self.tvs.reverse_working_voltage.constrain_subset( L.Range.from_center_rel(8.5 * P.V, 0.05) ) - # self.tvs.max_current.merge(F.Range.from_center_rel(41.7*P.A, 0.05)) - # self.tvs.forward_voltage.merge(F.Range(9.44*P.V, 10.40*P.V)) + # self.tvs.max_current.merge(L.Range.from_center_rel(41.7*P.A, 0.05)) + # self.tvs.forward_voltage.merge(L.Range(9.44*P.V, 10.40*P.V)) for diode in self.clamping_diodes: diode.forward_voltage.constrain_subset( diff --git a/src/faebryk/library/SP3243E_ReferenceDesign.py b/src/faebryk/library/SP3243E_ReferenceDesign.py index a242ac9e..2b0342e3 100644 --- a/src/faebryk/library/SP3243E_ReferenceDesign.py +++ b/src/faebryk/library/SP3243E_ReferenceDesign.py @@ -48,7 +48,7 @@ def __preinit__(self): # if isinstance(pwr.voltage.get_most_narrow(), F.TBD): # pwr.voltage.merge( - # F.Constant(8 * P.V) - # # F.Range.lower_bound(16 * P.V) + # L.Single(8 * P.V) + # # L.Range.lower_bound(16 * P.V) # ) # TODO: fix merge # # TODO: merge conflict From a0b6d0e8928cd6eac5ed63e9db2dcf0209ae48d2 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Wed, 23 Oct 2024 11:42:00 +0200 Subject: [PATCH 55/80] fix e_series with new sets --- src/faebryk/libs/e_series.py | 1107 ++++++++++++++++----------------- src/faebryk/libs/library/L.py | 9 +- src/faebryk/libs/sets.py | 308 ++++++--- test/libs/test_e_series.py | 107 ++-- test/libs/test_sets.py | 42 +- 5 files changed, 828 insertions(+), 745 deletions(-) diff --git a/src/faebryk/libs/e_series.py b/src/faebryk/libs/e_series.py index ccadcaf8..87168545 100644 --- a/src/faebryk/libs/e_series.py +++ b/src/faebryk/libs/e_series.py @@ -1,604 +1,563 @@ -from faebryk.libs.picker.jlcpcb.jlcpcb import _RaiseType - - -class E_SERIES_VALUES(_RaiseType): ... - - -def e_series_intersect(*args, **kwargs): - raise NotImplementedError() - - -def e_series_ratio(*args, **kwargs): - raise NotImplementedError() - - -def e_series_discretize_to_nearest(*args, **kwargs): - raise NotImplementedError() - - -# TODO -if False: - import copy - import logging - import math - from math import ceil, floor, log10 - from typing import Tuple - - import faebryk.library._F as F - from faebryk.core.parameter import Parameter - from faebryk.libs.units import Quantity - - logger = logging.getLogger(__name__) - - E_SERIES = set[float] - - class E_SERIES_VALUES: - E192 = { - 1.00, - 1.01, - 1.02, - 1.04, - 1.05, - 1.06, - 1.07, - 1.09, - 1.10, - 1.11, - 1.13, - 1.14, - 1.15, - 1.17, - 1.18, - 1.20, - 1.21, - 1.23, - 1.24, - 1.26, - 1.27, - 1.29, - 1.30, - 1.32, - 1.33, - 1.35, - 1.37, - 1.38, - 1.40, - 1.42, - 1.43, - 1.45, - 1.47, - 1.49, - 1.50, - 1.52, - 1.54, - 1.56, - 1.58, - 1.60, - 1.62, - 1.64, - 1.65, - 1.67, - 1.69, - 1.72, - 1.74, - 1.76, - 1.78, - 1.80, - 1.82, - 1.84, - 1.87, - 1.89, - 1.91, - 1.93, - 1.96, - 1.98, - 2.00, - 2.03, - 2.05, - 2.08, - 2.10, - 2.13, - 2.15, - 2.18, - 2.21, - 2.23, - 2.26, - 2.29, - 2.32, - 2.34, - 2.37, - 2.40, - 2.43, - 2.46, - 2.49, - 2.52, - 2.55, - 2.58, - 2.61, - 2.64, - 2.67, - 2.71, - 2.74, - 2.77, - 2.80, - 2.84, - 2.87, - 2.91, - 2.94, - 2.98, - 3.01, - 3.05, - 3.09, - 3.12, - 3.16, - 3.20, - 3.24, - 3.28, - 3.32, - 3.36, - 3.40, - 3.44, - 3.48, - 3.52, - 3.57, - 3.61, - 3.65, - 3.70, - 3.74, - 3.79, - 3.83, - 3.88, - 3.92, - 3.97, - 4.02, - 4.07, - 4.12, - 4.17, - 4.22, - 4.27, - 4.32, - 4.37, - 4.42, - 4.48, - 4.53, - 4.59, - 4.64, - 4.70, - 4.75, - 4.81, - 4.87, - 4.93, - 4.99, - 5.05, - 5.11, - 5.17, - 5.23, - 5.30, - 5.36, - 5.42, - 5.49, - 5.56, - 5.62, - 5.69, - 5.76, - 5.83, - 5.90, - 5.97, - 6.04, - 6.12, - 6.19, - 6.26, - 6.34, - 6.42, - 6.49, - 6.57, - 6.65, - 6.73, - 6.81, - 6.90, - 6.98, - 7.06, - 7.15, - 7.23, - 7.32, - 7.41, - 7.50, - 7.59, - 7.68, - 7.77, - 7.87, - 7.96, - 8.06, - 8.16, - 8.25, - 8.35, - 8.45, - 8.56, - 8.66, - 8.76, - 8.87, - 8.98, - 9.09, - 9.20, - 9.31, - 9.42, - 9.53, - 9.65, - 9.76, - 9.88, - } - - E96 = { - 1.00, - 1.02, - 1.05, - 1.07, - 1.10, - 1.13, - 1.15, - 1.18, - 1.21, - 1.24, - 1.27, - 1.30, - 1.33, - 1.37, - 1.40, - 1.43, - 1.47, - 1.50, - 1.54, - 1.58, - 1.62, - 1.65, - 1.69, - 1.74, - 1.78, - 1.82, - 1.87, - 1.91, - 1.96, - 2.00, - 2.05, - 2.10, - 2.15, - 2.21, - 2.26, - 2.32, - 2.37, - 2.43, - 2.49, - 2.55, - 2.61, - 2.67, - 2.74, - 2.80, - 2.87, - 2.94, - 3.01, - 3.09, - 3.16, - 3.24, - 3.32, - 3.40, - 3.48, - 3.57, - 3.65, - 3.74, - 3.83, - 3.92, - 4.02, - 4.12, - 4.22, - 4.32, - 4.42, - 4.53, - 4.64, - 4.75, - 4.87, - 4.99, - 5.11, - 5.23, - 5.36, - 5.49, - 5.62, - 5.76, - 5.90, - 6.04, - 6.19, - 6.34, - 6.49, - 6.65, - 6.81, - 6.98, - 7.15, - 7.32, - 7.50, - 7.68, - 7.87, - 8.06, - 8.25, - 8.45, - 8.66, - 8.87, - 9.09, - 9.31, - 9.53, - 9.76, - } - - E48 = { - 1.00, - 1.05, - 1.10, - 1.15, - 1.21, - 1.27, - 1.33, - 1.40, - 1.47, - 1.54, - 1.62, - 1.69, - 1.78, - 1.87, - 1.96, - 2.05, - 2.15, - 2.26, - 2.37, - 2.49, - 2.61, - 2.74, - 2.87, - 3.01, - 3.16, - 3.32, - 3.48, - 3.65, - 3.83, - 4.02, - 4.22, - 4.42, - 4.64, - 4.87, - 5.11, - 5.36, - 5.62, - 5.90, - 6.19, - 6.49, - 6.81, - 7.15, - 7.50, - 7.87, - 8.25, - 8.66, - 9.09, - 9.53, - } - - E24 = { - 1.0, - 1.1, - 1.2, - 1.3, - 1.5, - 1.6, - 1.8, - 2.0, - 2.2, - 2.4, - 2.7, - 3.0, - 3.3, - 3.6, - 3.9, - 4.3, - 4.7, - 5.1, - 5.6, - 6.2, - 6.8, - 7.5, - 8.2, - 9.1, - } - - E12 = { - 1.0, - 1.2, - 1.5, - 1.8, - 2.2, - 2.7, - 3.3, - 3.9, - 4.7, - 5.6, - 6.8, - 8.2, - } - - E6 = { - 1.0, - 1.5, - 2.2, - 3.3, - 4.7, - 6.8, - } - - E3 = { - 1.0, - 2.2, - 4.7, - } - - E_ALL = set(sorted(E24 | E192)) - - def repeat_set_over_base( - values: set[float], base: int, exp_range: range, n_decimals: int = 13 - ) -> set[float]: - assert all(v >= 1 and v < base for v in values) - return set( - [round(val * base**exp, n_decimals) for val in values for exp in exp_range] +from collections.abc import Iterator, Sequence +import copy +import logging +import math +from math import ceil, floor, log10 +from typing import Tuple, TypeVar, cast + +import faebryk.library._F as F +from faebryk.core.parameter import Parameter +from faebryk.libs.library import L +from faebryk.libs.sets import Range, Ranges +from faebryk.libs.units import Quantity, Unit, dimensionless + +logger = logging.getLogger(__name__) + +E_SERIES = set[float] + + +class E_SERIES_VALUES: + E192 = { + 1.00, + 1.01, + 1.02, + 1.04, + 1.05, + 1.06, + 1.07, + 1.09, + 1.10, + 1.11, + 1.13, + 1.14, + 1.15, + 1.17, + 1.18, + 1.20, + 1.21, + 1.23, + 1.24, + 1.26, + 1.27, + 1.29, + 1.30, + 1.32, + 1.33, + 1.35, + 1.37, + 1.38, + 1.40, + 1.42, + 1.43, + 1.45, + 1.47, + 1.49, + 1.50, + 1.52, + 1.54, + 1.56, + 1.58, + 1.60, + 1.62, + 1.64, + 1.65, + 1.67, + 1.69, + 1.72, + 1.74, + 1.76, + 1.78, + 1.80, + 1.82, + 1.84, + 1.87, + 1.89, + 1.91, + 1.93, + 1.96, + 1.98, + 2.00, + 2.03, + 2.05, + 2.08, + 2.10, + 2.13, + 2.15, + 2.18, + 2.21, + 2.23, + 2.26, + 2.29, + 2.32, + 2.34, + 2.37, + 2.40, + 2.43, + 2.46, + 2.49, + 2.52, + 2.55, + 2.58, + 2.61, + 2.64, + 2.67, + 2.71, + 2.74, + 2.77, + 2.80, + 2.84, + 2.87, + 2.91, + 2.94, + 2.98, + 3.01, + 3.05, + 3.09, + 3.12, + 3.16, + 3.20, + 3.24, + 3.28, + 3.32, + 3.36, + 3.40, + 3.44, + 3.48, + 3.52, + 3.57, + 3.61, + 3.65, + 3.70, + 3.74, + 3.79, + 3.83, + 3.88, + 3.92, + 3.97, + 4.02, + 4.07, + 4.12, + 4.17, + 4.22, + 4.27, + 4.32, + 4.37, + 4.42, + 4.48, + 4.53, + 4.59, + 4.64, + 4.70, + 4.75, + 4.81, + 4.87, + 4.93, + 4.99, + 5.05, + 5.11, + 5.17, + 5.23, + 5.30, + 5.36, + 5.42, + 5.49, + 5.56, + 5.62, + 5.69, + 5.76, + 5.83, + 5.90, + 5.97, + 6.04, + 6.12, + 6.19, + 6.26, + 6.34, + 6.42, + 6.49, + 6.57, + 6.65, + 6.73, + 6.81, + 6.90, + 6.98, + 7.06, + 7.15, + 7.23, + 7.32, + 7.41, + 7.50, + 7.59, + 7.68, + 7.77, + 7.87, + 7.96, + 8.06, + 8.16, + 8.25, + 8.35, + 8.45, + 8.56, + 8.66, + 8.76, + 8.87, + 8.98, + 9.09, + 9.20, + 9.31, + 9.42, + 9.53, + 9.65, + 9.76, + 9.88, + } + + E96 = { + 1.00, + 1.02, + 1.05, + 1.07, + 1.10, + 1.13, + 1.15, + 1.18, + 1.21, + 1.24, + 1.27, + 1.30, + 1.33, + 1.37, + 1.40, + 1.43, + 1.47, + 1.50, + 1.54, + 1.58, + 1.62, + 1.65, + 1.69, + 1.74, + 1.78, + 1.82, + 1.87, + 1.91, + 1.96, + 2.00, + 2.05, + 2.10, + 2.15, + 2.21, + 2.26, + 2.32, + 2.37, + 2.43, + 2.49, + 2.55, + 2.61, + 2.67, + 2.74, + 2.80, + 2.87, + 2.94, + 3.01, + 3.09, + 3.16, + 3.24, + 3.32, + 3.40, + 3.48, + 3.57, + 3.65, + 3.74, + 3.83, + 3.92, + 4.02, + 4.12, + 4.22, + 4.32, + 4.42, + 4.53, + 4.64, + 4.75, + 4.87, + 4.99, + 5.11, + 5.23, + 5.36, + 5.49, + 5.62, + 5.76, + 5.90, + 6.04, + 6.19, + 6.34, + 6.49, + 6.65, + 6.81, + 6.98, + 7.15, + 7.32, + 7.50, + 7.68, + 7.87, + 8.06, + 8.25, + 8.45, + 8.66, + 8.87, + 9.09, + 9.31, + 9.53, + 9.76, + } + + E48 = { + 1.00, + 1.05, + 1.10, + 1.15, + 1.21, + 1.27, + 1.33, + 1.40, + 1.47, + 1.54, + 1.62, + 1.69, + 1.78, + 1.87, + 1.96, + 2.05, + 2.15, + 2.26, + 2.37, + 2.49, + 2.61, + 2.74, + 2.87, + 3.01, + 3.16, + 3.32, + 3.48, + 3.65, + 3.83, + 4.02, + 4.22, + 4.42, + 4.64, + 4.87, + 5.11, + 5.36, + 5.62, + 5.90, + 6.19, + 6.49, + 6.81, + 7.15, + 7.50, + 7.87, + 8.25, + 8.66, + 9.09, + 9.53, + } + + E24 = { + 1.0, + 1.1, + 1.2, + 1.3, + 1.5, + 1.6, + 1.8, + 2.0, + 2.2, + 2.4, + 2.7, + 3.0, + 3.3, + 3.6, + 3.9, + 4.3, + 4.7, + 5.1, + 5.6, + 6.2, + 6.8, + 7.5, + 8.2, + 9.1, + } + + E12 = { + 1.0, + 1.2, + 1.5, + 1.8, + 2.2, + 2.7, + 3.3, + 3.9, + 4.7, + 5.6, + 6.8, + 8.2, + } + + E6 = { + 1.0, + 1.5, + 2.2, + 3.3, + 4.7, + 6.8, + } + + E3 = { + 1.0, + 2.2, + 4.7, + } + + E_ALL = set(sorted(E24 | E192)) + + +QuantityT = TypeVar("QuantityT", int, float, Quantity) + + +def repeat_set_over_base( + values: set[float], + base: int, + exp_range: Sequence[int], + unit: Unit, + n_decimals: int = 13, +) -> L.Singles[QuantityT]: + assert all(v >= 1 and v < base for v in values) + return L.Singles[QuantityT]( + *( + round(val * base**exp, n_decimals) * unit + for val in values + for exp in exp_range ) + ) - class ParamNotResolvedError(Exception): ... - _e_series_cache: list[tuple[Parameter, int, set]] = [] +class ParamNotResolvedError(Exception): ... - def e_series_intersect[T: float | Quantity]( - value: Parameter[T], e_series: E_SERIES = E_SERIES_VALUES.E_ALL - ) -> F.Set[T]: - # TODO this got really uglu, need to clean up - value = value.get_most_narrow() +_e_series_cache: dict[tuple[Ranges, int], L.Ranges] = {} - for k, i, v in _e_series_cache: - if k == value and i == id(e_series): - return F.Set(v) - if isinstance(value, F.Constant): - value = F.Range(value) - elif isinstance(value, F.Set): - raise NotImplementedError - elif isinstance(value, (F.Operation, F.TBD)): - raise ParamNotResolvedError() - elif isinstance(value, F.ANY): - # TODO - raise ParamNotResolvedError() +def e_series_intersect( + value_set: Range[QuantityT] | Ranges[QuantityT], + e_series: E_SERIES = E_SERIES_VALUES.E_ALL, +) -> L.Ranges[QuantityT]: + if isinstance(value_set, Range): + value_set = Ranges(value_set) - assert isinstance(value, F.Range) + if (value_set, id(e_series)) in _e_series_cache: + return _e_series_cache[(value_set, id(e_series))] - min_val = value.min - max_val = value.max - unit = 1 + if ( + value_set.is_empty() + or value_set.min_elem() < 0 + or value_set.max_elem() == float("inf") + ): + raise ValueError("Need positive finite set") - if not isinstance(min_val, F.Constant) or not isinstance(max_val, F.Constant): - # TODO - raise Exception() + out = L.Empty(value_set.units) - min_val = min_val.value - max_val = max_val.value + for sub_range in value_set: + min_val_q = sub_range.min_elem().to_compact() + max_val_q = sub_range.max_elem().to(min_val_q.units) - if isinstance(min_val, Quantity): - assert isinstance(max_val, Quantity) + min_val = min_val_q.magnitude + max_val = max_val_q.magnitude - min_val_q = min_val.to_compact() + e_series_values = repeat_set_over_base( + values=e_series, + base=10, + exp_range=range(floor(log10(min_val)), ceil(log10(max_val)) + 1), + unit=min_val_q.units, + ) + out = out.op_union_ranges(e_series_values.op_intersect_range(sub_range)) + _e_series_cache[(value_set, id(e_series))] = out + return out - unit = min_val_q.units - max_val_q = max_val.to(unit) - assert max_val_q.units == unit - min_val: float = min_val_q.magnitude - max_val: float = max_val_q.magnitude +def e_series_discretize_to_nearest( + value: Range[Quantity], e_series: E_SERIES = E_SERIES_VALUES.E_ALL +) -> Quantity: + target = cast(Quantity, (value.min_elem() + value.max_elem())) / 2 - assert isinstance(min_val, (float, int)) and isinstance(max_val, (float, int)) + e_series_values = repeat_set_over_base( + e_series, 10, range(floor(log10(target)), ceil(log10(target)) + 1), target.units + ) - # TODO ugly - if max_val == math.inf: - max_val = min_val * 10e3 + return min(e_series_values, key=lambda x: abs(x - target)) - e_series_values = repeat_set_over_base( - e_series, 10, range(floor(log10(min_val)), ceil(log10(max_val)) + 1) - ) - out = value & {e * unit for e in e_series_values} - _e_series_cache.append((copy.copy(value), id(e_series), out.params)) - return out - - def e_series_discretize_to_nearest( - value: Parameter, e_series: E_SERIES = E_SERIES_VALUES.E_ALL - ) -> F.Constant: - if not isinstance(value, (F.Constant, F.Range)): - raise NotImplementedError - - target = ( - value.value if isinstance(value, F.Constant) else sum(value.as_tuple()) / 2 - ) - e_series_values = repeat_set_over_base( - e_series, 10, range(floor(log10(target)), ceil(log10(target)) + 1) - ) +def e_series_ratio( + RH: Range[float], + RL: Range[float], + output_input_ratio: Range[float], + e_values: E_SERIES = E_SERIES_VALUES.E_ALL, +) -> Tuple[float, float]: + """ + Calculates the values for two components in the E series range which are bound by a + ratio. - return F.Constant(min(e_series_values, key=lambda x: abs(x - target))) - - def e_series_ratio( - RH: Parameter, - RL: Parameter, - output_input_ratio: Parameter, - e_values: E_SERIES = E_SERIES_VALUES.E_ALL, - ) -> Tuple[float, float]: - """ - Calculates the values for two components in the E series range which are bound by a - ratio. - - RH and RL define the contstraints for the components, and output_input_ratio is the - output/input voltage ratio as defined below. - RH and output_input_ratio must be constrained to a range or constant, but RL can be - ANY. - - output_input_ratio = RL/(RH + RL) - RL/oir = RH + RL - RL * (1/oir -1) = RH - RL = RH / (1/oir -1) - - Returns a tuple of RH/RL values. - - Can be used for a resistive divider. - """ - - if ( - not isinstance(RH, (F.Constant, F.Range)) - or not isinstance(RL, (F.Constant, F.Range, F.ANY)) - or not isinstance(output_input_ratio, (F.Constant, F.Range)) - ): - raise NotImplementedError - - if not output_input_ratio.is_subset_of(F.Range(0, 1)): - raise ValueError("Invalid output/input voltage ratio") - - rh = F.Range(RH.value, RH.value) if isinstance(RH, F.Constant) else RH - rl = F.Range(RL.value, RL.value) if isinstance(RL, F.Constant) else RL - oir = ( - F.Range(output_input_ratio.value, output_input_ratio.value) - if isinstance(output_input_ratio, F.Constant) - else output_input_ratio - ) + RH and RL define the contstraints for the components, and output_input_ratio is the + output/input voltage ratio as defined below. - rh_values = e_series_intersect(rh, e_values) - rl_values = ( - e_series_intersect(rl, e_values) if isinstance(rl, F.Range) else None - ) + output_input_ratio = RL/(RH + RL) + RL/oir = RH + RL + RL * (1/oir -1) = RH + RL = RH / (1/oir -1) - target_ratio = oir.as_center_tuple()[0] + Returns a tuple of RH/RL values. - solutions = [] + Can be used for a resistive divider. + """ - for rh_val in rh_values.params: - rl_ideal = rh_val / (F.Constant(1) / target_ratio - 1) + rh_factor = output_input_ratio.op_invert().op_subtract_ranges( + L.Singles(1.0 * dimensionless) + ) - rl_nearest_e_val = ( - min(rl_values.params, key=lambda x: abs(x - rl_ideal)) - if rl_values - else e_series_discretize_to_nearest(rl_ideal, e_values) - ) - real_ratio = rl_nearest_e_val / (rh_val + rl_nearest_e_val) + rh = Ranges(RH).op_intersect_ranges(rh_factor.op_mul_ranges(Ranges(RL))) + rh_e = e_series_intersect(rh, e_values) + rl = Ranges(RL).op_intersect_ranges( + rh_factor.op_invert().op_mul_ranges(Ranges(rh_e)) + ) + rl_e = e_series_intersect(rl, e_values) - solutions.append((real_ratio, (rh_val, rl_nearest_e_val))) + target_ratio = ( + cast(Quantity, (output_input_ratio.min_elem() + output_input_ratio.max_elem())) + / 2 + ) - optimum = min(solutions, key=lambda x: abs(x[0] - target_ratio)) + solutions = [] - logger.debug( - f"{target_ratio=}, {optimum[0]=}, {oir}, " - f"error: {abs(optimum[0]/ target_ratio - 1)*100:.4f}%" - ) + for rh_range in rh_e: + rh_val = rh_range.min_elem() + rl_ideal = rh_val / (1 / target_ratio - 1) + + rl_nearest_e_val = rl_e.closest_elem(rl_ideal) + real_ratio = rl_nearest_e_val / (rh_val + rl_nearest_e_val) + + solutions.append((real_ratio, (float(rh_val), float(rl_nearest_e_val)))) - if optimum[0] not in oir: - raise ArithmeticError( - "Calculated optimum RH RL value pair gives output/input voltage ratio " - "outside of specified range. Consider relaxing the constraints" - ) + optimum = min(solutions, key=lambda x: abs(x[0] - target_ratio)) + + logger.debug( + f"{target_ratio=}, {optimum[0]=}, {output_input_ratio}, " + f"error: {abs(optimum[0]/ target_ratio - 1)*100:.4f}%" + ) + + if optimum[0] not in output_input_ratio: + raise ArithmeticError( + "Calculated optimum RH RL value pair gives output/input voltage ratio " + "outside of specified range. Consider relaxing the constraints" + ) - return optimum[1] + return optimum[1] diff --git a/src/faebryk/libs/library/L.py b/src/faebryk/libs/library/L.py index 7593f69f..92bf5367 100644 --- a/src/faebryk/libs/library/L.py +++ b/src/faebryk/libs/library/L.py @@ -15,7 +15,14 @@ ) from faebryk.core.parameter import R, p_field # noqa: F401 from faebryk.core.reference import reference # noqa: F401 -from faebryk.libs.sets import PlainSet, Range, Single # noqa: F401 +from faebryk.libs.sets import ( # noqa: F401 + Empty, + PlainSet, + Range, + Ranges, + Single, + Singles, +) class AbstractclassError(Exception): ... diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index ac1cfda7..db75f2f8 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -1,8 +1,9 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from collections.abc import Generator, Iterable -from typing import Any, Generic, Protocol, TypeVar, cast +from bisect import bisect +from collections.abc import Generator, Iterable, Iterator +from typing import Any, Protocol, Type, TypeVar, cast from faebryk.libs.units import HasUnit, Quantity, Unit, dimensionless @@ -15,10 +16,16 @@ def is_empty(self) -> bool: ... def __contains__(self, item: T) -> bool: ... +class P_IterableSet[T, IterT](P_Set[T], Iterable[IterT], Protocol): ... + + class P_UnitSet[T](P_Set[T], Protocol): units: Unit +class P_IterableUnitSet[T, IterT](P_UnitSet[T], Iterable[IterT], Protocol): ... + + # -------------------------------------------------------------------------------------- # Types -------------------------------------------------------------------------------- @@ -42,7 +49,7 @@ def base_units(units: Unit) -> Unit: # Generic ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -class PlainSet[U](P_Set[U]): +class PlainSet[U](P_IterableSet[U, U]): def __init__(self, *elements: U): self.elements = set(elements) @@ -63,6 +70,9 @@ def __hash__(self) -> int: def __repr__(self) -> str: return f"PlainSet({', '.join(repr(e) for e in self.elements)})" + def __iter__(self) -> Iterator[U]: + return self.elements.__iter__() + # Numeric ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -71,20 +81,25 @@ class _N_Range(P_Set[NumericT]): def __init__(self, min: NumericT, max: NumericT): if not min <= max: raise ValueError("min must be less than or equal to max") - self.min = min - self.max = max + if min == float("inf") or max == float("-inf"): + raise ValueError("min or max has bad infinite value") + self._min = min + self._max = max def is_empty(self) -> bool: return False def min_elem(self) -> NumericT: - return self.min + return self._min + + def max_elem(self) -> NumericT: + return self._max def op_add_range(self, other: "_N_Range[NumericT]") -> "_N_Range[NumericT]": - return _N_Range(self.min + other.min, self.max + other.max) + return _N_Range(self._min + other._min, self._max + other._max) def op_negate(self) -> "_N_Range[NumericT]": - return _N_Range(-self.max, -self.min) + return _N_Range(-self._max, -self._min) def op_subtract_range(self, other: "_N_Range[NumericT]") -> "_N_Range[NumericT]": return self.op_add_range(other.op_negate()) @@ -92,33 +107,33 @@ def op_subtract_range(self, other: "_N_Range[NumericT]") -> "_N_Range[NumericT]" def op_mul_range(self, other: "_N_Range[NumericT]") -> "_N_Range[NumericT]": return _N_Range( min( - self.min * other.min, - self.min * other.max, - self.max * other.min, - self.max * other.max, + self._min * other._min, + self._min * other._max, + self._max * other._min, + self._max * other._max, ), max( - self.min * other.min, - self.min * other.max, - self.max * other.min, - self.max * other.max, + self._min * other._min, + self._min * other._max, + self._max * other._min, + self._max * other._max, ), ) def op_invert(self) -> "_N_Ranges[float]": - if self.min == 0 == self.max: + if self._min == 0 == self._max: return _N_Empty() - if self.min < 0 < self.max: + if self._min < 0 < self._max: return _N_Ranges( - _N_Range(float("-inf"), 1 / self.min), - _N_Range(1 / self.max, float("inf")), + _N_Range(float("-inf"), 1 / self._min), + _N_Range(1 / self._max, float("inf")), ) - elif self.min < 0 == self.max: - return _N_Ranges(_N_Range(float("-inf"), 1 / self.min)) - elif self.min == 0 < self.max: - return _N_Ranges(_N_Range(1 / self.max, float("inf"))) + elif self._min < 0 == self._max: + return _N_Ranges(_N_Range(float("-inf"), 1 / self._min)) + elif self._min == 0 < self._max: + return _N_Ranges(_N_Range(1 / self._max, float("inf"))) else: - return _N_Ranges(_N_Range(1 / self.max, 1 / self.min)) + return _N_Ranges(_N_Range(1 / self._max, 1 / self._min)) def op_div_range( self: "_N_Range[float]", other: "_N_Range[float]" @@ -126,8 +141,8 @@ def op_div_range( return _N_Ranges(*(self.op_mul_range(o) for o in other.op_invert().ranges)) def op_intersect_range(self, other: "_N_Range[NumericT]") -> "_N_Ranges[NumericT]": - min_ = max(self.min, other.min) - max_ = min(self.max, other.max) + min_ = max(self._min, other._min) + max_ = min(self._max, other._max) if min_ <= max_: return _N_Ranges(_N_Range(min_, max_)) return _N_Empty() @@ -135,39 +150,39 @@ def op_intersect_range(self, other: "_N_Range[NumericT]") -> "_N_Ranges[NumericT def maybe_merge_range( self, other: "_N_Range[NumericT]" ) -> list["_N_Range[NumericT]"]: - is_left = self.min <= other.min + is_left = self._min <= other._min left = self if is_left else other right = other if is_left else self - if right.min in self: - return [_N_Range(left.min, max(left.max, right.max))] + if right._min in self: + return [_N_Range(left._min, max(left._max, right._max))] return [left, right] def __eq__(self, other: Any) -> bool: if not isinstance(other, _N_Range): return False - return self.min == other.min and self.max == other.max + return self._min == other._min and self._max == other._max def __contains__(self, item: NumericT) -> bool: - return self.min <= item <= self.max + return self._min <= item <= self._max def __hash__(self) -> int: - return hash((self.min, self.max)) + return hash((self._min, self._max)) def __repr__(self) -> str: - return f"_Range({self.min}, {self.max})" + return f"_Range({self._min}, {self._max})" def _N_Single(value: NumericT) -> _N_Range[NumericT]: return _N_Range(value, value) -class _N_Ranges(P_Set[NumericT]): - def __init__(self, *ranges: _N_Range[NumericT] | "_N_Ranges[NumericT]"): +class _N_NonIterableRanges(P_Set[NumericT]): + def __init__(self, *ranges: _N_Range[NumericT] | "_N_NonIterableRanges[NumericT]"): def gen_flat_non_empty() -> Generator[_N_Range[NumericT]]: for r in ranges: if r.is_empty(): continue - if isinstance(r, _N_Ranges): + if isinstance(r, _N_NonIterableRanges): yield from r.ranges else: assert isinstance(r, _N_Range) @@ -197,33 +212,82 @@ def min_elem(self) -> NumericT: raise ValueError("empty range cannot have min element") return self.ranges[0].min_elem() - def op_add_ranges(self, other: "_N_Ranges[NumericT]") -> "_N_Ranges[NumericT]": - return _N_Ranges( + def max_elem(self) -> NumericT: + if self.is_empty(): + raise ValueError("empty range cannot have max element") + return self.ranges[-1].max_elem() + + def closest_elem(self, target: NumericT) -> NumericT: + if self.is_empty(): + raise ValueError("empty range cannot have closest element") + index = bisect(self.ranges, target, key=lambda r: r.min_elem()) + left = self.ranges[index - 1] if index > 0 else None + if left and target in left: + return target + left_bound = left.max_elem() if left else None + right_bound = ( + self.ranges[index].min_elem() if index < len(self.ranges) else None + ) + try: + [one] = [b for b in [left_bound, right_bound] if b is not None] + return one + except ValueError: + assert left_bound and right_bound + if target - left_bound < right_bound - target: + return left_bound + return right_bound + assert False # unreachable + + def op_intersect_range( + self, other: "_N_Range[NumericT]" + ) -> "_N_NonIterableRanges[NumericT]": + return _N_NonIterableRanges(*(r.op_intersect_range(other) for r in self.ranges)) + + def op_intersect_ranges( + self, other: "_N_NonIterableRanges[NumericT]" + ) -> "_N_NonIterableRanges[NumericT]": + # TODO currently quadratic + # lists are sorted, so this could be linear + return _N_NonIterableRanges( + *(r.op_intersect_range(o) for r in self.ranges for o in other.ranges) + ) + + def op_union_ranges( + self, other: "_N_NonIterableRanges[NumericT]" + ) -> "_N_NonIterableRanges[NumericT]": + return _N_NonIterableRanges(*self.ranges, *other.ranges) + + def op_add_ranges( + self, other: "_N_NonIterableRanges[NumericT]" + ) -> "_N_NonIterableRanges[NumericT]": + return _N_NonIterableRanges( *(r.op_add_range(o) for r in self.ranges for o in other.ranges) ) - def op_negate(self) -> "_N_Ranges[NumericT]": - return _N_Ranges(*(r.op_negate() for r in self.ranges)) + def op_negate(self) -> "_N_NonIterableRanges[NumericT]": + return _N_NonIterableRanges(*(r.op_negate() for r in self.ranges)) - def op_subtract_ranges(self, other: "_N_Ranges[NumericT]") -> "_N_Ranges[NumericT]": + def op_subtract_ranges( + self, other: "_N_NonIterableRanges[NumericT]" + ) -> "_N_NonIterableRanges[NumericT]": return self.op_add_ranges(other.op_negate()) - def op_mul_ranges(self, other: "_N_Ranges[NumericT]") -> "_N_Ranges[NumericT]": - return _N_Ranges( + def op_mul_ranges( + self, other: "_N_NonIterableRanges[NumericT]" + ) -> "_N_NonIterableRanges[NumericT]": + return _N_NonIterableRanges( *(r.op_mul_range(o) for r in self.ranges for o in other.ranges) ) - def op_invert(self) -> "_N_Ranges[float]": - return _N_Ranges(*(r.op_invert() for r in self.ranges)) + def op_invert(self) -> "_N_NonIterableRanges[float]": + return _N_NonIterableRanges(*(r.op_invert() for r in self.ranges)) def op_div_ranges( - self: "_N_Ranges[float]", other: "_N_Ranges[float]" - ) -> "_N_Ranges[float]": + self: "_N_NonIterableRanges[float]", other: "_N_NonIterableRanges[float]" + ) -> "_N_NonIterableRanges[float]": return self.op_mul_ranges(other.op_invert()) def __contains__(self, item: NumericT) -> bool: - from bisect import bisect - index = bisect(self.ranges, item, key=lambda r: r.min_elem()) if index == 0: @@ -231,7 +295,7 @@ def __contains__(self, item: NumericT) -> bool: return item in self.ranges[index - 1] def __eq__(self, value: Any) -> bool: - if not isinstance(value, _N_Ranges): + if not isinstance(value, _N_NonIterableRanges): return False if len(self.ranges) != len(value.ranges): return False @@ -244,21 +308,21 @@ def __hash__(self) -> int: return hash(tuple(hash(r) for r in self.ranges)) def __repr__(self) -> str: - return f"_RangeUnion({', '.join(f"[{r.min}, {r.max}]" for r in self.ranges)})" + return f"_N_Ranges({', '.join(f"[{r._min}, {r._max}]" for r in self.ranges)})" -class _N_RangesIterable(_N_Ranges[NumericT], Iterable[_N_Range[NumericT]]): +class _N_Ranges(_N_NonIterableRanges[NumericT], Iterable[_N_Range[NumericT]]): def __iter__(self) -> Generator[_N_Range[NumericT]]: yield from self.ranges -class _N_Singles(_N_Ranges[NumericT], Iterable[NumericT]): +class _N_Singles(_N_NonIterableRanges[NumericT], Iterable[NumericT]): def __init__(self, *values: NumericT): super().__init__(*(_N_Single(v) for v in values)) def __iter__(self) -> Generator[NumericT]: for r in self.ranges: - yield r.min + yield r._min def _N_Empty() -> _N_Ranges: @@ -339,8 +403,8 @@ def from_center_rel(center: QuantityT, rel_tol: float) -> "Range[QuantityT]": @staticmethod def _from_range(range: _N_Range[NumericT], units: Unit) -> "Range[QuantityT]": return Range( - min=Quantity(range.min, base_units(units)), - max=Quantity(range.max, base_units(units)), + min=Quantity(range._min, base_units(units)), + max=Quantity(range._max, base_units(units)), units=units, ) @@ -348,16 +412,21 @@ def base_to_units(self, value: NumericT) -> Quantity: return Quantity(value, self.range_units).to(self.units) def min_elem(self) -> Quantity: - return self.base_to_units(self._range.min) + return self.base_to_units(self._range.min_elem()) + + def max_elem(self) -> Quantity: + return self.base_to_units(self._range.max_elem()) def is_empty(self) -> bool: return self._range.is_empty() - def op_intersect_range(self, other: "Range[QuantityT]") -> "Ranges[QuantityT]": + def op_intersect_range( + self, other: "Range[QuantityT]" + ) -> "NonIterableRanges[QuantityT]": if not self.units.is_compatible_with(other.units): - return Ranges(units=self.units) + return NonIterableRanges(units=self.units) _range = self._range.op_intersect_range(other._range) - return Ranges._from_ranges(_range, self.units) + return NonIterableRanges._from_ranges(_range, self.units) def op_add_range(self, other: "Range[QuantityT]") -> "Range[QuantityT]": if not self.units.is_compatible_with(other.units): @@ -383,9 +452,9 @@ def op_invert(self) -> "Ranges[QuantityT]": _range = self._range.op_invert() return Ranges._from_ranges(_range, 1 / self.units) - def op_div_range(self, other: "Range[QuantityT]") -> "Ranges[QuantityT]": + def op_div_range(self, other: "Range[QuantityT]") -> "NonIterableRanges[QuantityT]": _range = self._range.op_div_range(other._range) - return Ranges._from_ranges(_range, self.units / other.units) + return NonIterableRanges._from_ranges(_range, self.units / other.units) # def __copy__(self) -> Self: # r = Range.__new__(Range) @@ -413,7 +482,7 @@ def __eq__(self, value: Any) -> bool: return False if isinstance(value, Range): return self._range == value._range - if isinstance(value, Ranges) and len(value._ranges.ranges) == 1: + if isinstance(value, NonIterableRanges) and len(value._ranges.ranges) == 1: return self._range == value._ranges.ranges[0] return False @@ -423,22 +492,33 @@ def __eq__(self, value: Any) -> bool: def __repr__(self) -> str: if self.units.is_compatible_with(dimensionless): - return f"Range({self._range.min}, {self._range.max})" - return f"Range({self.base_to_units(self._range.min)}, {self.base_to_units(self._range.max)} | {self.units})" + return f"Range({self._range._min}, {self._range._max})" + return f"Range({self.base_to_units(self._range._min)}, {self.base_to_units(self._range._max)} | {self.units})" class Single(Range[QuantityT]): def __init__(self, value: QuantityT): super().__init__(value, value) + def __iter__(self) -> Generator[Quantity]: + yield self.min_elem() + -class Ranges(P_UnitSet[QuantityT]): +NonIterableRangesT = TypeVar("NonIterableRangesT", bound="NonIterableRanges") + + +class NonIterableRanges(P_UnitSet[QuantityT]): def __init__( self, - *ranges: Range[QuantityT] | "Ranges[QuantityT]", + *ranges: Range[QuantityT] + | "NonIterableRanges[QuantityT]" + | tuple[QuantityT, QuantityT], units: Unit | None = None, ): - range_units = [HasUnit.get_units_or_dimensionless(r) for r in ranges] + proper_ranges = [ + Range(r[0], r[1]) if isinstance(r, tuple) else r for r in ranges + ] + range_units = [HasUnit.get_units_or_dimensionless(r) for r in proper_ranges] if len(range_units) == 0 and units is None: raise ValueError("units must be provided for empty union") self.units = units or range_units[0] @@ -446,17 +526,21 @@ def __init__( if not all(self.units.is_compatible_with(u) for u in range_units): raise ValueError("all elements must have compatible units") - def get_backing(r: Range[QuantityT] | "Ranges[QuantityT]"): + def get_backing(r: Range[QuantityT] | "NonIterableRanges[QuantityT]"): if isinstance(r, Range): return r._range else: return r._ranges - self._ranges = _N_Ranges(*(get_backing(r) for r in ranges)) + self._ranges = _N_Ranges(*(get_backing(r) for r in proper_ranges)) - @staticmethod - def _from_ranges(ranges: "_N_Ranges[NumericT]", units: Unit) -> "Ranges[QuantityT]": - r = Ranges.__new__(Ranges) + @classmethod + def _from_ranges( + cls: Type[NonIterableRangesT], + ranges: "_N_NonIterableRanges[NumericT]", + units: Unit, + ) -> NonIterableRangesT: + r = cls.__new__(cls) r._ranges = ranges r.units = units r.range_units = base_units(units) @@ -468,12 +552,48 @@ def is_empty(self) -> bool: def base_to_units(self, value: NumericT) -> Quantity: return Quantity(value, self.range_units).to(self.units) - def min_elem(self) -> QuantityT: + def min_elem(self) -> Quantity: if self.is_empty(): raise ValueError("empty range cannot have min element") return self.base_to_units(self._ranges.min_elem()) - def op_add_ranges(self, other: "Ranges[QuantityT]") -> "Ranges[QuantityT]": + def max_elem(self) -> Quantity: + if self.is_empty(): + raise ValueError("empty range cannot have max element") + return self.base_to_units(self._ranges.max_elem()) + + def closest_elem(self, target: Quantity) -> Quantity: + if not self.units.is_compatible_with(target.units): + raise ValueError("incompatible units") + return self.base_to_units( + self._ranges.closest_elem(target.to(self.range_units).magnitude) + ) + + def op_intersect_range(self, other: "Range[QuantityT]") -> "Ranges[QuantityT]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._ranges.op_intersect_range(other._range) + return Ranges._from_ranges(_range, self.units) + + def op_intersect_ranges( + self, other: "NonIterableRanges[QuantityT]" + ) -> "Ranges[QuantityT]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._ranges.op_intersect_ranges(other._ranges) + return Ranges._from_ranges(_range, self.units) + + def op_union_ranges( + self, other: "NonIterableRanges[QuantityT]" + ) -> "Ranges[QuantityT]": + if not self.units.is_compatible_with(other.units): + raise ValueError("incompatible units") + _range = self._ranges.op_union_ranges(other._ranges) + return Ranges._from_ranges(_range, self.units) + + def op_add_ranges( + self, other: "NonIterableRanges[QuantityT]" + ) -> "Ranges[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._ranges.op_add_ranges(other._ranges) @@ -483,13 +603,17 @@ def op_negate(self) -> "Ranges[QuantityT]": _range = self._ranges.op_negate() return Ranges._from_ranges(_range, self.units) - def op_subtract_ranges(self, other: "Ranges[QuantityT]") -> "Ranges[QuantityT]": + def op_subtract_ranges( + self, other: "NonIterableRanges[QuantityT]" + ) -> "Ranges[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._ranges.op_subtract_ranges(other._ranges) return Ranges._from_ranges(_range, self.units) - def op_mul_ranges(self, other: "Ranges[QuantityT]") -> "Ranges[QuantityT]": + def op_mul_ranges( + self, other: "NonIterableRanges[QuantityT]" + ) -> "Ranges[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._ranges.op_mul_ranges(other._ranges) @@ -499,7 +623,9 @@ def op_invert(self) -> "Ranges[QuantityT]": _range = self._ranges.op_invert() return Ranges._from_ranges(_range, 1 / self.units) - def op_div_ranges(self, other: "Ranges[QuantityT]") -> "Ranges[QuantityT]": + def op_div_ranges( + self, other: "NonIterableRanges[QuantityT]" + ) -> "Ranges[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") _range = self._ranges.op_div_ranges(other._ranges) @@ -520,16 +646,25 @@ def __eq__(self, value: Any) -> bool: return False if not self.units.is_compatible_with(value.units): return False - if isinstance(value, Ranges): + if isinstance(value, NonIterableRanges): return self._ranges == value._ranges if isinstance(value, Range) and len(self._ranges.ranges) == 1: return self._ranges.ranges[0] == value._range return False + def __hash__(self) -> int: + return hash((self._ranges, self.units)) + def __repr__(self) -> str: if self.units.is_compatible_with(dimensionless): - return f"_RangeUnion({', '.join(f"[{r.min}, {r.max}]" for r in self._ranges.ranges)})" - return f"_RangeUnion({', '.join(f"[{self.base_to_units(r.min)}, {self.base_to_units(r.max)}]" for r in self._ranges.ranges)} | {self.units})" + return f"_RangeUnion({', '.join(f"[{r._min}, {r._max}]" for r in self._ranges.ranges)})" + return f"_RangeUnion({', '.join(f"[{self.base_to_units(r._min)}, {self.base_to_units(r._max)}]" for r in self._ranges.ranges)} | {self.units})" + + +class Ranges(NonIterableRanges[QuantityT], Iterable[Range[QuantityT]]): + def __iter__(self) -> Generator[Range[QuantityT]]: + for r in self._ranges.ranges: + yield Range._from_range(r, self.units) def Empty(units: Unit | None = None) -> Ranges[QuantityT]: @@ -538,5 +673,10 @@ def Empty(units: Unit | None = None) -> Ranges[QuantityT]: return Ranges(units=units) -def Singles(*values: QuantityT, units: Unit | None = None) -> Ranges[QuantityT]: - return Ranges(*(Single(v) for v in values), units=units) +class Singles(NonIterableRanges[QuantityT]): + def __init__(self, *values: QuantityT, units: Unit | None = None): + super().__init__(*(Single(v) for v in values), units=units) + + def __iter__(self) -> Generator[Quantity]: + for r in self._ranges.ranges: + yield self.base_to_units(r._min) diff --git a/test/libs/test_e_series.py b/test/libs/test_e_series.py index 36a2b7f5..60d0feae 100644 --- a/test/libs/test_e_series.py +++ b/test/libs/test_e_series.py @@ -1,7 +1,6 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -import unittest from itertools import pairwise import faebryk.library._F as F @@ -10,69 +9,43 @@ e_series_intersect, e_series_ratio, ) - - -class TestESeries(unittest.TestCase): - def test_intersect(self): - self.assertEqual( - e_series_intersect(F.Range(1, 10), {1, 2, 3}), - F.Set([F.Constant(1), F.Constant(2), F.Constant(3), F.Constant(10)]), - ) - self.assertEqual( - e_series_intersect(F.Range(3, 10), {1, 8, 9}), - F.Set([F.Constant(8), F.Constant(9), F.Constant(10)]), - ) - self.assertEqual( - e_series_intersect(F.Range(10, 1e3), {1, 1.5, 8, 9.9}), - F.Set( - [ - F.Constant(10), - F.Constant(15), - F.Constant(80), - F.Constant(99), - F.Constant(100), - F.Constant(150), - F.Constant(800), - F.Constant(990), - F.Constant(1000), - ] - ), - ) - self.assertEqual( - e_series_intersect(F.Range(2.1e3, 7.9e3), {1, 2, 8, 9}), - F.Set([]), - ) - - def test_ratio(self): - self.assertEqual( - e_series_ratio( - F.Range(100, 10e3), - F.Range(100, 10e3), - F.Constant(1 / 5), - E_SERIES_VALUES.E24, - ), - (F.Constant(1.2e3), F.Constant(300)), - ) - self.assertEqual( - e_series_ratio( - F.Range(100, 10e3), - F.Range(100, 10e3), - F.Range.from_center(0.0123, 0.0123 / 10), - E_SERIES_VALUES.E48, - ), - (F.Constant(9.09e3), F.Constant(115)), - ) - - def test_sets(self): - E = E_SERIES_VALUES - EVs24 = [3 * 2**i for i in range(4)] - EVs192 = [3 * 2**i for i in range(4, 6)] - for EVs in [EVs24, EVs192]: - for i1, i2 in pairwise(EVs): - e1 = getattr(E, f"E{i1}") - e2 = getattr(E, f"E{i2}") - self.assertTrue(e1 < e2, f"{i1} < {i2}") - - -if __name__ == "__main__": - unittest.main() +from faebryk.libs.library import L +from faebryk.libs.units import dimensionless + + +def test_intersect(): + assert e_series_intersect(L.Range(1, 10), {1, 2, 3}) == L.Singles(1, 2, 3, 10) + assert e_series_intersect(L.Range(3, 10), {1, 8, 9}) == L.Singles(8, 9, 10) + assert e_series_intersect(L.Range(10, 1e3), {1, 1.5, 8, 9.9}) == L.Singles( + 10, 15, 80, 99, 100, 150, 800, 990, 1000 + ) + assert e_series_intersect(L.Range(2.1e3, 7.9e3), {1, 2, 8, 9}) == L.Empty( + units=dimensionless + ) + + +def test_ratio(): + assert e_series_ratio( + L.Range(100, 10e3), + L.Range(100, 10e3), + L.Single(1 / 5), + E_SERIES_VALUES.E24, + ) == (1.2e3, 300) + + assert e_series_ratio( + L.Range(100, 10e3), + L.Range(100, 10e3), + L.Range.from_center(0.0123, 0.0123 / 10), + E_SERIES_VALUES.E48, + ) == (9.09e3, 115) + + +def test_sets(): + E = E_SERIES_VALUES + EVs24 = [3 * 2**i for i in range(4)] + EVs192 = [3 * 2**i for i in range(4, 6)] + for EVs in [EVs24, EVs192]: + for i1, i2 in pairwise(EVs): + e1 = getattr(E, f"E{i1}") + e2 = getattr(E, f"E{i2}") + assert e1 < e2, f"{i1} < {i2}" diff --git a/test/libs/test_sets.py b/test/libs/test_sets.py index 9138a7d0..55b07e72 100644 --- a/test/libs/test_sets.py +++ b/test/libs/test_sets.py @@ -52,6 +52,16 @@ def test_set_min_elem(): assert x.min_elem() == 1 +def test_set_closest_elem(): + x = Ranges((5, 6), (7, 8), Singles(2, 4, 1)) + assert x.closest_elem(0 * dimensionless) == 1 + assert x.closest_elem(1 * dimensionless) == 1 + assert x.closest_elem(5.1 * dimensionless) == 5.1 * dimensionless + assert x.closest_elem(4.9 * dimensionless) == 5 * dimensionless + assert x.closest_elem(4.1 * dimensionless) == 4 * dimensionless + assert x.closest_elem(6.9 * dimensionless) == 7 * dimensionless + + def test_set_contains(): x = Singles(5, 3, 2, 4, 1) assert 3 * dimensionless in x @@ -60,8 +70,8 @@ def test_set_contains(): def test_union_min_elem(): x = Ranges( - Range(4, 5), - Range(3, 7), + (4, 5), + (3, 7), Single(9), Ranges(Range(1, 2), Ranges(Range(0, 1))), ) @@ -70,10 +80,10 @@ def test_union_min_elem(): def test_union_contains(): x = Ranges( - Range(4, 5), - Range(3, 7), + (4, 5), + (3, 7), Single(9), - Ranges(Range(1, 2), Ranges(Range(0, 1))), + Ranges((1, 2), Ranges((0, 1))), ) assert 0 * dimensionless in x assert 1 * dimensionless in x @@ -110,20 +120,16 @@ def test_union_empty(): def test_add_empty(): - assert (Empty(dimensionless).op_add_ranges(Ranges(Range(0, 1)))) == Empty( - dimensionless - ) + assert (Empty(dimensionless).op_add_ranges(Ranges((0, 1)))) == Empty(dimensionless) def test_addition(): assert Range(0, 1).op_add_range(Range(2, 3)) == Range(2, 4) assert Range(0, 1).op_add_range(Single(2)) == Range(2, 3) - assert Ranges(Single(2), Single(3)).op_add_ranges(Ranges(Range(0, 1))) == Range( - 2, 4 - ) + assert Ranges(Single(2), Single(3)).op_add_ranges(Ranges((0, 1))) == Range(2, 4) assert Ranges(Single(10), Range(20, 21)).op_add_ranges( - Ranges(Range(0, 1), Range(100, 101)) - ) == Ranges(Range(10, 11), Range(110, 111), Range(20, 22), Range(120, 122)) + Ranges((0, 1), (100, 101)) + ) == Ranges((10, 11), (110, 111), (20, 22), (120, 122)) def test_addition_unit(): @@ -150,8 +156,8 @@ def test_multiplication(): assert Range(-1, 1).op_mul_range(Range(2, 4)) == Range(-4, 4) assert Singles(0, 1).op_mul_ranges(Singles(2, 3)) == Singles(0, 2, 3) assert Singles(0, 1).op_mul_ranges(Singles(2, 3)).op_mul_ranges( - Ranges(Range(-1, 0)) - ) == Ranges(Range(0, 0), Range(-2, 0), Range(-3, 0)) + Ranges((-1, 0)) + ) == Ranges((0, 0), (-2, 0), (-3, 0)) def test_multiplication_unit(): @@ -163,10 +169,8 @@ def test_multiplication_unit(): def test_invert(): assert Range(1, 2).op_invert() == Range(0.5, 1) assert Range(-2, -1).op_invert() == Range(-1, -0.5) - assert Range(-1, 1).op_invert() == Ranges( - Range(float("-inf"), -1), Range(1, float("inf")) - ) - assert Ranges(Range(-4, 2), Range(-1, 3)).op_invert() == Ranges( + assert Range(-1, 1).op_invert() == Ranges((float("-inf"), -1), (1, float("inf"))) + assert Ranges((-4, 2), (-1, 3)).op_invert() == Ranges( Range(max=-0.25), Range(min=1 / 3) ) From 5afd0eae7f720b07b54ae9d7f0d40a1a04465e27 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 23 Oct 2024 12:13:09 +0200 Subject: [PATCH 56/80] interactive layouts --- .../exporters/visualize/interactive_graph.py | 421 ++++++++++++++++-- test/core/test_parameters.py | 18 + 2 files changed, 407 insertions(+), 32 deletions(-) diff --git a/src/faebryk/exporters/visualize/interactive_graph.py b/src/faebryk/exporters/visualize/interactive_graph.py index 280bc047..3dcbd9be 100644 --- a/src/faebryk/exporters/visualize/interactive_graph.py +++ b/src/faebryk/exporters/visualize/interactive_graph.py @@ -1,23 +1,27 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from typing import Collection, Iterable +from gc import is_finalized +from itertools import pairwise +from typing import Any, Callable, Collection, Iterable +import dash_core_components as dcc import dash_cytoscape as cyto from dash import Dash, html +from dash.dependencies import Input, Output, State from rich.console import Console from rich.table import Table # import faebryk.library._F as F from faebryk.core.graphinterface import Graph, GraphInterface -from faebryk.core.link import Link +from faebryk.core.link import Link, LinkSibling from faebryk.core.module import Module from faebryk.core.moduleinterface import ModuleInterface from faebryk.core.node import Node from faebryk.core.parameter import Expression, Parameter, Predicate from faebryk.core.trait import Trait from faebryk.exporters.visualize.util import generate_pastel_palette -from faebryk.libs.util import KeyErrorAmbiguous, find_or +from faebryk.libs.util import KeyErrorAmbiguous, cast_assert, find, find_or, groupby def typename(obj): @@ -30,10 +34,10 @@ def typename(obj): def _gif(gif: GraphInterface): return { "data": { - "id": id(gif), + "id": str(id(gif)), "label": gif.name, "type": typename(gif), - "parent": id(gif.node), + "parent": str(id(gif.node)), } } @@ -41,8 +45,8 @@ def _gif(gif: GraphInterface): def _link(source, target, link: Link): return { "data": { - "source": id(source), - "target": id(target), + "source": str(id(source)), + "target": str(id(target)), "type": typename(link), } } @@ -70,17 +74,22 @@ def _group(node: Node, root: bool): subtype = e.duplicates[0] if root: - label = node.get_full_name(types=True) + hier = node.get_hierarchy() + type_hier = [t for t, _ in hier] + name_hier = [n for _, n in hier] + name = ".".join(name_hier) + types = "|".join(typename(t) for t in type_hier) + label = f"{name}\n({types})" else: label = f"{node.get_name(accept_no_parent=True)}\n({typename(node)})" return { "data": { - "id": id(node), + "id": str(id(node)), "label": label, "type": "group", "subtype": typename(subtype), - "parent": id(p[0]) if (p := node.get_parent()) else None, + "parent": str(id(p[0])) if (p := node.get_parent()) else None, } } @@ -88,7 +97,7 @@ def _group(node: Node, root: bool): # Style -------------------------------------------------------------------------------- -def _with_pastels[T](iterable: Collection[T]): +def _with_pastels(iterable: Collection[str]): return zip(sorted(iterable), generate_pastel_palette(len(iterable))) @@ -153,7 +162,12 @@ def add_link_type(self, link_type: str, color: str): self.stylesheet.append( { "selector": f'edge[type = "{link_type}"]', - "style": {"line-color": color, "target-arrow-color": color}, + "style": { + "line-color": color, + "target-arrow-color": color, + # "target-arrow-shape": "none", + # "source-arrow-shape": "none", + }, } ) @@ -166,7 +180,11 @@ def add_group_type(self, group_type: str, color: str): ) -def _Layout(stylesheet: _Stylesheet, elements: list[dict[str, dict]]): +def _Layout( + stylesheet: _Stylesheet, elements: list[dict[str, dict]], extra: dict | None = None +): + if not extra: + extra = {} return html.Div( style={ "position": "fixed", @@ -192,29 +210,61 @@ def _Layout(stylesheet: _Stylesheet, elements: list[dict[str, dict]]): elements=elements, layout={ "name": "fcose", + # 'draft', 'default' or 'proof' + # - "draft" only applies spectral layout + # - "default" improves the quality with incremental layout + # (fast cooling rate) + # - "proof" improves the quality with incremental layout + # (slow cooling rate) "quality": "proof", + # Whether or not to animate the layout "animate": False, + # Use random node positions at beginning of layout + # if this is set to false, + # then quality option must be "proof" "randomize": False, + # Fit the viewport to the repositioned nodes "fit": True, + # Padding around layout "padding": 50, + # Whether to include labels in node dimensions. + # Valid in "proof" quality "nodeDimensionsIncludeLabels": True, - "uniformNodeDimensions": False, - "packComponents": True, - "nodeRepulsion": 1000, - "idealEdgeLength": 50, - "edgeElasticity": 0.45, - "nestingFactor": 0.1, - "gravity": 0.25, - "numIter": 2500, - "tile": True, - "tilingPaddingVertical": 10, - "tilingPaddingHorizontal": 10, - "gravityRangeCompound": 1.5, - "gravityCompound": 1.5, + # Whether or not simple nodes (non-compound nodes) + # are of uniform dimensions + "uniformNodeDimensions": True, + # Whether to pack disconnected components - + # cytoscape-layout-utilities extension should + # be registered and initialized + "packComponents": False, # Graph is never disconnected + # Node repulsion (non overlapping) multiplier + "nodeRepulsion": 100, + # Ideal edge (non nested) length + "idealEdgeLength": 100, + # Divisor to compute edge forces + "edgeElasticity": 0.2, + # Nesting factor (multiplier) to compute ideal edge length + # for nested edges + "nestingFactor": 0.0001, + # Maximum number of iterations to perform - + # this is a suggested value and might be adjusted by the + # algorithm as required + "numIter": 2500 * 4, + # For enabling tiling + "tile": False, # No unconnected nodes in Graph + # Gravity force (constant) + "gravity": 0, + # Gravity range (constant) "gravityRange": 3.8, + # Gravity force (constant) for compounds + "gravityCompound": 20, + # Gravity range (constant) for compounds + "gravityRangeCompound": 0.5, + # Initial cooling factor for incremental layout "initialEnergyOnIncremental": 0.5, "componentSpacing": 40, - }, + } + | extra, ) ], ), @@ -222,6 +272,304 @@ def _Layout(stylesheet: _Stylesheet, elements: list[dict[str, dict]]): ) +def _get_layout(app: Dash) -> dict[str, Any]: + for html_node in cast_assert(list, cast_assert(html.Div, app.layout).children): + if not isinstance(html_node, html.Div): + continue + for child in cast_assert(list, html_node.children): + if not isinstance(child, cyto.Cytoscape): + continue + return child.layout + raise ValueError("No Cytoscape found in layout") + + +# -------------------------------------------------------------------------------------- + + +class Layout: + type ID_or_OBJECT = object | str + + def __init__(self, app: Dash, elements: list[dict], nodes: list[Node]): + self.app = app + self.layout = _get_layout(app) + self.ids = { + element["data"]["id"] for element in elements if "id" in element["data"] + } + + self.div_children = cast_assert( + list, cast_assert(html.Div, app.layout).children + ) + self.nodes = nodes + + def is_filtered(self, elem: ID_or_OBJECT) -> bool: + if not isinstance(elem, str): + elem = self.id_of(elem) + return elem not in self.ids + + def nodes_of_type[T: Node](self, node_type: type[T]) -> set[T]: + return { + n + for n in self.nodes + if isinstance(n, node_type) and not self.is_filtered(n.self_gif) + } + + @staticmethod + def id_of(obj: ID_or_OBJECT) -> str: + if isinstance(obj, str): + return obj + return str(id(obj)) + + def add_rel_constraint( + self, + source: ID_or_OBJECT, + target: ID_or_OBJECT, + gap_x: int | None = None, + gap_y: int | None = None, + layout: dict | None = None, + ): + if not layout: + layout = self.layout + + if "relativePlacementConstraint" not in layout: + layout["relativePlacementConstraint"] = [] + rel_placement_constraints = cast_assert( + list, layout["relativePlacementConstraint"] + ) + + if self.is_filtered(source) or self.is_filtered(target): + return + if gap_y is not None: + top, bot = (source, target) if gap_y >= 0 else (target, source) + + # if isinstance(top, GraphInterface) and isinstance(bot, GraphInterface): + # print(f"{top}\n v\n{bot}") + + rel_placement_constraints.append( + { + "top": self.id_of(top), + "bottom": self.id_of(bot), + "gap": abs(gap_y), + } + ) + if gap_x is not None: + left, right = (source, target) if gap_x >= 0 else (target, source) + + # if isinstance(left, GraphInterface) and isinstance(right, GraphInterface): + # print(f"{left} > {right}") + + rel_placement_constraints.append( + { + "left": self.id_of(left), + "right": self.id_of(right), + "gap": abs(gap_x), + } + ) + + def add_rel_top_bot( + self, + top: ID_or_OBJECT, + bot: ID_or_OBJECT, + gap: int = 0, + layout: dict | None = None, + ): + assert gap >= 0 + self.add_rel_constraint(top, bot, gap_y=gap, layout=layout) + + def add_rel_left_right( + self, + left: ID_or_OBJECT, + right: ID_or_OBJECT, + gap: int = 0, + layout: dict | None = None, + ): + assert gap >= 0 + self.add_rel_constraint(left, right, gap_x=gap, layout=layout) + + def add_order( + self, + *nodes: ID_or_OBJECT, + horizontal: bool, + gap: int = 0, + layout: dict | None = None, + ): + if not layout: + layout = self.layout + for n1, n2 in pairwise(nodes): + if horizontal: + self.add_rel_left_right(n1, n2, gap=gap, layout=layout) + else: + self.add_rel_top_bot(n1, n2, gap=gap, layout=layout) + + def add_align( + self, *nodes: ID_or_OBJECT, horizontal: bool, layout: dict | None = None + ): + if not layout: + layout = self.layout + direction = "horizontal" if horizontal else "vertical" + nodes = tuple(n for n in nodes if not self.is_filtered(n)) + if len(nodes) <= 1: + return + + if all(isinstance(n, GraphInterface) for n in nodes): + print(f"align {direction}: {nodes}") + + if "alignmentConstraint" not in layout: + layout["alignmentConstraint"] = {} + if direction not in layout["alignmentConstraint"]: + layout["alignmentConstraint"][direction] = [] + + align = cast_assert(dict, layout["alignmentConstraint"]) + align[direction].append([self.id_of(n) for n in nodes]) + + def add_same_height[T: Node]( + self, + nodes: Iterable[T], + gif_key: Callable[[T], GraphInterface], + layout: dict | None = None, + ): + if not layout: + layout = self.layout + self.add_align(*(gif_key(n) for n in nodes), horizontal=True, layout=layout) + + +def buttons(layout: Layout): + app = layout.app + html_controls = html.Div( + className="controls", + style={"padding": "10px", "background-color": "#f0f0f0"}, + children=[ + # html.Label("Node Repulsion:"), + # dcc.Slider( + # id="node-repulsion-slider", + # min=500, + # max=5000, + # step=100, + # value=1000, + # marks={i: str(i) for i in range(500, 5001, 500)}, + # ), + # html.Label("Edge Elasticity:"), + # dcc.Slider( + # id="edge-elasticity-slider", + # min=0, + # max=1, + # step=0.05, + # value=0.45, + # marks={i / 10: str(i / 10) for i in range(0, 11, 1)}, + # ), + dcc.Checklist( + id="layout-checkbox", + options=[{"label": "Parameters", "value": "parameters"}], + ), + html.Button("Apply Changes", id="apply-changes-button"), + ], + ) + layout.div_children.insert(-2, html_controls) + + @app.callback( + Output("graph-view", "layout"), + Input("apply-changes-button", "n_clicks"), + State("layout-checkbox", "value"), + State("graph-view", "layout"), + ) + def absolute_layout(n_clicks, layout_checkbox, current_layout): + print(layout_checkbox) + if "parameters" in (layout_checkbox or []): + params_top(layout, current_layout) + + return current_layout + + +def params_top(layout: Layout, _layout: dict | None = None): + params = layout.nodes_of_type(Parameter) + expressions = layout.nodes_of_type(Expression) + predicates = layout.nodes_of_type(Predicate) + non_predicate_expressions = expressions - predicates + + def depth(expr: Expression) -> int: + operates_on = expressions & { + e.node + for e, li in expr.operates_on.edges.items() + if not isinstance(li, LinkSibling) and e.node is not expr + } + + # direct parameter or constants only + if not operates_on: + return 1 + return 1 + max(depth(o) for o in operates_on) + + expressions_by_depth = groupby(non_predicate_expressions, depth) + + def same_height[T: Parameter | Expression](nodes: Iterable[T]): + layout.add_same_height(nodes, lambda pe: pe.self_gif, layout=_layout) + layout.add_same_height(nodes, lambda pe: pe.operated_on, layout=_layout) + + # All params same height + same_height(params) + + # predicates same height + same_height(predicates) + + for _, exprs in expressions_by_depth.items(): + same_height(exprs) + + # predicate expressions below other expressions + if predicates: + for expr in non_predicate_expressions: + layout.add_rel_top_bot( + expr.operates_on, next(iter(predicates)).self_gif, gap=200 + ) + + # Expressions below params + if params: + for expr in expressions: + layout.add_rel_top_bot( + next(iter(params)).operated_on, expr.self_gif, gap=200 + ) + + # Expression tree + for expr in expressions: + operates_on = (params | expressions) & { + e.node + for e, li in expr.operates_on.edges.items() + if not isinstance(li, LinkSibling) and e.node is not expr + } + for o in operates_on: + layout.add_rel_top_bot(o.operated_on, expr.self_gif, gap=100) + + +def layout_constraints(layout: Layout, _layout: dict | None = None): + for n in layout.nodes: + # only to save on some printing + if layout.is_filtered(n.self_gif): + continue + + siblings = { + o + for o, li in n.self_gif.edges.items() + if isinstance(li, LinkSibling) and not layout.is_filtered(o) + } + + # siblings below self + for o in siblings: + layout.add_rel_top_bot(n.self_gif, o, gap=50, layout=_layout) + + # siblings on same level within node + layout.add_align(*siblings, horizontal=True, layout=_layout) + + order = list(sorted(siblings, key=lambda o: o.name)) + middle_i = len(order) // 2 + if len(siblings) % 2 == 1: + # sibling directly below self + layout.add_align( + n.self_gif, order[middle_i], horizontal=False, layout=_layout + ) + order.pop(middle_i) + + # self inbetween siblings + order.insert(middle_i, n.self_gif) + layout.add_order(*order, horizontal=True, gap=25, layout=_layout) + + # -------------------------------------------------------------------------------------- @@ -270,7 +618,15 @@ def node_has_parent_in_graph(node: Node) -> bool: app = Dash(__name__) app.layout = _Layout(stylesheet, elements) - # Print legend + # Extra layouting + layout = Layout(app, elements, list(nodes)) + layout_constraints(layout) + buttons(layout) + # TODO remove + print("params_top", "-" * 80) + params_top(layout) + + # Print legend --------------------------------------------------------------------- console = Console() for typegroup, colors in [ @@ -288,7 +644,8 @@ def node_has_parent_in_graph(node: Node) -> bool: console.print(table) - # + # Run ------------------------------------------------------------------------------ + app.run(jupyter_height=height or 1000) @@ -307,13 +664,13 @@ def interactive_graph( if depth > 0: nodes = [node for node in nodes if len(node.get_hierarchy()) <= depth] - gifs = [gif for gif in G if gif.node in nodes] + gifs = {gif for gif in G if gif.node in nodes} if filter_unconnected: - gifs = [gif for gif in gifs if len(gif.edges) > 1] + gifs = [gif for gif in gifs if len(gif.edges.keys() & gifs) > 1] edges = [ (edge[0], edge[1], edge[2]) for edge in G.edges if edge[0] in gifs and edge[1] in gifs ] - return interactive_subgraph(edges, gifs, nodes, height=height) + return interactive_subgraph(edges, list(gifs), nodes, height=height) diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index abdf6382..499babd6 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: MIT import logging +from itertools import pairwise import pytest @@ -10,6 +11,7 @@ from faebryk.libs.library import L from faebryk.libs.sets import Range from faebryk.libs.units import P +from faebryk.libs.util import times logger = logging.getLogger(__name__) @@ -49,6 +51,22 @@ class App(Node): interactive_graph(G, height=1400, node_types=(Parameter, Expression)) +def test_visualize_chain(): + from faebryk.exporters.visualize.interactive_graph import interactive_graph + + params = times(10, Parameter) + sums = [p1 + p2 for p1, p2 in pairwise(params)] + products = [p1 * p2 for p1, p2 in pairwise(sums)] + bigsum = sum(products) + + predicates = [bigsum <= 100] + for p in predicates: + p.constrain() + + G = params[0].get_graph() + interactive_graph(G, height=1400, node_types=(Parameter, Expression)) + + # TODO remove if __name__ == "__main__": # if run in jupyter notebook From 6ff340d10e8651527a7ae82f9c21cae7e77b2fbc Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 23 Oct 2024 13:23:29 +0200 Subject: [PATCH 57/80] interactive params --- .../exporters/visualize/interactive_graph.py | 171 +++++++----- .../exporters/visualize/interactive_params.py | 248 ++++++++++++++++++ src/faebryk/libs/util.py | 6 + test/core/test_parameters.py | 14 +- 4 files changed, 363 insertions(+), 76 deletions(-) create mode 100644 src/faebryk/exporters/visualize/interactive_params.py diff --git a/src/faebryk/exporters/visualize/interactive_graph.py b/src/faebryk/exporters/visualize/interactive_graph.py index 3dcbd9be..2a88e121 100644 --- a/src/faebryk/exporters/visualize/interactive_graph.py +++ b/src/faebryk/exporters/visualize/interactive_graph.py @@ -1,7 +1,6 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from gc import is_finalized from itertools import pairwise from typing import Any, Callable, Collection, Iterable @@ -21,13 +20,7 @@ from faebryk.core.parameter import Expression, Parameter, Predicate from faebryk.core.trait import Trait from faebryk.exporters.visualize.util import generate_pastel_palette -from faebryk.libs.util import KeyErrorAmbiguous, cast_assert, find, find_or, groupby - - -def typename(obj): - if isinstance(obj, type): - return obj.__name__ - return type(obj).__name__ +from faebryk.libs.util import KeyErrorAmbiguous, cast_assert, find_or, groupby, typename # Transformers ------------------------------------------------------------------------- @@ -210,59 +203,6 @@ def _Layout( elements=elements, layout={ "name": "fcose", - # 'draft', 'default' or 'proof' - # - "draft" only applies spectral layout - # - "default" improves the quality with incremental layout - # (fast cooling rate) - # - "proof" improves the quality with incremental layout - # (slow cooling rate) - "quality": "proof", - # Whether or not to animate the layout - "animate": False, - # Use random node positions at beginning of layout - # if this is set to false, - # then quality option must be "proof" - "randomize": False, - # Fit the viewport to the repositioned nodes - "fit": True, - # Padding around layout - "padding": 50, - # Whether to include labels in node dimensions. - # Valid in "proof" quality - "nodeDimensionsIncludeLabels": True, - # Whether or not simple nodes (non-compound nodes) - # are of uniform dimensions - "uniformNodeDimensions": True, - # Whether to pack disconnected components - - # cytoscape-layout-utilities extension should - # be registered and initialized - "packComponents": False, # Graph is never disconnected - # Node repulsion (non overlapping) multiplier - "nodeRepulsion": 100, - # Ideal edge (non nested) length - "idealEdgeLength": 100, - # Divisor to compute edge forces - "edgeElasticity": 0.2, - # Nesting factor (multiplier) to compute ideal edge length - # for nested edges - "nestingFactor": 0.0001, - # Maximum number of iterations to perform - - # this is a suggested value and might be adjusted by the - # algorithm as required - "numIter": 2500 * 4, - # For enabling tiling - "tile": False, # No unconnected nodes in Graph - # Gravity force (constant) - "gravity": 0, - # Gravity range (constant) - "gravityRange": 3.8, - # Gravity force (constant) for compounds - "gravityCompound": 20, - # Gravity range (constant) for compounds - "gravityRangeCompound": 0.5, - # Initial cooling factor for incremental layout - "initialEnergyOnIncremental": 0.5, - "componentSpacing": 40, } | extra, ) @@ -410,8 +350,8 @@ def add_align( if len(nodes) <= 1: return - if all(isinstance(n, GraphInterface) for n in nodes): - print(f"align {direction}: {nodes}") + # if all(isinstance(n, GraphInterface) for n in nodes): + # print(f"align {direction}: {nodes}") if "alignmentConstraint" not in layout: layout["alignmentConstraint"] = {} @@ -431,6 +371,76 @@ def add_same_height[T: Node]( layout = self.layout self.add_align(*(gif_key(n) for n in nodes), horizontal=True, layout=layout) + def set_type(self, t: str, layout: dict | None = None): + if not layout: + layout = self.layout + if t == "fcose" or t is None: + _layout = { + "name": "fcose", + # 'draft', 'default' or 'proof' + # - "draft" only applies spectral layout + # - "default" improves the quality with incremental layout + # (fast cooling rate) + # - "proof" improves the quality with incremental layout + # (slow cooling rate) + "quality": "proof", + # Whether or not to animate the layout + "animate": False, + # Use random node positions at beginning of layout + # if this is set to false, + # then quality option must be "proof" + "randomize": False, + # Fit the viewport to the repositioned nodes + "fit": True, + # Padding around layout + "padding": 50, + # Whether to include labels in node dimensions. + # Valid in "proof" quality + "nodeDimensionsIncludeLabels": True, + # Whether or not simple nodes (non-compound nodes) + # are of uniform dimensions + "uniformNodeDimensions": True, + # Whether to pack disconnected components - + # cytoscape-layout-utilities extension should + # be registered and initialized + "packComponents": False, # Graph is never disconnected + # Node repulsion (non overlapping) multiplier + "nodeRepulsion": 100, + # Ideal edge (non nested) length + "idealEdgeLength": 100, + # Divisor to compute edge forces + "edgeElasticity": 0.2, + # Nesting factor (multiplier) to compute ideal edge length + # for nested edges + "nestingFactor": 0.0001, + # Maximum number of iterations to perform - + # this is a suggested value and might be adjusted by the + # algorithm as required + "numIter": 2500 * 4, + # For enabling tiling + "tile": False, # No unconnected nodes in Graph + # Gravity force (constant) + "gravity": 0, + # Gravity range (constant) + "gravityRange": 3.8, + # Gravity force (constant) for compounds + "gravityCompound": 20, + # Gravity range (constant) for compounds + "gravityRangeCompound": 0.5, + # Initial cooling factor for incremental layout + "initialEnergyOnIncremental": 0.5, + "componentSpacing": 40, + } + elif t == "dagre": + _layout = { + "name": "dagre", + } + else: + raise ValueError(f"Unknown layout: {t}") + + layout.clear() + layout.update(_layout) + def buttons(layout: Layout): app = layout.app @@ -456,6 +466,21 @@ def buttons(layout: Layout): # value=0.45, # marks={i / 10: str(i / 10) for i in range(0, 11, 1)}, # ), + dcc.RadioItems( + id="layout-radio", + options=[ + {"label": "fcose", "value": "fcose"}, + {"label": "dagre", "value": "dagre"}, + ], + ), + dcc.RadioItems( + id="layout-dagre-ranker", + options=[ + {"label": "network-simplex", "value": "network-simplex"}, + {"label": "tight-tree", "value": "tight-tree"}, + {"label": "longest-path", "value": "longest-path"}, + ], + ), dcc.Checklist( id="layout-checkbox", options=[{"label": "Parameters", "value": "parameters"}], @@ -469,13 +494,25 @@ def buttons(layout: Layout): Output("graph-view", "layout"), Input("apply-changes-button", "n_clicks"), State("layout-checkbox", "value"), + State("layout-radio", "value"), + State("layout-dagre-ranker", "value"), State("graph-view", "layout"), ) - def absolute_layout(n_clicks, layout_checkbox, current_layout): - print(layout_checkbox) + def absolute_layout( + n_clicks, layout_checkbox, layout_radio, layout_dagre_ranker, current_layout + ): + print(layout_checkbox, layout_radio, layout_dagre_ranker) + layout.set_type(layout_radio, current_layout) + + if layout_radio == "fcose": + layout_constraints(layout, current_layout) + if "parameters" in (layout_checkbox or []): params_top(layout, current_layout) + if layout_dagre_ranker: + current_layout["ranker"] = layout_dagre_ranker + return current_layout @@ -620,11 +657,7 @@ def node_has_parent_in_graph(node: Node) -> bool: # Extra layouting layout = Layout(app, elements, list(nodes)) - layout_constraints(layout) buttons(layout) - # TODO remove - print("params_top", "-" * 80) - params_top(layout) # Print legend --------------------------------------------------------------------- console = Console() diff --git a/src/faebryk/exporters/visualize/interactive_params.py b/src/faebryk/exporters/visualize/interactive_params.py new file mode 100644 index 00000000..a21aa06f --- /dev/null +++ b/src/faebryk/exporters/visualize/interactive_params.py @@ -0,0 +1,248 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + +from dataclasses import dataclass +from typing import cast + +import dash_core_components as dcc +import dash_cytoscape as cyto +from dash import Dash, html +from dash.dependencies import Input, Output, State + +# import faebryk.library._F as F +from faebryk.core.graphinterface import Graph +from faebryk.core.link import LinkSibling +from faebryk.core.node import Node +from faebryk.core.parameter import Expression, Parameter +from faebryk.exporters.visualize.interactive_graph import ( + _GROUP_TYPES, + Layout, + _Layout, +) +from faebryk.libs.util import ( + KeyErrorAmbiguous, + find_or, + typename, +) + +Operand = Parameter | Expression + + +@dataclass(eq=True, frozen=True) +class ParamLink: + operator: Expression + operand: Operand + + +def _node(node: Operand): + try: + subtype = find_or(_GROUP_TYPES, lambda t: isinstance(node, t), default=Node) + except KeyErrorAmbiguous as e: + subtype = e.duplicates[0] + + hier = node.get_hierarchy() + type_hier = [t for t, _ in hier] + name_hier = [n for _, n in hier] + name = ".".join(name_hier) + types = "|".join(typename(t) for t in type_hier) + label = f"{name}\n({types})" + + return { + "data": { + "id": str(id(node)), + "label": label, + "type": typename(subtype), + } + } + + +def _link(link: ParamLink): + return { + "data": { + "source": str(id(link.operand)), + "target": str(id(link.operator)), + } + } + + +class _Stylesheet: + _BASE = [ + { + "selector": "node", + "style": { + "content": "data(label)", + "text-opacity": 0.8, + "text-valign": "center", + "text-halign": "center", + "font-size": "0.3em", + "background-color": "#BFD7B5", + "text-outline-color": "#FFFFFF", + "text-outline-width": 0.5, + "border-width": 1, + "border-color": "#888888", + "border-opacity": 0.5, + # group + "font-weight": "bold", + # "font-size": "1.5em", + # "text-valign": "top", + # "text-outline-color": "#FFFFFF", + # "text-outline-width": 1.5, + "text-wrap": "wrap", + # "border-width": 4, + }, + }, + { + "selector": "edge", + "style": { + "width": 1, + "line-color": "#A3C4BC", + "curve-style": "bezier", + "target-arrow-shape": "triangle", + "arrow-scale": 1, + "target-arrow-color": "#A3C4BC", + "text-outline-color": "#FFFFFF", + "text-outline-width": 2, + }, + }, + ] + + def __init__(self): + self.stylesheet = list(self._BASE) + + def add_node_type(self, node_type: str, color: str): + self.stylesheet.append( + { + "selector": f'node[type = "{node_type}"]', + "style": {"background-color": color}, + } + ) + + +DAGRE_LAYOUT = { + # Dagre algorithm options (uses default value if undefined) + "name": "dagre", + # Separation between adjacent nodes in the same rank + "nodeSep": None, + # Separation between adjacent edges in the same rank + "edgeSep": None, + # Separation between each rank in the layout + "rankSep": None, + # 'TB' for top to bottom flow, 'LR' for left to right + "rankDir": None, + # Alignment for rank nodes. Can be 'UL', 'UR', 'DL', or 'DR' + "align": None, + # If 'greedy', uses heuristic to find feedback arc set + "acyclicer": None, + # Algorithm to assign rank to nodes: 'network-simplex', 'tight-tree' or 'longest-path' + "ranker": None, + # Number of ranks to keep between source and target of the edge + # "minLen": lambda edge: 1, + # Higher weight edges are generally made shorter and straighter + # "edgeWeight": lambda edge: 1, + # General layout options + # Whether to fit to viewport + "fit": True, + # Fit padding + "padding": 30, + # Factor to expand/compress overall area nodes take up + "spacingFactor": None, + # Include labels in node space calculation + "nodeDimensionsIncludeLabels": False, + # Whether to transition node positions + "animate": False, + # Whether to animate specific nodes + # "animateFilter": lambda node, i: True, + # Duration of animation in ms if enabled + "animationDuration": 500, + # Easing of animation if enabled + "animationEasing": None, + # Constrain layout bounds: {x1, y1, x2, y2} or {x1, y1, w, h} + "boundingBox": None, + # Function to transform final node position + # "transform": lambda node, pos: pos, + # Callback on layoutready + # "ready": lambda: None, + # Sorting function to order nodes and edges + # "sort": None, + # Callback on layoutstop + # "stop": lambda: None, +} + + +def buttons(layout: Layout): + app = layout.app + html_controls = html.Div( + className="controls", + style={"padding": "10px", "background-color": "#f0f0f0"}, + children=[ + dcc.RadioItems( + id="layout-radio", + options=[ + {"label": "fcose", "value": "fcose"}, + {"label": "dagre", "value": "dagre"}, + ], + ), + dcc.RadioItems( + id="layout-dagre-ranker", + options=[ + {"label": "network-simplex", "value": "network-simplex"}, + {"label": "tight-tree", "value": "tight-tree"}, + {"label": "longest-path", "value": "longest-path"}, + ], + ), + html.Button("Apply Changes", id="apply-changes-button"), + ], + ) + layout.div_children.insert(-2, html_controls) + + @app.callback( + Output("graph-view", "layout"), + Input("apply-changes-button", "n_clicks"), + State("layout-radio", "value"), + State("layout-dagre-ranker", "value"), + State("graph-view", "layout"), + ) + def absolute_layout(n_clicks, layout_radio, layout_dagre_ranker, current_layout): + print(layout_radio, layout_dagre_ranker) + layout.set_type(layout_radio, current_layout) + + if layout_dagre_ranker: + current_layout["ranker"] = layout_dagre_ranker + + return current_layout + + +def visualize_parameters(G: Graph, height: int | None = None): + Operand_ = (Parameter, Expression) + nodes = G.nodes_of_types(Operand_) + nodes = cast(list[Operand], nodes) + + edges = { + ParamLink(n, e.node) + for n in nodes + if isinstance(n, Expression) + for e, li in n.operates_on.edges.items() + if not isinstance(li, LinkSibling) + and e.node is not n + and isinstance(e.node, Operand_) + } + + elements = [_node(n) for n in nodes] + [_link(li) for li in edges] + stylesheet = _Stylesheet() + + node_types_colors = [ + (typename(group_type), color) for group_type, color in _GROUP_TYPES.items() + ] + + for node_type, color in node_types_colors: + stylesheet.add_node_type(node_type, color) + + cyto.load_extra_layouts() + app = Dash(__name__) + app.layout = _Layout(stylesheet, elements, extra=DAGRE_LAYOUT) + + # Extra layouting + layout = Layout(app, elements, list(nodes)) + buttons(layout) + + app.run(jupyter_height=height or 1400) diff --git a/src/faebryk/libs/util.py b/src/faebryk/libs/util.py index 9a1f696f..ffb2e0b9 100644 --- a/src/faebryk/libs/util.py +++ b/src/faebryk/libs/util.py @@ -1130,3 +1130,9 @@ def _new(cls_, *args, **kwargs): cls.__new__ = _new return cls + + +def typename(obj): + if isinstance(obj, type): + return obj.__name__ + return type(obj).__name__ diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index 499babd6..834ff9d8 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -7,7 +7,7 @@ import pytest from faebryk.core.node import Node -from faebryk.core.parameter import Expression, Parameter +from faebryk.core.parameter import Parameter from faebryk.libs.library import L from faebryk.libs.sets import Range from faebryk.libs.units import P @@ -29,7 +29,7 @@ def test_visualize(): """ Creates webserver that opens automatically if run in jupyter notebook """ - from faebryk.exporters.visualize.interactive_graph import interactive_graph + from faebryk.exporters.visualize.interactive_params import visualize_parameters class App(Node): p1 = L.f_field(Parameter)(units=P.V) @@ -45,14 +45,14 @@ class App(Node): (p2 * p3 + app.p1 * 1 * P.A <= 10 * P.W).constrain() - # pytest.raises(ValueError, bool, app.p1 >= p2 * 5) + pytest.raises(ValueError, bool, app.p1 >= p2 * 5) G = app.get_graph() - interactive_graph(G, height=1400, node_types=(Parameter, Expression)) + visualize_parameters(G, height=1400) def test_visualize_chain(): - from faebryk.exporters.visualize.interactive_graph import interactive_graph + from faebryk.exporters.visualize.interactive_params import visualize_parameters params = times(10, Parameter) sums = [p1 + p2 for p1, p2 in pairwise(params)] @@ -64,7 +64,7 @@ def test_visualize_chain(): p.constrain() G = params[0].get_graph() - interactive_graph(G, height=1400, node_types=(Parameter, Expression)) + visualize_parameters(G, height=1400) # TODO remove @@ -77,4 +77,4 @@ def test_visualize_chain(): else: import typer - typer.run(test_visualize) + typer.run(test_visualize_chain) From 3262529d37910e1cd8782eef7237318ae7d96ada Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 23 Oct 2024 17:05:45 +0200 Subject: [PATCH 58/80] direct changes in ui --- src/faebryk/exporters/visualize/interactive_params.py | 10 ++++------ test/core/test_parameters.py | 2 +- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/faebryk/exporters/visualize/interactive_params.py b/src/faebryk/exporters/visualize/interactive_params.py index a21aa06f..f039d90d 100644 --- a/src/faebryk/exporters/visualize/interactive_params.py +++ b/src/faebryk/exporters/visualize/interactive_params.py @@ -134,7 +134,7 @@ def add_node_type(self, node_type: str, color: str): # If 'greedy', uses heuristic to find feedback arc set "acyclicer": None, # Algorithm to assign rank to nodes: 'network-simplex', 'tight-tree' or 'longest-path' - "ranker": None, + "ranker": "tight-tree", # Number of ranks to keep between source and target of the edge # "minLen": lambda edge: 1, # Higher weight edges are generally made shorter and straighter @@ -190,19 +190,17 @@ def buttons(layout: Layout): {"label": "longest-path", "value": "longest-path"}, ], ), - html.Button("Apply Changes", id="apply-changes-button"), ], ) layout.div_children.insert(-2, html_controls) @app.callback( Output("graph-view", "layout"), - Input("apply-changes-button", "n_clicks"), - State("layout-radio", "value"), - State("layout-dagre-ranker", "value"), + Input("layout-radio", "value"), + Input("layout-dagre-ranker", "value"), State("graph-view", "layout"), ) - def absolute_layout(n_clicks, layout_radio, layout_dagre_ranker, current_layout): + def absolute_layout(layout_radio, layout_dagre_ranker, current_layout): print(layout_radio, layout_dagre_ranker) layout.set_type(layout_radio, current_layout) diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index 834ff9d8..7ea32e01 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -77,4 +77,4 @@ def test_visualize_chain(): else: import typer - typer.run(test_visualize_chain) + typer.run(test_visualize) From 4cb7f84a9a9deb6ae0629e433022e19e211994c5 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Wed, 23 Oct 2024 18:04:18 +0200 Subject: [PATCH 59/80] iterate on jlbpcb set types/approach --- src/faebryk/core/parameter.py | 8 +- src/faebryk/core/solver.py | 6 +- src/faebryk/libs/library/L.py | 2 + src/faebryk/libs/picker/jlcpcb/jlcpcb.py | 1441 +++++++++--------- src/faebryk/libs/picker/jlcpcb/picker_lib.py | 16 +- src/faebryk/libs/sets.py | 48 +- 6 files changed, 790 insertions(+), 731 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 81ae33b2..a7f1879b 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -1,6 +1,7 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT +from collections.abc import Iterable import logging from enum import Enum, auto from types import NotImplementedType @@ -150,8 +151,13 @@ def new(self2): self.inspect_final = new - # def inspect_num_known_supersets(self) -> int: ... + # Could be exponentially many + def inspect_num_known_supersets(self) -> int: + raise Exception("not implemented") + # def inspect_get_known_supersets(self) -> Iterable[P_Set]: ... + def inspect_get_known_superranges(self: NumberLike) -> Iterable[Ranges]: + raise Exception("not implemented") # ---------------------------------------------------------------------------------- def __add__(self, other: NumberLike): diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index 28916984..a5d203cf 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -36,16 +36,12 @@ def get_any_single( constrain_result: bool = True, ) -> tuple[Any, list[Parameter]]: ... # TODO Any -> NumberLike? - # make at least one of the passed predicates true + # make at least one of the passed predicates true, unless that is impossible # while trying to minimize the value of the optional minimize expression # there is no specific order in which the predicates are solved # suppose_constraint can be added, which by constraining the solution further can make solving easier # it is only in effect for the duration of the solve call # constrain_solved will add the solutions as constraints - # returns a tuple of two lists: - # - the first list contains the predicates that were actually solved, i.e. they are true/false - # - the second list contains the expressions that remain unknown - # - the third list contains the parameters that have an empty solution set def assert_any_predicate[ArgType]( self, G: Graph, diff --git a/src/faebryk/libs/library/L.py b/src/faebryk/libs/library/L.py index 92bf5367..9b1c7ab9 100644 --- a/src/faebryk/libs/library/L.py +++ b/src/faebryk/libs/library/L.py @@ -17,6 +17,8 @@ from faebryk.core.reference import reference # noqa: F401 from faebryk.libs.sets import ( # noqa: F401 Empty, + P_Set, + P_UnitSet, PlainSet, Range, Ranges, diff --git a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py index cda85cd1..3fa9c976 100644 --- a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py +++ b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py @@ -1,799 +1,816 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT - -class _RaiseType: - def __init__(self): - raise NotImplementedError() - - -class JLCPCB_DB(_RaiseType): - class config: - class db_path: - @staticmethod - def exists() -> bool: - return False - - -class ComponentQuery(_RaiseType): ... - - -class Component(_RaiseType): ... - - -class MappingParameterDB(_RaiseType): ... - - -# TODO -if False: - import asyncio - import datetime - import logging - import os - import struct - import sys - from dataclasses import dataclass - from pathlib import Path - from textwrap import indent - from typing import Any, Callable, Generator, Self, Sequence - - import patoolib - import requests - from pint import DimensionalityError - from rich.progress import track - from tortoise import Tortoise - from tortoise.expressions import Q - from tortoise.fields import CharField, IntField, JSONField - from tortoise.models import Model - - import faebryk.library._F as F - from faebryk.core.module import Module - from faebryk.core.parameter import Parameter - from faebryk.libs.e_series import ( - E_SERIES_VALUES, - ParamNotResolvedError, - e_series_intersect, - ) - from faebryk.libs.picker.lcsc import ( - LCSC_NoDataException, - LCSC_Part, - LCSC_PinmapException, - attach, - ) - from faebryk.libs.picker.picker import ( - DescriptiveProperties, - PickError, - has_part_picked_defined, - ) - from faebryk.libs.units import P, Quantity, UndefinedUnitError, to_si_str - from faebryk.libs.util import at_exit, cast_assert, try_or - - logger = logging.getLogger(__name__) - - # TODO dont hardcode relative paths - BUILD_FOLDER = Path("./build") - CACHE_FOLDER = BUILD_FOLDER / Path("cache") - - class JLCPCB_Part(LCSC_Part): - def __init__(self, partno: str) -> None: - super().__init__(partno=partno) - - class TBD_ParseError(F.TBD): - """ - Wrapper for TBD that behaves exactly like TBD for the core and picker - But gives us the possibility to attach parser errors to it for deferred - error logging +import asyncio +import datetime +import logging +import os +import struct +import sys +from dataclasses import dataclass +from pathlib import Path +from textwrap import indent +from typing import Any, Callable, Generator, Self, Sequence, cast + +from more_itertools import take +import patoolib +import requests +from pint import DimensionalityError +from rich.progress import track +from tortoise import Tortoise +from tortoise.expressions import Q +from tortoise.fields import CharField, IntField, JSONField +from tortoise.models import Model + +from faebryk.core.solver import Solver +import faebryk.library._F as F +from faebryk.core.module import Module +from faebryk.core.parameter import Parameter, ParameterOperatable +from faebryk.libs.e_series import ( + E_SERIES_VALUES, + ParamNotResolvedError, + e_series_intersect, +) +from faebryk.libs.library import L +from faebryk.libs.picker.lcsc import ( + LCSC_NoDataException, + LCSC_Part, + LCSC_PinmapException, + attach, +) +from faebryk.libs.picker.picker import ( + DescriptiveProperties, + PickError, + has_part_picked_defined, +) +from faebryk.libs.units import P, Quantity, UndefinedUnitError, to_si_str +from faebryk.libs.util import at_exit, cast_assert, try_or + +logger = logging.getLogger(__name__) + +# TODO dont hardcode relative paths +BUILD_FOLDER = Path("./build") +CACHE_FOLDER = BUILD_FOLDER / Path("cache") + +INSPECT_KNOWN_SUPERSETS_LIMIT = 100 + + +class JLCPCB_Part(LCSC_Part): + def __init__(self, partno: str) -> None: + super().__init__(partno=partno) + + +class TBD_ParseError(L.P_UnitSet): + """ + Wrapper for TBD that behaves exactly like TBD for the core and picker + But gives us the possibility to attach parser errors to it for deferred + error logging + """ + + def __init__(self, e: Exception, msg: str): + self.e = e + self.msg = msg + super().__init__() + + def __repr__(self): + return f"{super().__repr__()}({self.msg}: {self.e})" + + def __getattr__(self, name: str) -> Any: + import traceback + + print("__getattr__", name) + traceback.print_stack() + return None + + def __setattr__(self, name: str, value: Any) -> None: + import traceback + + print("__setattr__", name, value) + traceback.print_stack() + + def is_empty(self) -> bool: + self.__getattr__("is_empty") + return True + + def __contains__(self, item: Any) -> bool: + self.__getattr__("__contains__") + return False + + @property + def units(self): + self.__getattr__("units") + import faebryk.libs.units + + return faebryk.libs.units.dimensionless + + @units.setter + def units(self, value: Unit): + self.__setattr__("units", value) + + +@dataclass(frozen=True) +class MappingParameterDB: + param_name: str + attr_keys: list[str] + attr_tolerance_key: str | None = None + transform_fn: Callable[[str], L.Range] | None = None + ignore_at: bool = True + + +class Category(Model): + id = IntField(primary_key=True) + category = CharField(max_length=255) + subcategory = CharField(max_length=255) + + class Meta: + table = "categories" + + async def get_ids( + self, category: str = "", subcategory: str = "" + ) -> list[dict[str, Any]]: """ + Get the category ids for the given category and subcategory - def __init__(self, e: Exception, msg: str): - self.e = e - self.msg = msg - super().__init__() + :param category: The category to search for, use "" for any + :param subcategory: The subcategory to search for, use "" for any - def __repr__(self): - return f"{super().__repr__()}({self.msg}: {self.e})" - - @dataclass - class MappingParameterDB: - param_name: str - attr_keys: list[str] - attr_tolerance_key: str | None = None - transform_fn: Callable[[str], Parameter] | None = None - ignore_at: bool = True - - class Category(Model): - id = IntField(primary_key=True) - category = CharField(max_length=255) - subcategory = CharField(max_length=255) - - class Meta: - table = "categories" - - async def get_ids( - self, category: str = "", subcategory: str = "" - ) -> list[dict[str, Any]]: - """ - Get the category ids for the given category and subcategory - - :param category: The category to search for, use "" for any - :param subcategory: The subcategory to search for, use "" for any - - :return: A list of category ids for the JLCPCB database Component id field - """ - filter_query = Q() - if category != "": - filter_query &= Q(category__icontains=category) - if subcategory != "": - filter_query &= Q(subcategory__icontains=subcategory) - category_ids = await self.filter(filter_query).values("id") - if len(category_ids) < 1: - raise LookupError( - f"Could not find a match for category {category} " - f"and subcategory {subcategory}", - ) - return [c["id"] for c in category_ids] + :return: A list of category ids for the JLCPCB database Component id field + """ + filter_query = Q() + if category != "": + filter_query &= Q(category__icontains=category) + if subcategory != "": + filter_query &= Q(subcategory__icontains=subcategory) + category_ids = await self.filter(filter_query).values("id") + if len(category_ids) < 1: + raise LookupError( + f"Could not find a match for category {category} " + f"and subcategory {subcategory}", + ) + return [c["id"] for c in category_ids] - class Manufacturers(Model): - id = IntField(primary_key=True) - name = CharField(max_length=255) - class Meta: - table = "manufacturers" +class Manufacturers(Model): + id = IntField(primary_key=True) + name = CharField(max_length=255) - async def get_ids(self, manufacturer: str) -> list[int]: - """ - Get the manufacturer ids for the given manufacturer + class Meta: + table = "manufacturers" - :param manufacturer: The manufacturer to search for + async def get_ids(self, manufacturer: str) -> list[int]: + """ + Get the manufacturer ids for the given manufacturer - :return: A list of manufacturer ids for the JLCPCB database Component id field - """ - manufacturer_ids = await self.filter(name__icontains=manufacturer).values( - "id" - ) - if len(manufacturer_ids) < 1: - raise LookupError( - f"Could not find a match for manufacturer {manufacturer}" - ) - return [m["id"] for m in manufacturer_ids] - - async def get_from_id(self, manufacturer_id: int) -> str: - return (await self.get(id=manufacturer_id)).name - - class Component(Model): - lcsc = IntField(primary_key=True) - category_id = IntField() - mfr = CharField(max_length=255) - package = CharField(max_length=255) - joints = IntField() - manufacturer_id = IntField() - basic = IntField() - description = CharField(max_length=255) - datasheet = CharField(max_length=255) - stock = IntField() - price = JSONField() - last_update = IntField() - extra = JSONField() - flag = IntField() - last_on_stock = IntField() - preferred = IntField() - - class Meta: - table = "components" - - class ParseError(Exception): - pass + :param manufacturer: The manufacturer to search for - @property - def partno(self): - return f"C{self.lcsc}" + :return: A list of manufacturer ids for the JLCPCB database Component id field + """ + manufacturer_ids = await self.filter(name__icontains=manufacturer).values("id") + if len(manufacturer_ids) < 1: + raise LookupError(f"Could not find a match for manufacturer {manufacturer}") + return [m["id"] for m in manufacturer_ids] + + async def get_from_id(self, manufacturer_id: int) -> str: + return (await self.get(id=manufacturer_id)).name + + +class Component(Model): + lcsc = IntField(primary_key=True) + category_id = IntField() + mfr = CharField(max_length=255) + package = CharField(max_length=255) + joints = IntField() + manufacturer_id = IntField() + basic = IntField() + description = CharField(max_length=255) + datasheet = CharField(max_length=255) + stock = IntField() + price = JSONField() + last_update = IntField() + extra = JSONField() + flag = IntField() + last_on_stock = IntField() + preferred = IntField() + + class Meta: + table = "components" + + class ParseError(Exception): + pass + + @property + def partno(self): + return f"C{self.lcsc}" + + def get_price(self, qty: int = 1) -> float: + """ + Get the price for qty of the component including handling fees - def get_price(self, qty: int = 1) -> float: - """ - Get the price for qty of the component including handling fees + For handling fees and component price classifications, see: + https://jlcpcb.com/help/article/pcb-assembly-faqs + """ + BASIC_HANDLING_FEE = 0 + PREFERRED_HANDLING_FEE = 0 + EXTENDED_HANDLING_FEE = 3 + + if qty < 1: + raise ValueError("Quantity must be greater than 0") + + if self.basic: + handling_fee = BASIC_HANDLING_FEE + elif self.preferred: + handling_fee = PREFERRED_HANDLING_FEE + else: + handling_fee = EXTENDED_HANDLING_FEE + + unit_price = float("inf") + try: + for p in self.price: + if p["qTo"] is None or qty < p["qTo"]: + unit_price = float(p["price"]) + unit_price = float(self.price[-1]["price"]) + except LookupError: + pass - For handling fees and component price classifications, see: - https://jlcpcb.com/help/article/pcb-assembly-faqs - """ - BASIC_HANDLING_FEE = 0 - PREFERRED_HANDLING_FEE = 0 - EXTENDED_HANDLING_FEE = 3 + return unit_price * qty + handling_fee - if qty < 1: - raise ValueError("Quantity must be greater than 0") + def attribute_to_set( + self, attribute_name: str, use_tolerance: bool = False, ignore_at: bool = True + ) -> L.Range[Quantity]: + """ + Convert a component value in the extra['attributes'] dict to a parameter - if self.basic: - handling_fee = BASIC_HANDLING_FEE - elif self.preferred: - handling_fee = PREFERRED_HANDLING_FEE - else: - handling_fee = EXTENDED_HANDLING_FEE + :param attribute_name: The key in the extra['attributes'] dict to convert + :param use_tolerance: Whether to use the tolerance field in the component - unit_price = float("inf") - try: - for p in self.price: - if p["qTo"] is None or qty < p["qTo"]: - unit_price = float(p["price"]) - unit_price = float(self.price[-1]["price"]) - except LookupError: - pass - - return unit_price * qty + handling_fee - - def attribute_to_parameter( - self, - attribute_name: str, - use_tolerance: bool = False, - ignore_at: bool = True, - ) -> Parameter: - """ - Convert a component value in the extra['attributes'] dict to a parameter - - :param attribute_name: The key in the extra['attributes'] dict to convert - :param use_tolerance: Whether to use the tolerance field in the component - - :return: The parameter representing the attribute value - """ - assert isinstance(self.extra, dict) and "attributes" in self.extra - - value_field = self.extra["attributes"][attribute_name] - # parse fields like "850mV@1A" - # TODO better to actually parse this - if ignore_at: - value_field = value_field.split("@")[0] - - value_field = value_field.replace("cd", "candela") - - # parse fields like "1.5V~2.5V" - if "~" in value_field: - values = value_field.split("~") - if len(values) != 2: - raise ValueError(f"Invalid range from value '{value_field}'") - return F.Range(*(P.Quantity(v) for v in values)) - - # unit hacks + :return: The parameter representing the attribute value + """ + assert isinstance(self.extra, dict) and "attributes" in self.extra + + value_field = self.extra["attributes"][attribute_name] + # parse fields like "850mV@1A" + # TODO better to actually parse this + if ignore_at: + value_field = value_field.split("@")[0] + + value_field = value_field.replace("cd", "candela") + + # parse fields like "1.5V~2.5V" + if "~" in value_field: + values = value_field.split("~") + if len(values) != 2: + raise ValueError(f"Invalid range from value '{value_field}'") + return L.Range(*(P.Quantity(v) for v in values)) + + # unit hacks + + try: + value = P.Quantity(value_field) + except UndefinedUnitError as e: + raise ValueError(f"Could not parse value field '{value_field}'") from e + + if not use_tolerance: + return L.Single(value) + + if "Tolerance" not in self.extra["attributes"]: + raise ValueError(f"No Tolerance field in component (lcsc: {self.lcsc})") + if "ppm" in self.extra["attributes"]["Tolerance"]: + tolerance = float(self.extra["attributes"]["Tolerance"].strip("±pm")) / 1e6 + elif "%~+" in self.extra["attributes"]["Tolerance"]: + tolerances = self.extra["attributes"]["Tolerance"].split("~") + tolerances = [float(t.strip("%+-")) for t in tolerances] + tolerance = max(tolerances) / 100 + elif "%" in self.extra["attributes"]["Tolerance"]: + tolerance = float(self.extra["attributes"]["Tolerance"].strip("%±")) / 100 + else: + raise ValueError( + "Could not parse tolerance field " + f"'{self.extra['attributes']['Tolerance']}'" + ) - try: - value = P.Quantity(value_field) - except UndefinedUnitError as e: - raise ValueError(f"Could not parse value field '{value_field}'") from e - - if not use_tolerance: - return F.Constant(value) - - if "Tolerance" not in self.extra["attributes"]: - raise ValueError(f"No Tolerance field in component (lcsc: {self.lcsc})") - if "ppm" in self.extra["attributes"]["Tolerance"]: - tolerance = ( - float(self.extra["attributes"]["Tolerance"].strip("±pm")) / 1e6 - ) - elif "%~+" in self.extra["attributes"]["Tolerance"]: - tolerances = self.extra["attributes"]["Tolerance"].split("~") - tolerances = [float(t.strip("%+-")) for t in tolerances] - tolerance = max(tolerances) / 100 - elif "%" in self.extra["attributes"]["Tolerance"]: - tolerance = ( - float(self.extra["attributes"]["Tolerance"].strip("%±")) / 100 - ) - else: - raise ValueError( - "Could not parse tolerance field " - f"'{self.extra['attributes']['Tolerance']}'" - ) + return L.Range.from_center_rel(value, tolerance) - return F.Range.from_center_rel(value, tolerance) + def get_parameter(self, m: MappingParameterDB) -> L.Range[Quantity]: + """ + Transform a component attribute to a parameter - def get_parameter(self, m: MappingParameterDB) -> Parameter: - """ - Transform a component attribute to a parameter + :param attribute_search_keys: The key in the component's extra['attributes'] + dict that holds the value to check + :param tolerance_search_key: The key in the component's extra['attributes'] dict + that holds the tolerance value + :param parser: A function to convert the attribute value to the correct type - :param attribute_search_keys: The key in the component's extra['attributes'] - dict that holds the value to check - :param tolerance_search_key: The key in the component's extra['attributes'] dict - that holds the tolerance value - :param parser: A function to convert the attribute value to the correct type + :return: The parameter representing the attribute value + """ - :return: The parameter representing the attribute value - """ + attribute_search_keys = m.attr_keys + tolerance_search_key = m.attr_tolerance_key + parser = m.transform_fn - attribute_search_keys = m.attr_keys - tolerance_search_key = m.attr_tolerance_key - parser = m.transform_fn + if tolerance_search_key is not None and parser is not None: + raise NotImplementedError( + "Cannot provide both tolerance_search_key and parser arguments" + ) - if tolerance_search_key is not None and parser is not None: - raise NotImplementedError( - "Cannot provide both tolerance_search_key and parser arguments" - ) + assert isinstance(self.extra, dict) - assert isinstance(self.extra, dict) + attr_key = next( + (k for k in attribute_search_keys if k in self.extra.get("attributes", "")), + None, + ) - attr_key = next( - ( - k - for k in attribute_search_keys - if k in self.extra.get("attributes", "") - ), - None, + if "attributes" not in self.extra: + raise LookupError("does not have any attributes") + if attr_key is None: + raise LookupError( + f"does not have any of required attribute fields: " + f"{attribute_search_keys} in {self.extra['attributes']}" + ) + if ( + tolerance_search_key is not None + and tolerance_search_key not in self.extra["attributes"] + ): + raise LookupError( + f"does not have any of required tolerance fields: " + f"{tolerance_search_key}" ) - if "attributes" not in self.extra: - raise LookupError("does not have any attributes") - if attr_key is None: - raise LookupError( - f"does not have any of required attribute fields: " - f"{attribute_search_keys} in {self.extra['attributes']}" - ) - if ( - tolerance_search_key is not None - and tolerance_search_key not in self.extra["attributes"] - ): - raise LookupError( - f"does not have any of required tolerance fields: " - f"{tolerance_search_key}" - ) - - if parser is not None: - return parser(self.extra["attributes"][attr_key]) - - return self.attribute_to_parameter( - attr_key, tolerance_search_key is not None, m.ignore_at + if parser is not None: + return parser(self.extra["attributes"][attr_key]) + + return self.attribute_to_set( + attr_key, tolerance_search_key is not None, m.ignore_at + ) + + def get_params( + self, mapping: list[MappingParameterDB] + ) -> tuple[ + dict[MappingParameterDB, L.Range[Quantity]], dict[MappingParameterDB, Exception] + ]: + params = {} + exceptions = {} + for m in mapping: + try: + params[m] = self.get_parameter(m) + except LookupError | ValueError | AssertionError as e: + exceptions[m] = e + return params, exceptions + + def attach( + self, + module: Module, + mapping: list[MappingParameterDB], + qty: int = 1, + ignore_exceptions: bool = False, + ): + params, exceptions = self.get_params(mapping) + + if not ignore_exceptions and exceptions: + params_str = indent( + "\n" + "\n".join(repr(e) for e in exceptions.values()), + " " * 4, + ) + raise Component.ParseError( + f"Failed to parse parameters for component {self.partno}: {params_str}" ) - def get_params(self, mapping: list[MappingParameterDB]) -> list[Parameter]: - return [ - try_or( - lambda: self.get_parameter(m), - default_f=lambda e: TBD_ParseError( - e, f"Failed to parse {m.param_name}" + module.add( + F.has_descriptive_properties_defined( + { + DescriptiveProperties.partno: self.mfr, + DescriptiveProperties.manufacturer: asyncio.run( + Manufacturers().get_from_id(self.manufacturer_id) ), - catch=(LookupError, ValueError, AssertionError), - ) - for m in mapping - ] - - def attach( - self, - module: Module, - mapping: list[MappingParameterDB], - qty: int = 1, - allow_TBD: bool = False, - ): - params = self.get_params(mapping) + DescriptiveProperties.datasheet: self.datasheet, + "JLCPCB stock": str(self.stock), + "JLCPCB price": f"{self.get_price(qty):.4f}", + "JLCPCB description": self.description, + "JLCPCB Basic": str(bool(self.basic)), + "JLCPCB Preferred": str(bool(self.preferred)), + }, + ) + ) - if not allow_TBD and any(isinstance(p, TBD_ParseError) for p in params): - params_str = indent( - "\n" - + "\n".join( - repr(p) for p in params if isinstance(p, TBD_ParseError) - ), - " " * 4, - ) - raise Component.ParseError( - f"Failed to parse parameters for component {self.partno}: {params_str}" - ) + attach(module, self.partno) + module.add(has_part_picked_defined(JLCPCB_Part(self.partno))) - for name, value in zip([m.param_name for m in mapping], params): - getattr(module, name).override(value) - - module.add( - F.has_descriptive_properties_defined( - { - DescriptiveProperties.partno: self.mfr, - DescriptiveProperties.manufacturer: asyncio.run( - Manufacturers().get_from_id(self.manufacturer_id) - ), - DescriptiveProperties.datasheet: self.datasheet, - "JLCPCB stock": str(self.stock), - "JLCPCB price": f"{self.get_price(qty):.4f}", - "JLCPCB description": self.description, - "JLCPCB Basic": str(bool(self.basic)), - "JLCPCB Preferred": str(bool(self.preferred)), - }, - ) + for name, value in params.items(): + getattr(module, name.param_name).alias_is(value) + + if logger.isEnabledFor(logging.DEBUG): + logger.debug( + f"Attached component {self.partno} to module {module}: \n" + f"{indent(str(params), ' '*4)}\n--->\n" + f"{indent(module.pretty_params(), ' '*4)}" ) - attach(module, self.partno) - module.add(has_part_picked_defined(JLCPCB_Part(self.partno))) - if logger.isEnabledFor(logging.DEBUG): - logger.debug( - f"Attached component {self.partno} to module {module}: \n" - f"{indent(str(params), ' '*4)}\n--->\n" - f"{indent(module.pretty_params(), ' '*4)}" - ) + @property + def mfr_name(self) -> str: + return asyncio.run(Manufacturers().get_from_id(self.manufacturer_id)) + - @property - def mfr_name(self) -> str: - return asyncio.run(Manufacturers().get_from_id(self.manufacturer_id)) +class ComponentQuery: + class Error(Exception): ... - class ComponentQuery: - class Error(Exception): ... + class ParamError(Error): + def __init__(self, param: L.P_UnitSet, msg: str): + self.param = param + self.msg = msg + super().__init__(f"{msg} for parameter {param!r}") - class ParamError(Error): - def __init__(self, param: Parameter, msg: str): - self.param = param - self.msg = msg - super().__init__(f"{msg} for parameter {param!r}") + def __init__(self): + # init db connection + JLCPCB_DB() - def __init__(self): - # init db connection - JLCPCB_DB() + self.Q: Q | None = Q() + self.results: list[Component] | None = None - self.Q: Q | None = Q() - self.results: list[Component] | None = None + async def exec(self) -> list[Component]: + queryset = Component.filter(self.Q) + logger.debug(f"Query results: {await queryset.count()}") + self.results = await queryset + self.Q = None + return self.results - async def exec(self) -> list[Component]: - queryset = Component.filter(self.Q) - logger.debug(f"Query results: {await queryset.count()}") - self.results = await queryset - self.Q = None + def get(self) -> list[Component]: + if self.results is not None: return self.results + return asyncio.run(self.exec()) - def get(self) -> list[Component]: - if self.results is not None: - return self.results - return asyncio.run(self.exec()) - - def filter_by_stock(self, qty: int) -> Self: - assert self.Q - self.Q &= Q(stock__gte=qty) - return self + def filter_by_stock(self, qty: int) -> Self: + assert self.Q + self.Q &= Q(stock__gte=qty) + return self - def filter_by_description(self, *keywords: str) -> Self: - assert self.Q + def filter_by_description(self, *keywords: str) -> Self: + assert self.Q - logger.debug(f"Possible keywords: {keywords}") - description_query = Q() - for keyword in keywords: - description_query |= Q(description__contains=keyword) - self.Q &= description_query + logger.debug(f"Possible keywords: {keywords}") + description_query = Q() + for keyword in keywords: + description_query |= Q(description__contains=keyword) + self.Q &= description_query - return self + return self - def filter_by_value( - self, - value: Parameter[Quantity], - si_unit: str, - e_series: set[float] | None = None, - ) -> Self: - assert self.Q - value = value.get_most_narrow() + def filter_by_value( + self, + value: L.Ranges[Quantity], + si_unit: str, + e_series: set[float] | None = None, + ) -> Self: + assert self.Q - if logger.isEnabledFor(logging.DEBUG): - logger.debug( - f"Filtering by value:\n{indent(value.get_tree_param().pretty(), ' '*4)}" + if value == L.Range(min=float("-inf"), max=float("inf")): + return self + assert not self.results + try: + intersection = e_series_intersect(value, e_series or E_SERIES_VALUES.E_ALL) + except ParamNotResolvedError as e: + raise ComponentQuery.ParamError( + value, f"Could not run e_series_intersect: {e}" + ) from e + si_vals = [ + to_si_str(cast_assert(L.Single, r).get_value(), si_unit) + .replace("µ", "u") + .replace("inf", "∞") + for r in intersection + ] + return self.filter_by_description(*si_vals) + + def filter_by_category(self, category: str, subcategory: str) -> Self: + assert self.Q + category_ids = asyncio.run(Category().get_ids(category, subcategory)) + self.Q &= Q(category_id__in=category_ids) + return self + + def filter_by_footprint( + self, footprint_candidates: Sequence[tuple[str, int]] | None + ) -> Self: + assert self.Q + if not footprint_candidates: + return self + footprint_query = Q() + if footprint_candidates is not None: + for footprint, pin_count in footprint_candidates: + footprint_query |= Q(description__icontains=footprint) & Q( + joints=pin_count ) + self.Q &= footprint_query + return self + + def filter_by_traits(self, obj: Module) -> Self: + out = self + if obj.has_trait(F.has_footprint_requirement): + out = self.filter_by_footprint( + obj.get_trait(F.has_footprint_requirement).get_footprint_requirement() + ) - if isinstance(value, F.ANY): - return self - assert not self.results - try: - intersection = F.Set( - [e_series_intersect(value, e_series or E_SERIES_VALUES.E_ALL)] - ).params - except ParamNotResolvedError as e: - raise ComponentQuery.ParamError( - value, f"Could not run e_series_intersect: {e}" - ) from e - si_vals = [ - to_si_str(cast_assert(F.Constant, r).value, si_unit) - .replace("µ", "u") - .replace("inf", "∞") - for r in intersection - ] - return self.filter_by_description(*si_vals) - - def filter_by_category(self, category: str, subcategory: str) -> Self: - assert self.Q - category_ids = asyncio.run(Category().get_ids(category, subcategory)) - self.Q &= Q(category_id__in=category_ids) - return self + return out - def filter_by_footprint( - self, footprint_candidates: Sequence[tuple[str, int]] | None - ) -> Self: - assert self.Q - if not footprint_candidates: - return self - footprint_query = Q() - if footprint_candidates is not None: - for footprint, pin_count in footprint_candidates: - footprint_query |= Q(description__icontains=footprint) & Q( - joints=pin_count - ) - self.Q &= footprint_query - return self + def sort_by_price(self, qty: int = 1) -> Self: + self.get().sort(key=lambda x: x.get_price(qty)) + return self - def filter_by_traits(self, obj: Module) -> Self: - out = self - if obj.has_trait(F.has_footprint_requirement): - out = self.filter_by_footprint( - obj.get_trait( - F.has_footprint_requirement - ).get_footprint_requirement() - ) + def filter_by_lcsc_pn(self, partnumber: str) -> Self: + assert self.Q + self.Q &= Q(lcsc=partnumber.strip("C")) + return self - return out + def filter_by_manufacturer_pn(self, partnumber: str) -> Self: + assert self.Q + self.Q &= Q(mfr__icontains=partnumber) + return self - def sort_by_price(self, qty: int = 1) -> Self: - self.get().sort(key=lambda x: x.get_price(qty)) + def filter_by_manufacturer(self, manufacturer: str) -> Self: + assert self.Q + if not manufacturer: return self + manufacturer_ids = asyncio.run(Manufacturers().get_ids(manufacturer)) + self.Q &= Q(manufacturer_id__in=manufacturer_ids) + return self + + def filter_by_module_params( + self, + module: Module, + mapping: list[MappingParameterDB], + solver: Solver, + ) -> Generator[Component, None, None]: + """ + Filter the results by the parameters of the module - def filter_by_lcsc_pn(self, partnumber: str) -> Self: - assert self.Q - self.Q &= Q(lcsc=partnumber.strip("C")) - return self + This should be used as the last step before attaching the component to the + module - def filter_by_manufacturer_pn(self, partnumber: str) -> Self: - assert self.Q - self.Q &= Q(mfr__icontains=partnumber) - return self + :param module: The module to filter by + :param mapping: The mapping of module parameters to component attributes + :param qty: The quantity of components needed + :param attach_first: Whether to attach the first component that matches the + parameters and return immediately - def filter_by_manufacturer(self, manufacturer: str) -> Self: - assert self.Q - if not manufacturer: - return self - manufacturer_ids = asyncio.run(Manufacturers().get_ids(manufacturer)) - self.Q &= Q(manufacturer_id__in=manufacturer_ids) - return self + :return: The first component that matches the parameters + """ - def filter_by_module_params( - self, - module: Module, - mapping: list[MappingParameterDB], - ) -> Generator[Component, None, None]: - """ - Filter the results by the parameters of the module - - This should be used as the last step before attaching the component to the - module - - :param module: The module to filter by - :param mapping: The mapping of module parameters to component attributes - :param qty: The quantity of components needed - :param attach_first: Whether to attach the first component that matches the - parameters and return immediately - - :return: The first component that matches the parameters - """ - - for c in self.get(): - params = c.get_params(mapping) - - if not all( - pm := [ - try_or( - lambda: p.is_subset_of(getattr(module, m.param_name)), - default=False, - catch=DimensionalityError, - ) - for p, m in zip(params, mapping) - ] - ): - logger.debug( - f"Component {c.lcsc} doesn't match: " - f"{[p for p, v in zip(params, pm) if not v]}" + for c in self.get(): + params, exceptions = c.get_params(mapping) + + if exceptions: # TODO + continue + + compatible = True + for m, p in params.items(): + mod_param = getattr(module, m.param_name) + known_superset = L.Ranges( + *take( + INSPECT_KNOWN_SUPERSETS_LIMIT, + mod_param.inspect_get_known_superranges(), ) - continue + ) + if not known_superset.is_superset_of(L.Ranges(p)): + compatible = False + break + + if compatible: + anded = True + for m, p in params.items(): + mod_param = cast(ParameterOperatable, getattr(module, m.param_name)) + anded = mod_param.operation_is_superset(p).operation_and(anded) + + result = solver.assert_any_predicate( + module.get_graph(), [(anded, None)], constrain_solved=False + ) + if len(result.true_predicates) == 0: + compatible = False + if not compatible: logger.debug( - f"Found part {c.lcsc:8} " - f"Basic: {bool(c.basic)}, Preferred: {bool(c.preferred)}, " - f"Price: ${c.get_price(1):2.4f}, " - f"{c.description:15}," + f"Component {c.lcsc} doesn't match: " + f"{[p for p, v in params.items()]}" ) + continue - yield c + logger.debug( + f"Found part {c.lcsc:8} " + f"Basic: {bool(c.basic)}, Preferred: {bool(c.preferred)}, " + f"Price: ${c.get_price(1):2.4f}, " + f"{c.description:15}," + ) - def filter_by_module_params_and_attach( - self, module: Module, mapping: list[MappingParameterDB], qty: int = 1 - ): - # TODO if no modules without TBD, rerun with TBD allowed - - failures = [] - for c in self.filter_by_module_params(module, mapping): - try: - c.attach(module, mapping, qty, allow_TBD=False) - return self - except (ValueError, Component.ParseError) as e: - failures.append((c, e)) - except LCSC_NoDataException as e: - failures.append((c, e)) - except LCSC_PinmapException as e: - failures.append((c, e)) - - if failures: - fail_str = indent( - "\n" + f"{'\n'.join(f'{c}: {e}' for c, e in failures)}", " " * 4 - ) + yield c - raise PickError( - f"Failed to attach any components to module {module}: {len(failures)}" - f" {fail_str}", - module, - ) + def filter_by_module_params_and_attach( + self, + module: Module, + mapping: list[MappingParameterDB], + solver: Solver, + qty: int = 1, + ): + # TODO if no modules without TBD, rerun with TBD allowed + + failures = [] + for c in self.filter_by_module_params(module, mapping, solver): + try: + c.attach(module, mapping, qty, ignore_exceptions=False) + return self + except (ValueError, Component.ParseError) as e: + failures.append((c, e)) + except LCSC_NoDataException as e: + failures.append((c, e)) + except LCSC_PinmapException as e: + failures.append((c, e)) + + if failures: + fail_str = indent( + "\n" + f"{'\n'.join(f'{c}: {e}' for c, e in failures)}", " " * 4 + ) raise PickError( - "No components found that match the parameters and that can be attached", + f"Failed to attach any components to module {module}: {len(failures)}" + f" {fail_str}", module, ) - class JLCPCB_DB: - @dataclass - class Config: - db_path: Path = CACHE_FOLDER / Path("jlcpcb_part_database") - no_download_prompt: bool = False - force_db_update: bool = False - - config = Config() - _instance: "JLCPCB_DB | None" = None - failed: Exception | None = None - - @staticmethod - def get() -> "JLCPCB_DB": - return JLCPCB_DB.__new__(JLCPCB_DB) - - def __new__(cls) -> "JLCPCB_DB": - if cls.failed: - raise cls.failed - if not JLCPCB_DB._instance: - instance = super(JLCPCB_DB, cls).__new__(cls) - try: - instance.init() - except FileNotFoundError as e: - cls.failed = e - raise e - - JLCPCB_DB._instance = instance - at_exit(JLCPCB_DB.close) - return JLCPCB_DB._instance - - @staticmethod - def close(): - if not JLCPCB_DB._instance: - return - instance = JLCPCB_DB._instance - JLCPCB_DB._instance = None - del instance - - def init(self) -> None: - config = self.config - self.db_path = config.db_path - self.db_file = config.db_path / Path("cache.sqlite3") - self.connected = False - - no_download_prompt = config.no_download_prompt - - if not sys.stdin.isatty(): - no_download_prompt = True - - if config.force_db_update: + raise PickError( + "No components found that match the parameters and that can be attached", + module, + ) + + +class JLCPCB_DB: + @dataclass + class Config: + db_path: Path = CACHE_FOLDER / Path("jlcpcb_part_database") + no_download_prompt: bool = False + force_db_update: bool = False + + config = Config() + _instance: "JLCPCB_DB | None" = None + failed: Exception | None = None + + @staticmethod + def get() -> "JLCPCB_DB": + return JLCPCB_DB.__new__(JLCPCB_DB) + + def __new__(cls) -> "JLCPCB_DB": + if cls.failed: + raise cls.failed + if not JLCPCB_DB._instance: + instance = super(JLCPCB_DB, cls).__new__(cls) + try: + instance.init() + except FileNotFoundError as e: + cls.failed = e + raise e + + JLCPCB_DB._instance = instance + at_exit(JLCPCB_DB.close) + return JLCPCB_DB._instance + + @staticmethod + def close(): + if not JLCPCB_DB._instance: + return + instance = JLCPCB_DB._instance + JLCPCB_DB._instance = None + del instance + + def init(self) -> None: + config = self.config + self.db_path = config.db_path + self.db_file = config.db_path / Path("cache.sqlite3") + self.connected = False + + no_download_prompt = config.no_download_prompt + + if not sys.stdin.isatty(): + no_download_prompt = True + + if config.force_db_update: + self.download() + elif not self.has_db(): + if no_download_prompt or self.prompt_db_update( + f"No JLCPCB database found at {self.db_file}, download now?" + ): + self.download() + else: + raise FileNotFoundError(f"No JLCPCB database found at {self.db_file}") + elif not self.is_db_up_to_date(): + if not no_download_prompt and self.prompt_db_update( + f"JLCPCB database at {self.db_file} is older than 7 days, update?" + ): self.download() - elif not self.has_db(): - if no_download_prompt or self.prompt_db_update( - f"No JLCPCB database found at {self.db_file}, download now?" + else: + logger.warning("Continuing with outdated JLCPCB database") + + asyncio.run(self._init_db()) + + def __del__(self): + if self.connected: + asyncio.run(self._close_db()) + + async def _init_db(self): + await Tortoise.init( + db_url=f"sqlite://{self.db_path}/cache.sqlite3", + modules={ + "models": [__name__] + }, # Use __name__ to refer to the current module + ) + self.connected = True + + async def _close_db(self): + from tortoise.log import logger as tortoise_logger + + # suppress close ORM info + tortoise_logger.setLevel(logging.WARNING) + await Tortoise.close_connections() + self.connected = False + + def has_db(self) -> bool: + return self.db_path.is_dir() and self.db_file.is_file() + + def is_db_up_to_date( + self, max_timediff: datetime.timedelta = datetime.timedelta(days=7) + ) -> bool: + if not self.has_db(): + return False + + return ( + datetime.datetime.fromtimestamp( + self.db_file.stat().st_mtime, tz=datetime.timezone.utc + ) + >= datetime.datetime.now(tz=datetime.timezone.utc) - max_timediff + ) + + def prompt_db_update(self, prompt: str = "Update JLCPCB database?") -> bool: + ans = input(prompt + " [y/N]:").lower() + return ans == "y" + + def download( + self, + ): + def download_file(url, output_path: Path): + with requests.get(url, stream=True) as r: + r.raise_for_status() + with open(output_path, "wb") as f: + for chunk in r.iter_content(chunk_size=8192): + f.write(chunk) + + def get_number_of_volumes(zip_path): + with open(zip_path, "rb") as f: + f.seek(-22, os.SEEK_END) # Go to the end of the file minus 22 bytes + end_of_central_dir = f.read(22) + + if len(end_of_central_dir) != 22 or not end_of_central_dir.startswith( + b"PK\x05\x06" ): - self.download() - else: - raise FileNotFoundError( - f"No JLCPCB database found at {self.db_file}" + # Not a valid ZIP file or the end of central directory signature is + # missing + raise ValueError( + "Invalid ZIP file or End of Central Directory signature not " + "found" ) - elif not self.is_db_up_to_date(): - if not no_download_prompt and self.prompt_db_update( - f"JLCPCB database at {self.db_file} is older than 7 days, update?" - ): - self.download() - else: - logger.warning("Continuing with outdated JLCPCB database") - - asyncio.run(self._init_db()) - - def __del__(self): - if self.connected: - asyncio.run(self._close_db()) - - async def _init_db(self): - await Tortoise.init( - db_url=f"sqlite://{self.db_path}/cache.sqlite3", - modules={ - "models": [__name__] - }, # Use __name__ to refer to the current module - ) - self.connected = True - async def _close_db(self): - from tortoise.log import logger as tortoise_logger + # Unpack the number of this volume (should be 0 if single part zip) + current_volume, volume_with_central_dir = struct.unpack( + " bool: - return self.db_path.is_dir() and self.db_file.is_file() + self.db_path.mkdir(parents=True, exist_ok=True) - def is_db_up_to_date( - self, max_timediff: datetime.timedelta = datetime.timedelta(days=7) - ) -> bool: - if not self.has_db(): - return False + zip_file = self.db_path / Path("cache.zip") + base_url = "https://yaqwsx.github.io/jlcparts/data/" - return ( - datetime.datetime.fromtimestamp( - self.db_file.stat().st_mtime, tz=datetime.timezone.utc - ) - >= datetime.datetime.now(tz=datetime.timezone.utc) - max_timediff - ) + logger.info(f"Downloading {base_url}cache.zip to {zip_file}") + download_file(base_url + "cache.zip", zip_file) - def prompt_db_update(self, prompt: str = "Update JLCPCB database?") -> bool: - ans = input(prompt + " [y/N]:").lower() - return ans == "y" + num_volumes = get_number_of_volumes(zip_file) + assert num_volumes <= 99 + logger.info(f"Number of volumes: {num_volumes}") - def download( - self, + # Download the additional volume files + for volume_num in track( + range(num_volumes), description="Downloading and appending zip volumes" ): - def download_file(url, output_path: Path): - with requests.get(url, stream=True) as r: - r.raise_for_status() - with open(output_path, "wb") as f: - for chunk in r.iter_content(chunk_size=8192): - f.write(chunk) - - def get_number_of_volumes(zip_path): - with open(zip_path, "rb") as f: - f.seek(-22, os.SEEK_END) # Go to the end of the file minus 22 bytes - end_of_central_dir = f.read(22) - - if len( - end_of_central_dir - ) != 22 or not end_of_central_dir.startswith(b"PK\x05\x06"): - # Not a valid ZIP file or the end of central directory signature is - # missing - raise ValueError( - "Invalid ZIP file or End of Central Directory signature not " - "found" - ) - - # Unpack the number of this volume (should be 0 if single part zip) - current_volume, volume_with_central_dir = struct.unpack( - " NumericT: return right_bound assert False # unreachable + def is_superset_of(self, other: "_N_NonIterableRanges[NumericT]") -> bool: + return other == other.op_intersect_ranges(self) + + def is_subset_of(self, other: "_N_NonIterableRanges[NumericT]") -> bool: + return other.is_superset_of(self) + def op_intersect_range( self, other: "_N_Range[NumericT]" ) -> "_N_NonIterableRanges[NumericT]": @@ -246,11 +252,32 @@ def op_intersect_range( def op_intersect_ranges( self, other: "_N_NonIterableRanges[NumericT]" ) -> "_N_NonIterableRanges[NumericT]": - # TODO currently quadratic - # lists are sorted, so this could be linear - return _N_NonIterableRanges( - *(r.op_intersect_range(o) for r in self.ranges for o in other.ranges) - ) + result = [] + s, o = 0, 0 + while s < len(self.ranges) and o < len(other.ranges): + rs, ro = self.ranges[s], other.ranges[o] + intersect = rs.op_intersect_range(ro) + if not intersect.is_empty(): + result.append(intersect) + + if rs.max_elem() < ro.min_elem(): + # no remaining element in other list can intersect with rs + s += 1 + elif ro.max_elem() < rs.min_elem(): + # no remaining element in self list can intersect with ro + o += 1 + elif rs.max_elem() < ro.max_elem(): + # rs ends before ro, so move to next in self list + s += 1 + elif ro.max_elem() < rs.max_elem(): + # ro ends before rs, so move to next in other list + o += 1 + else: + # rs and ro end on same number, so move to next in both lists + s += 1 + o += 1 + + return _N_NonIterableRanges(*result) def op_union_ranges( self, other: "_N_NonIterableRanges[NumericT]" @@ -500,6 +527,9 @@ class Single(Range[QuantityT]): def __init__(self, value: QuantityT): super().__init__(value, value) + def get_value(self) -> QuantityT: + return self.min_elem() + def __iter__(self) -> Generator[Quantity]: yield self.min_elem() @@ -569,6 +599,14 @@ def closest_elem(self, target: Quantity) -> Quantity: self._ranges.closest_elem(target.to(self.range_units).magnitude) ) + def is_superset_of(self, other: "NonIterableRanges[QuantityT]") -> bool: + if not self.units.is_compatible_with(other.units): + return False + return self._ranges.is_superset_of(other._ranges) + + def is_subset_of(self, other: "NonIterableRanges[QuantityT]") -> bool: + return other.is_superset_of(self) + def op_intersect_range(self, other: "Range[QuantityT]") -> "Ranges[QuantityT]": if not self.units.is_compatible_with(other.units): raise ValueError("incompatible units") From a8ce5fe1615d635894960c4cbd8d86a53ab51159 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 23 Oct 2024 18:10:45 +0200 Subject: [PATCH 60/80] viz: ui default options --- .../exporters/visualize/interactive_params.py | 73 +++++++++++-------- 1 file changed, 42 insertions(+), 31 deletions(-) diff --git a/src/faebryk/exporters/visualize/interactive_params.py b/src/faebryk/exporters/visualize/interactive_params.py index f039d90d..2a076662 100644 --- a/src/faebryk/exporters/visualize/interactive_params.py +++ b/src/faebryk/exporters/visualize/interactive_params.py @@ -122,42 +122,42 @@ def add_node_type(self, node_type: str, color: str): # Dagre algorithm options (uses default value if undefined) "name": "dagre", # Separation between adjacent nodes in the same rank - "nodeSep": None, + # "nodeSep": None, # Separation between adjacent edges in the same rank - "edgeSep": None, + # "edgeSep": None, # Separation between each rank in the layout - "rankSep": None, + # "rankSep": None, # 'TB' for top to bottom flow, 'LR' for left to right - "rankDir": None, + # "rankDir": None, # Alignment for rank nodes. Can be 'UL', 'UR', 'DL', or 'DR' - "align": None, + # "align": None, # If 'greedy', uses heuristic to find feedback arc set - "acyclicer": None, + # "acyclicer": None, # Algorithm to assign rank to nodes: 'network-simplex', 'tight-tree' or 'longest-path' - "ranker": "tight-tree", + # "ranker": "tight-tree", # Number of ranks to keep between source and target of the edge # "minLen": lambda edge: 1, # Higher weight edges are generally made shorter and straighter # "edgeWeight": lambda edge: 1, # General layout options # Whether to fit to viewport - "fit": True, + # "fit": True, # Fit padding - "padding": 30, + # "padding": 30, # Factor to expand/compress overall area nodes take up - "spacingFactor": None, + # "spacingFactor": None, # Include labels in node space calculation - "nodeDimensionsIncludeLabels": False, + # "nodeDimensionsIncludeLabels": False, # Whether to transition node positions - "animate": False, + # "animate": False, # Whether to animate specific nodes # "animateFilter": lambda node, i: True, # Duration of animation in ms if enabled - "animationDuration": 500, + # "animationDuration": 500, # Easing of animation if enabled - "animationEasing": None, + # "animationEasing": None, # Constrain layout bounds: {x1, y1, x2, y2} or {x1, y1, w, h} - "boundingBox": None, + # "boundingBox": None, # Function to transform final node position # "transform": lambda node, pos: pos, # Callback on layoutready @@ -171,25 +171,36 @@ def add_node_type(self, node_type: str, color: str): def buttons(layout: Layout): app = layout.app + + layout_chooser = dcc.RadioItems( + id="layout-radio", + options=[ + {"label": "fcose", "value": "fcose"}, + {"label": "dagre", "value": "dagre"}, + ], + value="dagre", + ) + + dagre_ranker = dcc.RadioItems( + id="layout-dagre-ranker", + options=[ + {"label": "network-simplex", "value": "network-simplex"}, + {"label": "tight-tree", "value": "tight-tree"}, + {"label": "longest-path", "value": "longest-path"}, + ], + value="tight-tree", + ) + html_controls = html.Div( className="controls", style={"padding": "10px", "background-color": "#f0f0f0"}, children=[ - dcc.RadioItems( - id="layout-radio", - options=[ - {"label": "fcose", "value": "fcose"}, - {"label": "dagre", "value": "dagre"}, - ], - ), - dcc.RadioItems( - id="layout-dagre-ranker", - options=[ - {"label": "network-simplex", "value": "network-simplex"}, - {"label": "tight-tree", "value": "tight-tree"}, - {"label": "longest-path", "value": "longest-path"}, - ], - ), + html.Table( + [ + html.Tr([html.Td("Layout:"), html.Td(layout_chooser)]), + html.Tr([html.Td("Dagre Ranker:"), html.Td(dagre_ranker)]), + ] + ) ], ) layout.div_children.insert(-2, html_controls) @@ -201,7 +212,7 @@ def buttons(layout: Layout): State("graph-view", "layout"), ) def absolute_layout(layout_radio, layout_dagre_ranker, current_layout): - print(layout_radio, layout_dagre_ranker) + # print(layout_radio, layout_dagre_ranker) layout.set_type(layout_radio, current_layout) if layout_dagre_ranker: From caa9c6cf47a1c80f6632464705e46f14b91542aa Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 23 Oct 2024 18:14:00 +0200 Subject: [PATCH 61/80] run ruff --- examples/minimal_led_orderable.py | 1 - src/faebryk/core/parameter.py | 2 +- src/faebryk/core/solver.py | 12 ++++++++---- .../exporters/visualize/interactive_params.py | 3 ++- src/faebryk/library/has_multi_picker.py | 2 +- src/faebryk/libs/app/erc.py | 5 ++++- src/faebryk/libs/e_series.py | 6 +----- src/faebryk/libs/picker/jlcpcb/jlcpcb.py | 13 ++++--------- src/faebryk/libs/picker/jlcpcb/pickers.py | 2 +- src/faebryk/libs/sets.py | 14 +++++++++++--- test/libs/test_e_series.py | 1 - 11 files changed, 33 insertions(+), 28 deletions(-) diff --git a/examples/minimal_led_orderable.py b/examples/minimal_led_orderable.py index 09f8ada5..f8b6cc64 100644 --- a/examples/minimal_led_orderable.py +++ b/examples/minimal_led_orderable.py @@ -7,7 +7,6 @@ import logging from pathlib import Path -from tkinter import W import typer diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index a7f1879b..d5629307 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -1,8 +1,8 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from collections.abc import Iterable import logging +from collections.abc import Iterable from enum import Enum, auto from types import NotImplementedType from typing import Any, Callable, Self diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index a5d203cf..9786c6e8 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -23,10 +23,13 @@ class SolveResult[ArgType]: # solve for a single value for the given expression # while trying to minimize the value of the optional minimize expression - # suppose_constraint can be added, which by constraining the solution further can make solving easier + # suppose_constraint can be added, which by constraining the solution further can + # make solving easier # it is only in effect for the duration of the solve call - # constrain_result will make sure the result is actually part of the solution set of the expression - # returns a tuple of the value chosen and a list of parameters that have an empty solution set + # constrain_result will make sure the result is actually part of the solution set + # of the expression + # returns a tuple of the value chosen and a list of parameters that have an empty + # solution set def get_any_single( self, G: Graph, @@ -39,7 +42,8 @@ def get_any_single( # make at least one of the passed predicates true, unless that is impossible # while trying to minimize the value of the optional minimize expression # there is no specific order in which the predicates are solved - # suppose_constraint can be added, which by constraining the solution further can make solving easier + # suppose_constraint can be added, which by constraining the solution further can + # make solving easier # it is only in effect for the duration of the solve call # constrain_solved will add the solutions as constraints def assert_any_predicate[ArgType]( diff --git a/src/faebryk/exporters/visualize/interactive_params.py b/src/faebryk/exporters/visualize/interactive_params.py index 2a076662..18375d01 100644 --- a/src/faebryk/exporters/visualize/interactive_params.py +++ b/src/faebryk/exporters/visualize/interactive_params.py @@ -133,7 +133,8 @@ def add_node_type(self, node_type: str, color: str): # "align": None, # If 'greedy', uses heuristic to find feedback arc set # "acyclicer": None, - # Algorithm to assign rank to nodes: 'network-simplex', 'tight-tree' or 'longest-path' + # Algorithm to assign rank to nodes: 'network-simplex', 'tight-tree' + # or 'longest-path' # "ranker": "tight-tree", # Number of ranks to keep between source and target of the edge # "minLen": lambda edge: 1, diff --git a/src/faebryk/library/has_multi_picker.py b/src/faebryk/library/has_multi_picker.py index 066a5746..f95b96c0 100644 --- a/src/faebryk/library/has_multi_picker.py +++ b/src/faebryk/library/has_multi_picker.py @@ -6,10 +6,10 @@ from abc import abstractmethod from typing import Callable, Mapping -from faebryk.core.solver import Solver import faebryk.library._F as F from faebryk.core.module import Module from faebryk.core.node import Node +from faebryk.core.solver import Solver from faebryk.core.trait import TraitImpl from faebryk.libs.picker.picker import PickError diff --git a/src/faebryk/libs/app/erc.py b/src/faebryk/libs/app/erc.py index 5b84fa37..ff5eee2a 100644 --- a/src/faebryk/libs/app/erc.py +++ b/src/faebryk/libs/app/erc.py @@ -38,7 +38,10 @@ def __init__(self, faulting_ifs: Sequence[ModuleInterface], *args: object) -> No class ERCFaultElectricPowerUndefinedVoltage(ERCFault): def __init__(self, faulting_EP: F.ElectricPower, *args: object) -> None: - msg = f"ElectricPower with undefined or unsolved voltage: {faulting_EP}: {faulting_EP.voltage}" + msg = ( + f"ElectricPower with undefined or unsolved voltage: {faulting_EP}:" + f" {faulting_EP.voltage}" + ) super().__init__([faulting_EP], msg, *args) diff --git a/src/faebryk/libs/e_series.py b/src/faebryk/libs/e_series.py index 87168545..a110fa02 100644 --- a/src/faebryk/libs/e_series.py +++ b/src/faebryk/libs/e_series.py @@ -1,12 +1,8 @@ -from collections.abc import Iterator, Sequence -import copy import logging -import math +from collections.abc import Sequence from math import ceil, floor, log10 from typing import Tuple, TypeVar, cast -import faebryk.library._F as F -from faebryk.core.parameter import Parameter from faebryk.libs.library import L from faebryk.libs.sets import Range, Ranges from faebryk.libs.units import Quantity, Unit, dimensionless diff --git a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py index 3fa9c976..5969b5a7 100644 --- a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py +++ b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py @@ -12,20 +12,19 @@ from textwrap import indent from typing import Any, Callable, Generator, Self, Sequence, cast -from more_itertools import take import patoolib import requests -from pint import DimensionalityError +from more_itertools import take from rich.progress import track from tortoise import Tortoise from tortoise.expressions import Q from tortoise.fields import CharField, IntField, JSONField from tortoise.models import Model -from faebryk.core.solver import Solver import faebryk.library._F as F from faebryk.core.module import Module -from faebryk.core.parameter import Parameter, ParameterOperatable +from faebryk.core.parameter import ParameterOperatable +from faebryk.core.solver import Solver from faebryk.libs.e_series import ( E_SERIES_VALUES, ParamNotResolvedError, @@ -44,7 +43,7 @@ has_part_picked_defined, ) from faebryk.libs.units import P, Quantity, UndefinedUnitError, to_si_str -from faebryk.libs.util import at_exit, cast_assert, try_or +from faebryk.libs.util import at_exit, cast_assert logger = logging.getLogger(__name__) @@ -103,10 +102,6 @@ def units(self): return faebryk.libs.units.dimensionless - @units.setter - def units(self, value: Unit): - self.__setattr__("units", value) - @dataclass(frozen=True) class MappingParameterDB: diff --git a/src/faebryk/libs/picker/jlcpcb/pickers.py b/src/faebryk/libs/picker/jlcpcb/pickers.py index 63b57a25..5f11af62 100644 --- a/src/faebryk/libs/picker/jlcpcb/pickers.py +++ b/src/faebryk/libs/picker/jlcpcb/pickers.py @@ -1,9 +1,9 @@ import logging -from faebryk.core.solver import DefaultSolver import faebryk.library._F as F import faebryk.libs.picker.jlcpcb.picker_lib as P from faebryk.core.module import Module +from faebryk.core.solver import DefaultSolver from faebryk.libs.picker.jlcpcb.jlcpcb import JLCPCB_DB, ComponentQuery from faebryk.libs.picker.picker import PickError diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 1cfd43d5..e875a35d 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -520,7 +520,10 @@ def __eq__(self, value: Any) -> bool: def __repr__(self) -> str: if self.units.is_compatible_with(dimensionless): return f"Range({self._range._min}, {self._range._max})" - return f"Range({self.base_to_units(self._range._min)}, {self.base_to_units(self._range._max)} | {self.units})" + return ( + f"Range({self.base_to_units(self._range._min)}, " + f"{self.base_to_units(self._range._max)} | {self.units})" + ) class Single(Range[QuantityT]): @@ -695,8 +698,13 @@ def __hash__(self) -> int: def __repr__(self) -> str: if self.units.is_compatible_with(dimensionless): - return f"_RangeUnion({', '.join(f"[{r._min}, {r._max}]" for r in self._ranges.ranges)})" - return f"_RangeUnion({', '.join(f"[{self.base_to_units(r._min)}, {self.base_to_units(r._max)}]" for r in self._ranges.ranges)} | {self.units})" + inner = ", ".join(f"[{r._min}, {r._max}]" for r in self._ranges.ranges) + return f"_RangeUnion({inner})" + inner = ", ".join( + f"[{self.base_to_units(r._min)}, {self.base_to_units(r._max)}]" + for r in self._ranges.ranges + ) + return f"_RangeUnion({inner} | {self.units})" class Ranges(NonIterableRanges[QuantityT], Iterable[Range[QuantityT]]): diff --git a/test/libs/test_e_series.py b/test/libs/test_e_series.py index 60d0feae..87eb107a 100644 --- a/test/libs/test_e_series.py +++ b/test/libs/test_e_series.py @@ -3,7 +3,6 @@ from itertools import pairwise -import faebryk.library._F as F from faebryk.libs.e_series import ( E_SERIES_VALUES, e_series_intersect, From a3ea34df0fd9345da4d47216451451a9f9b98857 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 23 Oct 2024 20:20:57 +0200 Subject: [PATCH 62/80] refine jlc & solver --- src/faebryk/core/parameter.py | 6 +- src/faebryk/core/solver.py | 59 ++++++++---- src/faebryk/libs/picker/jlcpcb/jlcpcb.py | 117 ++++++++--------------- src/faebryk/libs/sets.py | 11 +++ 4 files changed, 96 insertions(+), 97 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index d5629307..6e8b3ff8 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -152,10 +152,12 @@ def new(self2): self.inspect_final = new # Could be exponentially many - def inspect_num_known_supersets(self) -> int: + def inspect_known_supersets_are_few(self) -> bool: + raise Exception("not implemented") + + def inspect_get_known_supersets(self) -> Iterable[P_Set]: raise Exception("not implemented") - # def inspect_get_known_supersets(self) -> Iterable[P_Set]: ... def inspect_get_known_superranges(self: NumberLike) -> Iterable[Ranges]: raise Exception("not implemented") diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index 9786c6e8..87440452 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -21,15 +21,6 @@ class SolveResult[ArgType]: # in megabytes # memory: int - # solve for a single value for the given expression - # while trying to minimize the value of the optional minimize expression - # suppose_constraint can be added, which by constraining the solution further can - # make solving easier - # it is only in effect for the duration of the solve call - # constrain_result will make sure the result is actually part of the solution set - # of the expression - # returns a tuple of the value chosen and a list of parameters that have an empty - # solution set def get_any_single( self, G: Graph, @@ -37,15 +28,26 @@ def get_any_single( suppose_constraint: Predicate | None = None, minimize: Expression | None = None, constrain_result: bool = True, - ) -> tuple[Any, list[Parameter]]: ... # TODO Any -> NumberLike? - - # make at least one of the passed predicates true, unless that is impossible - # while trying to minimize the value of the optional minimize expression - # there is no specific order in which the predicates are solved - # suppose_constraint can be added, which by constraining the solution further can - # make solving easier - # it is only in effect for the duration of the solve call - # constrain_solved will add the solutions as constraints + ) -> tuple[Any, list[Parameter]]: # TODO Any -> NumberLike? + """ + Solve for a single value for the given expression. + + Args: + G: The graph to solve on. + expression: The expression to solve. + suppose_constraint: An optional constraint that can be added to make solving + easier. It is only in effect for the duration of the + solve call. + minimize: An optional expression to minimize while solving. + constrain_result: If True, ensure the result is part of the solution set of + the expression. + + Returns: + A tuple containing the chosen value and a list of parameters with empty + solution sets. + """ + ... + def assert_any_predicate[ArgType]( self, G: Graph, @@ -53,7 +55,26 @@ def assert_any_predicate[ArgType]( suppose_constraint: Predicate | None = None, minimize: Expression | None = None, constrain_solved: bool = True, - ) -> SolveResult[ArgType]: ... + ) -> SolveResult[ArgType]: + """ + Make at least one of the passed predicates true, unless that is impossible. + + Args: + G: The graph to solve on. + predicates: A list of predicates to solve. + suppose_constraint: An optional constraint that can be added to make solving + easier. It is only in effect for the duration of the + solve call. + minimize: An optional expression to minimize while solving. + constrain_solved: If True, add the solutions as constraints. + + Returns: + A SolveResult object containing the true, false, and unknown predicates. + + Note: + There is no specific order in which the predicates are solved. + """ + ... # run deferred work def finalize(self, G: Graph) -> None: ... diff --git a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py index 5969b5a7..c3fcd965 100644 --- a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py +++ b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py @@ -10,11 +10,10 @@ from dataclasses import dataclass from pathlib import Path from textwrap import indent -from typing import Any, Callable, Generator, Self, Sequence, cast +from typing import Any, Callable, Generator, Self, Sequence import patoolib import requests -from more_itertools import take from rich.progress import track from tortoise import Tortoise from tortoise.expressions import Q @@ -59,50 +58,6 @@ def __init__(self, partno: str) -> None: super().__init__(partno=partno) -class TBD_ParseError(L.P_UnitSet): - """ - Wrapper for TBD that behaves exactly like TBD for the core and picker - But gives us the possibility to attach parser errors to it for deferred - error logging - """ - - def __init__(self, e: Exception, msg: str): - self.e = e - self.msg = msg - super().__init__() - - def __repr__(self): - return f"{super().__repr__()}({self.msg}: {self.e})" - - def __getattr__(self, name: str) -> Any: - import traceback - - print("__getattr__", name) - traceback.print_stack() - return None - - def __setattr__(self, name: str, value: Any) -> None: - import traceback - - print("__setattr__", name, value) - traceback.print_stack() - - def is_empty(self) -> bool: - self.__getattr__("is_empty") - return True - - def __contains__(self, item: Any) -> bool: - self.__getattr__("__contains__") - return False - - @property - def units(self): - self.__getattr__("units") - import faebryk.libs.units - - return faebryk.libs.units.dimensionless - - @dataclass(frozen=True) class MappingParameterDB: param_name: str @@ -229,7 +184,7 @@ def get_price(self, qty: int = 1) -> float: return unit_price * qty + handling_fee - def attribute_to_set( + def attribute_to_range( self, attribute_name: str, use_tolerance: bool = False, ignore_at: bool = True ) -> L.Range[Quantity]: """ @@ -285,7 +240,7 @@ def attribute_to_set( return L.Range.from_center_rel(value, tolerance) - def get_parameter(self, m: MappingParameterDB) -> L.Range[Quantity]: + def get_range(self, m: MappingParameterDB) -> L.Range[Quantity]: """ Transform a component attribute to a parameter @@ -333,11 +288,11 @@ def get_parameter(self, m: MappingParameterDB) -> L.Range[Quantity]: if parser is not None: return parser(self.extra["attributes"][attr_key]) - return self.attribute_to_set( + return self.attribute_to_range( attr_key, tolerance_search_key is not None, m.ignore_at ) - def get_params( + def get_range_for_mappings( self, mapping: list[MappingParameterDB] ) -> tuple[ dict[MappingParameterDB, L.Range[Quantity]], dict[MappingParameterDB, Exception] @@ -346,7 +301,7 @@ def get_params( exceptions = {} for m in mapping: try: - params[m] = self.get_parameter(m) + params[m] = self.get_range(m) except LookupError | ValueError | AssertionError as e: exceptions[m] = e return params, exceptions @@ -358,7 +313,7 @@ def attach( qty: int = 1, ignore_exceptions: bool = False, ): - params, exceptions = self.get_params(mapping) + params, exceptions = self.get_range_for_mappings(mapping) if not ignore_exceptions and exceptions: params_str = indent( @@ -456,7 +411,7 @@ def filter_by_value( ) -> Self: assert self.Q - if value == L.Range(min=float("-inf"), max=float("inf")): + if value.is_unbounded(): return self assert not self.results try: @@ -466,9 +421,7 @@ def filter_by_value( value, f"Could not run e_series_intersect: {e}" ) from e si_vals = [ - to_si_str(cast_assert(L.Single, r).get_value(), si_unit) - .replace("µ", "u") - .replace("inf", "∞") + to_si_str(r.min_elem(), si_unit).replace("µ", "u").replace("inf", "∞") for r in intersection ] return self.filter_by_description(*si_vals) @@ -546,41 +499,52 @@ def filter_by_module_params( :return: The first component that matches the parameters """ + # iterate through all candidate components for c in self.get(): - params, exceptions = c.get_params(mapping) + range_mapping, exceptions = c.get_range_for_mappings(mapping) if exceptions: # TODO continue - compatible = True - for m, p in params.items(): - mod_param = getattr(module, m.param_name) - known_superset = L.Ranges( - *take( - INSPECT_KNOWN_SUPERSETS_LIMIT, - mod_param.inspect_get_known_superranges(), - ) + param_mapping = [ + ( + cast_assert(ParameterOperatable, getattr(module, m.param_name)), + c_range, ) - if not known_superset.is_superset_of(L.Ranges(p)): - compatible = False + for m, c_range in range_mapping.items() + ] + + known_incompatible = False + + # check for any param that has few supersets whether the component's range + # is compatible already instead of waiting for the solver + for m_param, c_range in param_mapping: + if not m_param.inspect_known_supersets_are_few(): + continue + + known_superset = L.Ranges(*m_param.inspect_get_known_superranges()) + if not known_superset.is_superset_of(L.Ranges(c_range)): + known_incompatible = True break - if compatible: + # check for every param whether the candidate component's range is + # compatible by querying the solver + if not known_incompatible: anded = True - for m, p in params.items(): - mod_param = cast(ParameterOperatable, getattr(module, m.param_name)) - anded = mod_param.operation_is_superset(p).operation_and(anded) + for m_param, c_range in param_mapping: + anded &= m_param.operation_is_superset(c_range) result = solver.assert_any_predicate( module.get_graph(), [(anded, None)], constrain_solved=False ) - if len(result.true_predicates) == 0: - compatible = False + if not result.true_predicates: + known_incompatible = True - if not compatible: + # debug + if known_incompatible: logger.debug( f"Component {c.lcsc} doesn't match: " - f"{[p for p, v in params.items()]}" + f"{[p for p, v in range_mapping.items()]}" ) continue @@ -600,7 +564,8 @@ def filter_by_module_params_and_attach( solver: Solver, qty: int = 1, ): - # TODO if no modules without TBD, rerun with TBD allowed + # TODO remove ignore_exceptions + # was used to handle TBDs failures = [] for c in self.filter_by_module_params(module, mapping, solver): diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index e875a35d..5cb8ecee 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -89,6 +89,9 @@ def __init__(self, min: NumericT, max: NumericT): def is_empty(self) -> bool: return False + def is_unbounded(self) -> bool: + return self._min == float("-inf") and self._max == float("inf") + def min_elem(self) -> NumericT: return self._min @@ -447,6 +450,9 @@ def max_elem(self) -> Quantity: def is_empty(self) -> bool: return self._range.is_empty() + def is_unbounded(self) -> bool: + return self._range.is_unbounded() + def op_intersect_range( self, other: "Range[QuantityT]" ) -> "NonIterableRanges[QuantityT]": @@ -712,6 +718,11 @@ def __iter__(self) -> Generator[Range[QuantityT]]: for r in self._ranges.ranges: yield Range._from_range(r, self.units) + def is_unbounded(self) -> bool: + if self.is_empty(): + return False + return next(iter(self)).is_unbounded() + def Empty(units: Unit | None = None) -> Ranges[QuantityT]: if units is None: From 2e8b2e2b8206986462b776fd63467213179407b2 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 23 Oct 2024 20:39:53 +0200 Subject: [PATCH 63/80] e_series cache; minor range fix --- src/faebryk/libs/e_series.py | 838 ++++++++++++----------- src/faebryk/libs/picker/jlcpcb/jlcpcb.py | 14 +- src/faebryk/libs/sets.py | 13 +- test/libs/test_e_series.py | 16 +- 4 files changed, 446 insertions(+), 435 deletions(-) diff --git a/src/faebryk/libs/e_series.py b/src/faebryk/libs/e_series.py index a110fa02..a72afec4 100644 --- a/src/faebryk/libs/e_series.py +++ b/src/faebryk/libs/e_series.py @@ -6,416 +6,430 @@ from faebryk.libs.library import L from faebryk.libs.sets import Range, Ranges from faebryk.libs.units import Quantity, Unit, dimensionless +from faebryk.libs.util import once logger = logging.getLogger(__name__) -E_SERIES = set[float] +E_SERIES = frozenset[float] class E_SERIES_VALUES: - E192 = { - 1.00, - 1.01, - 1.02, - 1.04, - 1.05, - 1.06, - 1.07, - 1.09, - 1.10, - 1.11, - 1.13, - 1.14, - 1.15, - 1.17, - 1.18, - 1.20, - 1.21, - 1.23, - 1.24, - 1.26, - 1.27, - 1.29, - 1.30, - 1.32, - 1.33, - 1.35, - 1.37, - 1.38, - 1.40, - 1.42, - 1.43, - 1.45, - 1.47, - 1.49, - 1.50, - 1.52, - 1.54, - 1.56, - 1.58, - 1.60, - 1.62, - 1.64, - 1.65, - 1.67, - 1.69, - 1.72, - 1.74, - 1.76, - 1.78, - 1.80, - 1.82, - 1.84, - 1.87, - 1.89, - 1.91, - 1.93, - 1.96, - 1.98, - 2.00, - 2.03, - 2.05, - 2.08, - 2.10, - 2.13, - 2.15, - 2.18, - 2.21, - 2.23, - 2.26, - 2.29, - 2.32, - 2.34, - 2.37, - 2.40, - 2.43, - 2.46, - 2.49, - 2.52, - 2.55, - 2.58, - 2.61, - 2.64, - 2.67, - 2.71, - 2.74, - 2.77, - 2.80, - 2.84, - 2.87, - 2.91, - 2.94, - 2.98, - 3.01, - 3.05, - 3.09, - 3.12, - 3.16, - 3.20, - 3.24, - 3.28, - 3.32, - 3.36, - 3.40, - 3.44, - 3.48, - 3.52, - 3.57, - 3.61, - 3.65, - 3.70, - 3.74, - 3.79, - 3.83, - 3.88, - 3.92, - 3.97, - 4.02, - 4.07, - 4.12, - 4.17, - 4.22, - 4.27, - 4.32, - 4.37, - 4.42, - 4.48, - 4.53, - 4.59, - 4.64, - 4.70, - 4.75, - 4.81, - 4.87, - 4.93, - 4.99, - 5.05, - 5.11, - 5.17, - 5.23, - 5.30, - 5.36, - 5.42, - 5.49, - 5.56, - 5.62, - 5.69, - 5.76, - 5.83, - 5.90, - 5.97, - 6.04, - 6.12, - 6.19, - 6.26, - 6.34, - 6.42, - 6.49, - 6.57, - 6.65, - 6.73, - 6.81, - 6.90, - 6.98, - 7.06, - 7.15, - 7.23, - 7.32, - 7.41, - 7.50, - 7.59, - 7.68, - 7.77, - 7.87, - 7.96, - 8.06, - 8.16, - 8.25, - 8.35, - 8.45, - 8.56, - 8.66, - 8.76, - 8.87, - 8.98, - 9.09, - 9.20, - 9.31, - 9.42, - 9.53, - 9.65, - 9.76, - 9.88, - } - - E96 = { - 1.00, - 1.02, - 1.05, - 1.07, - 1.10, - 1.13, - 1.15, - 1.18, - 1.21, - 1.24, - 1.27, - 1.30, - 1.33, - 1.37, - 1.40, - 1.43, - 1.47, - 1.50, - 1.54, - 1.58, - 1.62, - 1.65, - 1.69, - 1.74, - 1.78, - 1.82, - 1.87, - 1.91, - 1.96, - 2.00, - 2.05, - 2.10, - 2.15, - 2.21, - 2.26, - 2.32, - 2.37, - 2.43, - 2.49, - 2.55, - 2.61, - 2.67, - 2.74, - 2.80, - 2.87, - 2.94, - 3.01, - 3.09, - 3.16, - 3.24, - 3.32, - 3.40, - 3.48, - 3.57, - 3.65, - 3.74, - 3.83, - 3.92, - 4.02, - 4.12, - 4.22, - 4.32, - 4.42, - 4.53, - 4.64, - 4.75, - 4.87, - 4.99, - 5.11, - 5.23, - 5.36, - 5.49, - 5.62, - 5.76, - 5.90, - 6.04, - 6.19, - 6.34, - 6.49, - 6.65, - 6.81, - 6.98, - 7.15, - 7.32, - 7.50, - 7.68, - 7.87, - 8.06, - 8.25, - 8.45, - 8.66, - 8.87, - 9.09, - 9.31, - 9.53, - 9.76, - } - - E48 = { - 1.00, - 1.05, - 1.10, - 1.15, - 1.21, - 1.27, - 1.33, - 1.40, - 1.47, - 1.54, - 1.62, - 1.69, - 1.78, - 1.87, - 1.96, - 2.05, - 2.15, - 2.26, - 2.37, - 2.49, - 2.61, - 2.74, - 2.87, - 3.01, - 3.16, - 3.32, - 3.48, - 3.65, - 3.83, - 4.02, - 4.22, - 4.42, - 4.64, - 4.87, - 5.11, - 5.36, - 5.62, - 5.90, - 6.19, - 6.49, - 6.81, - 7.15, - 7.50, - 7.87, - 8.25, - 8.66, - 9.09, - 9.53, - } - - E24 = { - 1.0, - 1.1, - 1.2, - 1.3, - 1.5, - 1.6, - 1.8, - 2.0, - 2.2, - 2.4, - 2.7, - 3.0, - 3.3, - 3.6, - 3.9, - 4.3, - 4.7, - 5.1, - 5.6, - 6.2, - 6.8, - 7.5, - 8.2, - 9.1, - } - - E12 = { - 1.0, - 1.2, - 1.5, - 1.8, - 2.2, - 2.7, - 3.3, - 3.9, - 4.7, - 5.6, - 6.8, - 8.2, - } - - E6 = { - 1.0, - 1.5, - 2.2, - 3.3, - 4.7, - 6.8, - } - - E3 = { - 1.0, - 2.2, - 4.7, - } - - E_ALL = set(sorted(E24 | E192)) + E192 = frozenset( + [ + 1.00, + 1.01, + 1.02, + 1.04, + 1.05, + 1.06, + 1.07, + 1.09, + 1.10, + 1.11, + 1.13, + 1.14, + 1.15, + 1.17, + 1.18, + 1.20, + 1.21, + 1.23, + 1.24, + 1.26, + 1.27, + 1.29, + 1.30, + 1.32, + 1.33, + 1.35, + 1.37, + 1.38, + 1.40, + 1.42, + 1.43, + 1.45, + 1.47, + 1.49, + 1.50, + 1.52, + 1.54, + 1.56, + 1.58, + 1.60, + 1.62, + 1.64, + 1.65, + 1.67, + 1.69, + 1.72, + 1.74, + 1.76, + 1.78, + 1.80, + 1.82, + 1.84, + 1.87, + 1.89, + 1.91, + 1.93, + 1.96, + 1.98, + 2.00, + 2.03, + 2.05, + 2.08, + 2.10, + 2.13, + 2.15, + 2.18, + 2.21, + 2.23, + 2.26, + 2.29, + 2.32, + 2.34, + 2.37, + 2.40, + 2.43, + 2.46, + 2.49, + 2.52, + 2.55, + 2.58, + 2.61, + 2.64, + 2.67, + 2.71, + 2.74, + 2.77, + 2.80, + 2.84, + 2.87, + 2.91, + 2.94, + 2.98, + 3.01, + 3.05, + 3.09, + 3.12, + 3.16, + 3.20, + 3.24, + 3.28, + 3.32, + 3.36, + 3.40, + 3.44, + 3.48, + 3.52, + 3.57, + 3.61, + 3.65, + 3.70, + 3.74, + 3.79, + 3.83, + 3.88, + 3.92, + 3.97, + 4.02, + 4.07, + 4.12, + 4.17, + 4.22, + 4.27, + 4.32, + 4.37, + 4.42, + 4.48, + 4.53, + 4.59, + 4.64, + 4.70, + 4.75, + 4.81, + 4.87, + 4.93, + 4.99, + 5.05, + 5.11, + 5.17, + 5.23, + 5.30, + 5.36, + 5.42, + 5.49, + 5.56, + 5.62, + 5.69, + 5.76, + 5.83, + 5.90, + 5.97, + 6.04, + 6.12, + 6.19, + 6.26, + 6.34, + 6.42, + 6.49, + 6.57, + 6.65, + 6.73, + 6.81, + 6.90, + 6.98, + 7.06, + 7.15, + 7.23, + 7.32, + 7.41, + 7.50, + 7.59, + 7.68, + 7.77, + 7.87, + 7.96, + 8.06, + 8.16, + 8.25, + 8.35, + 8.45, + 8.56, + 8.66, + 8.76, + 8.87, + 8.98, + 9.09, + 9.20, + 9.31, + 9.42, + 9.53, + 9.65, + 9.76, + 9.88, + ] + ) + + E96 = frozenset( + [ + 1.00, + 1.02, + 1.05, + 1.07, + 1.10, + 1.13, + 1.15, + 1.18, + 1.21, + 1.24, + 1.27, + 1.30, + 1.33, + 1.37, + 1.40, + 1.43, + 1.47, + 1.50, + 1.54, + 1.58, + 1.62, + 1.65, + 1.69, + 1.74, + 1.78, + 1.82, + 1.87, + 1.91, + 1.96, + 2.00, + 2.05, + 2.10, + 2.15, + 2.21, + 2.26, + 2.32, + 2.37, + 2.43, + 2.49, + 2.55, + 2.61, + 2.67, + 2.74, + 2.80, + 2.87, + 2.94, + 3.01, + 3.09, + 3.16, + 3.24, + 3.32, + 3.40, + 3.48, + 3.57, + 3.65, + 3.74, + 3.83, + 3.92, + 4.02, + 4.12, + 4.22, + 4.32, + 4.42, + 4.53, + 4.64, + 4.75, + 4.87, + 4.99, + 5.11, + 5.23, + 5.36, + 5.49, + 5.62, + 5.76, + 5.90, + 6.04, + 6.19, + 6.34, + 6.49, + 6.65, + 6.81, + 6.98, + 7.15, + 7.32, + 7.50, + 7.68, + 7.87, + 8.06, + 8.25, + 8.45, + 8.66, + 8.87, + 9.09, + 9.31, + 9.53, + 9.76, + ] + ) + + E48 = frozenset( + [ + 1.00, + 1.05, + 1.10, + 1.15, + 1.21, + 1.27, + 1.33, + 1.40, + 1.47, + 1.54, + 1.62, + 1.69, + 1.78, + 1.87, + 1.96, + 2.05, + 2.15, + 2.26, + 2.37, + 2.49, + 2.61, + 2.74, + 2.87, + 3.01, + 3.16, + 3.32, + 3.48, + 3.65, + 3.83, + 4.02, + 4.22, + 4.42, + 4.64, + 4.87, + 5.11, + 5.36, + 5.62, + 5.90, + 6.19, + 6.49, + 6.81, + 7.15, + 7.50, + 7.87, + 8.25, + 8.66, + 9.09, + 9.53, + ] + ) + + E24 = frozenset( + [ + 1.0, + 1.1, + 1.2, + 1.3, + 1.5, + 1.6, + 1.8, + 2.0, + 2.2, + 2.4, + 2.7, + 3.0, + 3.3, + 3.6, + 3.9, + 4.3, + 4.7, + 5.1, + 5.6, + 6.2, + 6.8, + 7.5, + 8.2, + 9.1, + ] + ) + + E12 = frozenset( + [ + 1.0, + 1.2, + 1.5, + 1.8, + 2.2, + 2.7, + 3.3, + 3.9, + 4.7, + 5.6, + 6.8, + 8.2, + ] + ) + + E6 = frozenset( + [ + 1.0, + 1.5, + 2.2, + 3.3, + 4.7, + 6.8, + ] + ) + + E3 = frozenset( + [ + 1.0, + 2.2, + 4.7, + ] + ) + E_ALL = frozenset(sorted(E24 | E192)) QuantityT = TypeVar("QuantityT", int, float, Quantity) @@ -438,22 +452,17 @@ def repeat_set_over_base( ) -class ParamNotResolvedError(Exception): ... - - -_e_series_cache: dict[tuple[Ranges, int], L.Ranges] = {} - - +@once def e_series_intersect( value_set: Range[QuantityT] | Ranges[QuantityT], - e_series: E_SERIES = E_SERIES_VALUES.E_ALL, + e_series: E_SERIES | None = None, ) -> L.Ranges[QuantityT]: + if e_series is None: + e_series = E_SERIES_VALUES.E_ALL + if isinstance(value_set, Range): value_set = Ranges(value_set) - if (value_set, id(e_series)) in _e_series_cache: - return _e_series_cache[(value_set, id(e_series))] - if ( value_set.is_empty() or value_set.min_elem() < 0 @@ -477,7 +486,6 @@ def e_series_intersect( unit=min_val_q.units, ) out = out.op_union_ranges(e_series_values.op_intersect_range(sub_range)) - _e_series_cache[(value_set, id(e_series))] = out return out diff --git a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py index c3fcd965..abffcad5 100644 --- a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py +++ b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py @@ -25,8 +25,7 @@ from faebryk.core.parameter import ParameterOperatable from faebryk.core.solver import Solver from faebryk.libs.e_series import ( - E_SERIES_VALUES, - ParamNotResolvedError, + E_SERIES, e_series_intersect, ) from faebryk.libs.library import L @@ -407,19 +406,16 @@ def filter_by_value( self, value: L.Ranges[Quantity], si_unit: str, - e_series: set[float] | None = None, + e_series: E_SERIES | None = None, ) -> Self: assert self.Q if value.is_unbounded(): return self assert not self.results - try: - intersection = e_series_intersect(value, e_series or E_SERIES_VALUES.E_ALL) - except ParamNotResolvedError as e: - raise ComponentQuery.ParamError( - value, f"Could not run e_series_intersect: {e}" - ) from e + intersection = e_series_intersect(value, e_series) + if intersection.is_empty(): + raise ComponentQuery.ParamError(value, "No intersection with E-series") si_vals = [ to_si_str(r.min_elem(), si_unit).replace("µ", "u").replace("inf", "∞") for r in intersection diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 5cb8ecee..2e907d84 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -13,6 +13,9 @@ class P_Set[T](Protocol): def is_empty(self) -> bool: ... + def __bool__(self) -> bool: + raise Exception("don't use bool to check for emptiness, use is_empty()") + def __contains__(self, item: T) -> bool: ... @@ -225,9 +228,9 @@ def closest_elem(self, target: NumericT) -> NumericT: raise ValueError("empty range cannot have closest element") index = bisect(self.ranges, target, key=lambda r: r.min_elem()) left = self.ranges[index - 1] if index > 0 else None - if left and target in left: + if left is not None and target in left: return target - left_bound = left.max_elem() if left else None + left_bound = left.max_elem() if left is not None else None right_bound = ( self.ranges[index].min_elem() if index < len(self.ranges) else None ) @@ -520,8 +523,8 @@ def __eq__(self, value: Any) -> bool: return False # TODO, convert to base unit first - # def __hash__(self) -> int: - # return hash((self._range, self.units)) + def __hash__(self) -> int: + return hash((self._range, self.range_units)) def __repr__(self) -> str: if self.units.is_compatible_with(dimensionless): @@ -700,7 +703,7 @@ def __eq__(self, value: Any) -> bool: return False def __hash__(self) -> int: - return hash((self._ranges, self.units)) + return hash((self._ranges, self.range_units)) def __repr__(self) -> str: if self.units.is_compatible_with(dimensionless): diff --git a/test/libs/test_e_series.py b/test/libs/test_e_series.py index 87eb107a..332fca3f 100644 --- a/test/libs/test_e_series.py +++ b/test/libs/test_e_series.py @@ -13,14 +13,18 @@ def test_intersect(): - assert e_series_intersect(L.Range(1, 10), {1, 2, 3}) == L.Singles(1, 2, 3, 10) - assert e_series_intersect(L.Range(3, 10), {1, 8, 9}) == L.Singles(8, 9, 10) - assert e_series_intersect(L.Range(10, 1e3), {1, 1.5, 8, 9.9}) == L.Singles( - 10, 15, 80, 99, 100, 150, 800, 990, 1000 + assert e_series_intersect(L.Range(1, 10), frozenset({1, 2, 3})) == L.Singles( + 1, 2, 3, 10 ) - assert e_series_intersect(L.Range(2.1e3, 7.9e3), {1, 2, 8, 9}) == L.Empty( - units=dimensionless + assert e_series_intersect(L.Range(3, 10), frozenset({1, 8, 9})) == L.Singles( + 8, 9, 10 ) + assert e_series_intersect( + L.Range(10, 1e3), frozenset({1, 1.5, 8, 9.9}) + ) == L.Singles(10, 15, 80, 99, 100, 150, 800, 990, 1000) + assert e_series_intersect( + L.Range(2.1e3, 7.9e3), frozenset({1, 2, 8, 9}) + ) == L.Empty(units=dimensionless) def test_ratio(): From 2c931576bf6857a6a3293919084e0a10edc178d9 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 23 Oct 2024 21:11:07 +0200 Subject: [PATCH 64/80] Esphome solver --- src/faebryk/core/solver.py | 14 +++++++++----- src/faebryk/exporters/esphome/esphome.py | 16 ++++++++++++++-- src/faebryk/library/BH1750FVI_TR.py | 6 ++---- src/faebryk/library/HLK_LD2410B_P.py | 7 ++----- src/faebryk/library/PM1006.py | 7 ++----- src/faebryk/library/SCD40.py | 7 ++----- src/faebryk/library/XL_3528RGBW_WS2812B.py | 6 ++---- src/faebryk/libs/examples/buildutil.py | 6 ++++-- src/faebryk/libs/examples/pickers.py | 3 +-- src/faebryk/libs/picker/jlcpcb/pickers.py | 5 ++--- src/faebryk/libs/util.py | 8 ++++++++ 11 files changed, 48 insertions(+), 37 deletions(-) diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index 87440452..912d475e 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -2,7 +2,7 @@ from typing import Any, Protocol from faebryk.core.graph import Graph -from faebryk.core.parameter import Expression, Parameter, ParameterOperatable, Predicate +from faebryk.core.parameter import Expression, ParameterOperatable, Predicate class Solver(Protocol): @@ -15,6 +15,12 @@ class SolveResult[ArgType]: false_predicates: list["Solver.PredicateWithInfo[ArgType]"] unknown_predicates: list["Solver.PredicateWithInfo[ArgType]"] + @dataclass + class SolveResultSingle: + # TODO thinkn about failure case + value: Any # TODO Any -> NumberLike? + # parameters_with_empty_solution_sets: list[Parameter] + # timeout per solve call in milliseconds timeout: int # threads: int @@ -23,17 +29,15 @@ class SolveResult[ArgType]: def get_any_single( self, - G: Graph, - expression: Expression, + expression: ParameterOperatable, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, constrain_result: bool = True, - ) -> tuple[Any, list[Parameter]]: # TODO Any -> NumberLike? + ) -> SolveResultSingle: """ Solve for a single value for the given expression. Args: - G: The graph to solve on. expression: The expression to solve. suppose_constraint: An optional constraint that can be added to make solving easier. It is only in effect for the duration of the diff --git a/src/faebryk/exporters/esphome/esphome.py b/src/faebryk/exporters/esphome/esphome.py index 6832d632..4e8734d2 100644 --- a/src/faebryk/exporters/esphome/esphome.py +++ b/src/faebryk/exporters/esphome/esphome.py @@ -7,16 +7,28 @@ import faebryk.library._F as F from faebryk.core.graphinterface import Graph -from faebryk.libs.util import merge_dicts +from faebryk.core.parameter import Parameter +from faebryk.core.solver import Solver +from faebryk.libs.units import Quantity +from faebryk.libs.util import cast_assert, dict_value_visitor, merge_dicts logger = logging.getLogger(__name__) -def make_esphome_config(G: Graph) -> dict: +def make_esphome_config(G: Graph, solver: Solver) -> dict: esphome_components = G.nodes_with_trait(F.has_esphome_config) esphome_config = merge_dicts(*[t.get_config() for _, t in esphome_components]) + # deep find parameters in dict and solve + def solve_parameter(v): + if not isinstance(v, Parameter): + return v + + return str(cast_assert(Quantity, solver.get_any_single(v).value)) + + dict_value_visitor(esphome_config, lambda _, v: solve_parameter(v)) + return esphome_config diff --git a/src/faebryk/library/BH1750FVI_TR.py b/src/faebryk/library/BH1750FVI_TR.py index 9886f95e..a9a235f7 100644 --- a/src/faebryk/library/BH1750FVI_TR.py +++ b/src/faebryk/library/BH1750FVI_TR.py @@ -6,8 +6,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity -from faebryk.libs.util import cast_assert +from faebryk.libs.units import P logger = logging.getLogger(__name__) @@ -26,7 +25,6 @@ def __preinit__(self): def get_config(self) -> dict: obj = self.obj assert isinstance(obj, BH1750FVI_TR) - val = cast_assert(Quantity, self.update_interval.get_any_single()) i2c = F.is_esphome_bus.find_connected_bus(obj.i2c) @@ -37,7 +35,7 @@ def get_config(self) -> dict: "name": "BH1750 Illuminance", "address": "0x23", "i2c_id": i2c.get_trait(F.is_esphome_bus).get_bus_id(), - "update_interval": f"{val.to('s')}", + "update_interval": self.update_interval, } ] } diff --git a/src/faebryk/library/HLK_LD2410B_P.py b/src/faebryk/library/HLK_LD2410B_P.py index 4480176b..7e95e035 100644 --- a/src/faebryk/library/HLK_LD2410B_P.py +++ b/src/faebryk/library/HLK_LD2410B_P.py @@ -4,8 +4,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity -from faebryk.libs.util import cast_assert +from faebryk.libs.units import P class HLK_LD2410B_P(Module): @@ -17,8 +16,6 @@ class _ld2410b_esphome_config(F.has_esphome_config.impl()): ) def get_config(self) -> dict: - val = cast_assert(Quantity, self.throttle.get_any_single()) - obj = self.obj assert isinstance(obj, HLK_LD2410B_P) @@ -38,7 +35,7 @@ def get_config(self) -> dict: return { "ld2410": { - "throttle": f"{val.to('ms')}", + "throttle": self.throttle, "uart_id": uart_cfg["id"], }, "binary_sensor": [ diff --git a/src/faebryk/library/PM1006.py b/src/faebryk/library/PM1006.py index 7bb4dd60..c312d552 100644 --- a/src/faebryk/library/PM1006.py +++ b/src/faebryk/library/PM1006.py @@ -5,8 +5,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity -from faebryk.libs.util import cast_assert +from faebryk.libs.units import P class PM1006(Module): @@ -30,8 +29,6 @@ class _pm1006_esphome_config(F.has_esphome_config.impl()): update_interval = L.p_field(units=P.s, cardinality=1) def get_config(self) -> dict: - val = cast_assert(Quantity, self.update_interval.get_any_single()) - obj = self.obj assert isinstance(obj, PM1006), "This is not an PM1006!" @@ -41,7 +38,7 @@ def get_config(self) -> dict: "sensor": [ { "platform": "pm1006", - "update_interval": f"{val.value.to('s')}", + "update_interval": self.update_interval, "uart_id": uart.get_trait(F.is_esphome_bus).get_bus_id(), } ] diff --git a/src/faebryk/library/SCD40.py b/src/faebryk/library/SCD40.py index 3c1dabc5..0ccc4f3f 100644 --- a/src/faebryk/library/SCD40.py +++ b/src/faebryk/library/SCD40.py @@ -5,8 +5,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity -from faebryk.libs.util import cast_assert +from faebryk.libs.units import P class SCD40(Module): @@ -18,8 +17,6 @@ class _scd4x_esphome_config(F.has_esphome_config.impl()): update_interval = L.p_field(units=P.s, cardinality=1) def get_config(self) -> dict: - val = cast_assert(Quantity, self.update_interval.get_any_single()) - obj = self.get_obj(SCD40) i2c = F.is_esphome_bus.find_connected_bus(obj.i2c) @@ -39,7 +36,7 @@ def get_config(self) -> dict: }, "address": 0x62, "i2c_id": i2c.get_trait(F.is_esphome_bus).get_bus_id(), - "update_interval": f"{val.value.to('s')}", + "update_interval": self.update_interval, } ] } diff --git a/src/faebryk/library/XL_3528RGBW_WS2812B.py b/src/faebryk/library/XL_3528RGBW_WS2812B.py index dadc4d78..d48d91ea 100644 --- a/src/faebryk/library/XL_3528RGBW_WS2812B.py +++ b/src/faebryk/library/XL_3528RGBW_WS2812B.py @@ -4,8 +4,7 @@ import faebryk.library._F as F from faebryk.core.module import Module from faebryk.libs.library import L -from faebryk.libs.units import P, Quantity -from faebryk.libs.util import cast_assert +from faebryk.libs.units import P class XL_3528RGBW_WS2812B(Module): @@ -16,13 +15,12 @@ def get_config(self) -> dict: obj = self.get_obj(XL_3528RGBW_WS2812B) data_pin = F.is_esphome_bus.find_connected_bus(obj.di.signal) - val = cast_assert(Quantity, self.update_interval.get_any_single()) return { "light": [ { "platform": "esp32_rmt_led_strip", - "update_interval": f"{val.to('s')}", + "update_interval": self.update_interval, "num_leds": 1, # TODO: make dynamic "rmt_channel": 0, # TODO: make dynamic "chipset": "WS2812", diff --git a/src/faebryk/libs/examples/buildutil.py b/src/faebryk/libs/examples/buildutil.py index 7103f221..b58549e8 100644 --- a/src/faebryk/libs/examples/buildutil.py +++ b/src/faebryk/libs/examples/buildutil.py @@ -8,6 +8,7 @@ import faebryk.libs.picker.lcsc as lcsc from faebryk.core.module import Module +from faebryk.core.solver import DefaultSolver from faebryk.exporters.pcb.kicad.transformer import PCB_Transformer from faebryk.libs.app.checks import run_checks from faebryk.libs.app.pcb import apply_design @@ -54,15 +55,16 @@ def apply_design_to_pcb( # TODO this can be prettier # picking ---------------------------------------------------------------- modules = m.get_children_modules(types=Module) + solver = DefaultSolver() try: JLCPCB_DB() for n in modules: - add_jlcpcb_pickers(n, base_prio=-10) + add_jlcpcb_pickers(n, solver, base_prio=-10) except FileNotFoundError: logger.warning("JLCPCB database not found. Skipping JLCPCB pickers.") for n in modules: - add_example_pickers(n) + add_example_pickers(n, solver) pick_part_recursively(m) # ------------------------------------------------------------------------- diff --git a/src/faebryk/libs/examples/pickers.py b/src/faebryk/libs/examples/pickers.py index 178da375..bb6f0039 100644 --- a/src/faebryk/libs/examples/pickers.py +++ b/src/faebryk/libs/examples/pickers.py @@ -298,8 +298,7 @@ def pick_switch(module: "_TSwitch[F.Electrical]", solver: Solver): ) -def add_example_pickers(module: Module): - solver = DefaultSolver() +def add_example_pickers(module: Module, solver: Solver): lookup = { F.Resistor: pick_resistor, F.LED: pick_led, diff --git a/src/faebryk/libs/picker/jlcpcb/pickers.py b/src/faebryk/libs/picker/jlcpcb/pickers.py index 5f11af62..182dd278 100644 --- a/src/faebryk/libs/picker/jlcpcb/pickers.py +++ b/src/faebryk/libs/picker/jlcpcb/pickers.py @@ -3,7 +3,7 @@ import faebryk.library._F as F import faebryk.libs.picker.jlcpcb.picker_lib as P from faebryk.core.module import Module -from faebryk.core.solver import DefaultSolver +from faebryk.core.solver import Solver from faebryk.libs.picker.jlcpcb.jlcpcb import JLCPCB_DB, ComponentQuery from faebryk.libs.picker.picker import PickError @@ -81,13 +81,12 @@ def pick(self, module: Module): ) from e -def add_jlcpcb_pickers(module: Module, base_prio: int = 0) -> None: +def add_jlcpcb_pickers(module: Module, solver: Solver, base_prio: int = 0) -> None: # check if DB ok JLCPCB_DB() # Generic pickers prio = base_prio - solver = DefaultSolver() module.add( F.has_multi_picker(prio, JLCPCBPicker(P.find_and_attach_by_lcsc_id, solver)) ) diff --git a/src/faebryk/libs/util.py b/src/faebryk/libs/util.py index ffb2e0b9..caec1d74 100644 --- a/src/faebryk/libs/util.py +++ b/src/faebryk/libs/util.py @@ -1136,3 +1136,11 @@ def typename(obj): if isinstance(obj, type): return obj.__name__ return type(obj).__name__ + + +def dict_value_visitor(d: dict, visitor: Callable[[Any, Any], Any]): + for k, v in list(d.items()): + if isinstance(v, dict): + dict_value_visitor(v, visitor) + else: + d[k] = visitor(k, v) From 3ffec7486f1eb8a6de155c6dc7f300de082b32d3 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 23 Oct 2024 21:48:29 +0200 Subject: [PATCH 65/80] remove graph from solver; add hint_param to ComponentQuery --- src/faebryk/core/solver.py | 13 ++++--------- src/faebryk/libs/picker/jlcpcb/jlcpcb.py | 9 ++++++++- src/faebryk/libs/picker/jlcpcb/picker_lib.py | 10 +++++----- src/faebryk/libs/picker/picker.py | 4 +--- 4 files changed, 18 insertions(+), 18 deletions(-) diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index 912d475e..fb99c374 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -29,7 +29,7 @@ class SolveResultSingle: def get_any_single( self, - expression: ParameterOperatable, + operatable: ParameterOperatable, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, constrain_result: bool = True, @@ -38,7 +38,7 @@ def get_any_single( Solve for a single value for the given expression. Args: - expression: The expression to solve. + operatable: The expression or parameter to solve. suppose_constraint: An optional constraint that can be added to make solving easier. It is only in effect for the duration of the solve call. @@ -47,14 +47,12 @@ def get_any_single( the expression. Returns: - A tuple containing the chosen value and a list of parameters with empty - solution sets. + A SolveResultSingle object containing the chosen value. """ ... def assert_any_predicate[ArgType]( self, - G: Graph, predicates: list["Solver.PredicateWithInfo[ArgType]"], suppose_constraint: Predicate | None = None, minimize: Expression | None = None, @@ -64,7 +62,6 @@ def assert_any_predicate[ArgType]( Make at least one of the passed predicates true, unless that is impossible. Args: - G: The graph to solve on. predicates: A list of predicates to solve. suppose_constraint: An optional constraint that can be added to make solving easier. It is only in effect for the duration of the @@ -89,8 +86,7 @@ class DefaultSolver(Solver): def get_any_single( self, - G: Graph, - expression: Expression, + operatable: ParameterOperatable, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, constrain_result: bool = True, @@ -99,7 +95,6 @@ def get_any_single( def assert_any_predicate[ArgType]( self, - G: Graph, predicates: list["Solver.PredicateWithInfo[ArgType]"], suppose_constraint: Predicate | None = None, minimize: Expression | None = None, diff --git a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py index abffcad5..91882bdb 100644 --- a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py +++ b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py @@ -422,6 +422,13 @@ def filter_by_value( ] return self.filter_by_description(*si_vals) + def hint_filter_parameter( + self, param: ParameterOperatable, si_unit: str, e_series: E_SERIES | None = None + ) -> Self: + # TODO implement + raise NotImplementedError() + return self + def filter_by_category(self, category: str, subcategory: str) -> Self: assert self.Q category_ids = asyncio.run(Category().get_ids(category, subcategory)) @@ -531,7 +538,7 @@ def filter_by_module_params( anded &= m_param.operation_is_superset(c_range) result = solver.assert_any_predicate( - module.get_graph(), [(anded, None)], constrain_solved=False + [(anded, None)], constrain_solved=False ) if not result.true_predicates: known_incompatible = True diff --git a/src/faebryk/libs/picker/jlcpcb/picker_lib.py b/src/faebryk/libs/picker/jlcpcb/picker_lib.py index 08c91723..728d5ba0 100644 --- a/src/faebryk/libs/picker/jlcpcb/picker_lib.py +++ b/src/faebryk/libs/picker/jlcpcb/picker_lib.py @@ -191,7 +191,7 @@ def find_resistor(cmp: Module, solver: Solver): ComponentQuery() .filter_by_category("Resistors", "Chip Resistor - Surface Mount") .filter_by_stock(qty) - .filter_by_value(cmp.resistance, "Ω", E_SERIES_VALUES.E96) + .hint_filter_parameter(cmp.resistance, "Ω", E_SERIES_VALUES.E96) .filter_by_traits(cmp) .sort_by_price(qty) .filter_by_module_params_and_attach(cmp, mapping, solver, qty) @@ -229,7 +229,7 @@ def find_capacitor(cmp: Module, solver: Solver): ) .filter_by_stock(qty) .filter_by_traits(cmp) - .filter_by_value(cmp.capacitance, "F", E_SERIES_VALUES.E24) + .hint_filter_parameter(cmp.capacitance, "F", E_SERIES_VALUES.E24) .sort_by_price(qty) .filter_by_module_params_and_attach(cmp, mapping, solver, qty) ) @@ -273,7 +273,7 @@ def find_inductor(cmp: Module, solver: Solver): .filter_by_category("Inductors", "Inductors") .filter_by_stock(qty) .filter_by_traits(cmp) - .filter_by_value(cmp.inductance, "H", E_SERIES_VALUES.E24) + .hint_filter_parameter(cmp.inductance, "H", E_SERIES_VALUES.E24) .sort_by_price(qty) .filter_by_module_params_and_attach(cmp, mapping, solver, qty) ) @@ -356,8 +356,8 @@ def find_diode(cmp: Module, solver: Solver): ComponentQuery() .filter_by_category("Diodes", "") .filter_by_stock(qty) - .filter_by_value(cmp.max_current, "A", E_SERIES_VALUES.E3) - .filter_by_value(cmp.reverse_working_voltage, "V", E_SERIES_VALUES.E3) + .hint_filter_parameter(cmp.max_current, "A", E_SERIES_VALUES.E3) + .hint_filter_parameter(cmp.reverse_working_voltage, "V", E_SERIES_VALUES.E3) .filter_by_traits(cmp) .sort_by_price(qty) .filter_by_module_params_and_attach(cmp, mapping, solver, qty) diff --git a/src/faebryk/libs/picker/picker.py b/src/faebryk/libs/picker/picker.py index 2eb5e863..dddd53f7 100644 --- a/src/faebryk/libs/picker/picker.py +++ b/src/faebryk/libs/picker/picker.py @@ -178,9 +178,7 @@ def pick_module_by_params( if len(predicates) == 0: raise PickErrorParams(module, list(options)) - solve_result = solver.assert_any_predicate( - module.get_graph(), [(p, k) for k, p in predicates.items()] - ) + solve_result = solver.assert_any_predicate([(p, k) for k, p in predicates.items()]) # TODO handle failure parameters From 48b78ddcf49854f5274b5802373024b0dc299cd0 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Wed, 23 Oct 2024 21:54:55 +0200 Subject: [PATCH 66/80] comment on hint --- src/faebryk/libs/picker/jlcpcb/jlcpcb.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py index 91882bdb..74f07409 100644 --- a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py +++ b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py @@ -426,6 +426,12 @@ def hint_filter_parameter( self, param: ParameterOperatable, si_unit: str, e_series: E_SERIES | None = None ) -> Self: # TODO implement + # param will in the general case consist of multiple ranges + # we have to pick some range or make a new one to pre_filter our candidates + # we can try making a new range with inspect_min and max to filter out + # everything we already know won't fit + # then we can check the cardinality of the remaining candidates to see if we + # need to pick a range contained in the param to filter raise NotImplementedError() return self From a6a74ea7ac54441ef264a6344a267cfe0b577eae Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Thu, 24 Oct 2024 12:57:10 +0200 Subject: [PATCH 67/80] parameter report --- src/faebryk/core/parameter.py | 11 +- .../parameters/parameters_to_file.py | 135 +++++++++++++++++- .../exporters/visualize/interactive_params.py | 6 + src/faebryk/libs/util.py | 69 ++++++++- test/core/test_parameters.py | 10 ++ 5 files changed, 225 insertions(+), 6 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 6e8b3ff8..ef0df558 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -335,7 +335,9 @@ def __preinit__(self): @abstract class ConstrainableExpression(Expression, Constrainable): - pass + def __init__(self, *operands: ParameterOperatable.All): + Expression.__init__(self, *operands) + Constrainable.__init__(self) @abstract @@ -626,9 +628,10 @@ class NotEqual(NumericPredicate): class SeticPredicate(Predicate): def __init__(self, left, right): super().__init__(left, right) - types = [Parameter, ParameterOperatable.Sets] - if any(type(op) not in types for op in self.operands): - raise ValueError("operands must be Parameter or Set") + types = ParameterOperatable, P_Set + # TODO + # if any(not isinstance(op, types) for op in self.operands): + # raise ValueError("operands must be Parameter or Set") units = [op.units for op in self.operands] for u in units[1:]: if not units[0].is_compatible_with(u): diff --git a/src/faebryk/exporters/parameters/parameters_to_file.py b/src/faebryk/exporters/parameters/parameters_to_file.py index 4c4dabdd..1bb68485 100644 --- a/src/faebryk/exporters/parameters/parameters_to_file.py +++ b/src/faebryk/exporters/parameters/parameters_to_file.py @@ -3,13 +3,146 @@ import logging from pathlib import Path +from typing import Callable, Iterable +from faebryk.core.graphinterface import Graph from faebryk.core.module import Module -from faebryk.core.parameter import Parameter +from faebryk.core.parameter import Expression, Is, Parameter, Predicate +from faebryk.libs.util import EquivalenceClasses, groupby, ind, typename logger = logging.getLogger(__name__) +def parameter_alias_classes(G: Graph) -> list[set[Parameter]]: + full_eq = EquivalenceClasses[Parameter](G.nodes_of_type(Parameter)) + + is_exprs = [e for e in G.nodes_of_type(Is) if e.constrained] + + for is_expr in is_exprs: + params_ops = [op for op in is_expr.operands if isinstance(op, Parameter)] + full_eq.add_eq(*params_ops) + + return full_eq.get() + + +def get_params_for_expr(expr: Expression) -> set[Parameter]: + param_ops = {op for op in expr.operatable_operands if isinstance(op, Parameter)} + expr_ops = {op for op in expr.operatable_operands if isinstance(op, Expression)} + + return param_ops | {op for e in expr_ops for op in get_params_for_expr(e)} + + +def parameter_equivalence_classes(G: Graph) -> list[set[Parameter]]: + related = EquivalenceClasses[Parameter](G.nodes_of_type(Parameter)) + + eq_exprs = [e for e in G.nodes_of_type(Predicate) if e.constrained] + + for eq_expr in eq_exprs: + params = get_params_for_expr(eq_expr) + related.add_eq(*params) + + return related.get() + + +def parameter_report(G: Graph, path: Path): + params = G.nodes_of_type(Parameter) + exprs = G.nodes_of_type(Expression) + predicates = {e for e in exprs if isinstance(e, Predicate)} + exprs.difference_update(predicates) + alias_classes = parameter_alias_classes(G) + eq_classes = parameter_equivalence_classes(G) + unused = [ + p + for p in params + if not any(isinstance(e.node, Expression) for e in p.operated_on.edges) + ] + + def non_empty(classes: list[set[Parameter]]): + return [c for c in classes if len(c) > 1] + + def bound(classes: list[set[Parameter]]): + return sum(len(c) for c in non_empty(classes)) + + infostr = ( + f"{len(params)} parameters" + f"\n {len(non_empty(alias_classes))}({bound(alias_classes)}) alias classes" + f"\n {len(non_empty(eq_classes))}({bound(eq_classes)}) equivalence classes" + f"\n {len(unused)} unused" + "\n" + ) + infostr += f"{len(exprs)} expressions, {len(predicates)} predicates" + + logger.info(f"Found {infostr}") + + out = "" + out += infostr + "\n" + + def block( + header: str, + f: Callable[[], str] | None = None, + lines: list[str] | list[list[str]] | None = None, + ): + nonlocal out + out_str = "" + if f: + out_str += f() + if lines: + lines = [n for n in lines if isinstance(n, str)] + [ + n for nested in lines if isinstance(nested, list) for n in nested + ] + out_str += "\n".join(lines) + + out += f"{header}{'-'*80}\n{ind(out_str)}\n" + + block( + "Parameters", + lines=sorted([p.get_full_name(types=True) for p in params]), + ) + + block( + "Unused", + lines=sorted([p.get_full_name(types=True) for p in unused]), + ) + + def Eq(classes: list[set[Parameter]]): + stream = "" + for eq_class in classes: + if len(eq_class) <= 1: + continue + stream += "\n ".join( + sorted([p.get_full_name(types=True) for p in eq_class]) + ) + stream += "\n" + return stream.removesuffix("\n") + + block( + "Fully aliased", + f=lambda: Eq(alias_classes), + ) + + block( + "Equivalence classes", + f=lambda: Eq(eq_classes), + ) + + def type_group(name: str, container: Iterable): + type_grouped = sorted( + groupby(container, lambda x: type(x)).items(), key=lambda x: typename(x[0]) + ) + block( + name, + lines=[ + f"{typename(type_)}: {len(list(group))}" + for type_, group in type_grouped + ], + ) + + type_group("Expressions", exprs) + type_group("Predicates", predicates) + + path.write_text(out) + + def export_parameters_to_file(module: Module, path: Path): """Write all parameters of the given module to a file.""" # {module_name: [{param_name: param_value}, {param_name: param_value},...]} diff --git a/src/faebryk/exporters/visualize/interactive_params.py b/src/faebryk/exporters/visualize/interactive_params.py index 18375d01..98cb4489 100644 --- a/src/faebryk/exporters/visualize/interactive_params.py +++ b/src/faebryk/exporters/visualize/interactive_params.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: MIT from dataclasses import dataclass +from pathlib import Path from typing import cast import dash_core_components as dcc @@ -14,6 +15,7 @@ from faebryk.core.link import LinkSibling from faebryk.core.node import Node from faebryk.core.parameter import Expression, Parameter +from faebryk.exporters.parameters.parameters_to_file import parameter_report from faebryk.exporters.visualize.interactive_graph import ( _GROUP_TYPES, Layout, @@ -237,6 +239,10 @@ def visualize_parameters(G: Graph, height: int | None = None): and isinstance(e.node, Operand_) } + # TODO filter equivalency classes + + parameter_report(G, Path("./build/params.txt")) + elements = [_node(n) for n in nodes] + [_link(li) for li in edges] stylesheet = _Stylesheet() diff --git a/src/faebryk/libs/util.py b/src/faebryk/libs/util.py index caec1d74..c0e82297 100644 --- a/src/faebryk/libs/util.py +++ b/src/faebryk/libs/util.py @@ -12,7 +12,8 @@ from dataclasses import dataclass, fields from enum import StrEnum from functools import cache -from itertools import chain +from genericpath import commonprefix +from itertools import chain, pairwise from textwrap import indent from typing import ( Any, @@ -1144,3 +1145,69 @@ def dict_value_visitor(d: dict, visitor: Callable[[Any, Any], Any]): dict_value_visitor(v, visitor) else: d[k] = visitor(k, v) + + +class DefaultFactoryDict[T, U](dict[T, U]): + def __init__(self, factory: Callable[[T], U], *args, **kwargs): + self.factory = factory + super().__init__(*args, **kwargs) + + def __missing__(self, key: T) -> U: + res = self.factory(key) + self[key] = res + return res + + +class EquivalenceClasses[T: Hashable]: + def __init__(self, base: Iterable[T] | None = None): + self.classes: dict[T, set[T]] = DefaultFactoryDict(lambda k: {k}) + for elem in base or []: + self.classes[elem] + + def add_eq(self, *values: T): + if len(values) < 2: + return + val1 = values[0] + for val in values[1:]: + self.classes[val1].update(self.classes[val]) + for v in self.classes[val]: + self.classes[v] = self.classes[val1] + + def get(self) -> list[set[T]]: + sets = {id(s): s for s in self.classes.values()} + return list(sets.values()) + + +def common_prefix_to_tree(iterable: list[str]) -> Iterable[str]: + """ + Turns: + + <760>|RP2040.adc[0]|ADC.reference|ElectricPower.max_current|Parameter + <760>|RP2040.adc[0]|ADC.reference|ElectricPower.voltage|Parameter + <760>|RP2040.adc[1]|ADC.reference|ElectricPower.max_current|Parameter + <760>|RP2040.adc[1]|ADC.reference|ElectricPower.voltage|Parameter + + Into: + + <760>|RP2040.adc[0]|ADC.reference|ElectricPower.max_current|Parameter + -----------------------------------------------.voltage|Parameter + -----------------1]|ADC.reference|ElectricPower.max_current|Parameter + -----------------------------------------------.voltage|Parameter + + Notes: + Recommended to sort the iterable first. + """ + yield iterable[0] + + for s1, s2 in pairwise(iterable): + prefix = commonprefix([s1, s2]) + prefix_length = len(prefix) + yield "-" * prefix_length + s2[prefix_length:] + + +def ind[T: str | list[str]](lines: T) -> T: + prefix = " " + if isinstance(lines, str): + return indent(lines, prefix=prefix) + if isinstance(lines, list): + return [f"{prefix}{line}" for line in lines] # type: ignore diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index 7ea32e01..1d6ae3a9 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -6,6 +6,7 @@ import pytest +import faebryk.library._F as F from faebryk.core.node import Node from faebryk.core.parameter import Parameter from faebryk.libs.library import L @@ -67,6 +68,15 @@ def test_visualize_chain(): visualize_parameters(G, height=1400) +def test_visualize_inspect_app(): + from faebryk.exporters.visualize.interactive_params import visualize_parameters + + rp2040 = F.RP2040() + + G = rp2040.get_graph() + visualize_parameters(G, height=1400) + + # TODO remove if __name__ == "__main__": # if run in jupyter notebook From f6a23a2d2c7e771b20510a9d6a9188afbff88756 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Thu, 31 Oct 2024 10:25:00 +0100 Subject: [PATCH 68/80] rename final --- src/faebryk/core/parameter.py | 10 +++++----- src/faebryk/core/solver.py | 4 ++-- src/faebryk/libs/app/erc.py | 2 +- src/faebryk/libs/examples/buildutil.py | 1 + src/faebryk/libs/examples/pickers.py | 2 +- src/faebryk/libs/test/solver.py | 4 ++-- 6 files changed, 12 insertions(+), 11 deletions(-) diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index ef0df558..51a0bd99 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -140,16 +140,16 @@ def inspect_known_values(self: BooleanLike) -> P_Set[bool]: # raise NotImplementedError() # Run by the solver on finalization - inspect_final: Callable[[Self], None] = lambda _: None + inspect_solution: Callable[[Self], None] = lambda _: None - def inspect_add_on_final(self, fun: Callable[[Self], None]) -> None: - current = self.inspect_final + def inspect_add_on_solution(self, fun: Callable[[Self], None]) -> None: + current = self.inspect_solution def new(self2): current(self2) fun(self2) - self.inspect_final = new + self.inspect_solution = new # Could be exponentially many def inspect_known_supersets_are_few(self) -> bool: @@ -628,7 +628,7 @@ class NotEqual(NumericPredicate): class SeticPredicate(Predicate): def __init__(self, left, right): super().__init__(left, right) - types = ParameterOperatable, P_Set + # types = ParameterOperatable, P_Set # TODO # if any(not isinstance(op, types) for op in self.operands): # raise ValueError("operands must be Parameter or Set") diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index fb99c374..cc3105e0 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -78,7 +78,7 @@ def assert_any_predicate[ArgType]( ... # run deferred work - def finalize(self, G: Graph) -> None: ... + def find_and_lock_solution(self, G: Graph) -> None: ... class DefaultSolver(Solver): @@ -102,5 +102,5 @@ def assert_any_predicate[ArgType]( ) -> Solver.SolveResult[ArgType]: raise NotImplementedError() - def finalize(self, G: Graph) -> None: + def find_and_lock_solution(self, G: Graph) -> None: raise NotImplementedError() diff --git a/src/faebryk/libs/app/erc.py b/src/faebryk/libs/app/erc.py index ff5eee2a..ab87ff46 100644 --- a/src/faebryk/libs/app/erc.py +++ b/src/faebryk/libs/app/erc.py @@ -78,7 +78,7 @@ def raise_on_limit(x): if x.inspect_known_max() > voltage_limit: raise ERCFaultElectricPowerUndefinedVoltage(ep) - ep.voltage.inspect_add_on_final(raise_on_limit) + ep.voltage.inspect_add_on_solution(raise_on_limit) # shorted nets nets = G.nodes_of_type(F.Net) diff --git a/src/faebryk/libs/examples/buildutil.py b/src/faebryk/libs/examples/buildutil.py index b58549e8..a0ee276c 100644 --- a/src/faebryk/libs/examples/buildutil.py +++ b/src/faebryk/libs/examples/buildutil.py @@ -66,6 +66,7 @@ def apply_design_to_pcb( for n in modules: add_example_pickers(n, solver) pick_part_recursively(m) + solver.find_and_lock_solution(G) # ------------------------------------------------------------------------- example_prj = Path(__file__).parent / Path("resources/example") diff --git a/src/faebryk/libs/examples/pickers.py b/src/faebryk/libs/examples/pickers.py index bb6f0039..c9b8d58c 100644 --- a/src/faebryk/libs/examples/pickers.py +++ b/src/faebryk/libs/examples/pickers.py @@ -10,7 +10,7 @@ import faebryk.library._F as F from faebryk.core.module import Module -from faebryk.core.solver import DefaultSolver, Solver +from faebryk.core.solver import Solver from faebryk.libs.library import L from faebryk.libs.picker.lcsc import LCSC_Part from faebryk.libs.picker.picker import PickerOption, pick_module_by_params diff --git a/src/faebryk/libs/test/solver.py b/src/faebryk/libs/test/solver.py index 9beb6e9c..110b675b 100644 --- a/src/faebryk/libs/test/solver.py +++ b/src/faebryk/libs/test/solver.py @@ -8,7 +8,7 @@ def solves_to(stmt: ParameterOperatable, result: bool): - stmt.inspect_add_on_final(lambda x: x.inspect_known_values() == PlainSet(result)) + stmt.inspect_add_on_solution(lambda x: x.inspect_known_values() == PlainSet(result)) def solve_and_test(G: Graph | Node, *stmts: ParameterOperatable): @@ -19,4 +19,4 @@ def solve_and_test(G: Graph | Node, *stmts: ParameterOperatable): solves_to(stmt, True) solver = DefaultSolver() - solver.finalize(G) + solver.find_and_lock_solution(G) From 08b8b89b82c396e2bea58b5ec3a8cb2441ba6f21 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Thu, 31 Oct 2024 10:38:06 +0100 Subject: [PATCH 69/80] explicit lock --- src/faebryk/core/solver.py | 12 ++++++------ src/faebryk/exporters/esphome/esphome.py | 2 +- src/faebryk/libs/picker/jlcpcb/jlcpcb.py | 4 +--- src/faebryk/libs/picker/picker.py | 4 +++- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index cc3105e0..dafa955b 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -30,9 +30,9 @@ class SolveResultSingle: def get_any_single( self, operatable: ParameterOperatable, + lock: bool, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, - constrain_result: bool = True, ) -> SolveResultSingle: """ Solve for a single value for the given expression. @@ -43,7 +43,7 @@ def get_any_single( easier. It is only in effect for the duration of the solve call. minimize: An optional expression to minimize while solving. - constrain_result: If True, ensure the result is part of the solution set of + lock: If True, ensure the result is part of the solution set of the expression. Returns: @@ -54,9 +54,9 @@ def get_any_single( def assert_any_predicate[ArgType]( self, predicates: list["Solver.PredicateWithInfo[ArgType]"], + lock: bool, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, - constrain_solved: bool = True, ) -> SolveResult[ArgType]: """ Make at least one of the passed predicates true, unless that is impossible. @@ -67,7 +67,7 @@ def assert_any_predicate[ArgType]( easier. It is only in effect for the duration of the solve call. minimize: An optional expression to minimize while solving. - constrain_solved: If True, add the solutions as constraints. + lock: If True, add the solutions as constraints. Returns: A SolveResult object containing the true, false, and unknown predicates. @@ -87,18 +87,18 @@ class DefaultSolver(Solver): def get_any_single( self, operatable: ParameterOperatable, + lock: bool, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, - constrain_result: bool = True, ): raise NotImplementedError() def assert_any_predicate[ArgType]( self, predicates: list["Solver.PredicateWithInfo[ArgType]"], + lock: bool, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, - constrain_solved: bool = True, ) -> Solver.SolveResult[ArgType]: raise NotImplementedError() diff --git a/src/faebryk/exporters/esphome/esphome.py b/src/faebryk/exporters/esphome/esphome.py index 4e8734d2..04afe799 100644 --- a/src/faebryk/exporters/esphome/esphome.py +++ b/src/faebryk/exporters/esphome/esphome.py @@ -25,7 +25,7 @@ def solve_parameter(v): if not isinstance(v, Parameter): return v - return str(cast_assert(Quantity, solver.get_any_single(v).value)) + return str(cast_assert(Quantity, solver.get_any_single(v, lock=True).value)) dict_value_visitor(esphome_config, lambda _, v: solve_parameter(v)) diff --git a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py index 74f07409..a6895f12 100644 --- a/src/faebryk/libs/picker/jlcpcb/jlcpcb.py +++ b/src/faebryk/libs/picker/jlcpcb/jlcpcb.py @@ -543,9 +543,7 @@ def filter_by_module_params( for m_param, c_range in param_mapping: anded &= m_param.operation_is_superset(c_range) - result = solver.assert_any_predicate( - [(anded, None)], constrain_solved=False - ) + result = solver.assert_any_predicate([(anded, None)], lock=False) if not result.true_predicates: known_incompatible = True diff --git a/src/faebryk/libs/picker/picker.py b/src/faebryk/libs/picker/picker.py index dddd53f7..061054d4 100644 --- a/src/faebryk/libs/picker/picker.py +++ b/src/faebryk/libs/picker/picker.py @@ -178,7 +178,9 @@ def pick_module_by_params( if len(predicates) == 0: raise PickErrorParams(module, list(options)) - solve_result = solver.assert_any_predicate([(p, k) for k, p in predicates.items()]) + solve_result = solver.assert_any_predicate( + [(p, k) for k, p in predicates.items()], lock=True + ) # TODO handle failure parameters From 17d896b1c9be2a496838cc18744d010b16e6fbf1 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Thu, 31 Oct 2024 10:48:41 +0100 Subject: [PATCH 70/80] SolverResult --- src/faebryk/core/solver.py | 29 ++++++++++++++++++------ src/faebryk/exporters/esphome/esphome.py | 4 +++- 2 files changed, 25 insertions(+), 8 deletions(-) diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index dafa955b..6807a979 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -9,18 +9,33 @@ class Solver(Protocol): # TODO booleanlike is very permissive type PredicateWithInfo[ArgType] = tuple[ParameterOperatable.BooleanLike, ArgType] + class TimeoutError(Exception): ... + + @dataclass + class SolveResult: + timed_out: bool + @dataclass - class SolveResult[ArgType]: + class SolveResultAny[ArgType](SolveResult): true_predicates: list["Solver.PredicateWithInfo[ArgType]"] false_predicates: list["Solver.PredicateWithInfo[ArgType]"] unknown_predicates: list["Solver.PredicateWithInfo[ArgType]"] @dataclass - class SolveResultSingle: + class SolveResultSingle(SolveResult): # TODO thinkn about failure case value: Any # TODO Any -> NumberLike? # parameters_with_empty_solution_sets: list[Parameter] + def get(self) -> Any: + if self.timed_out: + raise Solver.TimeoutError() + return self.value + + @dataclass + class SolveResultAll(SolveResult): + has_solution: bool + # timeout per solve call in milliseconds timeout: int # threads: int @@ -57,7 +72,7 @@ def assert_any_predicate[ArgType]( lock: bool, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, - ) -> SolveResult[ArgType]: + ) -> SolveResultAny[ArgType]: """ Make at least one of the passed predicates true, unless that is impossible. @@ -78,7 +93,7 @@ def assert_any_predicate[ArgType]( ... # run deferred work - def find_and_lock_solution(self, G: Graph) -> None: ... + def find_and_lock_solution(self, G: Graph) -> SolveResultAll: ... class DefaultSolver(Solver): @@ -90,7 +105,7 @@ def get_any_single( lock: bool, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, - ): + ) -> Solver.SolveResultSingle: raise NotImplementedError() def assert_any_predicate[ArgType]( @@ -99,8 +114,8 @@ def assert_any_predicate[ArgType]( lock: bool, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, - ) -> Solver.SolveResult[ArgType]: + ) -> Solver.SolveResultAny[ArgType]: raise NotImplementedError() - def find_and_lock_solution(self, G: Graph) -> None: + def find_and_lock_solution(self, G: Graph) -> Solver.SolveResultAll: raise NotImplementedError() diff --git a/src/faebryk/exporters/esphome/esphome.py b/src/faebryk/exporters/esphome/esphome.py index 04afe799..d218cf24 100644 --- a/src/faebryk/exporters/esphome/esphome.py +++ b/src/faebryk/exporters/esphome/esphome.py @@ -25,7 +25,9 @@ def solve_parameter(v): if not isinstance(v, Parameter): return v - return str(cast_assert(Quantity, solver.get_any_single(v, lock=True).value)) + return str( + cast_assert(Quantity, solver.get_any_single(v, lock=True).value).get() + ) dict_value_visitor(esphome_config, lambda _, v: solve_parameter(v)) From e58ef002a853e315ac59b5f89717899eca5fb958 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Thu, 31 Oct 2024 16:24:50 +0100 Subject: [PATCH 71/80] Begin phase 1 impl: Alias classes --- src/faebryk/core/defaultsolver.py | 246 ++++++++++++++++++ src/faebryk/core/graphinterface.py | 8 + src/faebryk/core/node.py | 4 +- src/faebryk/core/parameter.py | 21 +- src/faebryk/core/solver.py | 56 ++-- src/faebryk/exporters/esphome/esphome.py | 4 +- .../parameters/parameters_to_file.py | 4 +- src/faebryk/library/BH1750FVI_TR.py | 4 +- src/faebryk/library/HLK_LD2410B_P.py | 2 +- src/faebryk/library/PM1006.py | 2 +- src/faebryk/library/SCD40.py | 2 +- src/faebryk/library/XL_3528RGBW_WS2812B.py | 2 +- src/faebryk/libs/examples/buildutil.py | 2 +- src/faebryk/libs/sets.py | 19 +- src/faebryk/libs/test/solver.py | 2 +- test/core/test_parameters.py | 35 ++- 16 files changed, 342 insertions(+), 71 deletions(-) create mode 100644 src/faebryk/core/defaultsolver.py diff --git a/src/faebryk/core/defaultsolver.py b/src/faebryk/core/defaultsolver.py new file mode 100644 index 00000000..afce30c7 --- /dev/null +++ b/src/faebryk/core/defaultsolver.py @@ -0,0 +1,246 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + +import logging +from statistics import median +from typing import Any + +from faebryk.core.graphinterface import Graph, GraphInterfaceSelf +from faebryk.core.parameter import ( + Expression, + Is, + Parameter, + ParameterOperatable, + Predicate, +) +from faebryk.core.solver import Solver +from faebryk.libs.sets import Ranges +from faebryk.libs.util import EquivalenceClasses + +logger = logging.getLogger(__name__) + + +def parameter_alias_classes(G: Graph) -> list[set[Parameter]]: + full_eq = EquivalenceClasses[Parameter](G.nodes_of_type(Parameter)) + + is_exprs = [e for e in G.nodes_of_type(Is) if e.constrained] + + for is_expr in is_exprs: + params_ops = [op for op in is_expr.operands if isinstance(op, Parameter)] + full_eq.add_eq(*params_ops) + + return full_eq.get() + + +def get_params_for_expr(expr: Expression) -> set[Parameter]: + param_ops = {op for op in expr.operatable_operands if isinstance(op, Parameter)} + expr_ops = {op for op in expr.operatable_operands if isinstance(op, Expression)} + + return param_ops | {op for e in expr_ops for op in get_params_for_expr(e)} + + +def get_constrained_predicates_involved_in( + p: Parameter | Expression, +) -> list[Predicate]: + # p.self -> p.operated_on -> e1.operates_on -> e1.self + dependants = p.bfs_node( + lambda path, _: isinstance(path[-1].node, ParameterOperatable) + and ( + # self + isinstance(path[-1], GraphInterfaceSelf) + # operated on + or path[-1].node.operated_on is path[-1] + # operated on -> operates on + or ( + len(path) >= 2 + and isinstance(path[-2].node, ParameterOperatable) + and path[-2].node.operated_on is path[-2] + and isinstance(path[-1].node, Expression) + and path[-1].node.operates_on is path[-1] + ) + ) + ) + return [p for p in dependants if isinstance(p, Predicate) and p.constrained] + + +def parameter_dependency_classes(G: Graph) -> list[set[Parameter]]: + related = EquivalenceClasses[Parameter](G.nodes_of_type(Parameter)) + + eq_exprs = [e for e in G.nodes_of_type(Predicate) if e.constrained] + + for eq_expr in eq_exprs: + params = get_params_for_expr(eq_expr) + related.add_eq(*params) + + return related.get() + + +def resolve_alias_classes(G: Graph) -> dict[ParameterOperatable, ParameterOperatable]: + params = [ + p + for p in G.nodes_of_type(Parameter) + if get_constrained_predicates_involved_in(p) + ] + exprs = G.nodes_of_type(Expression) + predicates = {e for e in exprs if isinstance(e, Predicate)} + exprs.difference_update(predicates) + + p_alias_classes = parameter_alias_classes(G) + dependency_classes = parameter_dependency_classes(G) + + infostr = ( + f"{len(params)} parameters" + f"\n {len(p_alias_classes)} alias classes" + f"\n {len(dependency_classes)} dependency classes" + "\n" + ) + logger.info("Phase 1 Solving: Alias classes") + logger.info(infostr) + + repr_map: dict[ParameterOperatable, ParameterOperatable] = {} + + part_of_class_params = {p for c in p_alias_classes for p in c} + not_part_of_class_params = set(params).difference(part_of_class_params) + + # Make new param repre for alias classes + for alias_class in p_alias_classes: + # single unit + unit_candidates = {p.units for p in alias_class} + if len(unit_candidates) > 1: + raise ValueError("Incompatible units in alias class") + + # single domain + domain_candidates = {p.domain for p in alias_class} + if len(domain_candidates) > 1: + raise ValueError("Incompatible domains in alias class") + + # intersect ranges + within_ranges = {p.within for p in alias_class if p.within is not None} + within = None + if within_ranges: + within = Ranges.op_intersect_ranges(*within_ranges) + + # heuristic: + # intersect soft sets + soft_sets = {p.soft_set for p in alias_class if p.soft_set is not None} + soft_set = None + if soft_sets: + soft_set = Ranges.op_intersect_ranges(*soft_sets) + + # heuristic: + # get median + guesses = {p.guess for p in alias_class if p.guess is not None} + guess = None + if guesses: + guess = median(guesses) # type: ignore + + # heuristic: + # max tolerance guess + tolerance_guesses = { + p.tolerance_guess for p in alias_class if p.tolerance_guess is not None + } + tolerance_guess = None + if tolerance_guesses: + tolerance_guess = max(tolerance_guesses) + + likely_constrained = any(p.likely_constrained for p in alias_class) + + representative = Parameter( + units=unit_candidates.pop(), + within=within, + soft_set=soft_set, + guess=guess, + tolerance_guess=tolerance_guess, + likely_constrained=likely_constrained, + ) + repr_map.update({p: representative for p in alias_class}) + + # copy non alias params (that are in use) + for p in not_part_of_class_params: + repr_map[p] = Parameter( + units=p.units, + within=p.within, + domain=p.domain, + soft_set=p.soft_set, + guess=p.guess, + tolerance_guess=p.tolerance_guess, + likely_constrained=p.likely_constrained, + ) + + # replace parameters in expressions and predicates + for expr in exprs | predicates: + + def try_replace(o: ParameterOperatable.All): + if not isinstance(o, ParameterOperatable): + return o + if o in repr_map: + return repr_map[o] + # TODO + raise Exception() + + # filter alias class Is + if isinstance(expr, Is): + if all(isinstance(o, Parameter) for o in expr.operands): + continue + + operands = [try_replace(o) for o in expr.operands] + new_expr = type(expr)(*operands) + logger.info(f"{expr}[{expr.operands}] ->\n {new_expr}[{new_expr.operands}]") + repr_map[expr] = new_expr + + return repr_map + + +class DefaultSolver(Solver): + timeout: int = 1000 + + def phase_one_no_guess_solving(self, G: Graph) -> None: + logger.info(f"Phase 1 Solving: No guesses {'-' * 80}") + + # strategies + # https://miro.com/app/board/uXjVLV3O2BQ=/ + # compress expressions inside alias classes + # x / y => x / x + # associativity + # (x + a) + b => x + a + b [for +,*] + # compress expressions that are using literals + # x + 1 + 5 => x + 6 + # x + 0 => x + # x * 1 => x + # x * 0 => 0 + # x / 1 => x + # compress calculatable expressions + # x / x => 1 + # x + x => 2*x + # x - x => 0 + + # as long as progress iterate + + repr_map = resolve_alias_classes(G) + + graphs = {p.get_graph() for p in repr_map.values()} + + logger.info(f"{len(graphs)} new graphs") + for s, d in repr_map.items(): + logger.info(f"{s} -> {d}") + + def get_any_single( + self, + operatable: ParameterOperatable, + lock: bool, + suppose_constraint: Predicate | None = None, + minimize: Expression | None = None, + ) -> Any: + raise NotImplementedError() + + def assert_any_predicate[ArgType]( + self, + predicates: list["Solver.PredicateWithInfo[ArgType]"], + lock: bool, + suppose_constraint: Predicate | None = None, + minimize: Expression | None = None, + ) -> Solver.SolveResultAny[ArgType]: + raise NotImplementedError() + + def find_and_lock_solution(self, G: Graph) -> Solver.SolveResultAll: + raise NotImplementedError() diff --git a/src/faebryk/core/graphinterface.py b/src/faebryk/core/graphinterface.py index 6b1f4dc5..08480948 100644 --- a/src/faebryk/core/graphinterface.py +++ b/src/faebryk/core/graphinterface.py @@ -68,6 +68,14 @@ def nodes_of_type[T: "Node"](self, t: type[T]) -> set[T]: def nodes_of_types(self, t: tuple[type["Node"], ...]) -> set["Node"]: return {n for n in self.node_projection() if isinstance(n, t)} + def __hash__(self) -> int: + return id(self()) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Graph): + return False + return self() == other() + class GraphInterface(FaebrykLibObject): GT = Graph diff --git a/src/faebryk/core/node.py b/src/faebryk/core/node.py index 39aac1d3..1dea158a 100644 --- a/src/faebryk/core/node.py +++ b/src/faebryk/core/node.py @@ -544,7 +544,7 @@ def get_parent(self): def get_name(self, accept_no_parent: bool = False): p = self.get_parent() if not p and accept_no_parent: - return f"<{hex(id(self))[-3:]}>" + return f"{hex(id(self))[-3:]}" if not p: raise NodeNoParent(self, "Parent required for name") return p[1] @@ -552,7 +552,7 @@ def get_name(self, accept_no_parent: bool = False): def get_hierarchy(self) -> list[tuple["Node", str]]: parent = self.get_parent() if not parent: - return [(self, f"<{hex(id(self))[-3:]}>")] + return [(self, f"{hex(id(self))[-3:]}")] parent_obj, name = parent return parent_obj.get_hierarchy() + [(self, name)] diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 51a0bd99..870be3e2 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -22,7 +22,7 @@ # boolean: T == S == bool # enum: T == S == Enum # number: T == Number type, S == Range[Number] -class ParameterOperatable: +class ParameterOperatable(Node): type QuantityLike = Quantity | Unit | NotImplementedType type Number = int | float | QuantityLike @@ -319,13 +319,14 @@ def constrain(self): @abstract -class Expression(Node, ParameterOperatable): +class Expression(ParameterOperatable): operates_on: GraphInterface def __init__(self, *operands: ParameterOperatable.All): super().__init__() + self.operands = operands self.operatable_operands = { - op for op in operands if isinstance(op, (Parameter, Expression)) + op for op in operands if isinstance(op, ParameterOperatable) } def __preinit__(self): @@ -725,7 +726,7 @@ class Set(Namespace): SYMMETRIC_DIFFERENCE = SymmetricDifference -class Parameter(Node, ParameterOperatable, Constrainable): +class Parameter(ParameterOperatable, Constrainable): class TraitT(Trait): ... def __init__( @@ -736,20 +737,25 @@ def __init__( within: Ranges | Range | None = None, domain: Domain = Numbers(negative=False), # soft constraints - soft_set: Range | None = None, + soft_set: Ranges | Range | None = None, guess: Quantity | int | float | None = None, # TODO actually allowed to be anything from domain - tolerance_guess: Quantity | None = None, + tolerance_guess: float | None = None, # hints likely_constrained: bool = False, - cardinality: int | None = None, ): super().__init__() if within is not None and not within.units.is_compatible_with(units): raise ValueError("incompatible units") + if isinstance(within, Range): + within = Ranges(within) + + if isinstance(soft_set, Range): + soft_set = Ranges(soft_set) + if not isinstance(units, Unit): raise TypeError("units must be a Unit") self.units = units @@ -759,7 +765,6 @@ def __init__( self.guess = guess self.tolerance_guess = tolerance_guess self.likely_constrained = likely_constrained - self.cardinality = cardinality # Type forwards type All = ParameterOperatable.All diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index 6807a979..d572ec4f 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -1,15 +1,27 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + +import logging from dataclasses import dataclass from typing import Any, Protocol -from faebryk.core.graph import Graph -from faebryk.core.parameter import Expression, ParameterOperatable, Predicate +from faebryk.core.graphinterface import Graph +from faebryk.core.parameter import ( + Expression, + ParameterOperatable, + Predicate, +) + +logger = logging.getLogger(__name__) class Solver(Protocol): # TODO booleanlike is very permissive type PredicateWithInfo[ArgType] = tuple[ParameterOperatable.BooleanLike, ArgType] - class TimeoutError(Exception): ... + class SolverError(Exception): ... + + class TimeoutError(SolverError): ... @dataclass class SolveResult: @@ -21,17 +33,6 @@ class SolveResultAny[ArgType](SolveResult): false_predicates: list["Solver.PredicateWithInfo[ArgType]"] unknown_predicates: list["Solver.PredicateWithInfo[ArgType]"] - @dataclass - class SolveResultSingle(SolveResult): - # TODO thinkn about failure case - value: Any # TODO Any -> NumberLike? - # parameters_with_empty_solution_sets: list[Parameter] - - def get(self) -> Any: - if self.timed_out: - raise Solver.TimeoutError() - return self.value - @dataclass class SolveResultAll(SolveResult): has_solution: bool @@ -48,7 +49,7 @@ def get_any_single( lock: bool, suppose_constraint: Predicate | None = None, minimize: Expression | None = None, - ) -> SolveResultSingle: + ) -> Any: """ Solve for a single value for the given expression. @@ -94,28 +95,3 @@ def assert_any_predicate[ArgType]( # run deferred work def find_and_lock_solution(self, G: Graph) -> SolveResultAll: ... - - -class DefaultSolver(Solver): - timeout: int = 1000 - - def get_any_single( - self, - operatable: ParameterOperatable, - lock: bool, - suppose_constraint: Predicate | None = None, - minimize: Expression | None = None, - ) -> Solver.SolveResultSingle: - raise NotImplementedError() - - def assert_any_predicate[ArgType]( - self, - predicates: list["Solver.PredicateWithInfo[ArgType]"], - lock: bool, - suppose_constraint: Predicate | None = None, - minimize: Expression | None = None, - ) -> Solver.SolveResultAny[ArgType]: - raise NotImplementedError() - - def find_and_lock_solution(self, G: Graph) -> Solver.SolveResultAll: - raise NotImplementedError() diff --git a/src/faebryk/exporters/esphome/esphome.py b/src/faebryk/exporters/esphome/esphome.py index d218cf24..04afe799 100644 --- a/src/faebryk/exporters/esphome/esphome.py +++ b/src/faebryk/exporters/esphome/esphome.py @@ -25,9 +25,7 @@ def solve_parameter(v): if not isinstance(v, Parameter): return v - return str( - cast_assert(Quantity, solver.get_any_single(v, lock=True).value).get() - ) + return str(cast_assert(Quantity, solver.get_any_single(v, lock=True).value)) dict_value_visitor(esphome_config, lambda _, v: solve_parameter(v)) diff --git a/src/faebryk/exporters/parameters/parameters_to_file.py b/src/faebryk/exporters/parameters/parameters_to_file.py index 1bb68485..678883f0 100644 --- a/src/faebryk/exporters/parameters/parameters_to_file.py +++ b/src/faebryk/exporters/parameters/parameters_to_file.py @@ -32,7 +32,7 @@ def get_params_for_expr(expr: Expression) -> set[Parameter]: return param_ops | {op for e in expr_ops for op in get_params_for_expr(e)} -def parameter_equivalence_classes(G: Graph) -> list[set[Parameter]]: +def parameter_dependency_classes(G: Graph) -> list[set[Parameter]]: related = EquivalenceClasses[Parameter](G.nodes_of_type(Parameter)) eq_exprs = [e for e in G.nodes_of_type(Predicate) if e.constrained] @@ -50,7 +50,7 @@ def parameter_report(G: Graph, path: Path): predicates = {e for e in exprs if isinstance(e, Predicate)} exprs.difference_update(predicates) alias_classes = parameter_alias_classes(G) - eq_classes = parameter_equivalence_classes(G) + eq_classes = parameter_dependency_classes(G) unused = [ p for p in params diff --git a/src/faebryk/library/BH1750FVI_TR.py b/src/faebryk/library/BH1750FVI_TR.py index a9a235f7..3a151c04 100644 --- a/src/faebryk/library/BH1750FVI_TR.py +++ b/src/faebryk/library/BH1750FVI_TR.py @@ -17,11 +17,9 @@ class _bh1750_esphome_config(F.has_esphome_config.impl()): units=P.s, soft_set=L.Range(100 * P.ms, 1 * P.day), guess=1 * P.s, + tolerance_guess=0, ) - def __preinit__(self): - self.update_interval.constrain_cardinality(1) - def get_config(self) -> dict: obj = self.obj assert isinstance(obj, BH1750FVI_TR) diff --git a/src/faebryk/library/HLK_LD2410B_P.py b/src/faebryk/library/HLK_LD2410B_P.py index 7e95e035..500cd048 100644 --- a/src/faebryk/library/HLK_LD2410B_P.py +++ b/src/faebryk/library/HLK_LD2410B_P.py @@ -12,7 +12,7 @@ class _ld2410b_esphome_config(F.has_esphome_config.impl()): throttle = L.p_field( units=P.ms, soft_set=L.Range(10 * P.ms, 1000 * P.ms), - cardinality=1, + tolerance_guess=0, ) def get_config(self) -> dict: diff --git a/src/faebryk/library/PM1006.py b/src/faebryk/library/PM1006.py index c312d552..056882b2 100644 --- a/src/faebryk/library/PM1006.py +++ b/src/faebryk/library/PM1006.py @@ -26,7 +26,7 @@ class PM1006(Module): """ class _pm1006_esphome_config(F.has_esphome_config.impl()): - update_interval = L.p_field(units=P.s, cardinality=1) + update_interval = L.p_field(units=P.s, tolerance_guess=0) def get_config(self) -> dict: obj = self.obj diff --git a/src/faebryk/library/SCD40.py b/src/faebryk/library/SCD40.py index 0ccc4f3f..e3b4591a 100644 --- a/src/faebryk/library/SCD40.py +++ b/src/faebryk/library/SCD40.py @@ -14,7 +14,7 @@ class SCD40(Module): """ class _scd4x_esphome_config(F.has_esphome_config.impl()): - update_interval = L.p_field(units=P.s, cardinality=1) + update_interval = L.p_field(units=P.s, tolerance_guess=0) def get_config(self) -> dict: obj = self.get_obj(SCD40) diff --git a/src/faebryk/library/XL_3528RGBW_WS2812B.py b/src/faebryk/library/XL_3528RGBW_WS2812B.py index d48d91ea..c90c6379 100644 --- a/src/faebryk/library/XL_3528RGBW_WS2812B.py +++ b/src/faebryk/library/XL_3528RGBW_WS2812B.py @@ -9,7 +9,7 @@ class XL_3528RGBW_WS2812B(Module): class _ws2812b_esphome_config(F.has_esphome_config.impl()): - update_interval = L.p_field(units=P.s, cardinality=1) + update_interval = L.p_field(units=P.s, tolerance_guess=0) def get_config(self) -> dict: obj = self.get_obj(XL_3528RGBW_WS2812B) diff --git a/src/faebryk/libs/examples/buildutil.py b/src/faebryk/libs/examples/buildutil.py index a0ee276c..691d1ab3 100644 --- a/src/faebryk/libs/examples/buildutil.py +++ b/src/faebryk/libs/examples/buildutil.py @@ -7,8 +7,8 @@ from typing import Callable import faebryk.libs.picker.lcsc as lcsc +from faebryk.core.defaultsolver import DefaultSolver from faebryk.core.module import Module -from faebryk.core.solver import DefaultSolver from faebryk.exporters.pcb.kicad.transformer import PCB_Transformer from faebryk.libs.app.checks import run_checks from faebryk.libs.app.pcb import apply_design diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 2e907d84..d2e73036 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -626,12 +626,21 @@ def op_intersect_range(self, other: "Range[QuantityT]") -> "Ranges[QuantityT]": return Ranges._from_ranges(_range, self.units) def op_intersect_ranges( - self, other: "NonIterableRanges[QuantityT]" + self, *other: "NonIterableRanges[QuantityT]" ) -> "Ranges[QuantityT]": - if not self.units.is_compatible_with(other.units): - raise ValueError("incompatible units") - _range = self._ranges.op_intersect_ranges(other._ranges) - return Ranges._from_ranges(_range, self.units) + # TODO make pretty + def single(left, right): + if not left.units.is_compatible_with(right.units): + raise ValueError("incompatible units") + _range = left._ranges.op_intersect_ranges(right._ranges) + return Ranges._from_ranges(_range, left.units) + + out = Ranges(self) + + for o in other: + out = single(out, o) + + return out def op_union_ranges( self, other: "NonIterableRanges[QuantityT]" diff --git a/src/faebryk/libs/test/solver.py b/src/faebryk/libs/test/solver.py index 110b675b..56d4dd1e 100644 --- a/src/faebryk/libs/test/solver.py +++ b/src/faebryk/libs/test/solver.py @@ -1,9 +1,9 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT +from faebryk.core.defaultsolver import DefaultSolver from faebryk.core.graphinterface import Graph from faebryk.core.node import Node from faebryk.core.parameter import ParameterOperatable -from faebryk.core.solver import DefaultSolver from faebryk.libs.sets import PlainSet diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index 1d6ae3a9..c871e52d 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -7,9 +7,12 @@ import pytest import faebryk.library._F as F +from faebryk.core.defaultsolver import DefaultSolver +from faebryk.core.module import Module from faebryk.core.node import Node from faebryk.core.parameter import Parameter from faebryk.libs.library import L +from faebryk.libs.logging import setup_basic_logging from faebryk.libs.sets import Range from faebryk.libs.units import P from faebryk.libs.util import times @@ -26,6 +29,31 @@ def test_new_definitions(): ) +def test_solve_phase_one(): + solver = DefaultSolver() + + def Voltage(): + return L.p_field(units=P.V, within=Range(0 * P.V, 10 * P.kV)) + + class App(Module): + voltage1 = Voltage() + voltage2 = Voltage() + voltage3 = Voltage() + + app = App() + voltage1 = app.voltage1 + voltage2 = app.voltage2 + voltage3 = app.voltage3 + + voltage1.alias_is(voltage2) + voltage3.alias_is(voltage1 + voltage2) + + voltage1.alias_is(Range(1 * P.V, 3 * P.V)) + voltage3.alias_is(Range(4 * P.V, 6 * P.V)) + + solver.phase_one_no_guess_solving(voltage1.get_graph()) + + def test_visualize(): """ Creates webserver that opens automatically if run in jupyter notebook @@ -82,9 +110,12 @@ def test_visualize_inspect_app(): # if run in jupyter notebook import sys + func = test_solve_phase_one + if "ipykernel" in sys.modules: - test_visualize() + func() else: import typer - typer.run(test_visualize) + setup_basic_logging() + typer.run(func) From 28f1f538fafc9c15681beaa63dedbfd1bbfa14f5 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Thu, 31 Oct 2024 16:28:17 +0100 Subject: [PATCH 72/80] check graph copy --- src/faebryk/core/defaultsolver.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/faebryk/core/defaultsolver.py b/src/faebryk/core/defaultsolver.py index afce30c7..98df9e81 100644 --- a/src/faebryk/core/defaultsolver.py +++ b/src/faebryk/core/defaultsolver.py @@ -219,6 +219,7 @@ def phase_one_no_guess_solving(self, G: Graph) -> None: repr_map = resolve_alias_classes(G) graphs = {p.get_graph() for p in repr_map.values()} + assert G not in graphs logger.info(f"{len(graphs)} new graphs") for s, d in repr_map.items(): From a8645ddb8400188d9af1a9b6e32eaa369e3180d5 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Thu, 31 Oct 2024 16:42:03 +0100 Subject: [PATCH 73/80] only solve used params --- src/faebryk/core/defaultsolver.py | 34 +++++++++++++++---------------- test/core/test_parameters.py | 8 +++++++- 2 files changed, 24 insertions(+), 18 deletions(-) diff --git a/src/faebryk/core/defaultsolver.py b/src/faebryk/core/defaultsolver.py index 98df9e81..c8e046b9 100644 --- a/src/faebryk/core/defaultsolver.py +++ b/src/faebryk/core/defaultsolver.py @@ -21,7 +21,13 @@ def parameter_alias_classes(G: Graph) -> list[set[Parameter]]: - full_eq = EquivalenceClasses[Parameter](G.nodes_of_type(Parameter)) + # TODO just get passed + params = [ + p + for p in G.nodes_of_type(Parameter) + if get_constrained_predicates_involved_in(p) + ] + full_eq = EquivalenceClasses[Parameter](params) is_exprs = [e for e in G.nodes_of_type(Is) if e.constrained] @@ -64,7 +70,14 @@ def get_constrained_predicates_involved_in( def parameter_dependency_classes(G: Graph) -> list[set[Parameter]]: - related = EquivalenceClasses[Parameter](G.nodes_of_type(Parameter)) + # TODO just get passed + params = [ + p + for p in G.nodes_of_type(Parameter) + if get_constrained_predicates_involved_in(p) + ] + + related = EquivalenceClasses[Parameter](params) eq_exprs = [e for e in G.nodes_of_type(Predicate) if e.constrained] @@ -99,11 +112,10 @@ def resolve_alias_classes(G: Graph) -> dict[ParameterOperatable, ParameterOperat repr_map: dict[ParameterOperatable, ParameterOperatable] = {} - part_of_class_params = {p for c in p_alias_classes for p in c} - not_part_of_class_params = set(params).difference(part_of_class_params) - # Make new param repre for alias classes for alias_class in p_alias_classes: + # TODO short-cut if len() == 1 + # single unit unit_candidates = {p.units for p in alias_class} if len(unit_candidates) > 1: @@ -155,18 +167,6 @@ def resolve_alias_classes(G: Graph) -> dict[ParameterOperatable, ParameterOperat ) repr_map.update({p: representative for p in alias_class}) - # copy non alias params (that are in use) - for p in not_part_of_class_params: - repr_map[p] = Parameter( - units=p.units, - within=p.within, - domain=p.domain, - soft_set=p.soft_set, - guess=p.guess, - tolerance_guess=p.tolerance_guess, - likely_constrained=p.likely_constrained, - ) - # replace parameters in expressions and predicates for expr in exprs | predicates: diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index c871e52d..c3b2f256 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -54,6 +54,12 @@ class App(Module): solver.phase_one_no_guess_solving(voltage1.get_graph()) +def test_solve_realworld(): + app = F.RP2040() + solver = DefaultSolver() + solver.phase_one_no_guess_solving(app.get_graph()) + + def test_visualize(): """ Creates webserver that opens automatically if run in jupyter notebook @@ -110,7 +116,7 @@ def test_visualize_inspect_app(): # if run in jupyter notebook import sys - func = test_solve_phase_one + func = test_solve_realworld if "ipykernel" in sys.modules: func() From 21d041fc81c2329b788b6147282cd487d36bc585 Mon Sep 17 00:00:00 2001 From: iopapamanoglou Date: Thu, 31 Oct 2024 17:35:36 +0100 Subject: [PATCH 74/80] wip: compress assoc --- src/faebryk/core/defaultsolver.py | 79 ++++++++++++++++++++++++++++++- src/faebryk/core/parameter.py | 11 +++++ test/core/test_parameters.py | 19 +++++++- 3 files changed, 106 insertions(+), 3 deletions(-) diff --git a/src/faebryk/core/defaultsolver.py b/src/faebryk/core/defaultsolver.py index c8e046b9..d3b9345f 100644 --- a/src/faebryk/core/defaultsolver.py +++ b/src/faebryk/core/defaultsolver.py @@ -3,12 +3,16 @@ import logging from statistics import median -from typing import Any +from typing import Any, cast + +from more_itertools import partition from faebryk.core.graphinterface import Graph, GraphInterfaceSelf from faebryk.core.parameter import ( + Add, Expression, Is, + Multiply, Parameter, ParameterOperatable, Predicate, @@ -97,6 +101,7 @@ def resolve_alias_classes(G: Graph) -> dict[ParameterOperatable, ParameterOperat exprs = G.nodes_of_type(Expression) predicates = {e for e in exprs if isinstance(e, Predicate)} exprs.difference_update(predicates) + exprs = {e for e in exprs if get_constrained_predicates_involved_in(e)} p_alias_classes = parameter_alias_classes(G) dependency_classes = parameter_dependency_classes(G) @@ -191,6 +196,73 @@ def try_replace(o: ParameterOperatable.All): return repr_map +def copy_pop(o: ParameterOperatable) -> ParameterOperatable: + if isinstance(o, Expression): + return type(o)(*o.operands) + elif isinstance(o, Parameter): + return Parameter( + units=o.units, + within=o.within, + domain=o.domain, + soft_set=o.soft_set, + guess=o.guess, + tolerance_guess=o.tolerance_guess, + likely_constrained=o.likely_constrained, + ) + else: + raise Exception() + + +def compress_associative_expressions( + G: Graph, +) -> dict[ParameterOperatable, ParameterOperatable]: + exprs = cast(set[Add | Multiply], G.nodes_of_types((Add, Multiply))) + exprs = {e for e in exprs if get_constrained_predicates_involved_in(e)} + # get out deepest expr in compressable tree + exprs = {e for e in exprs if type(e) not in {type(n) for n in e.get_operations()}} + + repr_map: dict[ParameterOperatable, ParameterOperatable] = {} + + # (A + B) + C + # X -> Y + # compress(Y) + # compress(X) -> [A, B] + # -> [A, B, C] + + def get_operands_of_ops_with_same_type[T: Add | Multiply](e: T) -> list[T]: + operands = e.operands + noncomp, compressable = partition(lambda o: type(o) is type(e), operands) + out = [] + for c in compressable: + out += get_operands_of_ops_with_same_type(c) + return out + list(noncomp) + + for expr in exprs: + operands = get_operands_of_ops_with_same_type(expr) + # copy + for o in operands: + repr_map[o] = copy_pop(o) + + # make new compressed expr with (copied) operands + new_expr = type(expr)( + *( + repr_map[o] if isinstance(o, ParameterOperatable) else o + for o in operands + ) + ) + repr_map[expr] = new_expr + + # copy other param ops + other_param_op = { + p + for p in G.nodes_of_type(ParameterOperatable) + if p not in repr_map and p not in exprs + } + repr_map.update({p: copy_pop(p) for p in other_param_op}) + + return repr_map + + class DefaultSolver(Solver): timeout: int = 1000 @@ -217,10 +289,13 @@ def phase_one_no_guess_solving(self, G: Graph) -> None: # as long as progress iterate repr_map = resolve_alias_classes(G) - graphs = {p.get_graph() for p in repr_map.values()} assert G not in graphs + logger.info(f"{len(graphs)} new graphs") + repr_map = compress_associative_expressions(G) + graphs = {p.get_graph() for p in repr_map.values()} + assert G not in graphs logger.info(f"{len(graphs)} new graphs") for s, d in repr_map.items(): logger.info(f"{s} -> {d}") diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 870be3e2..04211c53 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -39,6 +39,9 @@ class ParameterOperatable(Node): operated_on: GraphInterface + def get_operations(self) -> set["Expression"]: + return self.operated_on.get_connected_nodes(types=Expression) + def operation_add(self, other: NumberLike): return Add(self, other) @@ -333,6 +336,14 @@ def __preinit__(self): for op in self.operatable_operands: self.operates_on.connect(op.operated_on) + def get_operatable_operands(self) -> set[ParameterOperatable]: + return self.operates_on.get_connected_nodes(types=ParameterOperatable) + + @staticmethod + def topo_sort(exprs: Iterable["Expression"]) -> list["Expression"]: + # TODO + raise NotImplementedError() + @abstract class ConstrainableExpression(Expression, Constrainable): diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index c3b2f256..37611cfb 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -54,6 +54,23 @@ class App(Module): solver.phase_one_no_guess_solving(voltage1.get_graph()) +def test_assoc_compress(): + class App(Module): + ops = L.list_field(10, lambda: Parameter(units=P.V)) + + app = App() + + sum_ = app.ops[0] + for p in app.ops[1:]: + sum_ += p + + (sum_ < 11 * P.V).constrain() + + G = sum_.get_graph() + solver = DefaultSolver() + solver.phase_one_no_guess_solving(G) + + def test_solve_realworld(): app = F.RP2040() solver = DefaultSolver() @@ -116,7 +133,7 @@ def test_visualize_inspect_app(): # if run in jupyter notebook import sys - func = test_solve_realworld + func = test_assoc_compress if "ipykernel" in sys.modules: func() From 4d13797f5892525aa8485ebcd782a83069a6d865 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Fri, 1 Nov 2024 22:16:20 +0100 Subject: [PATCH 75/80] expression simplification --- src/faebryk/core/defaultsolver.py | 309 +++++++++++++++++++++++++----- src/faebryk/core/parameter.py | 26 ++- src/faebryk/libs/sets.py | 14 ++ test/core/test_parameters.py | 29 ++- 4 files changed, 311 insertions(+), 67 deletions(-) diff --git a/src/faebryk/core/defaultsolver.py b/src/faebryk/core/defaultsolver.py index d3b9345f..f2463e11 100644 --- a/src/faebryk/core/defaultsolver.py +++ b/src/faebryk/core/defaultsolver.py @@ -1,6 +1,7 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT +from collections.abc import Iterable import logging from statistics import median from typing import Any, cast @@ -10,12 +11,16 @@ from faebryk.core.graphinterface import Graph, GraphInterfaceSelf from faebryk.core.parameter import ( Add, + Arithmetic, + Constrainable, Expression, Is, Multiply, Parameter, ParameterOperatable, + Power, Predicate, + Subtract, ) from faebryk.core.solver import Solver from faebryk.libs.sets import Ranges @@ -70,7 +75,8 @@ def get_constrained_predicates_involved_in( ) ) ) - return [p for p in dependants if isinstance(p, Predicate) and p.constrained] + res = [p for p in dependants if isinstance(p, Predicate) and p.constrained] + return res def parameter_dependency_classes(G: Graph) -> list[set[Parameter]]: @@ -92,7 +98,20 @@ def parameter_dependency_classes(G: Graph) -> list[set[Parameter]]: return related.get() -def resolve_alias_classes(G: Graph) -> dict[ParameterOperatable, ParameterOperatable]: +# TODO make part of Expression class +def create_new_expr( + old_expr: Expression, *operands: ParameterOperatable.All +) -> Expression: + new_expr = type(old_expr)(*operands) + if isinstance(old_expr, Constrainable): + cast(Constrainable, new_expr).constrained = old_expr.constrained + return new_expr + + +def resolve_alias_classes( + G: Graph, +) -> tuple[dict[ParameterOperatable, ParameterOperatable], bool]: + dirty = False params = [ p for p in G.nodes_of_type(Parameter) @@ -120,6 +139,8 @@ def resolve_alias_classes(G: Graph) -> dict[ParameterOperatable, ParameterOperat # Make new param repre for alias classes for alias_class in p_alias_classes: # TODO short-cut if len() == 1 + if len(alias_class) > 1: + dirty = True # single unit unit_candidates = {p.units for p in alias_class} @@ -173,14 +194,16 @@ def resolve_alias_classes(G: Graph) -> dict[ParameterOperatable, ParameterOperat repr_map.update({p: representative for p in alias_class}) # replace parameters in expressions and predicates - for expr in exprs | predicates: + for expr in cast( + Iterable[Expression], + ParameterOperatable.sort_by_depth(exprs | predicates, ascending=True), + ): def try_replace(o: ParameterOperatable.All): if not isinstance(o, ParameterOperatable): return o if o in repr_map: return repr_map[o] - # TODO raise Exception() # filter alias class Is @@ -189,37 +212,53 @@ def try_replace(o: ParameterOperatable.All): continue operands = [try_replace(o) for o in expr.operands] - new_expr = type(expr)(*operands) + new_expr = create_new_expr(expr, *operands) logger.info(f"{expr}[{expr.operands}] ->\n {new_expr}[{new_expr.operands}]") repr_map[expr] = new_expr - return repr_map + return repr_map, dirty -def copy_pop(o: ParameterOperatable) -> ParameterOperatable: +def copy_param(p: Parameter) -> Parameter: + return Parameter( + units=p.units, + within=p.within, + domain=p.domain, + soft_set=p.soft_set, + guess=p.guess, + tolerance_guess=p.tolerance_guess, + likely_constrained=p.likely_constrained, + ) + + +def copy_pop( + o: ParameterOperatable.All, repr_map: dict[ParameterOperatable, ParameterOperatable] +) -> ParameterOperatable.All: + if o in repr_map: + return repr_map[o] if isinstance(o, Expression): - return type(o)(*o.operands) - elif isinstance(o, Parameter): - return Parameter( - units=o.units, - within=o.within, - domain=o.domain, - soft_set=o.soft_set, - guess=o.guess, - tolerance_guess=o.tolerance_guess, - likely_constrained=o.likely_constrained, + return create_new_expr( + o, + *( + repr_map[op] if op in repr_map else copy_pop(op, repr_map) + for op in o.operands + ), ) + elif isinstance(o, Parameter): + return copy_param(o) else: - raise Exception() + return o def compress_associative_expressions( G: Graph, -) -> dict[ParameterOperatable, ParameterOperatable]: - exprs = cast(set[Add | Multiply], G.nodes_of_types((Add, Multiply))) - exprs = {e for e in exprs if get_constrained_predicates_involved_in(e)} +) -> tuple[dict[ParameterOperatable, ParameterOperatable], bool]: + dirty = False + add_muls = cast(set[Add | Multiply], G.nodes_of_types((Add, Multiply))) # get out deepest expr in compressable tree - exprs = {e for e in exprs if type(e) not in {type(n) for n in e.get_operations()}} + parent_add_muls = { + e for e in add_muls if type(e) not in {type(n) for n in e.get_operations()} + } repr_map: dict[ParameterOperatable, ParameterOperatable] = {} @@ -229,44 +268,173 @@ def compress_associative_expressions( # compress(X) -> [A, B] # -> [A, B, C] - def get_operands_of_ops_with_same_type[T: Add | Multiply](e: T) -> list[T]: + def flatten_operands_of_ops_with_same_type[T: Add | Multiply]( + e: T, + ) -> tuple[list[T], bool]: + dirty = False operands = e.operands noncomp, compressable = partition(lambda o: type(o) is type(e), operands) out = [] for c in compressable: - out += get_operands_of_ops_with_same_type(c) - return out + list(noncomp) - - for expr in exprs: - operands = get_operands_of_ops_with_same_type(expr) + dirty = True + if c in repr_map: + out.append(repr_map[c]) + else: + sub_out, sub_dirty = flatten_operands_of_ops_with_same_type(c) + dirty |= sub_dirty + out += sub_out + return out + list(noncomp), dirty + + for expr in cast( + Iterable[Add | Multiply], + ParameterOperatable.sort_by_depth(parent_add_muls, ascending=True), + ): + operands, sub_dirty = flatten_operands_of_ops_with_same_type(expr) + dirty |= sub_dirty # copy for o in operands: - repr_map[o] = copy_pop(o) + if isinstance(o, ParameterOperatable): + repr_map[o] = copy_pop(o, repr_map) # make new compressed expr with (copied) operands - new_expr = type(expr)( + new_expr = create_new_expr( + expr, *( - repr_map[o] if isinstance(o, ParameterOperatable) else o + repr_map[o] if o in repr_map else copy_pop(o, repr_map) for o in operands - ) + ), ) repr_map[expr] = new_expr # copy other param ops - other_param_op = { - p - for p in G.nodes_of_type(ParameterOperatable) - if p not in repr_map and p not in exprs - } - repr_map.update({p: copy_pop(p) for p in other_param_op}) + other_param_op = ParameterOperatable.sort_by_depth( + ( + p + for p in G.nodes_of_type(ParameterOperatable) + if p not in repr_map and p not in add_muls + ), + ascending=True, + ) + remaining_param_op = {p: copy_pop(p, repr_map) for p in other_param_op} + repr_map.update(remaining_param_op) + + return repr_map, dirty + + +def compress_arithmetic_expressions( + G: Graph, +) -> tuple[dict[ParameterOperatable, ParameterOperatable], bool]: + dirty = False + arith_exprs = cast(set[Arithmetic], G.nodes_of_type(Arithmetic)) + + repr_map: dict[ParameterOperatable, ParameterOperatable] = {} + + for expr in cast( + Iterable[Arithmetic], + ParameterOperatable.sort_by_depth(arith_exprs, ascending=True), + ): + operands = expr.operands + const_ops, nonconst_ops = partition( + lambda o: isinstance(o, ParameterOperatable), operands + ) + multiplicity = {} + has_multiplicity = False + for n in nonconst_ops: + if n in multiplicity: + has_multiplicity = True + multiplicity[n] += 1 + else: + multiplicity[n] = 1 + + if isinstance(expr, Add): + try: + const_sum = [next(const_ops)] + for c in const_ops: + dirty = True + const_sum[0] += c + if const_sum[0] == 0 * expr.units: # TODO make work with all the types + dirty = True + const_sum = [] + except StopIteration: + const_sum = [] + nonconst_prod = { + n: Multiply(n, m) if m > 1 else copy_pop(n, repr_map) + for n, m in multiplicity.items() + } + new_operands = (*nonconst_prod.values(), *const_sum) + if len(new_operands) > 1: + new_expr = Add(*new_operands) + elif len(new_operands) == 1: + new_expr = new_operands[0] + else: + raise ValueError("No operands, should not happen") + repr_map.update(nonconst_prod) + repr_map[expr] = new_expr + + elif isinstance(expr, Multiply): + try: + const_prod = [next(const_ops)] + for c in const_ops: + dirty = True + const_prod[0] *= c + if const_prod[0] == 1 * expr.units: # TODO make work with all the types + dirty = True + const_prod = [] + except StopIteration: + const_prod = [] + if ( + len(const_prod) == 1 and const_prod[0] == 0 * expr.units + ): # TODO make work with all the types + dirty = True + repr_map[expr] = 0 * expr.units + else: + nonconst_prod = { + n: Power(n, m) if m > 1 else copy_pop(n, repr_map) + for n, m in multiplicity.items() + } + if has_multiplicity: + dirty = True + new_operands = (*nonconst_prod.values(), *const_prod) + if len(new_operands) > 1: + new_expr = Multiply(*new_operands) + elif len(new_operands) == 1: + new_expr = new_operands[0] + else: + raise ValueError("No operands, should not happen") + repr_map.update(nonconst_prod) + repr_map[expr] = new_expr + elif isinstance(expr, Subtract): + if expr.operands[0] is expr.operands[1]: + dirty = True + repr_map[expr] = 0 * expr.units + elif len(const_ops) == 2: + dirty = True + repr_map[expr] = expr.operands[0] - expr.operands[1] + else: + repr_map[expr] = copy_pop(expr, repr_map) + else: + repr_map[expr] = copy_pop(expr, repr_map) + + other_param_op = ParameterOperatable.sort_by_depth( + ( + p + for p in G.nodes_of_type(ParameterOperatable) + if p not in repr_map and p not in arith_exprs + ), + ascending=True, + ) + remaining_param_op = {p: copy_pop(p, repr_map) for p in other_param_op} + repr_map.update(remaining_param_op) - return repr_map + return { + k: v for k, v in repr_map.items() if isinstance(v, ParameterOperatable) + }, dirty class DefaultSolver(Solver): timeout: int = 1000 - def phase_one_no_guess_solving(self, G: Graph) -> None: + def phase_one_no_guess_solving(self, g: Graph) -> None: logger.info(f"Phase 1 Solving: No guesses {'-' * 80}") # strategies @@ -285,20 +453,59 @@ def phase_one_no_guess_solving(self, G: Graph) -> None: # x / x => 1 # x + x => 2*x # x - x => 0 + # x * x => x^2 + # k*x + l*x => (k+l)*x + # sqrt(x^2) => abs(x) + # sqrt(x) * sqrt(x) => x # as long as progress iterate - repr_map = resolve_alias_classes(G) - graphs = {p.get_graph() for p in repr_map.values()} - assert G not in graphs - logger.info(f"{len(graphs)} new graphs") - - repr_map = compress_associative_expressions(G) - graphs = {p.get_graph() for p in repr_map.values()} - assert G not in graphs - logger.info(f"{len(graphs)} new graphs") - for s, d in repr_map.items(): - logger.info(f"{s} -> {d}") + graphs = {g} + dirty = True + iter = 0 + + while dirty: + iter += 1 + logger.info(f"Iteration {iter}") + repr_map = {} + for g in graphs: + alias_repr_map, alias_dirty = resolve_alias_classes(g) + repr_map.update(alias_repr_map) + graphs = {p.get_graph() for p in repr_map.values()} + for g in graphs: + logger.info(f"G: {g}") + logger.info(f"{len(graphs)} new graphs") + # TODO assert all new graphs + + logger.info("Phase 2 Solving: Associative expressions") + repr_map = {} + for g in graphs: + assoc_repr_map, assoc_dirty = compress_associative_expressions(g) + repr_map.update(assoc_repr_map) + for s, d in repr_map.items(): + if isinstance(s, Expression): + logger.info(f"{s}[{s.operands}] -> {d}[{d.operands}]") + else: + logger.info(f"{s} -> {d}") + graphs = {p.get_graph() for p in repr_map.values()} + logger.info(f"{len(graphs)} new graphs") + # TODO assert all new graphs + + logger.info("Phase 3 Solving: Arithmetic expressions") + repr_map = {} + for g in graphs: + arith_repr_map, arith_dirty = compress_arithmetic_expressions(g) + repr_map.update(arith_repr_map) + for s, d in repr_map.items(): + if isinstance(s, Expression): + logger.info(f"{s}[{s.operands}] -> {d}[{d.operands}] | G: {id(g)}") + else: + logger.info(f"{s} -> {d} | G: {id(g)}") + graphs = {p.get_graph() for p in repr_map.values()} + logger.info(f"{len(graphs)} new graphs") + # TODO assert all new graphs + + dirty = alias_dirty or assoc_dirty or arith_dirty def get_any_single( self, diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 04211c53..b69d7572 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -42,6 +42,17 @@ class ParameterOperatable(Node): def get_operations(self) -> set["Expression"]: return self.operated_on.get_connected_nodes(types=Expression) + @staticmethod + def sort_by_depth( + exprs: Iterable["ParameterOperatable"], ascending: bool + ) -> list["ParameterOperatable"]: + def key(e: ParameterOperatable): + if isinstance(e, Expression): + return e.depth() + return 0 + + return sorted(exprs, key=key, reverse=not ascending) + def operation_add(self, other: NumberLike): return Add(self, other) @@ -339,10 +350,13 @@ def __preinit__(self): def get_operatable_operands(self) -> set[ParameterOperatable]: return self.operates_on.get_connected_nodes(types=ParameterOperatable) - @staticmethod - def topo_sort(exprs: Iterable["Expression"]) -> list["Expression"]: - # TODO - raise NotImplementedError() + def depth(self) -> int: + if hasattr(self, "_depth"): + return self._depth + self._depth = 1 + max( + op.depth() if isinstance(op, Expression) else 0 for op in self.operands + ) + return self._depth @abstract @@ -356,7 +370,7 @@ def __init__(self, *operands: ParameterOperatable.All): class Arithmetic(ConstrainableExpression, HasUnit): def __init__(self, *operands: ParameterOperatable.NumberLike): super().__init__(*operands) - types = int, float, Quantity, Unit, Parameter, Arithmetic + types = int, float, Quantity, Unit, Parameter, Arithmetic, Ranges if any(not isinstance(op, types) for op in operands): raise ValueError( "operands must be int, float, Quantity, Parameter, or Expression" @@ -755,7 +769,7 @@ def __init__( | None = None, # TODO actually allowed to be anything from domain tolerance_guess: float | None = None, # hints - likely_constrained: bool = False, + likely_constrained: bool = False, # TODO rename expect_constraits or similiar ): super().__init__() if within is not None and not within.units.is_compatible_with(units): diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index d2e73036..0f91c5f7 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -658,6 +658,20 @@ def op_add_ranges( _range = self._ranges.op_add_ranges(other._ranges) return Ranges._from_ranges(_range, self.units) + def __add__(self, other) -> "NonIterableRanges[QuantityT]": + if isinstance(other, NonIterableRanges): + return self.op_add_ranges(other) + elif isinstance(other, Range): + return self.op_add_ranges(Ranges(other)) + elif isinstance(other, Quantity): + return self.op_add_ranges(Singles(other)) + elif isinstance(other, int) or isinstance(other, float): + return self.op_add_ranges(Singles(other * dimensionless)) + return NotImplemented + + def __radd__(self, other) -> "NonIterableRanges[QuantityT]": + return self + other + def op_negate(self) -> "Ranges[QuantityT]": _range = self._ranges.op_negate() return Ranges._from_ranges(_range, self.units) diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index 37611cfb..dd7bca76 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -13,8 +13,8 @@ from faebryk.core.parameter import Parameter from faebryk.libs.library import L from faebryk.libs.logging import setup_basic_logging -from faebryk.libs.sets import Range -from faebryk.libs.units import P +from faebryk.libs.sets import Range, Ranges +from faebryk.libs.units import P, dimensionless from faebryk.libs.util import times logger = logging.getLogger(__name__) @@ -56,17 +56,26 @@ class App(Module): def test_assoc_compress(): class App(Module): - ops = L.list_field(10, lambda: Parameter(units=P.V)) + ops = L.list_field(10, lambda: Parameter(units=dimensionless)) app = App() - sum_ = app.ops[0] - for p in app.ops[1:]: - sum_ += p - - (sum_ < 11 * P.V).constrain() - - G = sum_.get_graph() + # (((((((((A + B + 1) + C + 2) * D * 3) * E * 4) * F * 5) * G * (A - A)) + H + 7) + I + 8) + J + 9) < 11 + # => (H + I + J + 24) < 11 + constants = [c * dimensionless for c in range(0, 10)] + constants[5] = app.ops[0] - app.ops[0] + # constants[9] = Ranges(Range(0 * dimensionless, 1 * dimensionless)) + acc = app.ops[0] + for i, p in enumerate(app.ops[1:3]): + acc += p + constants[i] + for i, p in enumerate(app.ops[3:7]): + acc *= p * constants[i + 3] + for i, p in enumerate(app.ops[7:]): + acc += p + constants[i + 7] + + (acc < 11).constrain() + + G = acc.get_graph() solver = DefaultSolver() solver.phase_one_no_guess_solving(G) From f0e34d5e03895bd5e20198c2f7436e028ab8b965 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Thu, 7 Nov 2024 14:38:12 +0100 Subject: [PATCH 76/80] expression simplification --- src/faebryk/core/defaultsolver.py | 384 +++++++++++++++++++++++------- src/faebryk/core/solver.py | 2 + src/faebryk/libs/sets.py | 4 +- test/core/test_parameters.py | 13 +- 4 files changed, 306 insertions(+), 97 deletions(-) diff --git a/src/faebryk/core/defaultsolver.py b/src/faebryk/core/defaultsolver.py index f2463e11..80e8c253 100644 --- a/src/faebryk/core/defaultsolver.py +++ b/src/faebryk/core/defaultsolver.py @@ -1,8 +1,8 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -from collections.abc import Iterable import logging +from collections.abc import Iterable from statistics import median from typing import Any, cast @@ -13,6 +13,7 @@ Add, Arithmetic, Constrainable, + Divide, Expression, Is, Multiply, @@ -24,6 +25,7 @@ ) from faebryk.core.solver import Solver from faebryk.libs.sets import Ranges +from faebryk.libs.units import dimensionless from faebryk.libs.util import EquivalenceClasses logger = logging.getLogger(__name__) @@ -131,7 +133,6 @@ def resolve_alias_classes( f"\n {len(dependency_classes)} dependency classes" "\n" ) - logger.info("Phase 1 Solving: Alias classes") logger.info(infostr) repr_map: dict[ParameterOperatable, ParameterOperatable] = {} @@ -213,7 +214,6 @@ def try_replace(o: ParameterOperatable.All): operands = [try_replace(o) for o in expr.operands] new_expr = create_new_expr(expr, *operands) - logger.info(f"{expr}[{expr.operands}] ->\n {new_expr}[{new_expr.operands}]") repr_map[expr] = new_expr return repr_map, dirty @@ -231,26 +231,42 @@ def copy_param(p: Parameter) -> Parameter: ) -def copy_pop( +def copy_operand_recursively( o: ParameterOperatable.All, repr_map: dict[ParameterOperatable, ParameterOperatable] ) -> ParameterOperatable.All: if o in repr_map: return repr_map[o] if isinstance(o, Expression): - return create_new_expr( - o, - *( - repr_map[op] if op in repr_map else copy_pop(op, repr_map) - for op in o.operands - ), - ) + new_ops = [] + for op in o.operands: + new_op = copy_operand_recursively(op, repr_map) + if isinstance(op, ParameterOperatable): + repr_map[op] = new_op + new_ops.append(new_op) + expr = create_new_expr(o, *new_ops) + repr_map[o] = expr + return expr elif isinstance(o, Parameter): - return copy_param(o) + param = copy_param(o) + repr_map[o] = param + return param else: return o -def compress_associative_expressions( +def is_replacable( + repr_map: dict[ParameterOperatable, ParameterOperatable], + e: Expression, + parent_expr: Expression, +) -> bool: + if e in repr_map: # overly restrictive: equivalent replacement would be ok + return False + if e.get_operations() != {parent_expr}: + return False + return True + + +def compress_associative_add_mul( G: Graph, ) -> tuple[dict[ParameterOperatable, ParameterOperatable], bool]: dirty = False @@ -261,6 +277,7 @@ def compress_associative_expressions( } repr_map: dict[ParameterOperatable, ParameterOperatable] = {} + removed = set() # (A + B) + C # X -> Y @@ -273,16 +290,18 @@ def flatten_operands_of_ops_with_same_type[T: Add | Multiply]( ) -> tuple[list[T], bool]: dirty = False operands = e.operands - noncomp, compressable = partition(lambda o: type(o) is type(e), operands) + noncomp, compressible = partition( + lambda o: type(o) is type(e) and is_replacable(repr_map, o, e), operands + ) out = [] - for c in compressable: + for c in compressible: dirty = True - if c in repr_map: - out.append(repr_map[c]) - else: - sub_out, sub_dirty = flatten_operands_of_ops_with_same_type(c) - dirty |= sub_dirty - out += sub_out + removed.add(c) + sub_out, sub_dirty = flatten_operands_of_ops_with_same_type(c) + dirty |= sub_dirty + out += sub_out + if len(out) > 0: + logger.info(f"FLATTENED {type(e).__name__} {e} -> {out}") return out + list(noncomp), dirty for expr in cast( @@ -290,33 +309,122 @@ def flatten_operands_of_ops_with_same_type[T: Add | Multiply]( ParameterOperatable.sort_by_depth(parent_add_muls, ascending=True), ): operands, sub_dirty = flatten_operands_of_ops_with_same_type(expr) - dirty |= sub_dirty - # copy - for o in operands: - if isinstance(o, ParameterOperatable): - repr_map[o] = copy_pop(o, repr_map) - - # make new compressed expr with (copied) operands - new_expr = create_new_expr( - expr, - *( - repr_map[o] if o in repr_map else copy_pop(o, repr_map) - for o in operands - ), + if sub_dirty: + dirty = True + copy_operands = [copy_operand_recursively(o, repr_map) for o in operands] + + new_expr = create_new_expr( + expr, + *copy_operands, + ) + repr_map[expr] = new_expr + + # copy other param ops + other_param_op = ParameterOperatable.sort_by_depth( + ( + p + for p in G.nodes_of_type(ParameterOperatable) + if p not in repr_map and p not in removed + ), + ascending=True, + ) + for o in other_param_op: + copy_operand_recursively(o, repr_map) + + return repr_map, dirty + + +def compress_associative_sub( + G: Graph, +) -> tuple[dict[ParameterOperatable, ParameterOperatable], bool]: + logger.info("Compressing Subtracts") + dirty = False + subs = cast(set[Subtract], G.nodes_of_type(Subtract)) + # get out deepest expr in compressable tree + parent_subs = { + e for e in subs if type(e) not in {type(n) for n in e.get_operations()} + } + + removed = set() + repr_map: dict[ParameterOperatable, ParameterOperatable] = {} + + def flatten_sub( + e: Subtract, + ) -> tuple[ + ParameterOperatable.All, + list[ParameterOperatable.All], + list[ParameterOperatable.All], + bool, + ]: + const_subtrahend = ( + [] if isinstance(e.operands[1], ParameterOperatable) else [e.operands[1]] ) - repr_map[expr] = new_expr + nonconst_subtrahend = [] if const_subtrahend else [e.operands[1]] + if isinstance(e.operands[0], Subtract) and is_replacable( + repr_map, e.operands[0], e + ): + removed.add(e.operands[0]) + minuend, const_subtrahends, nonconst_subtrahends, _ = flatten_sub( + e.operands[0] + ) + return ( + minuend, + const_subtrahends + const_subtrahend, + nonconst_subtrahends + nonconst_subtrahend, + True, + ) + else: + return e.operands[0], const_subtrahend, nonconst_subtrahend, False + + for expr in cast( + Iterable[Subtract], + ParameterOperatable.sort_by_depth(parent_subs, ascending=True), + ): + minuend, const_subtrahends, nonconst_subtrahends, sub_dirty = flatten_sub(expr) + if ( + isinstance(minuend, Add) + and is_replacable(repr_map, minuend, expr) + and len(const_subtrahends) > 0 + ): + copy_minuend = Add( + *(copy_operand_recursively(s, repr_map) for s in minuend.operands), + *(-1 * c for c in const_subtrahends), + ) + repr_map[expr] = copy_minuend + const_subtrahends = [] + sub_dirty = True + elif sub_dirty: + copy_minuend = copy_operand_recursively(minuend, repr_map) + if sub_dirty: + dirty = True + copy_subtrahends = [ + copy_operand_recursively(s, repr_map) + for s in nonconst_subtrahends + const_subtrahends + ] + if len(copy_subtrahends) > 0: + new_expr = Subtract( + copy_minuend, + Add(*copy_subtrahends), + ) + else: + new_expr = copy_minuend + removed.add(expr) + repr_map[expr] = new_expr + logger.info(f"REPRMAP {expr} -> {new_expr}") # copy other param ops other_param_op = ParameterOperatable.sort_by_depth( ( p for p in G.nodes_of_type(ParameterOperatable) - if p not in repr_map and p not in add_muls + if p not in repr_map and p not in removed ), ascending=True, ) - remaining_param_op = {p: copy_pop(p, repr_map) for p in other_param_op} - repr_map.update(remaining_param_op) + for o in other_param_op: + copy_o = copy_operand_recursively(o, repr_map) + logger.info(f"REMAINING {o} -> {copy_o}") + repr_map[o] = copy_o return repr_map, dirty @@ -328,20 +436,25 @@ def compress_arithmetic_expressions( arith_exprs = cast(set[Arithmetic], G.nodes_of_type(Arithmetic)) repr_map: dict[ParameterOperatable, ParameterOperatable] = {} + removed = set() for expr in cast( Iterable[Arithmetic], ParameterOperatable.sort_by_depth(arith_exprs, ascending=True), ): + if expr in repr_map or expr in removed: + continue + operands = expr.operands const_ops, nonconst_ops = partition( lambda o: isinstance(o, ParameterOperatable), operands ) + non_replacable_nonconst_ops, replacable_nonconst_ops = partition( + lambda o: o not in repr_map, nonconst_ops + ) multiplicity = {} - has_multiplicity = False - for n in nonconst_ops: + for n in replacable_nonconst_ops: if n in multiplicity: - has_multiplicity = True multiplicity[n] += 1 else: multiplicity[n] = 1 @@ -357,19 +470,32 @@ def compress_arithmetic_expressions( const_sum = [] except StopIteration: const_sum = [] - nonconst_prod = { - n: Multiply(n, m) if m > 1 else copy_pop(n, repr_map) - for n, m in multiplicity.items() - } - new_operands = (*nonconst_prod.values(), *const_sum) - if len(new_operands) > 1: - new_expr = Add(*new_operands) - elif len(new_operands) == 1: - new_expr = new_operands[0] - else: - raise ValueError("No operands, should not happen") - repr_map.update(nonconst_prod) - repr_map[expr] = new_expr + if any(m > 1 for m in multiplicity.values()): + dirty = True + if dirty: + copied = { + n: copy_operand_recursively(n, repr_map) for n in multiplicity + } + nonconst_prod = [ + Multiply(copied[n], m) if m > 1 else copied[n] + for n, m in multiplicity.items() + ] + new_operands = [ + *nonconst_prod, + *const_sum, + *( + copy_operand_recursively(o, repr_map) + for o in non_replacable_nonconst_ops + ), + ] + if len(new_operands) > 1: + new_expr = Add(*new_operands) + elif len(new_operands) == 1: + new_expr = new_operands[0] + removed.add(expr) + else: + raise ValueError("No operands, should not happen") + repr_map[expr] = new_expr elif isinstance(expr, Multiply): try: @@ -377,54 +503,100 @@ def compress_arithmetic_expressions( for c in const_ops: dirty = True const_prod[0] *= c - if const_prod[0] == 1 * expr.units: # TODO make work with all the types + if ( + const_prod[0] == 1 * dimensionless + ): # TODO make work with all the types dirty = True const_prod = [] except StopIteration: const_prod = [] if ( - len(const_prod) == 1 and const_prod[0] == 0 * expr.units + len(const_prod) == 1 and const_prod[0].magnitude == 0 ): # TODO make work with all the types dirty = True repr_map[expr] = 0 * expr.units else: - nonconst_prod = { - n: Power(n, m) if m > 1 else copy_pop(n, repr_map) - for n, m in multiplicity.items() - } - if has_multiplicity: + if any(m > 1 for m in multiplicity.values()): dirty = True - new_operands = (*nonconst_prod.values(), *const_prod) - if len(new_operands) > 1: - new_expr = Multiply(*new_operands) - elif len(new_operands) == 1: - new_expr = new_operands[0] - else: - raise ValueError("No operands, should not happen") - repr_map.update(nonconst_prod) - repr_map[expr] = new_expr + if dirty: + copied = { + n: copy_operand_recursively(n, repr_map) for n in multiplicity + } + nonconst_power = [ + Power(copied[n], m) if m > 1 else copied[n] + for n, m in multiplicity.items() + ] + new_operands = [ + *nonconst_power, + *const_prod, + *( + copy_operand_recursively(o, repr_map) + for o in non_replacable_nonconst_ops + ), + ] + if len(new_operands) > 1: + new_expr = Multiply(*new_operands) + elif len(new_operands) == 1: + new_expr = new_operands[0] + removed.add(expr) + else: + raise ValueError("No operands, should not happen") + repr_map[expr] = new_expr elif isinstance(expr, Subtract): - if expr.operands[0] is expr.operands[1]: + if sum(1 for _ in const_ops) == 2: + dirty = True + repr_map[expr] = expr.operands[0] - expr.operands[1] + removed.add(expr) + elif expr.operands[0] is expr.operands[1]: dirty = True repr_map[expr] = 0 * expr.units - elif len(const_ops) == 2: + removed.add(expr) + elif expr.operands[1] == 0 * expr.operands[1].units: dirty = True - repr_map[expr] = expr.operands[0] - expr.operands[1] + repr_map[expr.operands[0]] = repr_map.get( + expr.operands[0], + copy_operand_recursively(expr.operands[0], repr_map), + ) + repr_map[expr] = repr_map[expr.operands[0]] + removed.add(expr) else: - repr_map[expr] = copy_pop(expr, repr_map) + repr_map[expr] = copy_operand_recursively(expr, repr_map) + elif isinstance(expr, Divide): + if sum(1 for _ in const_ops) == 2: + if not expr.operands[1].magnitude == 0: + dirty = True + repr_map[expr] = expr.operands[0] / expr.operands[1] + removed.add(expr) + else: + # no valid solution but might not matter e.g. [phi(a,b,...) OR a/0 == b] + repr_map[expr] = copy_operand_recursively(expr, repr_map) + elif expr.operands[1] is expr.operands[0]: + dirty = True + repr_map[expr] = 1 * dimensionless + removed.add(expr) + elif expr.operands[1] == 1 * expr.operands[1].units: + dirty = True + repr_map[expr.operands[0]] = repr_map.get( + expr.operands[0], + copy_operand_recursively(expr.operands[0], repr_map), + ) + repr_map[expr] = repr_map[expr.operands[0]] + removed.add(expr) + else: + repr_map[expr] = copy_operand_recursively(expr, repr_map) else: - repr_map[expr] = copy_pop(expr, repr_map) + repr_map[expr] = copy_operand_recursively(expr, repr_map) other_param_op = ParameterOperatable.sort_by_depth( ( p for p in G.nodes_of_type(ParameterOperatable) - if p not in repr_map and p not in arith_exprs + if p not in repr_map and p not in removed ), ascending=True, ) - remaining_param_op = {p: copy_pop(p, repr_map) for p in other_param_op} - repr_map.update(remaining_param_op) + for o in other_param_op: + copy_operand_recursively(o, repr_map) return { k: v for k, v in repr_map.items() if isinstance(v, ParameterOperatable) @@ -467,45 +639,79 @@ def phase_one_no_guess_solving(self, g: Graph) -> None: while dirty: iter += 1 logger.info(f"Iteration {iter}") + logger.info("Phase 1 Solving: Alias classes") repr_map = {} for g in graphs: alias_repr_map, alias_dirty = resolve_alias_classes(g) repr_map.update(alias_repr_map) + for s, d in repr_map.items(): + if isinstance(d, Expression): + if isinstance(s, Expression): + logger.info(f"{s}[{s.operands}] -> {d}[{d.operands}]") + else: + logger.info(f"{s} -> {d}[{d.operands}]") + else: + logger.info(f"{s} -> {d}") graphs = {p.get_graph() for p in repr_map.values()} - for g in graphs: - logger.info(f"G: {g}") logger.info(f"{len(graphs)} new graphs") # TODO assert all new graphs - logger.info("Phase 2 Solving: Associative expressions") + logger.info("Phase 2a Solving: Add/Mul associative expressions") repr_map = {} for g in graphs: - assoc_repr_map, assoc_dirty = compress_associative_expressions(g) - repr_map.update(assoc_repr_map) + assoc_add_mul_repr_map, assoc_add_mul_dirty = ( + compress_associative_add_mul(g) + ) + repr_map.update(assoc_add_mul_repr_map) for s, d in repr_map.items(): - if isinstance(s, Expression): - logger.info(f"{s}[{s.operands}] -> {d}[{d.operands}]") + if isinstance(d, Expression): + if isinstance(s, Expression): + logger.info(f"{s}[{s.operands}] -> {d}[{d.operands}]") + else: + logger.info(f"{s} -> {d}[{d.operands}]") else: logger.info(f"{s} -> {d}") graphs = {p.get_graph() for p in repr_map.values()} logger.info(f"{len(graphs)} new graphs") # TODO assert all new graphs + logger.info("Phase 2b Solving: Subtract associative expressions") + repr_map = {} + for g in graphs: + assoc_sub_repr_map, assoc_sub_dirty = compress_associative_sub(g) + repr_map.update(assoc_sub_repr_map) + for s, d in repr_map.items(): + if isinstance(d, Expression): + if isinstance(s, Expression): + logger.info( + f"{s}[{s.operands}] -> {d}[{d.operands} | G {d.get_graph()!r}]" + ) + else: + logger.info(f"{s} -> {d}[{d.operands} | G {d.get_graph()!r}]") + else: + logger.info(f"{s} -> {d} | G {d.get_graph()!r}") + graphs = {p.get_graph() for p in repr_map.values()} + logger.info(f"{len(graphs)} new graphs") + # TODO assert all new graphs + logger.info("Phase 3 Solving: Arithmetic expressions") repr_map = {} for g in graphs: arith_repr_map, arith_dirty = compress_arithmetic_expressions(g) repr_map.update(arith_repr_map) for s, d in repr_map.items(): - if isinstance(s, Expression): - logger.info(f"{s}[{s.operands}] -> {d}[{d.operands}] | G: {id(g)}") + if isinstance(d, Expression): + if isinstance(s, Expression): + logger.info(f"{s}[{s.operands}] -> {d}[{d.operands}]") + else: + logger.info(f"{s} -> {d}[{d.operands}]") else: - logger.info(f"{s} -> {d} | G: {id(g)}") + logger.info(f"{s} -> {d}") graphs = {p.get_graph() for p in repr_map.values()} logger.info(f"{len(graphs)} new graphs") # TODO assert all new graphs - dirty = alias_dirty or assoc_dirty or arith_dirty + dirty = alias_dirty or assoc_add_mul_dirty or assoc_sub_dirty or arith_dirty def get_any_single( self, diff --git a/src/faebryk/core/solver.py b/src/faebryk/core/solver.py index d572ec4f..9b616034 100644 --- a/src/faebryk/core/solver.py +++ b/src/faebryk/core/solver.py @@ -23,6 +23,8 @@ class SolverError(Exception): ... class TimeoutError(SolverError): ... + class DivisionByZeroError(SolverError): ... + @dataclass class SolveResult: timed_out: bool diff --git a/src/faebryk/libs/sets.py b/src/faebryk/libs/sets.py index 0f91c5f7..88c5c175 100644 --- a/src/faebryk/libs/sets.py +++ b/src/faebryk/libs/sets.py @@ -731,12 +731,12 @@ def __hash__(self) -> int: def __repr__(self) -> str: if self.units.is_compatible_with(dimensionless): inner = ", ".join(f"[{r._min}, {r._max}]" for r in self._ranges.ranges) - return f"_RangeUnion({inner})" + return f"Ranges({inner})" inner = ", ".join( f"[{self.base_to_units(r._min)}, {self.base_to_units(r._max)}]" for r in self._ranges.ranges ) - return f"_RangeUnion({inner} | {self.units})" + return f"Ranges({inner} | {self.units})" class Ranges(NonIterableRanges[QuantityT], Iterable[Range[QuantityT]]): diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index dd7bca76..636344ae 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -54,17 +54,17 @@ class App(Module): solver.phase_one_no_guess_solving(voltage1.get_graph()) -def test_assoc_compress(): +def test_simplify(): class App(Module): ops = L.list_field(10, lambda: Parameter(units=dimensionless)) app = App() - # (((((((((A + B + 1) + C + 2) * D * 3) * E * 4) * F * 5) * G * (A - A)) + H + 7) + I + 8) + J + 9) < 11 - # => (H + I + J + 24) < 11 + # (((((((((((A + B + 1) + C + 2) * D * 3) * E * 4) * F * 5) * G * (A - A)) + H + 7) + I + 8) + J + 9) - 3) - 4) < 11 + # => (H + I + J + 17) < 11 constants = [c * dimensionless for c in range(0, 10)] constants[5] = app.ops[0] - app.ops[0] - # constants[9] = Ranges(Range(0 * dimensionless, 1 * dimensionless)) + constants[9] = Ranges(Range(0 * dimensionless, 1 * dimensionless)) acc = app.ops[0] for i, p in enumerate(app.ops[1:3]): acc += p + constants[i] @@ -73,7 +73,8 @@ class App(Module): for i, p in enumerate(app.ops[7:]): acc += p + constants[i + 7] - (acc < 11).constrain() + acc = (acc - 3 * dimensionless) - 4 * dimensionless + (acc < 11 * dimensionless).constrain() G = acc.get_graph() solver = DefaultSolver() @@ -142,7 +143,7 @@ def test_visualize_inspect_app(): # if run in jupyter notebook import sys - func = test_assoc_compress + func = test_simplify if "ipykernel" in sys.modules: func() From 865996ef2da4c24828d9d1c3184853a3423b02b4 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Thu, 7 Nov 2024 18:25:58 +0100 Subject: [PATCH 77/80] normalize params, remove some tautologies --- src/faebryk/core/defaultsolver.py | 270 +++++++++++++++++++++--------- test/core/test_parameters.py | 21 ++- 2 files changed, 207 insertions(+), 84 deletions(-) diff --git a/src/faebryk/core/defaultsolver.py b/src/faebryk/core/defaultsolver.py index 80e8c253..5baa1983 100644 --- a/src/faebryk/core/defaultsolver.py +++ b/src/faebryk/core/defaultsolver.py @@ -16,16 +16,18 @@ Divide, Expression, Is, + Log, Multiply, Parameter, ParameterOperatable, Power, Predicate, + Sqrt, Subtract, ) from faebryk.core.solver import Solver -from faebryk.libs.sets import Ranges -from faebryk.libs.units import dimensionless +from faebryk.libs.sets import Range, Ranges +from faebryk.libs.units import Quantity, dimensionless from faebryk.libs.util import EquivalenceClasses logger = logging.getLogger(__name__) @@ -58,7 +60,7 @@ def get_params_for_expr(expr: Expression) -> set[Parameter]: def get_constrained_predicates_involved_in( p: Parameter | Expression, -) -> list[Predicate]: +) -> set[Predicate]: # p.self -> p.operated_on -> e1.operates_on -> e1.self dependants = p.bfs_node( lambda path, _: isinstance(path[-1].node, ParameterOperatable) @@ -77,7 +79,7 @@ def get_constrained_predicates_involved_in( ) ) ) - res = [p for p in dependants if isinstance(p, Predicate) and p.constrained] + res = {p for p in dependants if isinstance(p, Predicate) and p.constrained} return res @@ -110,6 +112,95 @@ def create_new_expr( return new_expr +def copy_param(p: Parameter) -> Parameter: + return Parameter( + units=p.units, + within=p.within, + domain=p.domain, + soft_set=p.soft_set, + guess=p.guess, + tolerance_guess=p.tolerance_guess, + likely_constrained=p.likely_constrained, + ) + + +def copy_operand_recursively( + o: ParameterOperatable.All, repr_map: dict[ParameterOperatable, ParameterOperatable] +) -> ParameterOperatable.All: + if o in repr_map: + return repr_map[o] + if isinstance(o, Expression): + new_ops = [] + for op in o.operands: + new_op = copy_operand_recursively(op, repr_map) + if isinstance(op, ParameterOperatable): + repr_map[op] = new_op + new_ops.append(new_op) + expr = create_new_expr(o, *new_ops) + repr_map[o] = expr + return expr + elif isinstance(o, Parameter): + param = copy_param(o) + repr_map[o] = param + return param + else: + return o + + +# units -> base units (dimensionless) +# within -> constrain is subset +# scalar to single +def normalize_graph(G: Graph) -> dict[ParameterOperatable, ParameterOperatable]: + def set_to_base_units(s: Ranges | Range | None) -> Ranges | Range | None: + if s is None: + return None + if isinstance(s, Ranges): + return Ranges._from_ranges(s._ranges, dimensionless) + return Range._from_range(s._range, dimensionless) + + def scalar_to_base_units(q: int | float | Quantity | None) -> Quantity | None: + if q is None: + return None + if isinstance(q, Quantity): + return q.to_base_units().magnitude * dimensionless + return q * dimensionless + + param_ops = G.nodes_of_type(ParameterOperatable) + + repr_map: dict[ParameterOperatable, ParameterOperatable] = {} + + for po in cast( + Iterable[ParameterOperatable], + ParameterOperatable.sort_by_depth(param_ops, ascending=True), + ): + if isinstance(po, Parameter): + new_param = Parameter( + units=dimensionless, + within=None, + domain=po.domain, + soft_set=set_to_base_units(po.soft_set), + guess=scalar_to_base_units(po.guess), + tolerance_guess=po.tolerance_guess, + likely_constrained=po.likely_constrained, + ) + repr_map[po] = new_param + if po.within is not None: + new_param.constrain_subset(set_to_base_units(po.within)) + elif isinstance(po, Expression): + new_ops = [] + for op in po.operands: + if isinstance(op, ParameterOperatable): + assert op in repr_map + new_ops.append(repr_map[op]) + elif isinstance(op, int | float | Quantity): + new_ops.append(scalar_to_base_units(op)) + else: + new_ops.append(set_to_base_units(op)) + repr_map[po] = create_new_expr(po, *new_ops) + + return repr_map + + def resolve_alias_classes( G: Graph, ) -> tuple[dict[ParameterOperatable, ParameterOperatable], bool]: @@ -219,39 +310,8 @@ def try_replace(o: ParameterOperatable.All): return repr_map, dirty -def copy_param(p: Parameter) -> Parameter: - return Parameter( - units=p.units, - within=p.within, - domain=p.domain, - soft_set=p.soft_set, - guess=p.guess, - tolerance_guess=p.tolerance_guess, - likely_constrained=p.likely_constrained, - ) -def copy_operand_recursively( - o: ParameterOperatable.All, repr_map: dict[ParameterOperatable, ParameterOperatable] -) -> ParameterOperatable.All: - if o in repr_map: - return repr_map[o] - if isinstance(o, Expression): - new_ops = [] - for op in o.operands: - new_op = copy_operand_recursively(op, repr_map) - if isinstance(op, ParameterOperatable): - repr_map[op] = new_op - new_ops.append(new_op) - expr = create_new_expr(o, *new_ops) - repr_map[o] = expr - return expr - elif isinstance(o, Parameter): - param = copy_param(o) - repr_map[o] = param - return param - else: - return o def is_replacable( @@ -587,13 +647,15 @@ def compress_arithmetic_expressions( else: repr_map[expr] = copy_operand_recursively(expr, repr_map) - other_param_op = ParameterOperatable.sort_by_depth( - ( - p - for p in G.nodes_of_type(ParameterOperatable) - if p not in repr_map and p not in removed - ), - ascending=True, + other_param_op = ( + ParameterOperatable.sort_by_depth( # TODO, do we need the sort here? same above + ( + p + for p in G.nodes_of_type(ParameterOperatable) + if p not in repr_map and p not in removed + ), + ascending=True, + ) ) for o in other_param_op: copy_operand_recursively(o, repr_map) @@ -603,10 +665,68 @@ def compress_arithmetic_expressions( }, dirty +def has_implicit_constraint(po: ParameterOperatable) -> bool: + if isinstance(po, Parameter | Add | Subtract | Multiply | Power): # TODO others + return False + if isinstance(po, Divide): + return True # implicit constraint: divisor not zero + if isinstance(po, Sqrt | Log): + return True # implicit constraint: non-negative + return True + + +def remove_obvious_tautologies( + G: Graph, +) -> tuple[dict[ParameterOperatable, ParameterOperatable], bool]: + removed = set() + dirty = False + for pred_is in ParameterOperatable.sort_by_depth( + G.nodes_of_type(Is), ascending=True + ): + + def known_unconstrained(po: ParameterOperatable) -> bool: + no_other_constraints = ( + len(get_constrained_predicates_involved_in(po).difference({pred_is})) + == 0 + ) + return no_other_constraints and not has_implicit_constraint(po) + + pred_is = cast(Is, pred_is) + if pred_is.operands[0] is pred_is.operands[1] and not known_unconstrained( + pred_is.operands[0] + ): + removed.add(pred_is) + dirty = True + elif known_unconstrained(pred_is.operands[0]) or known_unconstrained( + pred_is.operands[1] + ): + removed.add(pred_is) + dirty = True + repr_map = {} + for p in G.nodes_of_type(ParameterOperatable): + if p not in removed: + repr_map[p] = copy_operand_recursively(p, repr_map) + return repr_map, dirty + + class DefaultSolver(Solver): timeout: int = 1000 def phase_one_no_guess_solving(self, g: Graph) -> None: + def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): + for s, d in repr_map.items(): + if isinstance(d, Expression): + if isinstance(s, Expression): + logger.info( + f"{s}[{s.operands}] -> {d}[{d.operands} | G {d.get_graph()!r}]" + ) + else: + logger.info(f"{s} -> {d}[{d.operands} | G {d.get_graph()!r}]") + else: + logger.info(f"{s} -> {d} | G {d.get_graph()!r}") + graphs = {p.get_graph() for p in repr_map.values()} + logger.info(f"{len(graphs)} graphs") + logger.info(f"Phase 1 Solving: No guesses {'-' * 80}") # strategies @@ -632,7 +752,12 @@ def phase_one_no_guess_solving(self, g: Graph) -> None: # as long as progress iterate - graphs = {g} + logger.info("Phase 0 Solving: normalize graph") + repr_map = normalize_graph(g) + debug_print(repr_map) + graphs = {p.get_graph() for p in repr_map.values()} + # TODO assert all new graphs + dirty = True iter = 0 @@ -644,16 +769,8 @@ def phase_one_no_guess_solving(self, g: Graph) -> None: for g in graphs: alias_repr_map, alias_dirty = resolve_alias_classes(g) repr_map.update(alias_repr_map) - for s, d in repr_map.items(): - if isinstance(d, Expression): - if isinstance(s, Expression): - logger.info(f"{s}[{s.operands}] -> {d}[{d.operands}]") - else: - logger.info(f"{s} -> {d}[{d.operands}]") - else: - logger.info(f"{s} -> {d}") + debug_print(repr_map) graphs = {p.get_graph() for p in repr_map.values()} - logger.info(f"{len(graphs)} new graphs") # TODO assert all new graphs logger.info("Phase 2a Solving: Add/Mul associative expressions") @@ -663,16 +780,8 @@ def phase_one_no_guess_solving(self, g: Graph) -> None: compress_associative_add_mul(g) ) repr_map.update(assoc_add_mul_repr_map) - for s, d in repr_map.items(): - if isinstance(d, Expression): - if isinstance(s, Expression): - logger.info(f"{s}[{s.operands}] -> {d}[{d.operands}]") - else: - logger.info(f"{s} -> {d}[{d.operands}]") - else: - logger.info(f"{s} -> {d}") + debug_print(repr_map) graphs = {p.get_graph() for p in repr_map.values()} - logger.info(f"{len(graphs)} new graphs") # TODO assert all new graphs logger.info("Phase 2b Solving: Subtract associative expressions") @@ -680,18 +789,8 @@ def phase_one_no_guess_solving(self, g: Graph) -> None: for g in graphs: assoc_sub_repr_map, assoc_sub_dirty = compress_associative_sub(g) repr_map.update(assoc_sub_repr_map) - for s, d in repr_map.items(): - if isinstance(d, Expression): - if isinstance(s, Expression): - logger.info( - f"{s}[{s.operands}] -> {d}[{d.operands} | G {d.get_graph()!r}]" - ) - else: - logger.info(f"{s} -> {d}[{d.operands} | G {d.get_graph()!r}]") - else: - logger.info(f"{s} -> {d} | G {d.get_graph()!r}") + debug_print(repr_map) graphs = {p.get_graph() for p in repr_map.values()} - logger.info(f"{len(graphs)} new graphs") # TODO assert all new graphs logger.info("Phase 3 Solving: Arithmetic expressions") @@ -699,19 +798,26 @@ def phase_one_no_guess_solving(self, g: Graph) -> None: for g in graphs: arith_repr_map, arith_dirty = compress_arithmetic_expressions(g) repr_map.update(arith_repr_map) - for s, d in repr_map.items(): - if isinstance(d, Expression): - if isinstance(s, Expression): - logger.info(f"{s}[{s.operands}] -> {d}[{d.operands}]") - else: - logger.info(f"{s} -> {d}[{d.operands}]") - else: - logger.info(f"{s} -> {d}") + debug_print(repr_map) graphs = {p.get_graph() for p in repr_map.values()} - logger.info(f"{len(graphs)} new graphs") # TODO assert all new graphs - dirty = alias_dirty or assoc_add_mul_dirty or assoc_sub_dirty or arith_dirty + logger.info("Phase 4 Solving: Remove obvious tautologies") + repr_map = {} + for g in graphs: + tautology_repr_map, tautology_dirty = remove_obvious_tautologies(g) + repr_map.update(tautology_repr_map) + debug_print(repr_map) + graphs = {p.get_graph() for p in repr_map.values()} + # TODO assert all new graphs + + dirty = ( + alias_dirty + or assoc_add_mul_dirty + or assoc_sub_dirty + or arith_dirty + or tautology_dirty + ) def get_any_single( self, diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index 636344ae..d60d7571 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -56,7 +56,12 @@ class App(Module): def test_simplify(): class App(Module): - ops = L.list_field(10, lambda: Parameter(units=dimensionless)) + ops = L.list_field( + 10, + lambda: Parameter( + units=dimensionless, within=Range(0, 1, units=dimensionless) + ), + ) app = App() @@ -81,6 +86,18 @@ class App(Module): solver.phase_one_no_guess_solving(G) +def test_remove_obvious_tautologies(): + p0, p1, p2 = (Parameter(units=dimensionless) for _ in range(3)) + p0.alias_is(p1 + p2) + p1.constrain_ge(0) + p2.constrain_ge(0) + p2.alias_is(p2) + + G = p0.get_graph() + solver = DefaultSolver() + solver.phase_one_no_guess_solving(G) + + def test_solve_realworld(): app = F.RP2040() solver = DefaultSolver() @@ -143,7 +160,7 @@ def test_visualize_inspect_app(): # if run in jupyter notebook import sys - func = test_simplify + func = test_remove_obvious_tautologies if "ipykernel" in sys.modules: func() From 93eeb78984a97b3c9f1330a60355b121bf7f18ce Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Thu, 7 Nov 2024 19:31:00 +0100 Subject: [PATCH 78/80] intersect literal subsets --- src/faebryk/core/defaultsolver.py | 58 +++++++++++++++++++++++++++++++ src/faebryk/core/parameter.py | 3 +- test/core/test_parameters.py | 15 +++++++- 3 files changed, 74 insertions(+), 2 deletions(-) diff --git a/src/faebryk/core/defaultsolver.py b/src/faebryk/core/defaultsolver.py index 5baa1983..801faebe 100644 --- a/src/faebryk/core/defaultsolver.py +++ b/src/faebryk/core/defaultsolver.py @@ -16,6 +16,7 @@ Divide, Expression, Is, + IsSubset, Log, Multiply, Parameter, @@ -310,8 +311,55 @@ def try_replace(o: ParameterOperatable.All): return repr_map, dirty +def subset_of_literal( + G: Graph, +) -> tuple[dict[ParameterOperatable, ParameterOperatable], bool]: + dirty = False + params = G.nodes_of_type(Parameter) + removed = set() + repr_map: dict[ParameterOperatable, ParameterOperatable] = {} + for param in params: + + def other_set(e: Is) -> ParameterOperatable.All: + if e.operands[0] is param: + return e.operands[1] + return e.operands[0] + + is_subsets = [ + e + for e in param.get_operations() + if isinstance(e, IsSubset) + and len(e.get_operations()) == 0 + and not isinstance(other_set(e), ParameterOperatable) + ] + if len(is_subsets) > 1: + other_sets = [other_set(e) for e in is_subsets] + intersected = other_sets[0] + for s in other_sets[1:]: + intersected = intersected.op_intersect_ranges(Ranges(s)) + removed.update(is_subsets) + new_param = copy_param(param) + new_param.constrain_subset(intersected) + repr_map[param] = new_param + dirty = True + else: + repr_map[param] = copy_param(param) + exprs = ( + ParameterOperatable.sort_by_depth( # TODO, do we need the sort here? same above + ( + p + for p in G.nodes_of_type(Expression) + if p not in repr_map and p not in removed + ), + ascending=True, + ) + ) + for expr in exprs: + copy_operand_recursively(expr, repr_map) + + return repr_map, dirty def is_replacable( @@ -811,12 +859,22 @@ def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): graphs = {p.get_graph() for p in repr_map.values()} # TODO assert all new graphs + logger.info("Phase 5 Solving: Subset of literals") + repr_map = {} + for g in graphs: + subset_repr_map, subset_dirty = subset_of_literal(g) + repr_map.update(subset_repr_map) + debug_print(repr_map) + graphs = {p.get_graph() for p in repr_map.values()} + # TODO assert all new graphs + dirty = ( alias_dirty or assoc_add_mul_dirty or assoc_sub_dirty or arith_dirty or tautology_dirty + or subset_dirty ) def get_any_single( diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index b69d7572..3f0a9a0a 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -40,7 +40,8 @@ class ParameterOperatable(Node): operated_on: GraphInterface def get_operations(self) -> set["Expression"]: - return self.operated_on.get_connected_nodes(types=Expression) + res = self.operated_on.get_connected_nodes(types=Expression) + return res @staticmethod def sort_by_depth( diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index d60d7571..f8cd3f80 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -98,6 +98,19 @@ def test_remove_obvious_tautologies(): solver.phase_one_no_guess_solving(G) +def test_subset_of_literal(): + p0, p1, p2 = ( + Parameter(units=dimensionless, within=Range(0, i, units=dimensionless)) + for i in range(3) + ) + p0.alias_is(p1) + p1.alias_is(p2) + + G = p0.get_graph() + solver = DefaultSolver() + solver.phase_one_no_guess_solving(G) + + def test_solve_realworld(): app = F.RP2040() solver = DefaultSolver() @@ -160,7 +173,7 @@ def test_visualize_inspect_app(): # if run in jupyter notebook import sys - func = test_remove_obvious_tautologies + func = test_subset_of_literal if "ipykernel" in sys.modules: func() From 4533df52aac7d1d1eaa01a6d766d86d070547f32 Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Fri, 8 Nov 2024 22:09:03 +0100 Subject: [PATCH 79/80] fix hash/eq bug in graph core --- src/faebryk/core/graphinterface.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/faebryk/core/graphinterface.py b/src/faebryk/core/graphinterface.py index 08480948..6b1f4dc5 100644 --- a/src/faebryk/core/graphinterface.py +++ b/src/faebryk/core/graphinterface.py @@ -68,14 +68,6 @@ def nodes_of_type[T: "Node"](self, t: type[T]) -> set[T]: def nodes_of_types(self, t: tuple[type["Node"], ...]) -> set["Node"]: return {n for n in self.node_projection() if isinstance(n, t)} - def __hash__(self) -> int: - return id(self()) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Graph): - return False - return self() == other() - class GraphInterface(FaebrykLibObject): GT = Graph From e7db104fea8626bbf633f8923c096b0c3f9eb5aa Mon Sep 17 00:00:00 2001 From: NoR8quoh1r <20768237+NoR8quoh1r@users.noreply.github.com> Date: Fri, 8 Nov 2024 22:09:17 +0100 Subject: [PATCH 80/80] improve alias simplification --- src/faebryk/core/defaultsolver.py | 258 +++++++++++++++++++----------- src/faebryk/core/parameter.py | 102 +++++++++++- test/core/test_parameters.py | 17 +- 3 files changed, 281 insertions(+), 96 deletions(-) diff --git a/src/faebryk/core/defaultsolver.py b/src/faebryk/core/defaultsolver.py index 801faebe..b334aa61 100644 --- a/src/faebryk/core/defaultsolver.py +++ b/src/faebryk/core/defaultsolver.py @@ -1,6 +1,7 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT +from collections import defaultdict import logging from collections.abc import Iterable from statistics import median @@ -29,27 +30,60 @@ from faebryk.core.solver import Solver from faebryk.libs.sets import Range, Ranges from faebryk.libs.units import Quantity, dimensionless -from faebryk.libs.util import EquivalenceClasses +from faebryk.libs.util import EquivalenceClasses, unique logger = logging.getLogger(__name__) -def parameter_alias_classes(G: Graph) -> list[set[Parameter]]: +def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): + import sys + + if getattr(sys, "gettrace", lambda: None)(): + log = print + else: + log = logger.info + for s, d in repr_map.items(): + if isinstance(d, Expression): + if isinstance(s, Expression): + log(f"{s}[{s.operands}] -> {d}[{d.operands} | G {d.get_graph()!r}]") + else: + log(f"{s} -> {d}[{d.operands} | G {d.get_graph()!r}]") + else: + log(f"{s} -> {d} | G {d.get_graph()!r}") + graphs = unique(map(lambda p: p.get_graph(), repr_map.values()), lambda g: g()) + log(f"{len(graphs)} graphs") + + +def parameter_ops_alias_classes( + G: Graph, +) -> dict[ParameterOperatable, set[ParameterOperatable]]: # TODO just get passed - params = [ + param_ops = { p - for p in G.nodes_of_type(Parameter) + for p in G.nodes_of_type(ParameterOperatable) if get_constrained_predicates_involved_in(p) - ] - full_eq = EquivalenceClasses[Parameter](params) + }.difference(G.nodes_of_type(Predicate)) + full_eq = EquivalenceClasses[ParameterOperatable](param_ops) is_exprs = [e for e in G.nodes_of_type(Is) if e.constrained] for is_expr in is_exprs: - params_ops = [op for op in is_expr.operands if isinstance(op, Parameter)] - full_eq.add_eq(*params_ops) + full_eq.add_eq(*is_expr.operands) + + obvious_eq = defaultdict(list) + for p in param_ops: + obvious_eq[p.obviously_eq_hash()].append(p) + logger.info(f"obvious eq: {obvious_eq}") - return full_eq.get() + for candidates in obvious_eq.values(): + if len(candidates) > 1: + logger.debug(f"#obvious eq candidates: {len(candidates)}") + for i, p in enumerate(candidates): + for q in candidates[:i]: + if p.obviously_eq(q): + full_eq.add_eq(p, q) + break + return full_eq.classes def get_params_for_expr(expr: Expression) -> set[Parameter]: @@ -60,7 +94,7 @@ def get_params_for_expr(expr: Expression) -> set[Parameter]: def get_constrained_predicates_involved_in( - p: Parameter | Expression, + p: ParameterOperatable, ) -> set[Predicate]: # p.self -> p.operated_on -> e1.operates_on -> e1.self dependants = p.bfs_node( @@ -108,6 +142,9 @@ def create_new_expr( old_expr: Expression, *operands: ParameterOperatable.All ) -> Expression: new_expr = type(old_expr)(*operands) + for op in operands: + if isinstance(op, ParameterOperatable): + assert op.get_graph() == new_expr.get_graph() if isinstance(old_expr, Constrainable): cast(Constrainable, new_expr).constrained = old_expr.constrained return new_expr @@ -206,9 +243,9 @@ def resolve_alias_classes( G: Graph, ) -> tuple[dict[ParameterOperatable, ParameterOperatable], bool]: dirty = False - params = [ + params_ops = [ p - for p in G.nodes_of_type(Parameter) + for p in G.nodes_of_type(ParameterOperatable) if get_constrained_predicates_involved_in(p) ] exprs = G.nodes_of_type(Expression) @@ -216,11 +253,11 @@ def resolve_alias_classes( exprs.difference_update(predicates) exprs = {e for e in exprs if get_constrained_predicates_involved_in(e)} - p_alias_classes = parameter_alias_classes(G) + p_alias_classes = parameter_ops_alias_classes(G) dependency_classes = parameter_dependency_classes(G) infostr = ( - f"{len(params)} parameters" + f"{len(params_ops)} parametersoperable" f"\n {len(p_alias_classes)} alias classes" f"\n {len(dependency_classes)} dependency classes" "\n" @@ -230,61 +267,88 @@ def resolve_alias_classes( repr_map: dict[ParameterOperatable, ParameterOperatable] = {} # Make new param repre for alias classes - for alias_class in p_alias_classes: - # TODO short-cut if len() == 1 - if len(alias_class) > 1: - dirty = True + for param_op in ParameterOperatable.sort_by_depth(params_ops, ascending=True): + if param_op in repr_map or param_op not in p_alias_classes: + continue + + alias_class = p_alias_classes[param_op] + + # TODO short-cut if len() == 1 ? + param_alias_class = [p for p in alias_class if isinstance(p, Parameter)] + expr_alias_class = [p for p in alias_class if isinstance(p, Expression)] + # TODO non unit/numeric params, i.e. enums, bools # single unit unit_candidates = {p.units for p in alias_class} if len(unit_candidates) > 1: raise ValueError("Incompatible units in alias class") + if len(param_alias_class) > 0: + dirty |= len(param_alias_class) > 1 + + # single domain + domain_candidates = {p.domain for p in param_alias_class} + if len(domain_candidates) > 1: + raise ValueError("Incompatible domains in alias class") + + # intersect ranges + within_ranges = { + p.within for p in param_alias_class if p.within is not None + } + within = None + if within_ranges: + within = Ranges.op_intersect_ranges(*within_ranges) + + # heuristic: + # intersect soft sets + soft_sets = { + p.soft_set for p in param_alias_class if p.soft_set is not None + } + soft_set = None + if soft_sets: + soft_set = Ranges.op_intersect_ranges(*soft_sets) + + # heuristic: + # get median + guesses = {p.guess for p in param_alias_class if p.guess is not None} + guess = None + if guesses: + guess = median(guesses) # type: ignore + + # heuristic: + # max tolerance guess + tolerance_guesses = { + p.tolerance_guess + for p in param_alias_class + if p.tolerance_guess is not None + } + tolerance_guess = None + if tolerance_guesses: + tolerance_guess = max(tolerance_guesses) + + likely_constrained = any(p.likely_constrained for p in param_alias_class) + + representative = Parameter( + units=unit_candidates.pop(), + within=within, + soft_set=soft_set, + guess=guess, + tolerance_guess=tolerance_guess, + likely_constrained=likely_constrained, + ) + repr_map.update({p: representative for p in param_alias_class}) + elif len(expr_alias_class) > 1: + dirty = True + representative = Parameter(units=unit_candidates.pop()) - # single domain - domain_candidates = {p.domain for p in alias_class} - if len(domain_candidates) > 1: - raise ValueError("Incompatible domains in alias class") - - # intersect ranges - within_ranges = {p.within for p in alias_class if p.within is not None} - within = None - if within_ranges: - within = Ranges.op_intersect_ranges(*within_ranges) - - # heuristic: - # intersect soft sets - soft_sets = {p.soft_set for p in alias_class if p.soft_set is not None} - soft_set = None - if soft_sets: - soft_set = Ranges.op_intersect_ranges(*soft_sets) - - # heuristic: - # get median - guesses = {p.guess for p in alias_class if p.guess is not None} - guess = None - if guesses: - guess = median(guesses) # type: ignore - - # heuristic: - # max tolerance guess - tolerance_guesses = { - p.tolerance_guess for p in alias_class if p.tolerance_guess is not None - } - tolerance_guess = None - if tolerance_guesses: - tolerance_guess = max(tolerance_guesses) - - likely_constrained = any(p.likely_constrained for p in alias_class) - - representative = Parameter( - units=unit_candidates.pop(), - within=within, - soft_set=soft_set, - guess=guess, - tolerance_guess=tolerance_guess, - likely_constrained=likely_constrained, - ) - repr_map.update({p: representative for p in alias_class}) + if len(expr_alias_class) > 0: + for e in expr_alias_class: + copy_expr = copy_operand_recursively(e, repr_map) + repr_map[e] = ( + representative # copy_expr TODO make sure this makes sense + ) + # TODO, if it doesn't have implicit constraints and it's operands don't aren't constraint, we can get rid of it + assert isinstance(copy_expr, Constrainable) + copy_expr.alias_is(representative) # replace parameters in expressions and predicates for expr in cast( @@ -301,12 +365,13 @@ def try_replace(o: ParameterOperatable.All): # filter alias class Is if isinstance(expr, Is): - if all(isinstance(o, Parameter) for o in expr.operands): - continue + continue - operands = [try_replace(o) for o in expr.operands] - new_expr = create_new_expr(expr, *operands) - repr_map[expr] = new_expr + assert all( + o in repr_map or not isinstance(o, ParameterOperatable) + for o in expr.operands + ) + repr_map[expr] = copy_operand_recursively(expr, repr_map) return repr_map, dirty @@ -713,6 +778,8 @@ def compress_arithmetic_expressions( }, dirty +# TODO move to expression? +# TODO recursive? def has_implicit_constraint(po: ParameterOperatable) -> bool: if isinstance(po, Parameter | Add | Subtract | Multiply | Power): # TODO others return False @@ -761,20 +828,6 @@ class DefaultSolver(Solver): timeout: int = 1000 def phase_one_no_guess_solving(self, g: Graph) -> None: - def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): - for s, d in repr_map.items(): - if isinstance(d, Expression): - if isinstance(s, Expression): - logger.info( - f"{s}[{s.operands}] -> {d}[{d.operands} | G {d.get_graph()!r}]" - ) - else: - logger.info(f"{s} -> {d}[{d.operands} | G {d.get_graph()!r}]") - else: - logger.info(f"{s} -> {d} | G {d.get_graph()!r}") - graphs = {p.get_graph() for p in repr_map.values()} - logger.info(f"{len(graphs)} graphs") - logger.info(f"Phase 1 Solving: No guesses {'-' * 80}") # strategies @@ -803,13 +856,13 @@ def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): logger.info("Phase 0 Solving: normalize graph") repr_map = normalize_graph(g) debug_print(repr_map) - graphs = {p.get_graph() for p in repr_map.values()} + graphs = unique(map(lambda p: p.get_graph(), repr_map.values()), lambda g: g()) # TODO assert all new graphs dirty = True iter = 0 - while dirty: + while dirty and len(graphs) > 0: iter += 1 logger.info(f"Iteration {iter}") logger.info("Phase 1 Solving: Alias classes") @@ -818,7 +871,9 @@ def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): alias_repr_map, alias_dirty = resolve_alias_classes(g) repr_map.update(alias_repr_map) debug_print(repr_map) - graphs = {p.get_graph() for p in repr_map.values()} + graphs = unique( + map(lambda p: p.get_graph(), repr_map.values()), lambda g: g() + ) # TODO assert all new graphs logger.info("Phase 2a Solving: Add/Mul associative expressions") @@ -829,7 +884,22 @@ def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): ) repr_map.update(assoc_add_mul_repr_map) debug_print(repr_map) - graphs = {p.get_graph() for p in repr_map.values()} + graphs = unique( + map(lambda p: p.get_graph(), repr_map.values()), lambda g: g() + ) + # TODO assert all new graphs + + logger.info("Phase 2a Solving: Add/Mul associative expressions") + repr_map = {} + for g in graphs: + assoc_add_mul_repr_map, assoc_add_mul_dirty = ( + compress_associative_add_mul(g) + ) + repr_map.update(assoc_add_mul_repr_map) + debug_print(repr_map) + graphs = unique( + map(lambda p: p.get_graph(), repr_map.values()), lambda g: g() + ) # TODO assert all new graphs logger.info("Phase 2b Solving: Subtract associative expressions") @@ -838,7 +908,9 @@ def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): assoc_sub_repr_map, assoc_sub_dirty = compress_associative_sub(g) repr_map.update(assoc_sub_repr_map) debug_print(repr_map) - graphs = {p.get_graph() for p in repr_map.values()} + graphs = unique( + map(lambda p: p.get_graph(), repr_map.values()), lambda g: g() + ) # TODO assert all new graphs logger.info("Phase 3 Solving: Arithmetic expressions") @@ -847,7 +919,9 @@ def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): arith_repr_map, arith_dirty = compress_arithmetic_expressions(g) repr_map.update(arith_repr_map) debug_print(repr_map) - graphs = {p.get_graph() for p in repr_map.values()} + graphs = unique( + map(lambda p: p.get_graph(), repr_map.values()), lambda g: g() + ) # TODO assert all new graphs logger.info("Phase 4 Solving: Remove obvious tautologies") @@ -856,7 +930,9 @@ def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): tautology_repr_map, tautology_dirty = remove_obvious_tautologies(g) repr_map.update(tautology_repr_map) debug_print(repr_map) - graphs = {p.get_graph() for p in repr_map.values()} + graphs = unique( + map(lambda p: p.get_graph(), repr_map.values()), lambda g: g() + ) # TODO assert all new graphs logger.info("Phase 5 Solving: Subset of literals") @@ -865,7 +941,9 @@ def debug_print(repr_map: dict[ParameterOperatable, ParameterOperatable]): subset_repr_map, subset_dirty = subset_of_literal(g) repr_map.update(subset_repr_map) debug_print(repr_map) - graphs = {p.get_graph() for p in repr_map.values()} + graphs = unique( + map(lambda p: p.get_graph(), repr_map.values()), lambda g: g() + ) # TODO assert all new graphs dirty = ( diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 3f0a9a0a..f43c42ef 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -5,7 +5,7 @@ from collections.abc import Iterable from enum import Enum, auto from types import NotImplementedType -from typing import Any, Callable, Self +from typing import Any, Callable, Self, override from faebryk.core.core import Namespace from faebryk.core.graphinterface import GraphInterface @@ -54,6 +54,36 @@ def key(e: ParameterOperatable): return sorted(exprs, key=key, reverse=not ascending) + def _is_constrains(self) -> list["Is"]: + return [ + i for i in self.operated_on.get_connected_nodes(types=Is) if i.constrained + ] + + def obviously_eq(self, other: "ParameterOperatable.All") -> bool: + if self == other: + return True + if other in self._is_constrains(): + return True + return False + + def obviously_eq_hash(self) -> int: + if hasattr(self, "__hash"): + return self.__hash + + ises = [i for i in self._is_constrains() if not isinstance(i, Expression)] + + def keyfn(i: Is): + if isinstance(i, Parameter): + return 1 << 63 + return hash(i) % (1 << 63) + + sorted_ises = sorted(ises, key=keyfn) + if len(sorted_ises) > 0: + self.__hash = hash(sorted_ises[0]) + else: + self.__hash = id(self) + return self.__hash + def operation_add(self, other: NumberLike): return Add(self, other) @@ -283,6 +313,18 @@ def if_then_else( # ) -> None: ... +def obviously_eq(a: ParameterOperatable.All, b: ParameterOperatable.All) -> bool: + if a == b: + return True + if isinstance(a, ParameterOperatable): + return a.obviously_eq(b) + elif isinstance(b, ParameterOperatable): + return b.obviously_eq(a) + return False + + +# TODO mixes two things, those that a constraining predicate can be called on, +# and the predicate, which can have it's constrained be set?? class Constrainable: type All = ParameterOperatable.All type Sets = ParameterOperatable.Sets @@ -339,7 +381,7 @@ class Expression(ParameterOperatable): def __init__(self, *operands: ParameterOperatable.All): super().__init__() - self.operands = operands + self.operands = tuple(operands) self.operatable_operands = { op for op in operands if isinstance(op, ParameterOperatable) } @@ -359,7 +401,24 @@ def depth(self) -> int: ) return self._depth + # TODO caching + @override + def obviously_eq(self, other: ParameterOperatable.All) -> bool: + if super().obviously_eq(other): + return True + if type(self) is type(other): + for s, o in zip(self.operands, other.operands): + if not obviously_eq(s, o): + return False + return True + return False + def obviously_eq_hash(self) -> int: + return hash((type(self), self.operands)) + + +# TODO are any expressions not constrainable? +# parameters are contstrainable, too, so all parameter-operatables are constrainable? @abstract class ConstrainableExpression(Expression, Constrainable): def __init__(self, *operands: ParameterOperatable.All): @@ -382,7 +441,6 @@ def __init__(self, *operands: ParameterOperatable.NumberLike): if isinstance(param, Parameter) ): raise ValueError("parameters must have domain Numbers or ESeries") - self.operands = operands @abstract @@ -395,10 +453,33 @@ def __init__(self, *operands): raise ValueError("All operands must have compatible units") +def _associative_obviously_eq(self: Expression, other: Expression) -> bool: + remaining = list(other.operands) + for op in self.operands: + for r in remaining: + if obviously_eq(op, r): + remaining.remove(r) + break + return not remaining + + class Add(Additive): def __init__(self, *operands): super().__init__(*operands) + # TODO caching + @override + def obviously_eq(self, other: ParameterOperatable.All) -> bool: + if ParameterOperatable.obviously_eq(self, other): + return True + if isinstance(other, Add): + return _associative_obviously_eq(self, other) + return False + + def obviously_eq_hash(self) -> int: + op_hash = sum(hash(op) for op in self.operands) + return hash((type(self), op_hash)) + class Subtract(Additive): def __init__(self, minuend, subtrahend): @@ -413,6 +494,19 @@ def __init__(self, *operands): for u in units[1:]: self.units = cast_assert(Unit, self.units * u) + # TODO caching + @override + def obviously_eq(self, other: ParameterOperatable.All) -> bool: + if ParameterOperatable.obviously_eq(self, other): + return True + if isinstance(other, Add): + return _associative_obviously_eq(self, other) + return False + + def obviously_eq_hash(self) -> int: + op_hash = sum(hash(op) for op in self.operands) + return hash((type(self), op_hash)) + class Divide(Arithmetic): def __init__(self, numerator, denominator): @@ -498,7 +592,6 @@ def __init__(self, *operands): if isinstance(param, Parameter) ): raise ValueError("parameters must have domain Boolean without a unit") - self.operands = operands class And(Logic): @@ -607,7 +700,6 @@ def __init__(self, left, right): r_units = HasUnit.get_units_or_dimensionless(right) if not l_units.is_compatible_with(r_units): raise ValueError("operands must have compatible units") - self.operands = [left, right] def __bool__(self): raise ValueError("Predicate cannot be converted to bool") diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index f8cd3f80..a7de2aa2 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -111,6 +111,21 @@ def test_subset_of_literal(): solver.phase_one_no_guess_solving(G) +def test_alias_classes(): + p0, p1, p2, p3, p4 = ( + Parameter(units=dimensionless, within=Range(0, i)) for i in range(5) + ) + p0.alias_is(p1) + addition = p2 + p3 + p1.alias_is(addition) + addition2 = p3 + p2 + p4.alias_is(addition2) + + G = p0.get_graph() + solver = DefaultSolver() + solver.phase_one_no_guess_solving(G) + + def test_solve_realworld(): app = F.RP2040() solver = DefaultSolver() @@ -173,7 +188,7 @@ def test_visualize_inspect_app(): # if run in jupyter notebook import sys - func = test_subset_of_literal + func = test_solve_realworld if "ipykernel" in sys.modules: func()