diff --git a/.gitmodules b/.gitmodules index e69de29b..ca82e7a7 100644 --- a/.gitmodules +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "src/faebryk/core/cpp/pybind11"] + path = src/faebryk/core/cpp/pybind11 + url = https://github.com/pybind/pybind11.git diff --git a/poetry.lock b/poetry.lock index 705ed03d..0044996b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -80,6 +80,17 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + [[package]] name = "certifi" version = "2024.8.30" @@ -325,6 +336,88 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "dash" +version = "2.18.1" +description = "A Python framework for building reactive web-apps. Developed by Plotly." +optional = false +python-versions = ">=3.8" +files = [ + {file = "dash-2.18.1-py3-none-any.whl", hash = "sha256:07c4513bb5f79a4b936847a0b49afc21dbd4b001ff77ea78d4d836043e211a07"}, + {file = "dash-2.18.1.tar.gz", hash = "sha256:ffdf89690d734f6851ef1cb344222826ffb11ad2214ab9172668bf8aadd75d12"}, +] + +[package.dependencies] +dash-core-components = "2.0.0" +dash-html-components = "2.0.0" +dash-table = "5.0.0" +Flask = ">=1.0.4,<3.1" +importlib-metadata = "*" +nest-asyncio = "*" +plotly = ">=5.0.0" +requests = "*" +retrying = "*" +setuptools = "*" +typing-extensions = ">=4.1.1" +Werkzeug = "<3.1" + +[package.extras] +celery = ["celery[redis] (>=5.1.2)", "redis (>=3.5.3)"] +ci = ["black (==22.3.0)", "dash-dangerously-set-inner-html", "dash-flow-example (==0.0.5)", "flake8 (==7.0.0)", "flaky (==3.8.1)", "flask-talisman (==1.0.0)", "jupyterlab (<4.0.0)", "mimesis (<=11.1.0)", "mock (==4.0.3)", "numpy (<=1.26.3)", "openpyxl", "orjson (==3.10.3)", "pandas (>=1.4.0)", "pyarrow", "pylint (==3.0.3)", "pytest-mock", "pytest-rerunfailures", "pytest-sugar (==0.9.6)", "pyzmq (==25.1.2)", "xlrd (>=2.0.1)"] +compress = ["flask-compress"] +dev = ["PyYAML (>=5.4.1)", "coloredlogs (>=15.0.1)", "fire (>=0.4.0)"] +diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"] +testing = ["beautifulsoup4 (>=4.8.2)", "cryptography", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"] + +[[package]] +name = "dash-core-components" +version = "2.0.0" +description = "Core component suite for Dash" +optional = false +python-versions = "*" +files = [ + {file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"}, + {file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"}, +] + +[[package]] +name = "dash-cytoscape" +version = "1.0.2" +description = "A Component Library for Dash aimed at facilitating network visualization in Python, wrapped around Cytoscape.js" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dash_cytoscape-1.0.2.tar.gz", hash = "sha256:a61019d2184d63a2b3b5c06d056d3b867a04223a674cc3c7cf900a561a9a59aa"}, +] + +[package.dependencies] +dash = "*" + +[package.extras] +leaflet = ["dash-leaflet (>=1.0.16rc3)"] + +[[package]] +name = "dash-html-components" +version = "2.0.0" +description = "Vanilla HTML components for Dash" +optional = false +python-versions = "*" +files = [ + {file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"}, + {file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"}, +] + +[[package]] +name = "dash-table" +version = "5.0.0" +description = "Dash table" +optional = false +python-versions = "*" +files = [ + {file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"}, + {file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"}, +] + [[package]] name = "dataclasses-json" version = "0.6.7" @@ -402,6 +495,28 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2. testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] typing = ["typing-extensions (>=4.12.2)"] +[[package]] +name = "flask" +version = "3.0.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, + {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + [[package]] name = "flexcache" version = "0.3" @@ -553,6 +668,29 @@ files = [ {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] +[[package]] +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -589,6 +727,34 @@ files = [ [package.extras] colors = ["colorama (>=0.4.6)"] +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + [[package]] name = "kicadcliwrapper" version = "1.0.1" @@ -751,6 +917,75 @@ profiling = ["gprof2dot"] rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + [[package]] name = "marshmallow" version = "3.22.0" @@ -866,6 +1101,17 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + [[package]] name = "networkx" version = "3.3" @@ -1143,6 +1389,21 @@ docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-a test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] type = ["mypy (>=1.11.2)"] +[[package]] +name = "plotly" +version = "5.24.1" +description = "An open-source, interactive data visualization library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "plotly-5.24.1-py3-none-any.whl", hash = "sha256:f67073a1e637eb0dc3e46324d9d51e2fe76e9727c892dde64ddf1e1b51f29089"}, + {file = "plotly-5.24.1.tar.gz", hash = "sha256:dbc8ac8339d248a4bcc36e08a5659bacfe1b079390b8953533f4eb22169b4bae"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + [[package]] name = "pluggy" version = "1.5.0" @@ -1570,6 +1831,20 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "retrying" +version = "1.3.4" +description = "Retrying" +optional = false +python-versions = "*" +files = [ + {file = "retrying-1.3.4-py3-none-any.whl", hash = "sha256:8cc4d43cb8e1125e0ff3344e9de678fefd85db3b750b81b2240dc0183af37b35"}, + {file = "retrying-1.3.4.tar.gz", hash = "sha256:345da8c5765bd982b1d1915deb9102fd3d1f7ad16bd84a9700b85f64d24e8f3e"}, +] + +[package.dependencies] +six = ">=1.7.0" + [[package]] name = "rich" version = "13.8.1" @@ -1657,6 +1932,26 @@ dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodest doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] test = ["Cython", "array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +[[package]] +name = "setuptools" +version = "75.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] + [[package]] name = "sexpdata" version = "1.0.2" @@ -1748,6 +2043,21 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "tenacity" +version = "9.0.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, + {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + [[package]] name = "tortoise-orm" version = "0.21.6" @@ -1903,6 +2213,23 @@ objprint = ">0.1.3" [package.extras] full = ["orjson"] +[[package]] +name = "werkzeug" +version = "3.0.4" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, + {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + [[package]] name = "wrapt" version = "1.16.0" @@ -1982,7 +2309,26 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] +[[package]] +name = "zipp" +version = "3.20.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [metadata] lock-version = "2.0" python-versions = "^3.12,<3.13" -content-hash = "36c2d4186a53f637873bf685e55c67d0ac689538136007c3d29f100e6f1ef5f6" +content-hash = "15a099ee5938e32a83080860c6a67ae73d597e614f4d72fc33b79b0967597562" diff --git a/pyproject.toml b/pyproject.toml index 00c948ed..6a8152f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,8 @@ pytest = ">=7.1.3,<9.0.0" viztracer = "^0.16.3" pyinstrument = "^4.7.1" gprof2dot = "^2024.6.6" +dash = "^2.18.1" +dash-cytoscape = "^1.0.2" [tool.pytest.ini_options] addopts = ["--import-mode=importlib"] diff --git a/src/faebryk/core/bfs.py b/src/faebryk/core/bfs.py new file mode 100644 index 00000000..e8c40c89 --- /dev/null +++ b/src/faebryk/core/bfs.py @@ -0,0 +1,166 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT +import itertools +import logging +from collections import defaultdict, deque +from typing import Any, Generator, Iterable, Self + +from faebryk.core.graphinterface import GraphInterface +from faebryk.core.link import Link +from faebryk.libs.util import DefaultFactoryDict + +logger = logging.getLogger(__name__) + + +class BFSPath: + link_cache: dict[tuple[GraphInterface, GraphInterface], Link] = DefaultFactoryDict( + lambda x: x[0].is_connected_to(x[1]) + ) + + def __init__( + self, + path: list[GraphInterface], + visited_ref: dict[GraphInterface, Any], + ) -> None: + self.path: list[GraphInterface] = path + self.visited_ref: dict[GraphInterface, Any] = visited_ref + self.confidence = 1.0 + self.filtered = False + self.path_data: dict[str, Any] = {} + self.stop = False + + @property + def last(self) -> GraphInterface: + return self.path[-1] + + def get_link(self, edge: tuple[GraphInterface, GraphInterface]) -> Link: + return self.link_cache[edge] + + @property + def last_edge(self) -> tuple[GraphInterface, GraphInterface] | None: + if len(self.path) < 2: + return None + return self.path[-2], self.path[-1] + + @property + def first(self) -> GraphInterface: + return self.path[0] + + @classmethod + def from_base(cls, base: "BFSPath", node: GraphInterface): + out = cls( + base.path + [node], + visited_ref=base.visited_ref, + ) + out.confidence = base.confidence + out.filtered = base.filtered + out.path_data = base.path_data + out.stop = base.stop + return out + + def __add__(self, node: GraphInterface) -> Self: + return self.from_base(self, node) + + def __contains__(self, node: GraphInterface) -> bool: + return node in self.path + + @property + def edges(self) -> Iterable[tuple[GraphInterface, GraphInterface]]: + return itertools.pairwise(self.path) + + def __len__(self) -> int: + return len(self.path) + + def __getitem__(self, idx: int) -> GraphInterface: + return self.path[idx] + + # The partial visit stuff is pretty weird, let me try to explain: + # If a node is not fully visited through a path, it means that there might still + # be paths that lead to this node that are more interesting. Thus we give the caller + # the chance to explore those other paths. + # If at a later point the caller discovers that the current path is fully visited + # after all, it can mark it. + @property + def strong(self) -> bool: + return self.confidence == 1.0 + + def mark_visited(self): + # TODO + for node in self.path: + self.visited_ref[node] = [self] + # self.visited_ref.update(self.path) + + +def insert_sorted(target: deque | list, item, key, asc=True): + for i, p in enumerate(target): + if (asc and key(item) < key(p)) or (not asc and key(item) > key(p)): + target.insert(i, item) + return i + target.append(item) + return len(target) - 1 + + +def bfs_visit( + roots: Iterable[GraphInterface], +) -> Generator[BFSPath, None, None]: + """ + Generic BFS (not depending on Graph) + Returns all visited nodes. + """ + + visited: defaultdict[GraphInterface, list[BFSPath]] = defaultdict(list) + open_path_queue: deque[BFSPath] = deque() + + def handle_path(path: BFSPath): + if path.stop: + open_path_queue.clear() + return + + if path.filtered: + return + + # old_paths = visited[path.last] + + # promise_cnt = path.path_data.get("promise_depth", 0) + # for p in old_paths: + # p_cnt = p.path_data.get("promise_depth", 0) + # if promise_cnt > p_cnt: + # print("Pruned") + # return + + # def metric(x: BFSPath): + # # promise_cnt = x.path_data.get("promise_depth", 0) + # return (1 - x.confidence), len(x) + + # insert_sorted(old_paths, path, key=metric) + visited[path.last] + if path.strong: + path.mark_visited() + + # insert_sorted(open_path_queue, path, key=lambda x: (len(x), (1 - x.confidence))) + open_path_queue.append(path) + + # yield identity paths + for root in roots: + path = BFSPath([root], visited) + yield path + handle_path(path) + + while open_path_queue: + open_path = open_path_queue.popleft() + + edges = set(open_path.last.edges) + for neighbour in edges: + # visited + if neighbour in visited: + # complete path + if any(x.strong for x in visited[neighbour]): + continue + # visited in path (loop) + if neighbour in open_path: + continue + + new_path = open_path + neighbour + + yield new_path + handle_path(new_path) diff --git a/src/faebryk/core/cpp/.clang-format b/src/faebryk/core/cpp/.clang-format new file mode 100644 index 00000000..b50527d5 --- /dev/null +++ b/src/faebryk/core/cpp/.clang-format @@ -0,0 +1,8 @@ +Language: Cpp +IndentWidth: 4 +UseTab: Never +BreakConstructorInitializers: BeforeComma +ConstructorInitializerIndentWidth: 2 +AllowShortFunctionsOnASingleLine: None +AllowShortLambdasOnASingleLine: None +ColumnLimit: 89 diff --git a/src/faebryk/core/cpp/CMakeLists.txt b/src/faebryk/core/cpp/CMakeLists.txt new file mode 100644 index 00000000..045da3d5 --- /dev/null +++ b/src/faebryk/core/cpp/CMakeLists.txt @@ -0,0 +1,21 @@ +cmake_minimum_required(VERSION 3.4...3.18) +project(faebryk_core_cpp) + +# Specify the Python version you're using +set(PYBIND11_PYTHON_VERSION 3.12) + +set(CMAKE_CXX_STANDARD 20) +message(STATUS "C++ version: ${CMAKE_CXX_COMPILER_VERSION}") + +# turn on optimization +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O2") + +# TODO remove +# enable debug symbols +set(CMAKE_BUILD_TYPE Debug) + +add_subdirectory(pybind11) + +include_directories(${CMAKE_SOURCE_DIR}/include) + +pybind11_add_module(faebryk_core_cpp src/main.cpp) diff --git a/src/faebryk/core/cpp/__init__.py b/src/faebryk/core/cpp/__init__.py new file mode 100644 index 00000000..dec7e025 --- /dev/null +++ b/src/faebryk/core/cpp/__init__.py @@ -0,0 +1,30 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + +import pathlib +import subprocess +import sys + +print(f"Python version: {sys.version}") +print(f"Python executable: {sys.executable}") + +cpp_dir = pathlib.Path(__file__).parent +pybind11_dir = cpp_dir / "pybind11" +build_dir = cpp_dir / "build" + +# check if pybind11 is available +if not pybind11_dir.exists(): + raise RuntimeError("pybind11 not found") + +# recompile +# subprocess.run(["rm", "-rf", str(build_dir)], check=True) +subprocess.run(["cmake", "-S", str(cpp_dir), "-B", str(build_dir)], check=True) +subprocess.run(["cmake", "--build", str(build_dir)], check=True) + +if not build_dir.exists(): + raise RuntimeError("build directory not found") + +# Add the build directory to Python path +sys.path.append(str(cpp_dir / "build")) + +import faebryk_core_cpp # noqa: E402 diff --git a/src/faebryk/core/cpp/graph.py b/src/faebryk/core/cpp/graph.py new file mode 100644 index 00000000..4150912e --- /dev/null +++ b/src/faebryk/core/cpp/graph.py @@ -0,0 +1,214 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + + +import ctypes +import logging +from itertools import pairwise +from typing import Any, Callable, Iterable + +from faebryk.core.cpp import faebryk_core_cpp as cpp +from faebryk.core.graphinterface import ( + Graph, + GraphInterface, + GraphInterfaceHierarchical, + GraphInterfaceSelf, +) +from faebryk.core.link import ( + Link, + LinkDirect, + LinkDirectConditional, + LinkDirectDerived, + LinkNamedParent, + LinkParent, + LinkSibling, +) +from faebryk.core.module import Module +from faebryk.core.moduleinterface import ( + GraphInterfaceHierarchicalModuleSpecial, + GraphInterfaceModuleConnection, + ModuleInterface, +) +from faebryk.core.node import GraphInterfaceHierarchicalNode, Node +from faebryk.libs.util import DefaultFactoryDict, NotNone, cast_assert + +logger = logging.getLogger(__name__) + + +class CGraph: + _cache: "CGraph | None" = None + _cache_edge_cnt: int | None = None + _cache_ref: Graph | None = None + + # TODO use other path + class Path: + def __init__(self, cpath: cpp.Path): + self.path = [CGraph.gif_py(cgif) for cgif in cpath.gifs] + + @property + def last(self) -> GraphInterface: + return self.path[-1] + + @property + def edges(self) -> Iterable[tuple[GraphInterface, GraphInterface]]: + return pairwise(self.path) + + def __new__(cls, g: Graph): + if ( + cls._cache is None + or cls._cache_edge_cnt != g.edge_cnt + or cls._cache_ref is not g + ): + cls._cache = super().__new__(cls) + cls._cache.setup(g) + cls._cache_edge_cnt = g.edge_cnt + cls._cache_ref = g + return cls._cache + + def setup(self, g: Graph): + self.cg = cpp.Graph() + self._gif_c: dict[GraphInterface, cpp.GraphInterface] = DefaultFactoryDict( + self.create_cgif_from_gif + ) + self.link_c: dict[Link, cpp.Link] = DefaultFactoryDict( + self.create_clink_from_link + ) + self.node_c: dict[Node, cpp.Node] = DefaultFactoryDict( + self.create_cnode_from_node + ) + + edges = [ + (self.get_gif(src), self.get_gif(dst), self.link_c[link]) + for src, dst, link in g.edges + # TODO remove, preoptimization that only works for mifs + if isinstance(src.node, ModuleInterface) + and isinstance(dst.node, ModuleInterface) + and type(src) + in { + GraphInterfaceSelf, + GraphInterfaceHierarchicalModuleSpecial, + GraphInterfaceModuleConnection, + GraphInterfaceHierarchicalNode, + } + ] + + logger.info( + f"Converting {g}[{id(g):x}] -> V: {len(self._gif_c)} E: {len(edges)}" + ) + self.cg.add_edges(edges) + + def get_gif(self, gif: GraphInterface): + c_gif = self._gif_c[gif] + assert gif.node is not None + c_gif.set_node(self.node_c[gif.node]) + return c_gif + + @staticmethod + def get_obj[T: Any](typ: type[T], ptr: int) -> T: + return cast_assert(typ, ctypes.cast(ptr, ctypes.py_object).value) + + @staticmethod + def gif_py(cgif: cpp.GraphInterface) -> GraphInterface: + return CGraph.get_obj(GraphInterface, int(cgif.py_ptr)) + + def create_cnode_from_node(self, node: Node) -> cpp.Node: + if type(node) is Node: + node_type = cpp.NodeType.GENERIC + elif isinstance(node, Module): + node_type = cpp.NodeType.MODULE + elif isinstance(node, ModuleInterface): + node_type = cpp.NodeType.MODULEINTERFACE + else: + node_type = cpp.NodeType.OTHER + + cgif_self = self._gif_c[node.self_gif] + cgif_parent = self._gif_c[node.parent] + cgif_children = self._gif_c[node.children] + + cnode = cpp.Node( + node.get_name(accept_no_parent=True), + type(node).__name__, + node_type, + id(node), + cgif_self, + cgif_parent, + cgif_children, + ) + + cgif_self.set_node(cnode) + cgif_parent.set_node(cnode) + cgif_children.set_node(cnode) + + return cnode + + def create_cgif_from_gif(self, gif: GraphInterface) -> cpp.GraphInterface: + cgif_type = { + GraphInterface: cpp.GraphInterfaceType.GENERIC, + GraphInterfaceHierarchical: cpp.GraphInterfaceType.HIERARCHICAL, + GraphInterfaceSelf: cpp.GraphInterfaceType.SELF, + GraphInterfaceHierarchicalNode: cpp.GraphInterfaceType.HIERARCHICAL_NODE, + GraphInterfaceHierarchicalModuleSpecial: cpp.GraphInterfaceType.HIERARCHICAL_MODULE_SPECIAL, + GraphInterfaceModuleConnection: cpp.GraphInterfaceType.MODULE_CONNECTION, + }.get(type(gif), cpp.GraphInterfaceType.OTHER) + + cgif = cpp.GraphInterface( + cgif_type, + id(gif), + self.cg, + ) + + if isinstance(gif, GraphInterfaceHierarchical): + # TODO this happens for unconnected hierarchical gifs + # e.g specializes of moduleinterfaces + # need a better way to handle, for now we just dont mark them hierarchical + # which is valid in the context of pathfinding + if not gif.is_parent and gif.get_parent() is None: + return cgif + cgif.make_hierarchical( + gif.is_parent, + NotNone(gif.get_parent())[1] if not gif.is_parent else "", + ) + + return cgif + + @staticmethod + def create_clink_from_link(link: Link) -> cpp.Link: + clink_type = { + Link: cpp.LinkType.GENERIC, + LinkSibling: cpp.LinkType.SIBLING, + LinkParent: cpp.LinkType.PARENT, + LinkNamedParent: cpp.LinkType.NAMED_PARENT, + LinkDirect: cpp.LinkType.DIRECT, + LinkDirectConditional: cpp.LinkType.DIRECT_CONDITIONAL, + ModuleInterface.LinkDirectShallow: cpp.LinkType.DIRECT_CONDITIONAL_SHALLOW, + LinkDirectDerived: cpp.LinkType.DIRECT_DERIVED, + }.get(type(link), cpp.LinkType.OTHER) + + filters = [] + filter_func: Callable[[list[GraphInterface]], bool] | None = None + if isinstance(link, ModuleInterface.LinkDirectShallow): + clink_type = cpp.LinkType.DIRECT_CONDITIONAL_SHALLOW + filters = [t.__name__ for t in link._children_types] + elif isinstance(link, LinkDirectConditional): + + def _filter_func(cpath: list[cpp.GraphInterface]) -> bool: + path = [CGraph.gif_py(cgif) for cgif in cpath] + return link.is_filtered(path) == LinkDirectConditional.FilterResult.PASS + + filter_func = _filter_func + + clink_type = cpp.LinkType.DIRECT_CONDITIONAL + + if isinstance(link, LinkNamedParent): + clink_type = cpp.LinkType.NAMED_PARENT + + if clink_type == cpp.LinkType.OTHER: + raise NotImplementedError(f"Link type not implemented in C++: {link}") + + return cpp.Link(clink_type, id(link), filters, filter_func) + + def find_paths(self, src: ModuleInterface, *dst: ModuleInterface): + cpaths, counters = cpp.find_paths( + self.cg, self.node_c[src], [self.node_c[d] for d in dst] + ) + return [self.Path(cpath) for cpath in cpaths], counters diff --git a/src/faebryk/core/cpp/include/graph.hpp b/src/faebryk/core/cpp/include/graph.hpp new file mode 100644 index 00000000..79e91c49 --- /dev/null +++ b/src/faebryk/core/cpp/include/graph.hpp @@ -0,0 +1,240 @@ +#pragma once + +#include "util.hpp" +#include +#include +#include + +enum NodeType { + N_GENERIC, + N_MODULE, + N_MODULEINTERFACE, + N_OTHER, +}; +enum GraphInterfaceType { + G_GENERIC, + G_HIERARCHICAL, + G_SELF, + G_HIERARCHICAL_NODE, + G_HIERARCHICAL_MODULE_SPECIAL, + G_MODULE_CONNECTION, + G_OTHER, +}; +enum LinkType { + L_GENERIC, + L_SIBLING, + L_PARENT, + L_NAMED_PARENT, + L_DIRECT, + L_DIRECT_CONDITIONAL, + L_DIRECT_CONDITIONAL_SHALLOW, + L_DIRECT_DERIVED, + L_OTHER, +}; + +using NodeGranularType = std::string; +struct GraphInterface; +class Graph; +class Node; + +struct Link { + LinkType type; + uint64_t py_ptr; + + // LinkDirectConditionalShallow + std::vector shallow_filter; + bool is_filtered(const Node &node) const; + + // LinkDirectConditional + std::optional &)>> + filter; +}; + +struct Node { + std::string name; + NodeGranularType granular_type; + NodeType type; + uint64_t py_ptr; + + const GraphInterface &self_gif; + const GraphInterface &parent; + const GraphInterface &children; + + bool is_instance(NodeType type) const { + return this->type == type; + } + + bool operator==(const Node &other) const { + return this->py_ptr == other.py_ptr; + } + + std::unordered_set get_children(NodeType type, + bool direct_only = false) const; +}; + +struct GraphInterface { + Node *node = nullptr; + GraphInterfaceType type; + uint64_t py_ptr; + Graph &graph; + size_t v_i = 0; + + GraphInterface(GraphInterfaceType type, uint64_t py_ptr, Graph &graph) + : type(type) + , py_ptr(py_ptr) + , graph(graph) { + } + + void set_node(Node *node) { + assert(node != nullptr); + this->node = node; + } + Node &get_node() const { + assert(node != nullptr); + return *node; + } + + bool is_instance(GraphInterfaceType type) const { + return this->type == type; + } + + const std::vector &edges() const; + + std::optional is_connected(const GraphInterface &to) const; + + // GraphInterfaceHierarchical stuff + bool is_parent = false; + std::optional parent_name = {}; + void make_hierarchical(bool is_parent, std::string parent_name) { + this->is_parent = is_parent; + if (!is_parent) { + this->parent_name = parent_name; + } + } + bool is_hierarchical() const { + return this->type == GraphInterfaceType::G_HIERARCHICAL || + this->type == GraphInterfaceType::G_HIERARCHICAL_NODE; + } + bool is_uplink(const GraphInterface &to) const { + return this->is_hierarchical() && to.type == this->type && !this->is_parent && + to.is_parent; + } + bool is_downlink(const GraphInterface &to) const { + return this->is_hierarchical() && to.type == this->type && this->is_parent && + !to.is_parent; + } + + // override equality + bool operator==(const GraphInterface &other) const { + return this->py_ptr == other.py_ptr; + } +}; + +class Graph { + size_t v_i = 0; + + public: + std::unordered_set v; + std::vector> e; + std::unordered_map> + e_cache = {}; + std::unordered_map> + e_cache_simple = {}; + + public: + const std::vector & + edges_simple(const GraphInterface *v) const { + // Never should reach a GIF that has no edges + auto edges = e_cache_simple.find(v); + assert(edges != e_cache_simple.end()); + return edges->second; + } + + void + add_edges(std::vector> &e) { + for (auto &edge : e) { + auto &from = std::get<0>(edge); + auto &to = std::get<1>(edge); + auto &link = std::get<2>(edge); + add_edge(from, to, link); + } + } + + void add_edge(GraphInterface &from, GraphInterface &to, Link &link) { + e.push_back(std::make_tuple(&from, &to, &link)); + e_cache[&from][&to] = &link; + e_cache[&to][&from] = &link; + e_cache_simple[&from].push_back(&to); + e_cache_simple[&to].push_back(&from); + + if (v.insert(&from).second) { + from.v_i = v_i++; + } + if (v.insert(&to).second) { + to.v_i = v_i++; + } + } + + std::optional is_connected(const GraphInterface &from, + const GraphInterface &to) const; +}; + +struct Path { + std::vector gifs; + + Path(std::vector gifs) + : gifs(gifs) { + } +}; + +inline std::optional +GraphInterface::is_connected(const GraphInterface &to) const { + return graph.is_connected(*this, to); +} + +inline std::optional Graph::is_connected(const GraphInterface &from, + const GraphInterface &to) const { + auto edges = e_cache.find(&from); + if (edges == e_cache.end()) { + return {}; + } + auto edge = edges->second.find(&to); + if (edge == edges->second.end()) { + return {}; + } + auto &link = edge->second; + return link; +} + +inline const std::vector &GraphInterface::edges() const { + return graph.edges_simple(this); +} + +inline bool Link::is_filtered(const Node &node) const { + return std::find(shallow_filter.begin(), shallow_filter.end(), node.granular_type) != + shallow_filter.end(); +} + +inline std::unordered_set Node::get_children(NodeType type, + bool direct_only) const { + if (!direct_only) { + throw std::runtime_error("Not implemented"); + } + + auto &children_and_self = this->children.edges(); + std::unordered_set children; + for (auto &gif : children_and_self) { + if (gif->type != GraphInterfaceType::G_HIERARCHICAL_NODE) { + continue; + } + + if (gif->get_node().type != type) { + continue; + } + + children.insert(&gif->get_node()); + } + + return children; +} diff --git a/src/faebryk/core/cpp/include/pathfinder.hpp b/src/faebryk/core/cpp/include/pathfinder.hpp new file mode 100644 index 00000000..3dc7ac5f --- /dev/null +++ b/src/faebryk/core/cpp/include/pathfinder.hpp @@ -0,0 +1,848 @@ +#pragma once + +#include "graph.hpp" +#include + +inline bool INDIV_MEASURE = true; +inline uint32_t MAX_PATHS = 1 << 31; + +struct Edge { + const GraphInterface &from; + const GraphInterface &to; +}; + +struct PathStackElement { + NodeGranularType parent_type; + NodeGranularType child_type; + const GraphInterface &parent_gif; + std::string name; + bool up; + + std::string str() const { + std::stringstream ss; + if (up) { + ss << child_type << "->" << parent_type << "." << name; + } else { + ss << parent_type << "." << name << "->" << child_type; + } + return ss.str(); + } +}; + +struct UnresolvedStackElement { + PathStackElement elem; + bool promise; + + bool match(PathStackElement &other) { + return elem.parent_type == other.parent_type && + elem.child_type == other.child_type && elem.name == other.name && + elem.up != other.up; + } + + std::string str() const { + std::stringstream ss; + ss << elem.str(); + if (promise) { + ss << " promise"; + } + return ss.str(); + } +}; + +using PathStack = std::vector; +using UnresolvedStack = std::vector; + +struct PathData { + UnresolvedStack unresolved_stack; + PathStack promise_stack; +}; + +class BFSPath { + std::vector path; + std::shared_ptr path_data; + + public: + double confidence = 1.0; + bool filtered = false; + bool stop = false; + + BFSPath(const GraphInterface &path_head) + : path(std::vector{&path_head}) + , path_data(std::make_shared()) { + } + + // copy constructor + BFSPath(const BFSPath &other) + : path(other.path) + // copy path_data + , path_data(std::make_shared(*other.path_data)) + , confidence(other.confidence) + , filtered(other.filtered) + , stop(other.stop) { + } + + BFSPath(const BFSPath &other, const GraphInterface &new_head) + : path(other.path) + , path_data(other.path_data) + , confidence(other.confidence) + , filtered(other.filtered) + , stop(other.stop) { + path.push_back(&new_head); + } + + PathData &get_path_data_mut() { + if (!path_data.unique()) { + PathData new_data = *path_data; + path_data = std::make_shared(new_data); + } + return *path_data; + } + + PathData &get_path_data() const { + return *path_data; + } + + bool strong() const { + return confidence == 1.0; + } + const Link &get_link(Edge edge) const { + auto out = edge.from.is_connected(edge.to); + assert(out); + const Link &link = **out; + return link; + } + + std::optional last_edge() const { + if (path.size() < 2) { + return {}; + } + return Edge{*path[path.size() - 2], *path.back()}; + } + + std::optional> + last_tri_edge() const { + if (path.size() < 3) { + return {}; + } + return std::make_tuple(path[path.size() - 3], path[path.size() - 2], + path.back()); + } + + BFSPath operator+(const GraphInterface &gif) { + return BFSPath(*this, gif); + } + + // vector interface + const GraphInterface &last() const { + return *path.back(); + } + const GraphInterface &first() const { + return *path.front(); + } + const GraphInterface &operator[](int idx) const { + return *path[idx]; + } + + size_t size() const { + return path.size(); + } + bool contains(const GraphInterface &gif) const { + return std::find(path.begin(), path.end(), &gif) != path.end(); + } + + void iterate_edges(std::function visitor) const { + for (size_t i = 1; i < path.size(); i++) { + Edge edge{*path[i - 1], *path[i]}; + bool res = visitor(edge); + if (!res) { + return; + } + } + } + + const std::vector &get_path() const { + return path; + } + + size_t index(const GraphInterface *gif) const { + return std::distance(path.begin(), std::find(path.begin(), path.end(), gif)); + } +}; + +class PerfCounter { + std::chrono::high_resolution_clock::time_point start; + + public: + PerfCounter() { + start = std::chrono::high_resolution_clock::now(); + } + + int64_t ns() { + auto end = std::chrono::high_resolution_clock::now(); + auto duration = + std::chrono::duration_cast(end - start); + return duration.count(); + } + + double ms() { + return ns() / 1e6; + } + + double s() { + return ns() / 1e9; + } +}; + +class PerfCounterAccumulating { + std::chrono::high_resolution_clock::time_point start; + int64_t time_ns = 0; + bool paused = false; + + public: + PerfCounterAccumulating() { + start = std::chrono::high_resolution_clock::now(); + } + + void pause() { + if (paused) { + return; + } + auto end = std::chrono::high_resolution_clock::now(); + auto duration = + std::chrono::duration_cast(end - start); + this->time_ns += duration.count(); + paused = true; + } + + void resume() { + if (!paused) { + return; + } + start = std::chrono::high_resolution_clock::now(); + paused = false; + } + + int64_t ns() { + pause(); + return this->time_ns; + } + + double ms() { + return ns() / 1e6; + } + + double s() { + return ns() / 1e9; + } +}; + +void bfs_visit(const GraphInterface &root, std::function visitor) { + PerfCounterAccumulating pc, pc_search, pc_set_insert, pc_setup, pc_deque_insert, + pc_edges, pc_check_visited, pc_filter, pc_new_path; + pc_set_insert.pause(); + pc_search.pause(); + pc_deque_insert.pause(); + pc_edges.pause(); + pc_check_visited.pause(); + pc_filter.pause(); + pc_new_path.pause(); + + std::vector visited(root.graph.v.size(), false); + std::vector visited_weak(root.graph.v.size(), false); + std::deque open_path_queue; + + auto handle_path = [&](BFSPath &path) { + pc.pause(); + pc_filter.resume(); + visitor(path); + pc_filter.pause(); + pc.resume(); + + if (path.stop) { + open_path_queue.clear(); + return; + } + + if (path.filtered) { + return; + } + + pc_set_insert.resume(); + visited_weak[path.last().v_i] = true; + + if (path.strong()) { + visited[path.last().v_i] = true; + } + pc_set_insert.pause(); + + pc_deque_insert.resume(); + open_path_queue.push_back(std::move(path)); + pc_deque_insert.pause(); + }; + + pc_setup.pause(); + auto root_path = BFSPath(root); + handle_path(root_path); + + pc_search.resume(); + while (!open_path_queue.empty()) { + auto path = std::move(open_path_queue.front()); + open_path_queue.pop_front(); + + pc_edges.resume(); + auto edges = path.last().edges(); + pc_edges.pause(); + for (auto &neighbour : edges) { + pc_check_visited.resume(); + if (visited[neighbour->v_i]) { + pc_check_visited.pause(); + continue; + } + if (visited_weak[neighbour->v_i] && path.contains(*neighbour)) { + pc_check_visited.pause(); + continue; + } + pc_check_visited.pause(); + + pc_new_path.resume(); + auto new_path = path + *neighbour; + pc_new_path.pause(); + pc_search.pause(); + handle_path(new_path); + pc_search.resume(); + } + } + pc_set_insert.pause(); + pc_search.pause(); + pc.pause(); + + printf(" TIME: %3.2lf ms BFS Check Visited\n", pc_check_visited.ms()); + printf(" TIME: %3.2lf ms BFS Edges\n", pc_edges.ms()); + printf(" TIME: %3.2lf ms BFS New Path\n", pc_new_path.ms()); + printf(" TIME: %3.2lf ms BFS Search\n", pc_search.ms()); + printf(" TIME: %3.2lf ms BFS Setup\n", pc_setup.ms()); + printf(" TIME: %3.2lf ms BFS Set Insert\n", pc_set_insert.ms()); + printf(" TIME: %3.2lf ms BFS Deque Insert\n", pc_deque_insert.ms()); + printf(" TIME: %3.2lf ms BFS Non-filter total\n", pc.ms()); + printf(" TIME: %3.2lf ms BFS Filter total\n", pc_filter.ms()); +} + +class PathFinder; +struct Counter { + size_t in_cnt = 0; + size_t weak_in_cnt = 0; + size_t out_weaker = 0; + size_t out_stronger = 0; + size_t out_cnt = 0; + double time_spent_s = 0; + + bool hide = false; + const char *name = ""; + // TODO make template + bool multi = false; + bool total_counter = false; + + bool exec(PathFinder *pf, bool (PathFinder::*filter)(BFSPath &), BFSPath &p) { + if (!INDIV_MEASURE && !total_counter) { + return (pf->*filter)(p); + } + + // perf pre + in_cnt++; + auto confidence_pre = p.confidence; + if (confidence_pre < 1.0) { + weak_in_cnt++; + } + PerfCounter pc; + + // exec + bool res = (pf->*filter)(p); + + // perf post + int64_t duration_ns = pc.ns(); + time_spent_s += duration_ns * 1e-9; + + if (res) { + out_cnt++; + } + if (p.confidence < confidence_pre) { + out_weaker++; + } else if (p.confidence > confidence_pre) { + out_stronger++; + } + + return res; + } + + std::vector + exec_multi(PathFinder *pf, + std::vector (PathFinder::*filter)(std::vector &), + std::vector &p) { + if (!INDIV_MEASURE && !total_counter) { + return (pf->*filter)(p); + } + + in_cnt += p.size(); + // TODO weak + PerfCounter pc; + + // exec + auto res = (pf->*filter)(p); + + // perf post + int64_t duration_ns = pc.ns(); + time_spent_s += duration_ns * 1e-9; + + out_cnt += res.size(); + + return res; + } +}; + +struct Filter { + bool (PathFinder::*filter)(BFSPath &); + bool discovery = false; + + Counter counter; + + bool exec(PathFinder *pf, BFSPath &p) { + bool out = counter.exec(pf, filter, p); + if (!out && discovery) { + p.filtered = true; + } + return out; + } +}; + +class PathFinder { + Graph &g; + + // instance data + std::vector multi_paths; + size_t path_cnt = 0; + + bool _count(BFSPath &p); + bool _filter_path_by_node_type(BFSPath &p); + bool _filter_path_gif_type(BFSPath &p); + bool _filter_path_by_dead_end_split(BFSPath &p); + // bool _mark_path_with_promises_heuristic(BFSPath &p); + bool _build_path_stack(BFSPath &p); + // bool _filter_path_by_dead_end_split_full(BFSPath &p); + // bool _build_path_stack_full(BFSPath &p); + // bool _filter_path_by_dst(BFSPath &p, std::unordered_set + // &dst_self); + bool _filter_path_by_end_in_self_gif(BFSPath &p); + bool _filter_path_same_end_type(BFSPath &p); + bool _filter_path_by_stack(BFSPath &p); + bool _filter_shallow(BFSPath &p); + bool _filter_conditional_link(BFSPath &p); + std::vector _filter_paths_by_split_join(std::vector &paths); + + public: + PathFinder(Graph &g) + : g(g) { + } + + // Create a vector of function pointers to member functions + std::vector filters{ + Filter{ + .filter = &PathFinder::_count, + .discovery = true, + .counter = + Counter{ + .hide = true, + }, + }, + Filter{ + .filter = &PathFinder::_filter_path_by_node_type, + .discovery = true, + .counter = + Counter{ + .name = "node type", + }, + }, + Filter{ + .filter = &PathFinder::_filter_path_gif_type, + .discovery = true, + .counter = + Counter{ + .name = "gif type", + }, + }, + Filter{ + .filter = &PathFinder::_filter_path_by_dead_end_split, + .discovery = true, + .counter = + Counter{ + .name = "dead end split", + }, + }, + Filter{ + .filter = &PathFinder::_filter_shallow, + .discovery = true, + .counter = + Counter{ + .name = "shallow", + }, + }, + Filter{ + .filter = &PathFinder::_filter_conditional_link, + .discovery = true, + .counter = + Counter{ + .name = "conditional link", + }, + }, + Filter{ + .filter = &PathFinder::_build_path_stack, + .discovery = false, + .counter = + Counter{ + .name = "build stack", + }, + }, + Filter{ + .filter = &PathFinder::_filter_path_by_end_in_self_gif, + .discovery = false, + .counter = + Counter{ + .name = "end in self gif", + }, + }, + Filter{ + .filter = &PathFinder::_filter_path_same_end_type, + .discovery = false, + .counter = + Counter{ + .name = "same end type", + }, + }, + Filter{ + .filter = &PathFinder::_filter_path_by_stack, + .discovery = false, + .counter = + Counter{ + .name = "stack", + }, + }, + }; + bool run_filters(BFSPath &p) { + for (auto &filter : filters) { + bool res = filter.exec(this, p); + if (!res) { + return false; + } + } + return true; + } + + std::pair, std::vector> + find_paths(Node &src, std::vector &dst) { + if (!src.is_instance(NodeType::N_MODULEINTERFACE)) { + throw std::runtime_error("src type is not MODULEINTERFACE"); + } + std::unordered_set dst_py_ids; + for (auto &d : dst) { + if (!d.is_instance(NodeType::N_MODULEINTERFACE)) { + throw std::runtime_error("dst type is not MODULEINTERFACE"); + } + dst_py_ids.insert(d.self_gif.py_ptr); + } + + std::vector paths; + + Counter total_counter{.name = "total", .total_counter = true}; + + PerfCounter pc_bfs; + + bfs_visit(src.self_gif, [&](BFSPath &p) { + bool res = total_counter.exec(this, &PathFinder::run_filters, p); + if (!res) { + return; + } + // shortcut if path to dst found + if (dst_py_ids.contains(p.last().py_ptr)) { + dst_py_ids.erase(p.last().py_ptr); + if (dst_py_ids.empty()) { + p.stop = true; + } + } + paths.push_back(p); + }); + + printf("TIME: %3.2lf ms BFS\n", pc_bfs.ms()); + + Counter counter_split_join{ + .name = "split join", + .multi = true, + }; + auto multi_paths = counter_split_join.exec_multi( + this, &PathFinder::_filter_paths_by_split_join, this->multi_paths); + + std::vector paths_out; + for (auto &p : paths) { + paths_out.push_back(Path(p.get_path())); + } + for (auto &p : multi_paths) { + paths_out.push_back(Path(p.get_path())); + } + + std::vector counters; + for (auto &f : filters) { + auto &counter = f.counter; + if (counter.hide) { + continue; + } + counters.push_back(counter); + } + counters.push_back(counter_split_join); + counters.push_back(total_counter); + + return std::make_pair(paths_out, counters); + } +}; + +bool PathFinder::_count(BFSPath &p) { + path_cnt++; + if (path_cnt % 50000 == 0) { + std::cout << "path_cnt: " << path_cnt << std::endl; + } + // if (p.path.size() > MAX_PATHS) { + // p.stop = true; + // } + if (path_cnt > MAX_PATHS) { + p.stop = true; + } + return true; +} + +bool PathFinder::_filter_path_by_node_type(BFSPath &p) { + return (p.last().get_node().is_instance(NodeType::N_MODULEINTERFACE)); +} + +bool PathFinder::_filter_path_gif_type(BFSPath &p) { + auto &type = p.last().type; + return (type == GraphInterfaceType::G_SELF || + type == GraphInterfaceType::G_HIERARCHICAL_NODE || + type == GraphInterfaceType::G_HIERARCHICAL_MODULE_SPECIAL || + type == GraphInterfaceType::G_MODULE_CONNECTION); +} + +bool PathFinder::_filter_path_by_end_in_self_gif(BFSPath &p) { + return p.last().type == GraphInterfaceType::G_SELF; +} + +bool PathFinder::_filter_path_same_end_type(BFSPath &p) { + return p.last().node->granular_type == p.first().node->granular_type; +} + +std::optional _extend_path_hierarchy_stack(Edge &edge) { + bool up = edge.from.is_uplink(edge.to); + if (!up && !edge.from.is_downlink(edge.to)) { + return {}; + } + auto &child_gif = up ? edge.from : edge.to; + auto &parent_gif = up ? edge.to : edge.from; + + assert(child_gif.parent_name); + auto name = *child_gif.parent_name; + return PathStackElement{parent_gif.get_node().granular_type, + child_gif.get_node().granular_type, parent_gif, name, up}; +} + +void _extend_fold_stack(PathStackElement &elem, UnresolvedStack &unresolved_stack, + PathStack &promise_stack) { + if (!unresolved_stack.empty() && unresolved_stack.back().match(elem)) { + auto promise = unresolved_stack.back().promise; + if (promise) { + promise_stack.push_back(elem); + } + unresolved_stack.pop_back(); + } else { + // TODO get children and count instead + bool multi_child = true; + // if down and multipath -> promise + bool promise = !elem.up and multi_child; + + unresolved_stack.push_back(UnresolvedStackElement{elem, promise}); + if (promise) { + promise_stack.push_back(elem); + } + } +} + +bool PathFinder::_build_path_stack(BFSPath &p) { + auto edge = p.last_edge(); + if (!edge) { + return true; + } + + auto elem = _extend_path_hierarchy_stack(*edge); + if (!elem) { + return true; + } + + auto &promises = p.get_path_data_mut(); + auto &unresolved_stack = promises.unresolved_stack; + auto &promise_stack = promises.promise_stack; + + size_t promise_cnt = promise_stack.size(); + _extend_fold_stack(elem.value(), unresolved_stack, promise_stack); + + int promise_growth = promise_stack.size() - promise_cnt; + p.confidence *= std::pow(0.5, promise_growth); + + return true; +} + +bool PathFinder::_filter_path_by_stack(BFSPath &p) { + const auto promises = p.get_path_data(); + auto &unresolved_stack = promises.unresolved_stack; + auto &promise_stack = promises.promise_stack; + + if (!unresolved_stack.empty()) { + return false; + } + + if (!promise_stack.empty()) { + this->multi_paths.push_back(p); + return false; + } + + return true; +} + +bool PathFinder::_filter_path_by_dead_end_split(BFSPath &p) { + auto last_tri_edge = p.last_tri_edge(); + if (!last_tri_edge) { + return true; + } + auto &[one, two, three] = *last_tri_edge; + + if (!one->is_hierarchical() || !two->is_hierarchical() || + !three->is_hierarchical()) { + return true; + } + + // check if child->parent->child + if (!one->is_parent && two->is_parent && !three->is_parent) { + return false; + } + + return true; +} + +bool PathFinder::_filter_shallow(BFSPath &p) { + bool ok = true; + auto edge = p.last_edge(); + if (!edge) { + return true; + } + const auto &linkobj = p.get_link(*edge); + + if (linkobj.type != LinkType::L_DIRECT_CONDITIONAL_SHALLOW) { + return true; + } + + return !linkobj.is_filtered(p.first().get_node()); +} + +bool PathFinder::_filter_conditional_link(BFSPath &p) { + auto edge = p.last_edge(); + if (!edge) { + return true; + } + const auto &linkobj = p.get_link(*edge); + + if (linkobj.type != LinkType::L_DIRECT_CONDITIONAL) { + return true; + } + + auto filter = linkobj.filter; + if (!filter) { + return true; + } + + return (*filter)(p.get_path()); +} + +template +std::unordered_map> groupby(const std::vector &vec, + std::function f) { + std::unordered_map> out; + for (auto &t : vec) { + out[f(t)].push_back(t); + } + return out; +} + +// TODO needs get children +std::vector +PathFinder::_filter_paths_by_split_join(std::vector &paths) { + // basically the only thing we need to do is + // - check whether for every promise descend all children have a path + // that joins again before the end + // - join again before end == ends in same node (self_gif) + + std::unordered_set filtered; + std::unordered_map> split; + + // build split map + for (auto &p : paths) { + auto &promises = p.get_path_data(); + auto &unresolved_stack = promises.unresolved_stack; + auto &promise_stack = promises.promise_stack; + + assert(unresolved_stack.empty()); + assert(!promise_stack.empty()); + + for (auto &elem : promise_stack) { + if (elem.up) { + // join + continue; + } + // split + split[&elem.parent_gif].push_back(&p); + } + } + + // check split map + for (auto &[start_gif, split_paths] : split) { + std::unordered_set children = + start_gif->get_node().get_children(NodeType::N_MODULEINTERFACE, true); + + assert(split_paths.size()); + // TODO this assumption is not correct (same in python) + auto index = split_paths[0]->index(start_gif); + + std::function f = + [index](const BFSPath *p) -> const GraphInterface * { + return &p->last(); + }; + auto grouped_by_end = groupby(split_paths, f); + + for (auto &[end_gif, grouped_paths] : grouped_by_end) { + std::unordered_set covered_children; + for (auto &p : grouped_paths) { + // TODO check if + 1 is valid + covered_children.insert(&(*p)[index + 1].get_node()); + } + + if (covered_children != children) { + filtered.insert(grouped_paths.begin(), grouped_paths.end()); + continue; + } + } + } + + std::vector paths_out; + for (BFSPath &p : paths) { + if (filtered.contains(&p)) { + continue; + } + p.confidence = 1.0; + paths_out.push_back(p); + } + return paths_out; +} \ No newline at end of file diff --git a/src/faebryk/core/cpp/include/util.hpp b/src/faebryk/core/cpp/include/util.hpp new file mode 100644 index 00000000..bf913fa4 --- /dev/null +++ b/src/faebryk/core/cpp/include/util.hpp @@ -0,0 +1,23 @@ +#pragma once + +#include +#include +#include + +template inline std::string str_vec(const std::vector &vec) { + std::stringstream ss; + ss << "["; + for (size_t i = 0; i < vec.size(); ++i) { + // if T is string just put it into stream directly + if constexpr (std::is_same_v) { + ss << '"' << vec[i] << '"'; + } else { + ss << vec[i].str(); + } + if (i < vec.size() - 1) { + ss << ", "; + } + } + ss << "]"; + return ss.str(); +} \ No newline at end of file diff --git a/src/faebryk/core/cpp/pybind11 b/src/faebryk/core/cpp/pybind11 new file mode 160000 index 00000000..f7e14e98 --- /dev/null +++ b/src/faebryk/core/cpp/pybind11 @@ -0,0 +1 @@ +Subproject commit f7e14e985be167ca158fd3ee2fe5d8a4f175fa87 diff --git a/src/faebryk/core/cpp/src/main.cpp b/src/faebryk/core/cpp/src/main.cpp new file mode 100644 index 00000000..72f9bc24 --- /dev/null +++ b/src/faebryk/core/cpp/src/main.cpp @@ -0,0 +1,103 @@ +#include +#include +#include +#include + +#include "pathfinder.hpp" + +// check if c++20 is used +#if __cplusplus < 202002L +#error "C++20 is required" +#endif + +std::pair, std::vector> find_paths(Graph &g, Node &src, + std::vector &dst) { + PerfCounter pc; + + PathFinder pf(g); + auto res = pf.find_paths(src, dst); + + printf("TIME: %3.2lf ms C++ find paths\n", pc.ms()); + return res; +} + +void configure(bool indv_measure, uint32_t max_paths) { + INDIV_MEASURE = indv_measure; + MAX_PATHS = max_paths; +} + +namespace py = pybind11; + +PYBIND11_MODULE(faebryk_core_cpp, m) { + m.doc() = "faebryk core cpp graph"; + m.def("find_paths", &find_paths, "Find paths between modules"); + m.def("configure", &configure, "Configure the pathfinder"); + + py::class_(m, "Counter") + .def_readonly("name", &Counter::name) + .def_readonly("in_cnt", &Counter::in_cnt) + .def_readonly("weak_in_cnt", &Counter::weak_in_cnt) + .def_readonly("out_cnt", &Counter::out_cnt) + .def_readonly("hide", &Counter::hide) + .def_readonly("out_weaker", &Counter::out_weaker) + .def_readonly("out_stronger", &Counter::out_stronger) + .def_readonly("multi", &Counter::multi) + .def_readonly("total_counter", &Counter::total_counter) + .def_readonly("time_spent_s", &Counter::time_spent_s); + + py::class_(m, "Node") + .def(py::init()) + .def_readonly("py_ptr", &Node::py_ptr); + + py::class_(m, "GraphInterface") + .def(py::init()) + //.def_property("node", &GraphInterface::get_node, + // &GraphInterface::set_node) + .def("set_node", &GraphInterface::set_node) + .def_readonly("py_ptr", &GraphInterface::py_ptr) + .def("make_hierarchical", &GraphInterface::make_hierarchical); + + py::class_(m, "Link") + .def(py::init, + std::optional &)>>>()) + .def_readonly("py_ptr", &Link::py_ptr); + ; + + py::class_(m, "Graph") + .def(py::init<>()) + .def("add_edge", &Graph::add_edge) + .def("add_edges", &Graph::add_edges); + + py::class_(m, "Path").def_readonly("gifs", &Path::gifs); + + // Type enums + // --------------------------------------------------------------- + py::enum_(m, "GraphInterfaceType") + .value("GENERIC", GraphInterfaceType::G_GENERIC) + .value("SELF", GraphInterfaceType::G_SELF) + .value("HIERARCHICAL", GraphInterfaceType::G_HIERARCHICAL) + .value("HIERARCHICAL_NODE", GraphInterfaceType::G_HIERARCHICAL_NODE) + .value("HIERARCHICAL_MODULE_SPECIAL", + GraphInterfaceType::G_HIERARCHICAL_MODULE_SPECIAL) + .value("MODULE_CONNECTION", GraphInterfaceType::G_MODULE_CONNECTION) + .value("OTHER", GraphInterfaceType::G_OTHER); + + py::enum_(m, "LinkType") + .value("GENERIC", LinkType::L_GENERIC) + .value("SIBLING", LinkType::L_SIBLING) + .value("PARENT", LinkType::L_PARENT) + .value("NAMED_PARENT", LinkType::L_NAMED_PARENT) + .value("DIRECT", LinkType::L_DIRECT) + .value("DIRECT_CONDITIONAL", LinkType::L_DIRECT_CONDITIONAL) + .value("DIRECT_CONDITIONAL_SHALLOW", LinkType::L_DIRECT_CONDITIONAL_SHALLOW) + .value("DIRECT_DERIVED", LinkType::L_DIRECT_DERIVED) + .value("OTHER", LinkType::L_OTHER); + + py::enum_(m, "NodeType") + .value("GENERIC", NodeType::N_GENERIC) + .value("MODULE", NodeType::N_MODULE) + .value("MODULEINTERFACE", NodeType::N_MODULEINTERFACE) + .value("OTHER", NodeType::N_OTHER); +} diff --git a/src/faebryk/core/graph.py b/src/faebryk/core/graph.py index 16d66a1c..a6edb835 100644 --- a/src/faebryk/core/graph.py +++ b/src/faebryk/core/graph.py @@ -7,11 +7,12 @@ from typing_extensions import deprecated +from faebryk.libs.bfs import BFSPath, bfs_visit from faebryk.libs.util import ( ConfigFlag, + DefaultFactoryDict, LazyMixin, SharedReference, - bfs_visit, lazy_construct, ) @@ -108,20 +109,27 @@ def get_edges(self, obj: T) -> Mapping[T, "Link"]: ... @abstractmethod def _union(rep: GT, old: GT) -> GT: ... + type Path = BFSPath[T] + type bfs_filter = Callable[[Path], bool] + def bfs_visit( self, - filter: Callable[[list[T], "Link"], bool], + filter: bfs_filter, start: Iterable[T], G: GT | None = None, ): G = G or self() - return bfs_visit( - lambda n: [ - o for o, link in self.get_edges(n[-1]).items() if filter(n + [o], link) - ], - start, - ) + def neighbours(path: "Graph.Path"): + # TODO use G instead of self + for o in set(self.get_edges(path.last).keys()): + new_path = path + o + if not filter(new_path): + continue + + yield new_path + + return bfs_visit(neighbours, start) def __str__(self) -> str: return f"{type(self).__name__}(V={self.node_cnt}, E={self.edge_cnt})" @@ -129,6 +137,10 @@ def __str__(self) -> str: @abstractmethod def __iter__(self) -> Iterator[T]: ... + @property + @abstractmethod + def edges(self) -> Iterable[tuple[T, T, "Link"]]: ... + # TODO subgraph should return a new GraphView @abstractmethod def subgraph(self, node_filter: Callable[[T], bool]) -> Iterable[T]: ... diff --git a/src/faebryk/core/graph_backends/graphpy.py b/src/faebryk/core/graph_backends/graphpy.py index 8eb45bd4..7cecd25a 100644 --- a/src/faebryk/core/graph_backends/graphpy.py +++ b/src/faebryk/core/graph_backends/graphpy.py @@ -58,6 +58,9 @@ def view(self, filter_node: Callable[[T], bool]) -> "PyGraph[T]": def edges(self, obj: T) -> Mapping[T, L]: return self._e_cache[obj] + def all_edges(self) -> Iterable[tuple[T, T, L]]: + return self._e + class PyGraphView[T](PyGraph[T]): def __init__(self, parent: PyGraph[T], filter: Callable[[T], bool]): @@ -134,3 +137,7 @@ def subgraph(self, node_filter: Callable[[T], bool]): def __iter__(self) -> Iterator[T]: return iter(self()) + + @property + def edges(self) -> Iterable[tuple[T, T, L]]: + return self().all_edges() diff --git a/src/faebryk/core/graphinterface.py b/src/faebryk/core/graphinterface.py index 9e836af8..a93b5698 100644 --- a/src/faebryk/core/graphinterface.py +++ b/src/faebryk/core/graphinterface.py @@ -7,7 +7,7 @@ from faebryk.core.core import ID_REPR, FaebrykLibObject from faebryk.core.graph_backends.default import GraphImpl -from faebryk.core.link import Link, LinkDirect, LinkNamedParent +from faebryk.core.link import Link, LinkDirect, LinkNamedParent, LinkParent from faebryk.libs.util import ( NotNone, exceptions_to_log, @@ -100,32 +100,41 @@ def connections(self): def get_direct_connections(self) -> set["GraphInterface"]: return set(self.edges.keys()) - def is_connected(self, other: "GraphInterface"): + def is_connected_to(self, other: "GraphInterface"): return self is other or self.G.is_connected(self, other) + def bfs_visit( + self, + filter: Graph.bfs_filter, + ): + return self.G.bfs_visit(filter, [self]) + # Less graph-specific stuff # TODO make link trait to initialize from list - def connect(self, other: Self, linkcls=None) -> Self: - assert other is not self + def connect(self, *others: Self, linkcls=None) -> Self: + assert self not in others if linkcls is None: linkcls = LinkDirect - link = linkcls([other, self]) - _, no_path = self.G.merge(other.G) + for other in others: + link = linkcls([other, self]) + + _, no_path = self.G.merge(other.G) - if not no_path: - dup = self.is_connected(other) - assert ( - not dup or type(dup) is linkcls - ), f"Already connected with different link type: {dup}" + if not no_path: + dup = self.is_connected_to(other) + # TODO resolve link + assert ( + not dup or type(dup) is linkcls + ), f"Already connected with different link type: {dup}" - self.G.add_edge(self, other, link=link) + self.G.add_edge(self, other, link=link) - if logger.isEnabledFor(logging.DEBUG): - with exceptions_to_log(): - logger.debug(f"GIF connection: {link}") + if logger.isEnabledFor(logging.DEBUG): + with exceptions_to_log(): + logger.debug(f"GIF connection: {link}") return self @@ -177,17 +186,45 @@ def get_children(self) -> list[tuple[str, "Node"]]: def get_parent(self) -> tuple["Node", str] | None: assert not self.is_parent - conns = self.get_links_by_type(LinkNamedParent) + conns = self.get_links_by_type(LinkParent) if not conns: return None assert len(conns) == 1 conn = conns[0] parent = conn.get_parent() - return parent.node, conn.name + return parent.node, conn.name if isinstance(conn, LinkNamedParent) else "" def disconnect_parent(self): self.G.remove_edge(self) + @classmethod + def is_uplink( + cls, + path: tuple["GraphInterface", "GraphInterface"], + ): + prev_node, next_node = path + + return ( + isinstance(prev_node, cls) + and type(next_node) is type(prev_node) + and not prev_node.is_parent + and next_node.is_parent + ) + + @classmethod + def is_downlink( + cls, + path: tuple["GraphInterface", "GraphInterface"], + ): + prev_node, next_node = path + + return ( + isinstance(prev_node, cls) + and type(next_node) is type(prev_node) + and prev_node.is_parent + and not next_node.is_parent + ) + class GraphInterfaceSelf(GraphInterface): ... diff --git a/src/faebryk/core/link.py b/src/faebryk/core/link.py index deee44a1..51e8d4e4 100644 --- a/src/faebryk/core/link.py +++ b/src/faebryk/core/link.py @@ -2,7 +2,9 @@ # SPDX-License-Identifier: MIT import inspect import logging -from typing import TYPE_CHECKING, Callable +from enum import Enum, auto +from itertools import pairwise +from typing import TYPE_CHECKING from faebryk.core.core import LINK_TB, FaebrykLibObject @@ -76,10 +78,11 @@ def __init__(self, name: str, interfaces: list["GraphInterface"]) -> None: @classmethod def curry(cls, name: str): - def curried(interfaces: list["GraphInterface"]): - return cls(name, interfaces) + class LinkNamedParentWithName(LinkNamedParent): + def __init__(self, interfaces: list["GraphInterface"]) -> None: + super().__init__(name, interfaces) - return curried + return LinkNamedParentWithName class LinkDirect(Link): @@ -95,22 +98,54 @@ def get_connections(self) -> list["GraphInterface"]: class LinkFilteredException(Exception): ... -class _TLinkDirectShallow(LinkDirect): - def __new__(cls, *args, **kwargs): - if cls is _TLinkDirectShallow: - raise TypeError( - "Can't instantiate abstract class _TLinkDirectShallow directly" - ) - return super().__new__(cls) +class LinkDirectConditional(LinkDirect): + class FilterResult(Enum): + PASS = auto() + FAIL_RECOVERABLE = auto() + FAIL_UNRECOVERABLE = auto() + + def __init__(self, interfaces: list["GraphInterface"]) -> None: + if self.is_filtered(interfaces) != LinkDirectConditional.FilterResult.PASS: + raise LinkFilteredException() + self.interfaces = interfaces + + def get_interfaces(self) -> list["GraphInterface"]: + return self.interfaces + + def is_filtered(self, path: list["GraphInterface"]) -> FilterResult: + return LinkDirectConditional.FilterResult.PASS -def LinkDirectShallow(if_filter: Callable[[LinkDirect, "GraphInterface"], bool]): - class _LinkDirectShallow(_TLinkDirectShallow): - i_filter = if_filter +class LinkDirectDerived(LinkDirectConditional): + def __init__( + self, interfaces: list["GraphInterface"], path: list["GraphInterface"] + ) -> None: + self.path = path - def __init__(self, interfaces: list["GraphInterface"]) -> None: - if not all(map(self.i_filter, interfaces)): - raise LinkFilteredException() - super().__init__(interfaces) + links = [e1.is_connected_to(e2) for e1, e2 in pairwise(path)] + self.filters = [ + link for link in links if isinstance(link, LinkDirectConditional) + ] + + super().__init__(interfaces) + + def is_filtered( + self, path: list["GraphInterface"] + ) -> LinkDirectConditional.FilterResult: + result = LinkDirectConditional.FilterResult.PASS + for f in self.filters: + match res := f.is_filtered(path): + case LinkDirectConditional.FilterResult.FAIL_UNRECOVERABLE: + return res + case LinkDirectConditional.FilterResult.FAIL_RECOVERABLE: + result = res + + return result + + @classmethod + def curry(cls, path: list["GraphInterface"]): + class LinkDirectDerivedWithPath(LinkDirectDerived): + def __init__(self, interfaces: list["GraphInterface"]) -> None: + super().__init__(interfaces, path) - return _LinkDirectShallow + return LinkDirectDerivedWithPath diff --git a/src/faebryk/core/module.py b/src/faebryk/core/module.py index a23bf857..d685a5a0 100644 --- a/src/faebryk/core/module.py +++ b/src/faebryk/core/module.py @@ -3,7 +3,8 @@ import logging from typing import TYPE_CHECKING, Callable, Iterable -from faebryk.core.moduleinterface import GraphInterfaceModuleSibling +from faebryk.core.link import LinkParent +from faebryk.core.moduleinterface import GraphInterfaceHierarchicalModuleSpecial from faebryk.core.node import Node, NodeException, f_field from faebryk.core.trait import Trait from faebryk.libs.util import unique_ref @@ -23,8 +24,8 @@ def __init__(self, module: "Module", *args: object) -> None: class Module(Node): class TraitT(Trait): ... - specializes = f_field(GraphInterfaceModuleSibling)(is_parent=False) - specialized = f_field(GraphInterfaceModuleSibling)(is_parent=True) + specializes = f_field(GraphInterfaceHierarchicalModuleSpecial)(is_parent=False) + specialized = f_field(GraphInterfaceHierarchicalModuleSpecial)(is_parent=True) def get_most_special(self) -> "Module": specialers = { @@ -111,14 +112,14 @@ def get_node_prop_matrix[N: Node](sub_type: type[N]): # continue # special.add(t) - self.specialized.connect(special.specializes) + self.specialized.connect(special.specializes, linkcls=LinkParent) # Attach to new parent has_parent = special.get_parent() is not None assert not has_parent or attach_to is None if not has_parent: if attach_to: - attach_to.add(special, container=attach_to.specialized) + attach_to.add(special, container=attach_to.specialized_nodes) else: gen_parent = self.get_parent() if gen_parent: diff --git a/src/faebryk/core/moduleinterface.py b/src/faebryk/core/moduleinterface.py index 74eb405b..f5645031 100644 --- a/src/faebryk/core/moduleinterface.py +++ b/src/faebryk/core/moduleinterface.py @@ -1,14 +1,10 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT import logging -from typing import ( - Iterable, - Sequence, -) +from typing import TYPE_CHECKING, Sequence, cast from typing_extensions import Self -from faebryk.core.core import LINK_TB from faebryk.core.graphinterface import ( GraphInterface, GraphInterfaceHierarchical, @@ -16,322 +12,129 @@ from faebryk.core.link import ( Link, LinkDirect, - LinkDirectShallow, - LinkFilteredException, - _TLinkDirectShallow, + LinkDirectConditional, + LinkDirectDerived, + LinkParent, +) +from faebryk.core.node import ( + Node, + NodeException, + f_field, ) -from faebryk.core.node import Node from faebryk.core.trait import Trait -from faebryk.libs.util import cast_assert, once, print_stack - -logger = logging.getLogger(__name__) - - -# The resolve functions are really weird -# You have to look into where they are called to make sense of what they are doing -# Chain resolve is for deciding what to do in a case like this -# if1 -> link1 -> if2 -> link2 -> if3 -# This will then decide with which link if1 and if3 are connected -def _resolve_link_transitive(links: Iterable[type[Link]]) -> type[Link]: - from faebryk.libs.util import is_type_set_subclasses - - uniq = set(links) - assert uniq - - if len(uniq) == 1: - return next(iter(uniq)) - - if is_type_set_subclasses(uniq, {_TLinkDirectShallow}): - # TODO this only works if the filter is identical - raise NotImplementedError() - - if is_type_set_subclasses(uniq, {LinkDirect, _TLinkDirectShallow}): - return [u for u in uniq if issubclass(u, _TLinkDirectShallow)][0] - - raise NotImplementedError() - - -# This one resolves the case if1 -> link1 -> if2; if1 -> link2 -> if2 -def _resolve_link_duplicate(links: Iterable[type[Link]]) -> type[Link]: - from faebryk.libs.util import is_type_set_subclasses - - uniq = set(links) - assert uniq - if len(uniq) == 1: - return next(iter(uniq)) +if TYPE_CHECKING: + from faebryk.core.pathfinder import Path - if is_type_set_subclasses(uniq, {LinkDirect, _TLinkDirectShallow}): - return [u for u in uniq if not issubclass(u, _TLinkDirectShallow)][0] - - raise NotImplementedError() - - -class _LEVEL: - """connect depth counter to debug connections in ModuleInterface""" - - def __init__(self) -> None: - self.value = 0 - - def inc(self): - self.value += 1 - return self.value - 1 - - def dec(self): - self.value -= 1 - - -_CONNECT_DEPTH = _LEVEL() +logger = logging.getLogger(__name__) -class GraphInterfaceModuleSibling(GraphInterfaceHierarchical): ... +class GraphInterfaceHierarchicalModuleSpecial(GraphInterfaceHierarchical): ... class GraphInterfaceModuleConnection(GraphInterface): ... -# CONNECT PROCEDURE -# connect -# connect_siblings -# - check not same ref -# - check not connected -# - connect_hierarchies -# - resolve link (if exists) -# - connect gifs -# - signal on_connect -# - connect_down -# - connect direct children by name -# - connect_up -# - check for each parent if all direct children by name connected -# - connect -# - check not filtered -# - cross connect_hierarchies transitive hull -# - cross connect_hierarchies siblings - - class ModuleInterface(Node): class TraitT(Trait): ... - specializes: GraphInterface - specialized: GraphInterface + specializes = f_field(GraphInterfaceHierarchicalModuleSpecial)(is_parent=False) + specialized = f_field(GraphInterfaceHierarchicalModuleSpecial)(is_parent=True) connected: GraphInterfaceModuleConnection - # TODO rename - @classmethod - @once - def LinkDirectShallow(cls): + class LinkDirectShallow(LinkDirectConditional): """ Make link that only connects up but not down """ - def test(node: Node): - return not any(isinstance(p[0], cls) for p in node.get_hierarchy()[:-1]) - - class _LinkDirectShallowMif( - LinkDirectShallow(lambda link, gif: test(gif.node)) - ): ... + def is_filtered(self, path: list[GraphInterface]): + # only beginning and end matter + # end is same type as beginning + + if isinstance(path[0].node, self._children_types): + return LinkDirectConditional.FilterResult.FAIL_UNRECOVERABLE + + return LinkDirectConditional.FilterResult.PASS + + def __init__( + self, + interfaces: list["GraphInterface"], + ) -> None: + self._children_types = tuple( + { + type(mif) + for mif in interfaces[0].node.get_children( + direct_only=False, types=ModuleInterface + ) + } + ) - return _LinkDirectShallowMif + super().__init__(interfaces) def __preinit__(self) -> None: ... - @staticmethod - def _get_connected(gif: GraphInterface): - assert isinstance(gif.node, ModuleInterface) - connections = gif.edges.items() - - # check if ambiguous links between mifs - assert len(connections) == len({c[0] for c in connections}) - - return { - cast_assert(ModuleInterface, s.node): link - for s, link in connections - if s.node is not gif.node - } - - def get_connected(self): - return self._get_connected(self.connected) - - def get_specialized(self): - return self._get_connected(self.specialized) - - def get_specializes(self): - return self._get_connected(self.specializes) - - def _connect_siblings_and_connections( - self, other: "ModuleInterface", linkcls: type[Link] - ) -> Self: - if other is self: - return self - - # Already connected - if self.is_connected_to(other): - return self - - # if link is filtered, cancel here - self._connect_across_hierarchies(other, linkcls) - if not self.is_connected_to(other): - return self - - if logger.isEnabledFor(logging.DEBUG): - logger.debug(f"MIF connection: {self} to {other}") - - def cross_connect( - s_group: dict[ModuleInterface, type[Link] | Link], - d_group: dict[ModuleInterface, type[Link] | Link], - hint=None, - ): - if logger.isEnabledFor(logging.DEBUG) and hint is not None: - logger.debug(f"Connect {hint} {s_group} -> {d_group}") - - for s, slink in s_group.items(): - if isinstance(slink, Link): - slink = type(slink) - for d, dlink in d_group.items(): - if isinstance(dlink, Link): - dlink = type(dlink) - # can happen while connection trees are resolving - if s is d: - continue - link = _resolve_link_transitive([slink, dlink, linkcls]) - - s._connect_across_hierarchies(d, linkcls=link) - - # Connect to all connections - s_con = self.get_connected() | {self: linkcls} - d_con = other.get_connected() | {other: linkcls} - cross_connect(s_con, d_con, "connections") - - # Connect to all siblings - s_sib = self.get_specialized() | self.get_specializes() | {self: linkcls} - d_sib = other.get_specialized() | other.get_specializes() | {other: linkcls} - cross_connect(s_sib, d_sib, "siblings") - - return self - - def _on_connect(self, other: "ModuleInterface"): - """override to handle custom connection logic""" - ... - - def _try_connect_down(self, other: "ModuleInterface", linkcls: type[Link]) -> None: - if not isinstance(other, type(self)): + # Graph ---------------------------------------------------------------------------- + def _connect_via_implied_paths(self, other: Self, paths: list["Path"]): + if self.connected.is_connected_to(other.connected): + # TODO link resolution return - for _, (src, dst) in self.zip_children_by_name_with( - other, ModuleInterface - ).items(): - if src is None or dst is None: - continue - src.connect(dst, linkcls=linkcls) - - def _try_connect_up(self, other: "ModuleInterface") -> None: - p1 = self.get_parent() - p2 = other.get_parent() - if not ( - p1 - and p2 - and p1[0] is not p2[0] - and isinstance(p1[0], type(p2[0])) - and isinstance(p1[0], ModuleInterface) - ): - return - - src_m = p1[0] - dst_m = p2[0] - assert isinstance(dst_m, ModuleInterface) - - def _is_connected(a, b): - assert isinstance(a, ModuleInterface) - assert isinstance(b, ModuleInterface) - return a.is_connected_to(b) - - connection_map = [ - (src_i, dst_i, _is_connected(src_i, dst_i)) - for src_i, dst_i in src_m.zip_children_by_name_with( - dst_m, sub_type=ModuleInterface - ).values() + # heuristic: choose path with fewest conditionals + paths_links = [ + (path, [e1.is_connected_to(e2) for e1, e2 in path.edges]) for path in paths ] - - assert connection_map - - if not all(connected for _, _, connected in connection_map): - return - - # decide which LinkType to use here - # depends on connections between src_i & dst_i - # e.g. if any Shallow, we need to choose shallow - link = _resolve_link_transitive( - [type(sublink) for _, _, sublink in connection_map if sublink] - ) - - if logger.isEnabledFor(logging.DEBUG): - logger.debug(f"Up connect {src_m} -> {dst_m}") - src_m.connect(dst_m, linkcls=link) - - def _connect_across_hierarchies( - self, other: "ModuleInterface", linkcls: type[Link] - ): - existing_link = self.is_connected_to(other) - if existing_link: - if isinstance(existing_link, linkcls): - return - resolved = _resolve_link_duplicate([type(existing_link), linkcls]) - if resolved is type(existing_link): - return - if LINK_TB: - print(print_stack(existing_link.tb)) - raise NotImplementedError( - "Overriding existing links not implemented, tried to override " - + f"{existing_link} with {resolved}" + paths_conditionals = [ + ( + path, + [link for link in links if isinstance(link, LinkDirectConditional)], ) + for path, links in paths_links + ] + path = min(paths_conditionals, key=lambda x: len(x[1]))[0] + # - # level 0 connect - try: - self.connected.connect(other.connected, linkcls=linkcls) - except LinkFilteredException: - return - - if logger.isEnabledFor(logging.DEBUG): - logger.debug(f"{' '*2*_CONNECT_DEPTH.inc()}Connect {self} to {other}") - self._on_connect(other) - - con_depth_one = _CONNECT_DEPTH.value == 1 - recursion_error = None - try: - # level +1 (down) connect - self._try_connect_down(other, linkcls=linkcls) + self.connect(other, linkcls=LinkDirectDerived.curry(path.path)) - # level -1 (up) connect - self._try_connect_up(other) + def get_connected(self): + from faebryk.core.pathfinder import PathFinder - except RecursionError as e: - recursion_error = e - if not con_depth_one: - raise + for path in PathFinder.find_paths(self): + node = cast(Self, path.last.node) + self._connect_via_implied_paths(node, [path]) + yield node - if recursion_error: - raise Exception(f"Recursion error while connecting {self} to {other}") + def is_connected_to(self, other: "ModuleInterface"): + return next(self.get_paths_to(other), None) - _CONNECT_DEPTH.dec() + def get_paths_to(self, *other: "ModuleInterface"): + from faebryk.core.pathfinder import PathFinder - def get_direct_connections(self) -> set["ModuleInterface"]: - return { - gif.node - for gif in self.connected.get_direct_connections() - if isinstance(gif.node, ModuleInterface) and gif.node is not self - } + for path in PathFinder.find_paths(self, *other): + if path.last.node in other: + yield path - def connect(self: Self, *other: Self, linkcls=None) -> Self: - # TODO consider some type of check at the end within the graph instead - # assert type(other) is type(self) + def connect(self: Self, *other: Self, linkcls: type[Link] | None = None) -> Self: if linkcls is None: linkcls = LinkDirect - for o in other: - self._connect_siblings_and_connections(o, linkcls=linkcls) + if not {type(o) for o in other}.issubset({type(self)}): + raise NodeException( + self, + f"Can only connect modules of same type: {{{type(self)}}}," + f" got {{{','.join(str(type(o)) for o in other)}}}", + ) + + self.connected.connect(*{o.connected for o in other}, linkcls=linkcls) + return other[-1] if other else self - def connect_via(self, bridge: Node | Sequence[Node], *other: Self, linkcls=None): + # Convenience functions ------------------------------------------------------------ + def connect_via( + self, + bridge: Node | Sequence[Node], + *other: Self, + linkcls: type[Link] | None = None, + ): from faebryk.library.can_bridge import can_bridge bridges = [bridge] if isinstance(bridge, Node) else bridge @@ -343,21 +146,17 @@ def connect_via(self, bridge: Node | Sequence[Node], *other: Self, linkcls=None) intf.connect(*other, linkcls=linkcls) - def connect_shallow(self, other: Self) -> Self: - return self.connect(other, linkcls=type(self).LinkDirectShallow()) - - def is_connected_to(self, other: "ModuleInterface"): - return self.connected.is_connected(other.connected) + def connect_shallow(self, *other: Self) -> Self: + return self.connect(*other, linkcls=type(self).LinkDirectShallow) def specialize[T: ModuleInterface](self, special: T) -> T: - logger.debug(f"Specializing MIF {self} with {special}") - assert isinstance(special, type(self)) + self.specialized.connect(special.specializes, linkcls=LinkParent) - # This is doing the heavy lifting - self.connect(special) - - # Establish sibling relationship - self.specialized.connect(special.specializes) + return cast(T, special) - return special + def get_general(self): + out = self.specializes.get_parent() + if out: + return out[0] + return None diff --git a/src/faebryk/core/node.py b/src/faebryk/core/node.py index 03438051..a9fb8c77 100644 --- a/src/faebryk/core/node.py +++ b/src/faebryk/core/node.py @@ -1,5 +1,6 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT +import itertools import logging from itertools import chain from typing import ( @@ -18,11 +19,12 @@ from faebryk.core.core import ID_REPR, FaebrykLibObject from faebryk.core.graphinterface import ( + Graph, GraphInterface, GraphInterfaceHierarchical, GraphInterfaceSelf, ) -from faebryk.core.link import Link, LinkNamedParent, LinkSibling +from faebryk.core.link import LinkNamedParent, LinkSibling from faebryk.libs.exceptions import FaebrykException from faebryk.libs.util import ( KeyErrorNotFound, @@ -145,19 +147,23 @@ def __init__(self, node: "Node", other: "Node", *args: object) -> None: class NodeNoParent(NodeException): ... +class GraphInterfaceHierarchicalNode(GraphInterfaceHierarchical): + pass + + # ----------------------------------------------------------------------------- class Node(FaebrykLibObject, metaclass=PostInitCaller): runtime_anon: list["Node"] runtime: dict[str, "Node"] - specialized: list["Node"] + specialized_nodes: list["Node"] self_gif: GraphInterfaceSelf - children: GraphInterfaceHierarchical = f_field(GraphInterfaceHierarchical)( + children: GraphInterfaceHierarchicalNode = f_field(GraphInterfaceHierarchicalNode)( is_parent=True ) - parent: GraphInterfaceHierarchical = f_field(GraphInterfaceHierarchical)( + parent: GraphInterfaceHierarchicalNode = f_field(GraphInterfaceHierarchicalNode)( is_parent=False ) @@ -428,7 +434,7 @@ def _handle_add_node(self, name: str, node: "Node"): ): return - node.parent.connect(self.children, LinkNamedParent.curry(name)) + node.parent.connect(self.children, linkcls=LinkNamedParent.curry(name)) node._handle_added_to_parent() def _remove_child(self, node: "Node"): @@ -442,16 +448,18 @@ def get_graph(self): def get_parent(self): return self.parent.get_parent() - def get_name(self): + def get_name(self, accept_no_parent: bool = False): p = self.get_parent() if not p: + if accept_no_parent: + return f"<{hex(id(self))[-4:].upper()}>" raise NodeNoParent(self, "Parent required for name") return p[1] def get_hierarchy(self) -> list[tuple["Node", str]]: parent = self.get_parent() if not parent: - return [(self, "*")] + return [(self, f"<{hex(id(self)).upper()[-4:]}>")] parent_obj, name = parent return parent_obj.get_hierarchy() + [(self, name)] @@ -531,74 +539,78 @@ def get_trait[V: "Trait"](self, trait: Type[V]) -> V: # Graph stuff ---------------------------------------------------------------------- def _get_children_direct(self): - return { + return ( gif.node - for gif, link in self.get_graph().get_edges(self.children).items() - if isinstance(link, LinkNamedParent) - } + for gif in self.children.edges + if isinstance(gif, GraphInterfaceHierarchicalNode) + ) def _get_children_all(self, include_root: bool): # TODO looks like get_node_tree is 2x faster - def _filter(path, link): - next_node = path[-1] - prev_node = path[-2] if len(path) >= 2 else None + def _filter(path: Graph.Path): + next_node = path.last # Only look at hierarchy if not isinstance( - next_node, (GraphInterfaceSelf, GraphInterfaceHierarchical) + next_node, (GraphInterfaceSelf, GraphInterfaceHierarchicalNode) ): return False # Only children - if ( - isinstance(prev_node, GraphInterfaceHierarchical) - and isinstance(next_node, GraphInterfaceHierarchical) - and not prev_node.is_parent - and next_node.is_parent + if len(path.path) >= 2 and GraphInterfaceHierarchicalNode.is_uplink( + (path.path[-2], next_node) ): return False return True - out = self.bfs_node(_filter) + out = set(self.bfs_node(_filter)) if not include_root: out.remove(self) - return set(out) + return out - def get_children[T: Node]( + def get_children_gen[T: Node]( self, direct_only: bool, types: type[T] | tuple[type[T], ...], include_root: bool = False, f_filter: Callable[[T], bool] | None = None, - sort: bool = True, - ) -> set[T]: + ) -> Iterable[T]: if direct_only: out = self._get_children_direct() if include_root: - out.add(self) + out = itertools.chain(out, [self]) else: out = self._get_children_all(include_root=include_root) if types is not Node or f_filter: - out = { + out = ( n for n in out if isinstance(n, types) and (not f_filter or f_filter(n)) - } + ) + out = cast(Iterable[T], out) - out = cast(set[T], out) + return out + + def get_children[T: Node]( + self, + direct_only: bool, + types: type[T] | tuple[type[T], ...], + include_root: bool = False, + f_filter: Callable[[T], bool] | None = None, + sort: bool = True, + ): + out = self.get_children_gen(direct_only, types, include_root, f_filter) if sort: - out = set( - sorted( - out, - key=lambda n: try_or(n.get_name, default="", catch=NodeNoParent), - ) + out = sorted( + out, + key=lambda n: try_or(n.get_name, default="", catch=NodeNoParent), ) - return out + return set(out) def get_tree[T: Node]( self, @@ -633,11 +645,18 @@ def get_tree[T: Node]( return tree - def bfs_node(self, filter: Callable[[list[GraphInterface], Link], bool]): - return Node.get_nodes_from_gifs( - self.get_graph().bfs_visit(filter, [self.self_gif]) + def bfs_node(self, filter: Callable[[Graph.Path], bool]): + return Node.get_nodes_from_gifs_gen( + (path.last for path in self.self_gif.bfs_visit(filter)) ) + def bfs_visit(self, filter: Graph.bfs_filter): + return self.self_gif.bfs_visit(filter) + + @staticmethod + def get_nodes_from_gifs_gen(gifs: Iterable[GraphInterface]): + return (gif.node for gif in gifs if isinstance(gif, GraphInterfaceSelf)) + @staticmethod def get_nodes_from_gifs(gifs: Iterable[GraphInterface]): # TODO move this to gif? diff --git a/src/faebryk/core/parameter.py b/src/faebryk/core/parameter.py index 607075de..4c5b55ae 100644 --- a/src/faebryk/core/parameter.py +++ b/src/faebryk/core/parameter.py @@ -62,6 +62,9 @@ class MergeException(Exception): ... class SupportsSetOps: def __contains__(self, other: "Parameter[PV].LIT_OR_PARAM") -> bool: ... + class is_dynamic(TraitT): + def exec(self) -> None: ... + def try_compress(self) -> "Parameter[PV]": return self @@ -127,7 +130,7 @@ def _narrowed(self, other: "Parameter[PV]"): if self is other: return - if self.narrowed_by.is_connected(other.narrows): + if self.narrowed_by.is_connected_to(other.narrows): return self.narrowed_by.connect(other.narrows) @@ -168,6 +171,9 @@ def is_either_instance(t: type["Parameter[PV]"]): @_resolved def merge(self: "Parameter[PV]", other: "Parameter[PV]") -> "Parameter[PV]": + if self is other: + return self + out = self._merge(other) self._narrowed(out) @@ -367,6 +373,10 @@ def __rand__(self: "Parameter[PV]", other: "Parameter[PV]") -> "Parameter[PV]": return self.intersect(other, self) def get_most_narrow(self) -> "Parameter[PV]": + # TODO remove + # if self.has_trait(self.is_dynamic): + # self.get_trait(self.is_dynamic).exec() + out = self.get_narrowing_chain()[-1] com = out.try_compress() diff --git a/src/faebryk/core/pathfinder.py b/src/faebryk/core/pathfinder.py new file mode 100644 index 00000000..d591106d --- /dev/null +++ b/src/faebryk/core/pathfinder.py @@ -0,0 +1,721 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT +import io +import logging +import time +from collections import defaultdict +from dataclasses import dataclass +from itertools import pairwise +from typing import Any, Callable, cast + +from more_itertools import partition +from rich.console import Console +from rich.table import Table + +from faebryk.core.bfs import BFSPath, bfs_visit +from faebryk.core.graphinterface import ( + GraphInterface, + GraphInterfaceHierarchical, + GraphInterfaceSelf, +) +from faebryk.core.link import LinkDirectConditional +from faebryk.core.moduleinterface import ( + GraphInterfaceHierarchicalModuleSpecial, + GraphInterfaceModuleConnection, + ModuleInterface, +) +from faebryk.core.node import ( + GraphInterfaceHierarchicalNode, + Node, +) +from faebryk.libs.exceptions import FaebrykException +from faebryk.libs.util import ConfigFlag, ConfigFlagInt, consume, groupby + +logger = logging.getLogger(__name__) + +type Path = BFSPath + +CPP = ConfigFlag( + "CORE_MIFS_CPP", default=False, descr="Use C++ implementation of PathFinder" +) +INDIV_MEASURE = ConfigFlag( + "CORE_MIFS_INDIV_MEASURE", default=False, descr="Measure individual paths" +) +MAX_PATHS = ConfigFlagInt( + "CORE_MIFS_MAX_PATHS", default=10000, descr="Max number of paths to find" +) + + +@dataclass +class Counter: + in_cnt: int = 0 + weak_in_cnt: int = 0 + out_weaker: int = 0 + out_stronger: int = 0 + out_cnt: int = 0 + time_spent_s: float = 0 + multi: bool = False + total_counter: bool = False + + def reset(self): + self.in_cnt = 0 + self.weak_in_cnt = 0 + self.out_weaker = 0 + self.out_stronger = 0 + self.out_cnt = 0 + self.time_spent_s = 0 + + +class Counters: + def __init__(self): + self.counters: dict[str, Counter] = {} + + def reset(self): + for v in self.counters.values(): + v.reset() + + def __repr__(self): + table = Table(title="Filter Counters") + table.add_column("func", style="cyan", width=20) + table.add_column("in", style="green", justify="right") + table.add_column("weak in", style="green", justify="right") + table.add_column("out", style="green", justify="right") + # table.add_column("drop", style="cyan", justify="center") + table.add_column("filt", style="magenta", justify="right") + table.add_column("weaker", style="green", justify="right") + table.add_column("stronger", style="green", justify="right") + table.add_column("time", style="yellow", justify="right") + table.add_column("time/in", style="yellow", justify="right") + + individual, total = partition( + lambda x: x[1].total_counter, self.counters.items() + ) + individual = list(individual) + for section in partition(lambda x: x[1].multi, individual): + for k, v in sorted( + section, + key=lambda x: (x[1].out_cnt, x[1].in_cnt), + reverse=True, + ): + k_clean = ( + k.split("path_")[-1] + .replace("_", " ") + .removeprefix("by ") + .removeprefix("with ") + ) + if v.in_cnt == 0: + continue + table.add_row( + k_clean, + str(v.in_cnt), + str(v.weak_in_cnt), + str(v.out_cnt), + # "x" if getattr(k, "discovery_filter", False) else "", + f"{(1-v.out_cnt/v.in_cnt)*100:.1f} %" if v.in_cnt else "-", + str(v.out_weaker), + str(v.out_stronger), + f"{v.time_spent_s*1000:.2f} ms", + f"{v.time_spent_s/v.in_cnt*1000*1000:.2f} us" if v.in_cnt else "-", + ) + table.add_section() + + table.add_section() + for k, v in total: + if v.in_cnt == 0: + continue + table.add_row( + k, + str(v.in_cnt), + str(v.weak_in_cnt), + str(v.out_cnt), + # "x" if getattr(k, "discovery_filter", False) else "", + f"{(1-v.out_cnt/v.in_cnt)*100:.1f} %" if v.in_cnt else "-", + str(v.out_weaker), + str(v.out_stronger), + f"{v.time_spent_s*1000:.2f} ms", + f"{v.time_spent_s/v.in_cnt*1000*1000:.2f} us" if v.in_cnt else "-", + ) + if INDIV_MEASURE: + table.add_row( + "Total", + "", + "", + "", + # "", + "", + "", + "", + f"{sum(v.time_spent_s for _,v in individual)*1000:.2f} ms", + f"{sum(v.time_spent_s/v.in_cnt for _,v in individual if v.in_cnt)*1000*1000:.2f} us", + ) + + console = Console(record=True, width=120, file=io.StringIO()) + console.print(table) + return console.export_text(styles=True) + + +def perf_counter(*args, **kwargs): + multi = kwargs.get("multi", False) + total = kwargs.get("total", False) + + if multi: + + def perf_counter_multi[F: Callable[[list[Path]], Any]](f: F) -> F: + counter = Counter(multi=True, total_counter=total) + perf_counter.counters.counters[f.__name__] = counter + + if not INDIV_MEASURE and not total: + return f + + def wrapper(paths: list[Path], *args, **kwargs): + counter.in_cnt += len(paths) + counter.weak_in_cnt += sum(1 for p in paths if not p.strong) + confidence = [p.confidence for p in paths] + + start = time.perf_counter() + res = f(paths, *args, **kwargs) + counter.time_spent_s += time.perf_counter() - start + + counter.out_cnt += len(res) + counter.out_stronger += sum( + 1 for p, c in zip(paths, confidence) if p.confidence > c + ) + counter.out_weaker += sum( + 1 for p, c in zip(paths, confidence) if p.confidence < c + ) + + return res + + return wrapper + + w = perf_counter_multi + + else: + + def perf_counter_[F: Callable[[Path], Any]](f: F) -> F: + counter = Counter(total_counter=total) + perf_counter.counters.counters[f.__name__] = counter + + if not INDIV_MEASURE and not total: + return f + + def wrapper(path: Path, *args, **kwargs): + counter.in_cnt += 1 + if not path.strong: + counter.weak_in_cnt += 1 + + confidence = path.confidence + + start = time.perf_counter() + res = f(path, *args, **kwargs) + counter.time_spent_s += time.perf_counter() - start + + if res: + counter.out_cnt += 1 + if path.confidence > confidence: + counter.out_stronger += 1 + elif path.confidence < confidence: + counter.out_weaker += 1 + + return res + + return wrapper + + w = perf_counter_ + + if not hasattr(perf_counter, "counters"): + perf_counter.counters = Counters() + if len(args) == 1 and callable(args[0]): + f = args[0] + return w(f) + return w + + +def discovery[F: Callable[[Path], Any]](f: F) -> F: + def wrapper(path: Path, *args, **kwargs): + res = f(path, *args, **kwargs) + if not res: + path.filtered = True + return res + + wrapper.discovery_filter = True + wrapper.__name__ = f.__name__ + + return wrapper + + +class PathFinder: + @dataclass + class PathStackElement: + parent_type: type[Node] + child_type: type[Node] + parent_gif: GraphInterfaceHierarchical + name: str + up: bool + + @dataclass + class UnresolvedStackElement: + elem: "PathFinder.PathStackElement" + promise: bool + + def match(self, elem: "PathFinder.PathStackElement"): + return ( + self.elem.parent_type == elem.parent_type + and self.elem.child_type == elem.child_type + and self.elem.name == elem.name + and self.elem.up != elem.up + ) + + type PathStack = list[PathStackElement] + + @staticmethod + def _extend_path_hierarchy_stack(edge: tuple[GraphInterface, GraphInterface]): + up = GraphInterfaceHierarchicalNode.is_uplink(edge) + if not up and not GraphInterfaceHierarchicalNode.is_downlink(edge): + return + edge = cast(tuple[GraphInterfaceHierarchical, GraphInterfaceHierarchical], edge) + child_gif = edge[0 if up else 1] + parent_gif = edge[1 if up else 0] + + p = child_gif.get_parent() + assert p + name = p[1] + return PathFinder.PathStackElement( + parent_type=type(parent_gif.node), + child_type=type(child_gif.node), + parent_gif=parent_gif, + name=name, + up=up, + ) + + @staticmethod + def _get_path_hierarchy_stack( + path_: Path, + stack_cache: dict[tuple[GraphInterface, ...], "PathStack"] | None = None, + ) -> PathStack: + out: PathFinder.PathStack = [] + + path = path_.path + if ( + stack_cache + and (cached_path := stack_cache.get(tuple(path[:-1]))) is not None + ): + out = cached_path + path = path[-2:] + + for edge in pairwise(path): + elem = PathFinder._extend_path_hierarchy_stack(edge) + if elem: + out.append(elem) + + if stack_cache is not None: + stack_cache[tuple(path)] = out + return out + + @staticmethod + def _extend_fold_stack( + elem: "PathFinder.PathStackElement", + unresolved_stack: list["PathFinder.UnresolvedStackElement"], + promise_stack: list["PathFinder.PathStackElement"], + ): + if unresolved_stack and unresolved_stack[-1].match(elem): + promise = unresolved_stack.pop().promise + if promise: + promise_stack.append(elem) + + else: + # if down & multipath -> promise + promise = not elem.up and bool( + consume( + elem.parent_gif.node.get_children_gen( + direct_only=True, + types=ModuleInterface, + ), + 2, + ) + ) + + unresolved_stack.append(PathFinder.UnresolvedStackElement(elem, promise)) + + if promise: + promise_stack.append(elem) + + @staticmethod + def _fold_stack(stack: PathStack): + unresolved_stack: list[PathFinder.UnresolvedStackElement] = [] + promise_stack: list[PathFinder.PathStackElement] = [] + for elem in stack: + PathFinder._extend_fold_stack(elem, unresolved_stack, promise_stack) + + return unresolved_stack, promise_stack + + # Path filters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + @perf_counter + @discovery + @staticmethod + def _filter_path_gif_type(path: Path): + return isinstance( + path.last, + ( + GraphInterfaceSelf, + GraphInterfaceHierarchicalNode, + GraphInterfaceHierarchicalModuleSpecial, + GraphInterfaceModuleConnection, + ), + ) + + @perf_counter + @discovery + @staticmethod + def _filter_path_by_node_type(path: Path): + # TODO for module specialization also modules will be allowed + return isinstance(path.last.node, ModuleInterface) + + @staticmethod + def _check_tri_edge(tri_edge_in: list[GraphInterface]): + if not len(tri_edge_in) == 3: + return True + + if not all( + isinstance(gif, GraphInterfaceHierarchicalNode) for gif in tri_edge_in + ): + return True + + tri_edge = cast( + tuple[ + GraphInterfaceHierarchicalNode, + GraphInterfaceHierarchicalNode, + GraphInterfaceHierarchicalNode, + ], + tri_edge_in, + ) + + # check if child->parent->child + if ( + not tri_edge[0].is_parent + and tri_edge[1].is_parent + and not tri_edge[2].is_parent + ): + return False + + return True + + @perf_counter + @discovery + @staticmethod + def _filter_path_by_dead_end_split(path: Path): + return PathFinder._check_tri_edge(path.path[-3:]) + + @perf_counter + @discovery + @staticmethod + def _filter_path_by_dead_end_split_full(path: Path): + for i in range(len(path.path) - 2): + if not PathFinder._check_tri_edge(path.path[i : i + 3]): + return False + return True + + @perf_counter + @staticmethod + def _filter_path_by_dst(path: Path, dst_self: set[int]): + return id(path.last) in dst_self + + @perf_counter + @staticmethod + def _filter_path_by_end_in_self_gif(path: Path): + return isinstance(path.last, GraphInterfaceSelf) + + @perf_counter + @staticmethod + def _filter_path_same_end_type(path: Path): + return type(path.last.node) is type(path.first.node) + + @perf_counter + @staticmethod + def _mark_path_with_promises_heuristic(path: Path): + """ + Marks paths that have promises in-case they get filtered down the line + """ + # inline version + edge = path.last_edge + if not edge: + return True + + if GraphInterfaceHierarchicalNode.is_downlink(edge): + path.confidence *= 0.9 + + return True + + @perf_counter + @staticmethod + def _build_path_stack(path: Path): + """ + Marks paths that have promises in-case they get filtered down the line + """ + # inline version + edge = path.last_edge + if not edge: + return True + + elem = PathFinder._extend_path_hierarchy_stack(edge) + if not elem: + return True + + unresolved_stack, promise_stack = map( + list, path.path_data.get("promises", ([], [])) + ) + + promise_cnt = len(promise_stack) + PathFinder._extend_fold_stack(elem, unresolved_stack, promise_stack) + path.path_data = path.path_data | { + "promises": (unresolved_stack, promise_stack), + "promise_depth": len(promise_stack), # convenience + } + promise_growth = len(promise_stack) - promise_cnt + path.confidence *= 0.5**promise_growth + + return True + + @perf_counter + @staticmethod + def _build_path_stack_full(path: Path): + stack = PathFinder._get_path_hierarchy_stack(path) + unresolved_stack, promise_stack = PathFinder._fold_stack(stack) + path.path_data = path.path_data | { + "promises": (unresolved_stack, promise_stack), + "promise_depth": len(promise_stack), # convenience + } + path.confidence *= 0.5 ** len(promise_stack) + return True + + @perf_counter + @staticmethod + def _filter_path_by_stack(path: Path, multi_paths_out: list[Path]): + unresolved_stack, promise_stack = path.path_data.get("promises", ([], [])) + if unresolved_stack: + return False + + if promise_stack: + multi_paths_out.append(path) + return False + + return True + + @perf_counter + @discovery + @staticmethod + def _filter_and_mark_path_by_link_filter(path: Path, inline: bool = True): + for edge in path.edges: + linkobj = path.get_link(edge) + + if not isinstance(linkobj, LinkDirectConditional): + continue + + # perf boost + if inline: + if isinstance(linkobj, ModuleInterface.LinkDirectShallow): + # don't need to recheck shallows + if len(path) > 2 and edge[1] is not path.last: + continue + + match linkobj.is_filtered(path.path): + case LinkDirectConditional.FilterResult.FAIL_UNRECOVERABLE: + return False + case LinkDirectConditional.FilterResult.FAIL_RECOVERABLE: + path.confidence *= 0.8 + + return True + + @perf_counter(multi=True) + @staticmethod + def _filter_paths_by_split_join( + paths: list[Path], + ) -> list[Path]: + # basically the only thing we need to do is + # - check whether for every promise descend all children have a path + # that joins again before the end + # - join again before end == ends in same node (self_gif) + + path_filtered = {id(p): False for p in paths} + split: dict[GraphInterfaceHierarchical, list[Path]] = defaultdict(list) + + # build split map + for path in paths: + unresolved_stack, promise_stack = path.path_data.get("promises", ([], [])) + + if unresolved_stack or not promise_stack: + continue + + for elem in promise_stack: + if elem.up: + # join + continue + # split + split[elem.parent_gif].append(path) + + for start_gif, split_paths in split.items(): + all_children = [ + n.parent + for n in start_gif.node.get_children( + direct_only=True, types=ModuleInterface + ) + ] + # TODO this is not correct + index = split_paths[0].path.index(start_gif) + + grouped_by_end = groupby(split_paths, lambda p: p.path[-1]) + for end_gif, grouped_paths in grouped_by_end.items(): + path_suffixes = {id(p): p.path[index:] for p in grouped_paths} + + # not full coverage + if set(all_children) != set(p[1] for p in path_suffixes.values()): + for path in grouped_paths: + path_filtered[id(path)] = True + # TODO don't we have to filter out the paths + continue + + out = [p for p in paths if not path_filtered[id(p)]] + for p in out: + p.confidence = 1.0 + return out + + # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + @staticmethod + @discovery + def _count(path: Path): + if not hasattr(PathFinder._count, "paths"): + PathFinder._count.paths = 0 + if not hasattr(PathFinder._count, "max_path"): + PathFinder._count.max_path = 0 + + if len(path) > PathFinder._count.max_path: + PathFinder._count.max_path = len(path) + # logger.info(f"Max path length: {PathFinder._count.max_path}") + + PathFinder._count.paths += 1 + if PathFinder._count.paths % 50000 == 0: + logger.info(f"{PathFinder._count.paths}") + + # if len(path) > int(MAX_PATHS): + # path.stop = True + if PathFinder._count.paths > int(MAX_PATHS): + path.stop = True + + return True + + @staticmethod + def find_paths_py(src: "ModuleInterface", *dst: "ModuleInterface"): + start_time = time.perf_counter() + + PathFinder._count.paths = 0 + PathFinder._count.max_path = 0 + perf_counter.counters.reset() + + if dst: + dst = tuple(d for d in dst if type(d) is type(src)) + if not dst: + return + dst_self = {id(dst.self_gif) for dst in dst} + # TODO apparently not really faster to have single dst + dst_filters = [lambda path: PathFinder._filter_path_by_dst(path, dst_self)] + else: + dst_filters = [ + PathFinder._filter_path_by_end_in_self_gif, + PathFinder._filter_path_same_end_type, + ] + if src is dst: + raise FaebrykException("src and dst are the same") + + multi_paths: list[Path] = [] + + # Stage filters ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + filters_discovery = [ + PathFinder._count, + PathFinder._filter_path_by_node_type, + PathFinder._filter_path_gif_type, + # TODO pretty slow + PathFinder._filter_path_by_dead_end_split, + PathFinder._build_path_stack, + # PathFinder._mark_path_with_promises_heuristic, + ] + + filters_single = [ + *filters_discovery, + # --------------------- + *dst_filters, + # PathFinder._filter_path_by_dead_end_split_full, + # PathFinder._build_path_stack_full, + lambda path: PathFinder._filter_path_by_stack(path, multi_paths), + lambda path: PathFinder._filter_and_mark_path_by_link_filter( + path, inline=False + ), + ] + + filters_multiple = [ + PathFinder._filter_paths_by_split_join, + ] + + # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + @perf_counter(total=True) + def run_filters(p: Path): + for f in filters_single: + if not f(p): + return False + return True + + # yield path & discovery filter + for p in bfs_visit([src.self_gif]): + if not run_filters(p): + continue + yield p + + # yield multi path filter + yielded_multi = set() + + def try_multi_filter(): + for f in filters_multiple: + for p in f(multi_paths): + if id(p) in yielded_multi: + continue + yield p + + yield from try_multi_filter() + + end_time = time.perf_counter() + if logger.isEnabledFor(logging.INFO): + logger.info(f"Searched {PathFinder._count.paths} paths") + logger.info(f"\n\t\t{perf_counter.counters}") + logger.info(f"Time: {(end_time - start_time)*1000:.2f}ms") + + @staticmethod + def find_paths_cpp(src: "ModuleInterface", *dst: "ModuleInterface"): + from faebryk.core.cpp import faebryk_core_cpp + from faebryk.core.cpp.graph import CGraph + + faebryk_core_cpp.configure(bool(INDIV_MEASURE), int(MAX_PATHS)) + + time_start = time.perf_counter() + Gpp = CGraph(src.get_graph()) + time_construct = time.perf_counter() - time_start + + paths, counters = Gpp.find_paths(src, *dst) + time_find = time.perf_counter() - time_construct - time_start + + perf = Counters() + for c in counters: + perf.counters[c.name] = c + + if logger.isEnabledFor(logging.INFO): + logger.info(f"Time construct: {time_construct*1000:.2f}ms") + logger.info(f"Time find: {time_find*1000:.2f}ms") + logger.info(f"\n\t\t{perf}") + + return iter(paths) + + @staticmethod + def find_paths(src: "ModuleInterface", *dst: "ModuleInterface"): + if CPP: + return PathFinder.find_paths_cpp(src, *dst) + else: + return PathFinder.find_paths_py(src, *dst) diff --git a/src/faebryk/exporters/netlist/graph.py b/src/faebryk/exporters/netlist/graph.py index a67c8e18..dcd3785e 100644 --- a/src/faebryk/exporters/netlist/graph.py +++ b/src/faebryk/exporters/netlist/graph.py @@ -4,12 +4,11 @@ import logging from abc import abstractmethod -import networkx as nx - import faebryk.library._F as F from faebryk.core.graphinterface import Graph from faebryk.core.module import Module from faebryk.exporters.netlist.netlist import T2Netlist +from faebryk.libs.util import KeyErrorAmbiguous logger = logging.getLogger(__name__) @@ -53,7 +52,7 @@ def get_or_set_name_and_value_of_node(c: Module): class can_represent_kicad_footprint_via_attached_component( can_represent_kicad_footprint.impl() ): - def __init__(self, component: Module, graph: nx.Graph) -> None: + def __init__(self, component: Module, graph: Graph) -> None: """ graph has to be electrically closed """ @@ -101,21 +100,17 @@ def add_or_get_net(interface: F.Electrical): net.part_of.connect(interface) return net if len(nets) > 1: - raise Exception(f"Multiple nets interconnected: {nets}") + raise KeyErrorAmbiguous(list(nets), "Multiple nets interconnected") return next(iter(nets)) def attach_nets_and_kicad_info(g: Graph): - # g has to be closed - - Gclosed = g - # group comps & fps node_fps = { n: t.get_footprint() # TODO maybe nicer to just look for footprints # and get their respective components instead - for n, t in Gclosed.nodes_with_trait(F.has_footprint) + for n, t in g.nodes_with_trait(F.has_footprint) if isinstance(n, Module) } @@ -127,9 +122,8 @@ def attach_nets_and_kicad_info(g: Graph): for n, fp in node_fps.items(): if fp.has_trait(can_represent_kicad_footprint): continue - fp.add(can_represent_kicad_footprint_via_attached_component(n, Gclosed)) + fp.add(can_represent_kicad_footprint_via_attached_component(n, g)) for fp in node_fps.values(): - # TODO use graph for mif in fp.get_children(direct_only=True, types=F.Pad): add_or_get_net(mif.net) diff --git a/src/faebryk/exporters/pcb/kicad/transformer.py b/src/faebryk/exporters/pcb/kicad/transformer.py index 3c7a8051..4246e001 100644 --- a/src/faebryk/exporters/pcb/kicad/transformer.py +++ b/src/faebryk/exporters/pcb/kicad/transformer.py @@ -488,7 +488,7 @@ def _get_pad(ffp: FFootprint, intf: F.Electrical): pin_map = ffp.get_trait(F.has_kicad_footprint).get_pin_names() pin_name = find( pin_map.items(), - lambda pad_and_name: intf.is_connected_to(pad_and_name[0].net) is not None, + lambda pad_and_name: intf.is_connected_to(pad_and_name[0].net), )[1] fp = PCB_Transformer.get_fp(ffp) diff --git a/src/faebryk/exporters/pcb/layout/heuristic_decoupling.py b/src/faebryk/exporters/pcb/layout/heuristic_decoupling.py index f0e8745c..8eae2ac9 100644 --- a/src/faebryk/exporters/pcb/layout/heuristic_decoupling.py +++ b/src/faebryk/exporters/pcb/layout/heuristic_decoupling.py @@ -199,7 +199,7 @@ def place_next_to_pad( nfp = module.get_trait(F.has_footprint).get_footprint() npad = find( nfp.get_children(direct_only=True, types=F.Pad), - lambda p: p.net.is_connected_to(pad.net) is not None, + lambda p: p.net.is_connected_to(pad.net), ) nkfp, nkpad = npad.get_trait(PCB_Transformer.has_linked_kicad_pad).get_pad() if len(nkpad) != 1: @@ -249,7 +249,7 @@ def place_next_to( for parent_pad, child in zip(pads_intf, children): intf = find( child.get_children(direct_only=True, types=F.Electrical), - lambda x: x.is_connected_to(parent_intf) is not None, + lambda x: x.is_connected_to(parent_intf), ) logger.debug(f"Placing {intf} next to {parent_pad}") diff --git a/src/faebryk/exporters/pcb/routing/util.py b/src/faebryk/exporters/pcb/routing/util.py index 809cc333..03356bdc 100644 --- a/src/faebryk/exporters/pcb/routing/util.py +++ b/src/faebryk/exporters/pcb/routing/util.py @@ -219,6 +219,6 @@ def group_pads_that_are_connected_already( def get_routes_of_pad(pad: F.Pad): return { route - for mif in pad.pcb.get_direct_connections() + for mif in pad.pcb.get_connected() if (route := mif.get_parent_of_type(Route)) } diff --git a/src/faebryk/exporters/visualize/interactive_graph.py b/src/faebryk/exporters/visualize/interactive_graph.py index 5b78d9ee..e274472f 100644 --- a/src/faebryk/exporters/visualize/interactive_graph.py +++ b/src/faebryk/exporters/visualize/interactive_graph.py @@ -1,87 +1,86 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -import rich -import rich.text +from typing import Collection, Iterable -from faebryk.core.graph import Graph -from faebryk.core.graphinterface import GraphInterface +import dash_cytoscape as cyto +from dash import Dash, html +from rich.console import Console +from rich.table import Table + +import faebryk.library._F as F +from faebryk.core.graphinterface import Graph, GraphInterface from faebryk.core.link import Link +from faebryk.core.module import Module +from faebryk.core.moduleinterface import ModuleInterface from faebryk.core.node import Node -from faebryk.exporters.visualize.util import IDSet, generate_pastel_palette +from faebryk.core.parameter import Parameter +from faebryk.core.trait import Trait +from faebryk.exporters.visualize.util import generate_pastel_palette +from faebryk.libs.util import KeyErrorAmbiguous, find_or, typename -def interactive_graph(G: Graph): - import dash_cytoscape as cyto - from dash import Dash, html +# Transformers ------------------------------------------------------------------------- +def _gif(gif: GraphInterface): + return { + "data": { + "id": id(gif), + "label": gif.name, + "type": typename(gif), + "parent": id(gif.node), + } + } - # Register the fcose layout - cyto.load_extra_layouts() - app = Dash(__name__) +def _link(source, target, link: Link): + return { + "data": { + "source": id(source), + "target": id(target), + "type": typename(link), + } + } - node_types: set[str] = set() - groups = {} - - def _group(gif: GraphInterface) -> str: - node = gif.node - my_node_id = str(id(node)) - if my_node_id not in groups: - label = f"{node.get_full_name()} ({type(node).__name__})" - groups[my_node_id] = { - "data": { - "id": my_node_id, - "label": label, - "type": "group", - } - } - return my_node_id - - def _node(node: Node): - full_name = node.get_full_name() - type_name = type(node).__name__ - node_types.add(type_name) - data = {"id": str(id(node)), "label": full_name, "type": type_name} - if isinstance(node, GraphInterface): - data["parent"] = _group(node) - return {"data": data} - - link_types: set[str] = set() - links_touched = IDSet[Link]() - - def _link(link: Link): - if link in links_touched: - return None - links_touched.add(link) - - try: - source, target = tuple(str(id(n)) for n in link.get_connections()) - except ValueError: - return None - - type_name = type(link).__name__ - link_types.add(type_name) - - return {"data": {"source": source, "target": target, "type": type_name}} - - def _not_none(x): - return x is not None - - elements = [ - *(filter(_not_none, (_node(gif) for gif in G))), - *( - filter( - _not_none, - (_link(link) for gif in G for link in gif.get_links()), - ) - ), - *( - groups.values() - ), # must go after nodes because the node iteration creates the groups - ] +_GROUP_TYPES = { + Parameter: "#FFD9DE", # Very light pink + Module: "#E0F0FF", # Very light blue + Trait: "#FCFCFF", # Almost white + F.Electrical: "#D1F2EB", # Very soft turquoise + F.ElectricPower: "#FCF3CF", # Very light goldenrod + F.ElectricLogic: "#EBE1F1", # Very soft lavender + # Defaults + ModuleInterface: "#DFFFE4", # Very light green + Node: "#FCFCFF", # Almost white +} + + +def _group(node: Node): + try: + subtype = find_or(_GROUP_TYPES, lambda t: isinstance(node, t), default=Node) + except KeyErrorAmbiguous as e: + subtype = e.duplicates[0] + + return { + "data": { + "id": id(node), + "label": f"{node.get_name(accept_no_parent=True)}\n({typename(node)})", + "type": "group", + "subtype": typename(subtype), + "parent": id(node.get_parent()[0]) if node.get_parent() else None, + } + } + + +# Style -------------------------------------------------------------------------------- + + +def _with_pastels[T](iterable: Collection[T]): + return zip(sorted(iterable), generate_pastel_palette(len(iterable))) - stylesheet = [ + +class _Stylesheet: + _BASE = [ { "selector": "node", "style": { @@ -89,7 +88,13 @@ def _not_none(x): "text-opacity": 0.8, "text-valign": "center", "text-halign": "center", + "font-size": "0.5em", "background-color": "#BFD7B5", + "text-outline-color": "#FFFFFF", + "text-outline-width": 0.5, + "border-width": 1, + "border-color": "#888888", + "border-opacity": 0.5, }, }, { @@ -101,22 +106,10 @@ def _not_none(x): "target-arrow-shape": "triangle", "arrow-scale": 1, "target-arrow-color": "#A3C4BC", + "text-outline-color": "#FFFFFF", + "text-outline-width": 2, }, }, - ] - - def _pastels(iterable): - return zip(iterable, generate_pastel_palette(len(iterable))) - - for node_type, color in _pastels(node_types): - stylesheet.append( - { - "selector": f'node[type = "{node_type}"]', - "style": {"background-color": color}, - } - ) - - stylesheet.append( { "selector": 'node[type = "group"]', "style": { @@ -124,88 +117,184 @@ def _pastels(iterable): "font-weight": "bold", "font-size": "1.5em", "text-valign": "top", + "text-outline-color": "#FFFFFF", + "text-outline-width": 1.5, + "text-wrap": "wrap", + "border-width": 4, }, - } - ) + }, + ] + + def __init__(self): + self.stylesheet = list(self._BASE) + + def add_node_type(self, node_type: str, color: str): + self.stylesheet.append( + { + "selector": f'node[type = "{node_type}"]', + "style": {"background-color": color}, + } + ) - for link_type, color in _pastels(link_types): - stylesheet.append( + def add_link_type(self, link_type: str, color: str): + self.stylesheet.append( { "selector": f'edge[type = "{link_type}"]', "style": {"line-color": color, "target-arrow-color": color}, } ) - container_style = { - "position": "fixed", - "display": "flex", - "flex-direction": "column", - "height": "100%", - "width": "100%", - } + def add_group_type(self, group_type: str, color: str): + self.stylesheet.append( + { + "selector": f'node[subtype = "{group_type}"]', + "style": {"background-color": color}, + } + ) - graph_view_style = { - "position": "absolute", - "width": "100%", - "height": "100%", - "zIndex": 999, - } - _cyto = cyto.Cytoscape( - id="graph-view", - stylesheet=stylesheet, - style=graph_view_style, - elements=elements, - layout={ - "name": "fcose", - "quality": "proof", - "animate": False, - "randomize": False, - "fit": True, - "padding": 50, - "nodeDimensionsIncludeLabels": True, - "uniformNodeDimensions": False, - "packComponents": True, - "nodeRepulsion": 8000, - "idealEdgeLength": 50, - "edgeElasticity": 0.45, - "nestingFactor": 0.1, - "gravity": 0.25, - "numIter": 2500, - "tile": True, - "tilingPaddingVertical": 10, - "tilingPaddingHorizontal": 10, - "gravityRangeCompound": 1.5, - "gravityCompound": 1.0, - "gravityRange": 3.8, - "initialEnergyOnIncremental": 0.5, +def _Layout(stylesheet: _Stylesheet, elements: list[dict[str, dict]]): + return html.Div( + style={ + "position": "fixed", + "display": "flex", + "flex-direction": "column", + "height": "100%", + "width": "100%", }, - ) - - app.layout = html.Div( - style=container_style, children=[ html.Div( className="cy-container", style={"flex": "1", "position": "relative"}, - children=[_cyto], + children=[ + cyto.Cytoscape( + id="graph-view", + stylesheet=stylesheet.stylesheet, + style={ + "position": "absolute", + "width": "100%", + "height": "100%", + "zIndex": 999, + }, + elements=elements, + layout={ + "name": "fcose", + "quality": "proof", + "animate": False, + "randomize": False, + "fit": True, + "padding": 50, + "nodeDimensionsIncludeLabels": True, + "uniformNodeDimensions": False, + "packComponents": True, + "nodeRepulsion": 1000, + "idealEdgeLength": 50, + "edgeElasticity": 0.45, + "nestingFactor": 0.1, + "gravity": 0.25, + "numIter": 2500, + "tile": True, + "tilingPaddingVertical": 10, + "tilingPaddingHorizontal": 10, + "gravityRangeCompound": 1.5, + "gravityCompound": 1.5, + "gravityRange": 3.8, + "initialEnergyOnIncremental": 0.5, + "componentSpacing": 40, + }, + ) + ], ), ], ) - # print the color palette - print("Node types:") - for node_type, color in _pastels(node_types): - colored_text = rich.text.Text(f"{node_type}: {color}") - colored_text.stylize(f"on {color}") - rich.print(colored_text) - print("\n") - - print("Link types:") - for link_type, color in _pastels(link_types): - colored_text = rich.text.Text(f"{link_type}: {color}") - colored_text.stylize(f"on {color}") - rich.print(colored_text) - print("\n") - - app.run() + +# -------------------------------------------------------------------------------------- + + +def interactive_subgraph( + edges: Iterable[tuple[GraphInterface, GraphInterface, Link]], + gifs: list[GraphInterface], + nodes: Iterable[Node], + height: int | None = None, +): + links = [link for _, _, link in edges] + link_types = {typename(link) for link in links} + gif_types = {typename(gif) for gif in gifs} + + elements = ( + [_gif(gif) for gif in gifs] + + [_link(*edge) for edge in edges] + + [_group(node) for node in nodes] + ) + + # Build stylesheet + stylesheet = _Stylesheet() + + gif_type_colors = list(_with_pastels(gif_types)) + link_type_colors = list(_with_pastels(link_types)) + group_types_colors = [ + (typename(group_type), color) for group_type, color in _GROUP_TYPES.items() + ] + + for gif_type, color in gif_type_colors: + stylesheet.add_node_type(gif_type, color) + + for link_type, color in link_type_colors: + stylesheet.add_link_type(link_type, color) + + for group_type, color in group_types_colors: + stylesheet.add_group_type(group_type, color) + + # Register the fcose layout + cyto.load_extra_layouts() + app = Dash(__name__) + app.layout = _Layout(stylesheet, elements) + + # Print legend + console = Console() + + for typegroup, colors in [ + ("GIF", gif_type_colors), + ("Link", link_type_colors), + ("Node", group_types_colors), + ]: + table = Table(title="Legend") + table.add_column("Type", style="cyan") + table.add_column("Color", style="green") + table.add_column("Name") + + for text, color in colors: + table.add_row(typegroup, f"[on {color}] [/]", text) + + console.print(table) + + # + app.run(jupyter_height=height or 1000) + + +def interactive_graph( + G: Graph, + node_types: tuple[type[Node], ...] | None = None, + depth: int = 0, + filter_unconnected: bool = True, + height: int | None = None, +): + if node_types is None: + node_types = (Node,) + + # Build elements + nodes = G.nodes_of_types(node_types) + if depth > 0: + nodes = [node for node in nodes if len(node.get_hierarchy()) <= depth] + + gifs = [gif for gif in G if gif.node in nodes] + if filter_unconnected: + gifs = [gif for gif in gifs if len(gif.edges) > 1] + + edges = [ + (edge[0], edge[1], edge[2]) + for edge in G.edges + if edge[0] in gifs and edge[1] in gifs + ] + return interactive_subgraph(edges, gifs, nodes, height=height) diff --git a/src/faebryk/library/CH340x.py b/src/faebryk/library/CH340x.py index d159ae4a..a44c0e7e 100644 --- a/src/faebryk/library/CH340x.py +++ b/src/faebryk/library/CH340x.py @@ -14,7 +14,7 @@ class CH340x(Module): usb: F.USB2_0 uart: F.UART - tnow: F.Electrical + tnow: F.ElectricLogic gpio_power: F.ElectricPower designator = L.f_field(F.has_designator_prefix_defined)("U") diff --git a/src/faebryk/library/CH344Q_ReferenceDesign.py b/src/faebryk/library/CH344Q_ReferenceDesign.py index c9ed22a4..9a83758b 100644 --- a/src/faebryk/library/CH344Q_ReferenceDesign.py +++ b/src/faebryk/library/CH344Q_ReferenceDesign.py @@ -66,7 +66,8 @@ def __preinit__(self): self.usb_uart_converter.osc[0].connect(self.oscillator.xtal_if.xout) self.oscillator.gnd.connect(pwr_3v3.lv) - self.reset_lowpass.out.connect(self.usb_uart_converter.reset) + # FIXME + # self.reset_lowpass.out.connect(self.usb_uart_converter.reset) self.usb_uart_converter.reset.pulled.pull(up=True) # ------------------------------------ diff --git a/src/faebryk/library/ElectricLogic.py b/src/faebryk/library/ElectricLogic.py index a0733cf2..a2310996 100644 --- a/src/faebryk/library/ElectricLogic.py +++ b/src/faebryk/library/ElectricLogic.py @@ -1,7 +1,6 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -import sys from abc import abstractmethod from enum import Enum, auto from typing import Self @@ -136,10 +135,3 @@ def connect_shallow( self.reference.lv.connect(other.reference.lv) return super().connect_shallow(other) - - def connect(self, *other: Self, linkcls=None): - recursion_depth = sys.getrecursionlimit() - sys.setrecursionlimit(10000) - ret = super().connect(*other, linkcls=linkcls) - sys.setrecursionlimit(recursion_depth) - return ret diff --git a/src/faebryk/library/ElectricPower.py b/src/faebryk/library/ElectricPower.py index d8b4dfad..8597d801 100644 --- a/src/faebryk/library/ElectricPower.py +++ b/src/faebryk/library/ElectricPower.py @@ -3,14 +3,11 @@ import math -from typing import Self import faebryk.library._F as F -from faebryk.core.moduleinterface import ModuleInterface from faebryk.core.node import Node from faebryk.libs.library import L from faebryk.libs.units import P, Quantity -from faebryk.libs.util import RecursionGuard class ElectricPower(F.Power): @@ -51,6 +48,7 @@ def protect(self): lv: F.Electrical voltage: F.TBD[Quantity] + max_current: F.TBD[Quantity] """ Only for this particular power interface @@ -82,20 +80,8 @@ def fused(self, attach_to: Node | None = None): return fused_power def __preinit__(self) -> None: - ... + self.voltage.add(F.is_dynamic_by_connections(lambda mif: mif.voltage)) + # self.voltage.merge( # self.hv.potential - self.lv.potential # ) - - def _on_connect(self, other: ModuleInterface) -> None: - super()._on_connect(other) - - if not isinstance(other, ElectricPower): - return - - self.voltage.merge(other.voltage) - - # TODO remove with lazy mifs - def connect(self: Self, *other: Self, linkcls=None) -> Self: - with RecursionGuard(): - return super().connect(*other, linkcls=linkcls) diff --git a/src/faebryk/library/HLK_LD2410B_P.py b/src/faebryk/library/HLK_LD2410B_P.py index 7fb8c522..7923f2f3 100644 --- a/src/faebryk/library/HLK_LD2410B_P.py +++ b/src/faebryk/library/HLK_LD2410B_P.py @@ -20,7 +20,7 @@ def get_config(self) -> dict: uart_candidates = { mif - for mif in obj.uart.get_direct_connections() + for mif in obj.uart.get_connected() if mif.has_trait(F.is_esphome_bus) and mif.has_trait(F.has_esphome_config) } diff --git a/src/faebryk/library/I2C.py b/src/faebryk/library/I2C.py index 7db966d9..1068cc52 100644 --- a/src/faebryk/library/I2C.py +++ b/src/faebryk/library/I2C.py @@ -29,11 +29,6 @@ def terminate(self): self.sda.pulled.pull(up=True) self.scl.pulled.pull(up=True) - def _on_connect(self, other: "I2C"): - super()._on_connect(other) - - self.frequency.merge(other.frequency) - class SpeedMode(Enum): low_speed = 10 * P.khertz standard_speed = 100 * P.khertz @@ -43,3 +38,6 @@ class SpeedMode(Enum): @staticmethod def define_max_frequency_capability(mode: SpeedMode): return F.Range(I2C.SpeedMode.low_speed, mode) + + def __preinit__(self) -> None: + self.frequency.add(F.is_dynamic_by_connections(lambda mif: mif.frequency)) diff --git a/src/faebryk/library/Pad.py b/src/faebryk/library/Pad.py index 84e83d5b..212e8f2f 100644 --- a/src/faebryk/library/Pad.py +++ b/src/faebryk/library/Pad.py @@ -38,7 +38,7 @@ def find_pad_for_intf_with_parent_that_has_footprint( pads = [ pad for pad in footprint.get_children(direct_only=True, types=Pad) - if pad.net.is_connected_to(intf) is not None + if pad.net.is_connected_to(intf) ] return pads diff --git a/src/faebryk/library/Power.py b/src/faebryk/library/Power.py index 594a3c39..56e7646a 100644 --- a/src/faebryk/library/Power.py +++ b/src/faebryk/library/Power.py @@ -5,26 +5,14 @@ class Power(ModuleInterface): - class PowerSourcesShortedError(Exception): ... - class is_power_source(ModuleInterface.TraitT): ... - class is_power_source_defined(is_power_source.impl()): ... - class is_power_sink(ModuleInterface.TraitT): ... - class is_power_sink_defined(is_power_sink.impl()): ... - def make_source(self): - self.add(self.is_power_source_defined()) + self.add(self.is_power_source.impl()()) return self def make_sink(self): - self.add(self.is_power_sink_defined()) + self.add(self.is_power_sink.impl()()) return self - - def _on_connect(self, other: "Power"): - if self.has_trait(self.is_power_source) and other.has_trait( - self.is_power_source - ): - raise self.PowerSourcesShortedError(self, other) diff --git a/src/faebryk/library/SignalElectrical.py b/src/faebryk/library/SignalElectrical.py index 780fa2bd..a011831b 100644 --- a/src/faebryk/library/SignalElectrical.py +++ b/src/faebryk/library/SignalElectrical.py @@ -5,7 +5,7 @@ import faebryk.library._F as F from faebryk.core.graphinterface import GraphInterface -from faebryk.core.link import LinkFilteredException, _TLinkDirectShallow +from faebryk.core.link import LinkDirectConditional from faebryk.core.module import Module from faebryk.core.moduleinterface import ModuleInterface from faebryk.core.node import Node @@ -13,11 +13,12 @@ class SignalElectrical(F.Signal): - class LinkIsolatedReference(_TLinkDirectShallow): - def __init__(self, interfaces: list[GraphInterface]) -> None: - if any(isinstance(gif.node, F.ElectricPower) for gif in interfaces): - raise LinkFilteredException("All nodes are ElectricPower") - super().__init__(interfaces) + class LinkIsolatedReference(LinkDirectConditional): + def is_filtered(self, path: list[GraphInterface]): + # TODO needs to be more specific powers of SignalElectrical + if any(isinstance(gif.node, F.ElectricPower) for gif in path): + return LinkDirectConditional.FilterResult.FAIL_UNRECOVERABLE + return LinkDirectConditional.FilterResult.PASS # ---------------------------------------- # modules, interfaces, parameters diff --git a/src/faebryk/library/TBD.py b/src/faebryk/library/TBD.py index 0f52ec82..da812ef9 100644 --- a/src/faebryk/library/TBD.py +++ b/src/faebryk/library/TBD.py @@ -3,11 +3,12 @@ from textwrap import indent -from faebryk.core.parameter import Parameter +from faebryk.core.parameter import Parameter, _resolved from faebryk.libs.units import UnitsContainer class TBD[PV](Parameter[PV]): + @_resolved def __eq__(self, __value: object) -> bool: if isinstance(__value, TBD): return True diff --git a/src/faebryk/library/UART_Base.py b/src/faebryk/library/UART_Base.py index 025fe017..216223e9 100644 --- a/src/faebryk/library/UART_Base.py +++ b/src/faebryk/library/UART_Base.py @@ -19,7 +19,5 @@ def single_electric_reference(self): F.ElectricLogic.connect_all_module_references(self) ) - def _on_connect(self, other: "UART_Base"): - super()._on_connect(other) - - self.baud.merge(other.baud) + def __preinit__(self) -> None: + self.baud.add(F.is_dynamic_by_connections(lambda mif: mif.baud)) diff --git a/src/faebryk/library/_F.py b/src/faebryk/library/_F.py index e5b77714..1c96f4f5 100644 --- a/src/faebryk/library/_F.py +++ b/src/faebryk/library/_F.py @@ -24,6 +24,7 @@ from faebryk.library.has_pcb_position import has_pcb_position from faebryk.library.has_single_electric_reference import has_single_electric_reference from faebryk.library.Power import Power +from faebryk.library.is_dynamic_by_connections import is_dynamic_by_connections from faebryk.library.Signal import Signal from faebryk.library.has_construction_dependency import has_construction_dependency from faebryk.library.has_footprint import has_footprint diff --git a/src/faebryk/library/is_dynamic_by_connections.py b/src/faebryk/library/is_dynamic_by_connections.py new file mode 100644 index 00000000..c7e54ebb --- /dev/null +++ b/src/faebryk/library/is_dynamic_by_connections.py @@ -0,0 +1,68 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + +import logging +from typing import Callable + +from faebryk.core.moduleinterface import ModuleInterface +from faebryk.core.node import NodeException +from faebryk.core.parameter import Parameter +from faebryk.libs.util import NotNone, cast_assert, once + +logger = logging.getLogger(__name__) + + +class is_dynamic_by_connections(Parameter.is_dynamic.impl()): + def __init__(self, key: Callable[[ModuleInterface], Parameter]) -> None: + super().__init__() + self._key = key + self._guard = False + self._merged: set[int] = set() + + @once + def mif_parent(self) -> ModuleInterface: + return cast_assert(ModuleInterface, NotNone(self.obj.get_parent())[0]) + + def exec_for_mifs(self, mifs: set[ModuleInterface]): + if self._guard: + return + + mif_parent = self.mif_parent() + self_param = self.get_obj(Parameter) + if self._key(mif_parent) is not self_param: + raise NodeException(self, "Key not mapping to parameter") + + # only self + if len(mifs) == 1: + return + + params = [self._key(mif) for mif in mifs] + params_with_guard = [ + ( + param, + cast_assert( + is_dynamic_by_connections, param.get_trait(Parameter.is_dynamic) + ), + ) + for param in params + ] + + # Disable guards to prevent infinite recursion + for param, guard in params_with_guard: + guard._guard = True + guard._merged.add(id(self_param)) + + # Merge parameters + for param in params: + if id(param) in self._merged: + continue + self._merged.add(id(param)) + self_param.merge(param) + + # Enable guards again + for _, guard in params_with_guard: + guard._guard = False + + def exec(self): + mif_parent = self.mif_parent() + self.exec_for_mifs(set(mif_parent.get_connected())) diff --git a/src/faebryk/library/is_esphome_bus.py b/src/faebryk/library/is_esphome_bus.py index d42104a2..48212e80 100644 --- a/src/faebryk/library/is_esphome_bus.py +++ b/src/faebryk/library/is_esphome_bus.py @@ -15,7 +15,7 @@ def get_bus_id(self) -> str: ... @staticmethod def find_connected_bus(bus: ModuleInterface): - connected_mifs = bus.get_direct_connections() + connected_mifs = list(bus.get_connected()) try: return find(connected_mifs, lambda mif: mif.has_trait(is_esphome_bus)) except ValueError: diff --git a/src/faebryk/libs/app/checks.py b/src/faebryk/libs/app/checks.py index 44439aaf..c130f8c9 100644 --- a/src/faebryk/libs/app/checks.py +++ b/src/faebryk/libs/app/checks.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: MIT -from faebryk.core.graph import Graph +from faebryk.core.graphinterface import Graph from faebryk.core.module import Module from faebryk.libs.app.erc import simple_erc diff --git a/src/faebryk/libs/app/erc.py b/src/faebryk/libs/app/erc.py index 78b65aed..0957bfb8 100644 --- a/src/faebryk/libs/app/erc.py +++ b/src/faebryk/libs/app/erc.py @@ -11,7 +11,7 @@ from faebryk.core.moduleinterface import ModuleInterface from faebryk.library.Operation import Operation from faebryk.libs.picker.picker import has_part_picked -from faebryk.libs.util import groupby, print_stack +from faebryk.libs.util import groupby logger = logging.getLogger(__name__) @@ -24,16 +24,7 @@ def __init__(self, faulting_ifs: Sequence[ModuleInterface], *args: object) -> No class ERCFaultShort(ERCFault): def __init__(self, faulting_ifs: Sequence[ModuleInterface], *args: object) -> None: - link = faulting_ifs[0].is_connected_to(faulting_ifs[1]) - assert link - from faebryk.core.core import LINK_TB - - stack = "" - if LINK_TB: - stack = print_stack(link.tb) - super().__init__(faulting_ifs, *args) - print(stack) class ERCFaultElectricPowerUndefinedVoltage(ERCFault): @@ -45,6 +36,12 @@ def __init__(self, faulting_EP: list[F.ElectricPower], *args: object) -> None: super().__init__(faulting_EP, msg, *args) +class ERCPowerSourcesShortedError(ERCFault): + """ + Multiple power sources shorted together + """ + + def simple_erc(G: Graph): """Simple ERC check. @@ -70,6 +67,15 @@ def simple_erc(G: Graph): for ep in electricpower: if ep.lv.is_connected_to(ep.hv): raise ERCFaultShort([ep], "shorted power") + if ep.has_trait(F.Power.is_power_source): + other_sources = [ + other + for other in ep.get_connected() + if isinstance(other, F.ElectricPower) + and other.has_trait(F.Power.is_power_source) + ] + if other_sources: + raise ERCPowerSourcesShortedError([ep] + other_sources) unresolved_voltage = [ ep @@ -82,11 +88,11 @@ def simple_erc(G: Graph): # shorted nets nets = G.nodes_of_type(F.Net) - logger.info(f"Checking {len(nets)} nets") + logger.info(f"Checking {len(nets)} explicit nets") for net in nets: collisions = { p[0] - for mif in net.part_of.get_direct_connections() + for mif in net.part_of.get_connected() if (p := mif.get_parent()) and isinstance(p[0], F.Net) } @@ -123,6 +129,7 @@ def simple_erc(G: Graph): # if any(mif.is_connected_to(other) for other in (mifs - checked)): # raise ERCFault([mif], "shorted symmetric footprint") comps = G.nodes_of_types((F.Resistor, F.Capacitor, F.Fuse)) + logger.info(f"Checking {len(comps)} passives") for comp in comps: assert isinstance(comp, (F.Resistor, F.Capacitor, F.Fuse)) # TODO make prettier diff --git a/src/faebryk/libs/app/parameters.py b/src/faebryk/libs/app/parameters.py index 1e19e0b8..8003d944 100644 --- a/src/faebryk/libs/app/parameters.py +++ b/src/faebryk/libs/app/parameters.py @@ -4,8 +4,12 @@ import logging import faebryk.library._F as F +from faebryk.core.graphinterface import Graph from faebryk.core.module import Module +from faebryk.core.moduleinterface import ModuleInterface from faebryk.core.parameter import Parameter +from faebryk.libs.test.times import Times +from faebryk.libs.util import find, groupby logger = logging.getLogger(__name__) @@ -33,3 +37,60 @@ def replace_tbd_with_any(module: Module, recursive: bool, loglvl: int | None = N if recursive: for m in module.get_children_modules(types=Module): replace_tbd_with_any(m, recursive=False, loglvl=loglvl) + + +# TODO this is an ugly hack until we have better params +def resolve_dynamic_parameters_alt(graph: Graph): + params = [ + (param, trait) + for param, trait in graph.nodes_with_trait(Parameter.is_dynamic) + if isinstance(trait, F.is_dynamic_by_connections) + ] + + mifs = {trait.mif_parent() for _, trait in params} + print("MIFS", len(mifs)) + mifs_grouped: list[set[ModuleInterface]] = [] + while mifs: + mif = mifs.pop() + connections = {other for other in mifs if mif.is_connected_to(other)} + connections.add(mif) + mifs_grouped.append(connections) + mifs.difference_update(connections) + + for _, trait in params: + mif = trait.mif_parent() + p_mifs = find(mifs_grouped, lambda mifs: mif in mifs) + trait.exec_for_mifs(p_mifs) + + +def resolve_dynamic_parameters(graph: Graph): + mifs: list[set[ModuleInterface]] = [] + + params = [ + (param, trait) + for param, trait in graph.nodes_with_trait(Parameter.is_dynamic) + if isinstance(trait, F.is_dynamic_by_connections) + ] + + times = Times() + mifs_ = groupby(params, lambda p: p[1].mif_parent()) + params_: set[tuple[Parameter, F.is_dynamic_by_connections]] = set() + while mifs_: + mif, mif_params = mifs_.popitem() + connections = set(mif.get_connected()) + mifs.append(connections) + + for m in connections: + if m in mifs_: + del mifs_[m] + params_.update(mif_params) + + times.add("get parameter connections") + + for _, trait in params_: + mif = trait.mif_parent() + p_mifs = find(mifs, lambda mifs: mif in mifs) + trait.exec_for_mifs(p_mifs) + + times.add("merge parameters") + logger.info(times) diff --git a/src/faebryk/libs/app/pcb.py b/src/faebryk/libs/app/pcb.py index aca2f587..6ff489bd 100644 --- a/src/faebryk/libs/app/pcb.py +++ b/src/faebryk/libs/app/pcb.py @@ -16,6 +16,7 @@ from faebryk.exporters.pcb.kicad.transformer import PCB_Transformer from faebryk.exporters.pcb.routing.util import apply_route_in_pcb from faebryk.libs.app.kicad_netlist import write_netlist +from faebryk.libs.app.parameters import resolve_dynamic_parameters from faebryk.libs.kicad.fileformats import ( C_kicad_fp_lib_table_file, C_kicad_pcb_file, @@ -74,6 +75,8 @@ def apply_design( app: Module, transform: Callable[[PCB_Transformer], Any] | None = None, ): + resolve_dynamic_parameters(G) + logger.info(f"Writing netlist to {netlist_path}") changed = write_netlist(G, netlist_path, use_kicad_designators=True) apply_netlist(pcb_path, netlist_path, changed) diff --git a/src/faebryk/libs/bfs.py b/src/faebryk/libs/bfs.py new file mode 100644 index 00000000..a494b85b --- /dev/null +++ b/src/faebryk/libs/bfs.py @@ -0,0 +1,114 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT +import itertools +import logging +from collections import deque +from typing import Callable, Generator, Iterable, Self + +logger = logging.getLogger(__name__) + + +class BFSPath[T]: + def __init__(self, path: list[T], visited_ref: set[T]) -> None: + self.path: list[T] = path + self.visited_ref: set[T] = visited_ref + self._fully_visited = True + + @property + def last(self) -> T: + return self.path[-1] + + @property + def first(self) -> T: + return self.path[0] + + @classmethod + def from_base(cls, base: "BFSPath[T]", node: T): + return cls(base.path + [node], base.visited_ref) + + def __add__(self, node: T) -> Self: + return self.from_base(self, node) + + def __contains__(self, node: T) -> bool: + return node in self.path + + @property + def edges(self) -> Iterable[tuple[T, T]]: + return itertools.pairwise(self.path) + + def __len__(self) -> int: + return len(self.path) + + def __getitem__(self, idx: int) -> T: + return self.path[idx] + + # The partial visit stuff is pretty weird, let me try to explain: + # If a node is not fully visited through a path, it means that there might still + # be paths that lead to this node that are more interesting. Thus we give the caller + # the chance to explore those other paths. + # If at a later point the caller discovers that the current path is fully visited + # after all, it can mark it. + @property + def fully_visited(self) -> bool: + return self._fully_visited + + @fully_visited.setter + def fully_visited(self, value: bool): + self._fully_visited = value + if value: + self.mark_visited() + + def mark_visited(self): + self.visited_ref.update(self.path) + + +def bfs_visit[T]( + neighbours: Callable[[BFSPath[T]], Iterable[BFSPath[T]]], + roots: Iterable[T], +) -> Generator[BFSPath[T], None, None]: + """ + Generic BFS (not depending on Graph) + Returns all visited nodes. + """ + visited: set[T] = set(roots) + visited_partially: set[T] = set(roots) + open_path_queue: deque[BFSPath[T]] = deque( + [BFSPath([root], visited) for root in roots] + ) + + # TODO remove + paths = [] + + def handle_path(path: BFSPath[T]): + if path.fully_visited: + path.mark_visited() + + for path in open_path_queue: + yield path + handle_path(path) + + while open_path_queue: + open_path = open_path_queue.popleft() + + for new_path in neighbours(open_path): + neighbour = new_path.last + # visited + if neighbour in visited: + continue + # visited in path (loop) + if neighbour in visited_partially and neighbour in open_path: + continue + + visited_partially.add(neighbour) + open_path_queue.append(new_path) + + # # TODO remove + paths.append(new_path) + if len(paths) % 50000 == 0: + logger.info(f"{len(visited)} {len(visited_partially)} {len(paths)}") + + yield new_path + handle_path(new_path) + + # TODO remove + logger.info(f"Searched {len(paths)} paths") diff --git a/src/faebryk/libs/examples/buildutil.py b/src/faebryk/libs/examples/buildutil.py index c8a22eef..00e75151 100644 --- a/src/faebryk/libs/examples/buildutil.py +++ b/src/faebryk/libs/examples/buildutil.py @@ -8,7 +8,7 @@ import faebryk.libs.picker.lcsc as lcsc from faebryk.core.module import Module from faebryk.libs.app.checks import run_checks -from faebryk.libs.app.parameters import replace_tbd_with_any +from faebryk.libs.app.parameters import replace_tbd_with_any, resolve_dynamic_parameters from faebryk.libs.app.pcb import apply_design from faebryk.libs.examples.pickers import add_example_pickers from faebryk.libs.picker.jlcpcb.jlcpcb import JLCPCB_DB @@ -50,6 +50,7 @@ def apply_design_to_pcb(m: Module): ) G = m.get_graph() + resolve_dynamic_parameters(G) run_checks(m, G) # TODO this can be prettier diff --git a/src/faebryk/libs/test/times.py b/src/faebryk/libs/test/times.py index 16b1a78f..0cb9808c 100644 --- a/src/faebryk/libs/test/times.py +++ b/src/faebryk/libs/test/times.py @@ -1,35 +1,50 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT +import io import time -from textwrap import indent + +from rich.console import Console +from rich.table import Table + +from faebryk.libs.units import P class Times: - def __init__(self) -> None: - self.times = {} - self.last_time = time.time() + def __init__(self, cnt: int = 1, unit: str = "ms") -> None: + self.times: dict[str, float] = {} + self.last_time = time.perf_counter() + + self.unit = unit + self.cnt = cnt def add(self, name: str): - now = time.time() + now = time.perf_counter() if name not in self.times: self.times[name] = now - self.last_time self.last_time = now def _format_val(self, val: float): - return f"{val * 1000:.2f}ms" + return f"{((val / self.cnt)*P.s).to(self.unit).m:.2f}", self.unit def __repr__(self): - formatted = { - k: self._format_val(v) - for k, v in self.times.items() - if not k.startswith("_") - } - longest_name = max(len(k) for k in formatted) - return "Timings: \n" + indent( - "\n".join(f"{k:>{longest_name}}: {v:<10}" for k, v in formatted.items()), - " " * 4, - ) + table = Table(title="Timings") + table.add_column("Category", style="cyan") + table.add_column("Subcategory", style="magenta") + table.add_column("Value", justify="right", style="green") + table.add_column("Unit", style="yellow") + + for k, v in self.times.items(): + if not k.startswith("_"): + value, unit = self._format_val(v) + categories = k.split(":", 1) + if len(categories) == 1: + categories.append("") + table.add_row(categories[0].strip(), categories[1].strip(), value, unit) + + console = Console(record=True, file=io.StringIO()) + console.print(table) + return console.export_text(styles=True) class Context: def __init__(self, name: str, times: "Times"): @@ -38,10 +53,10 @@ def __init__(self, name: str, times: "Times"): def __enter__(self): self.times.add("_" + self.name) - self.start = time.time() + self.start = time.perf_counter() def __exit__(self, exc_type, exc_value, traceback): - self.times.times[self.name] = time.time() - self.start + self.times.times[self.name] = time.perf_counter() - self.start def context(self, name: str): return Times.Context(name, self) diff --git a/src/faebryk/libs/util.py b/src/faebryk/libs/util.py index 425f2b3c..2fb5bbce 100644 --- a/src/faebryk/libs/util.py +++ b/src/faebryk/libs/util.py @@ -3,10 +3,11 @@ import asyncio import inspect +import itertools import logging import sys from abc import abstractmethod -from collections import defaultdict +from collections import defaultdict, deque from contextlib import contextmanager from dataclasses import dataclass, fields from enum import StrEnum @@ -16,6 +17,7 @@ Any, Callable, Concatenate, + Generator, Iterable, Iterator, List, @@ -63,9 +65,9 @@ def __eq__(self, other): return hash(self) == hash(other) -def unique(it, key): +def unique[T](it: Iterable[T], key: Callable[[T], Any]) -> list[T]: seen = [] - out = [] + out: list[T] = [] for i in it: v = key(i) if v in seen: @@ -595,28 +597,6 @@ def __repr__(self) -> str: return f"{type(self).__name__}({self.object})" -def bfs_visit[T]( - neighbours: Callable[[list[T]], list[T]], roots: Iterable[T] -) -> set[T]: - """ - Generic BFS (not depending on Graph) - Returns all visited nodes. - """ - open_path_queue: list[list[T]] = [[root] for root in roots] - visited: set[T] = set(roots) - - while open_path_queue: - open_path = open_path_queue.pop(0) - - for neighbour in neighbours(open_path): - if neighbour not in visited: - new_path = open_path + [neighbour] - visited.add(neighbour) - open_path_queue.append(new_path) - - return visited - - class TwistArgs: def __init__(self, op: Callable) -> None: self.op = op @@ -737,6 +717,32 @@ def __bool__(self): return res +class ConfigFlagInt: + def __init__(self, name: str, default: int, descr: str = "") -> None: + self.name = name + self.default = default + self.descr = descr + + @cache + def get(self): + import os + + key = f"FBRK_{self.name}" + + if key not in os.environ: + return self.default + + val = int(os.environ[key]) + + if val != self.default: + logger.warning(f"Config flag |{self.name}={val}|") + + return val + + def __int__(self): + return self.get() + + class ConfigFlagEnum[E: StrEnum]: def __init__(self, enum: type[E], name: str, default: E, descr: str = "") -> None: self.enum = enum @@ -940,3 +946,26 @@ def exceptions_to_log( ) if not mute: raise + + +def typename(x: object | type) -> str: + if not isinstance(x, type): + x = type(x) + return x.__name__ + + +def consume(iter: Iterable, n: int) -> list: + assert n >= 0 + out = list(itertools.islice(iter, n)) + return out if len(out) == n else [] + + +class DefaultFactoryDict[T, U](dict[T, U]): + def __init__(self, factory: Callable[[T], U], *args, **kwargs): + self.factory = factory + super().__init__(*args, **kwargs) + + def __missing__(self, key: T) -> U: + res = self.factory(key) + self[key] = res + return res diff --git a/test/conftest.py b/test/conftest.py new file mode 100644 index 00000000..35349ceb --- /dev/null +++ b/test/conftest.py @@ -0,0 +1,17 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + + +import logging + +import pytest + +from faebryk.libs.logging import setup_basic_logging + + +# TODO does not work +@pytest.fixture(scope="package", autouse=True) +def setup_logging(): + setup_basic_logging() + logging.info("Setup logging") + yield diff --git a/test/core/cpp/test_cpp.py b/test/core/cpp/test_cpp.py new file mode 100644 index 00000000..f8f489f6 --- /dev/null +++ b/test/core/cpp/test_cpp.py @@ -0,0 +1,58 @@ +# This file is part of the faebryk project +# SPDX-License-Identifier: MIT + +import logging + +import faebryk.library._F as F +from faebryk.core.cpp.graph import CGraph +from faebryk.core.module import Module +from faebryk.core.moduleinterface import ModuleInterface +from faebryk.libs.test.times import Times + +logger = logging.getLogger(__name__) + + +def test_graph_build(): + # python + class App(Module): + a: ModuleInterface + b: ModuleInterface + + def __preinit__(self): + self.a.connect(self.b) + + app = App() + G = app.get_graph() + + # cpp + Gpp = CGraph(G) + + assert Gpp.gif_py(Gpp._gif_c[app.a.self_gif]).node is app.a + + paths, _ = Gpp.find_paths(app.a, app.b) + assert len(paths) == 2 + self_path, path = paths + assert len(self_path.path) == 1 + assert self_path.path[0] is app.a.self_gif + assert len(path.path) == 4 + assert path.path[0] is app.a.self_gif + assert path.path[1] is app.a.connected + assert path.path[2] is app.b.connected + assert path.path[3] is app.b.self_gif + + +def test_graph_convert_performance(): + times = Times() + + app = F.USB2514B() + G = app.get_graph() + times.add("construct") + + CGraph(G) + times.add("convert") + + logger.info(f"\n{times}") + + +if __name__ == "__main__": + test_graph_build() diff --git a/test/core/test_core.py b/test/core/test_core.py index 238a8a5f..9906f6f2 100644 --- a/test/core/test_core.py +++ b/test/core/test_core.py @@ -173,19 +173,19 @@ def test_gifs(self): gif1.connect(gif2) self.assertIn(gif2, gif1.edges) - self.assertTrue(gif1.is_connected(gif2) is not None) + self.assertTrue(gif1.is_connected_to(gif2) is not None) gif3 = GIF() class linkcls(LinkDirect): pass - gif1.connect(gif3, linkcls) - self.assertIsInstance(gif1.is_connected(gif3), linkcls) - self.assertEqual(gif1.is_connected(gif3), gif3.is_connected(gif1)) + gif1.connect(gif3, linkcls=linkcls) + self.assertIsInstance(gif1.is_connected_to(gif3), linkcls) + self.assertEqual(gif1.is_connected_to(gif3), gif3.is_connected_to(gif1)) self.assertRaises(AssertionError, lambda: gif1.connect(gif3)) - gif1.connect(gif3, linkcls) + gif1.connect(gif3, linkcls=linkcls) self.assertEqual(gif1.G, gif2.G) @@ -194,13 +194,13 @@ def test_node_gifs(self): n1 = Node() - self.assertIsInstance(n1.self_gif.is_connected(n1.parent), LinkSibling) - self.assertIsInstance(n1.self_gif.is_connected(n1.children), LinkSibling) + self.assertIsInstance(n1.self_gif.is_connected_to(n1.parent), LinkSibling) + self.assertIsInstance(n1.self_gif.is_connected_to(n1.children), LinkSibling) n2 = Node() n1.add(n2, name="n2") - self.assertIsInstance(n1.children.is_connected(n2.parent), LinkParent) + self.assertIsInstance(n1.children.is_connected_to(n2.parent), LinkParent) print(n1.get_graph()) @@ -234,7 +234,9 @@ def test_fab_ll_chain_names(self): x.add(y, f"i{i}") x = y - self.assertEqual(x.get_full_name(), "*.i0.i1.i2.i3.i4.i5.i6.i7.i8.i9") + self.assertRegex( + x.get_full_name(), "<[0-9A-F]{4}>.i0.i1.i2.i3.i4.i5.i6.i7.i8.i9" + ) def test_fab_ll_chain_tree(self): root = Node() @@ -246,7 +248,9 @@ def test_fab_ll_chain_tree(self): x.add(z, f"j{i}") x = y - self.assertEqual(x.get_full_name(), "*.i0.i1.i2.i3.i4.i5.i6.i7.i8.i9") + self.assertRegex( + x.get_full_name(), "<[0-9A-F]{4}>.i0.i1.i2.i3.i4.i5.i6.i7.i8.i9" + ) if __name__ == "__main__": diff --git a/test/core/test_hierarchy_connect.py b/test/core/test_hierarchy_connect.py index fbdf2b59..71d4d895 100644 --- a/test/core/test_hierarchy_connect.py +++ b/test/core/test_hierarchy_connect.py @@ -2,285 +2,376 @@ # SPDX-License-Identifier: MIT import logging -import unittest from itertools import chain +import pytest + import faebryk.library._F as F -from faebryk.core.core import logger as core_logger -from faebryk.core.link import LinkDirect, LinkDirectShallow, _TLinkDirectShallow +from faebryk.core.graphinterface import GraphInterface +from faebryk.core.link import LinkDirectConditional, LinkDirectDerived from faebryk.core.module import Module from faebryk.core.moduleinterface import ModuleInterface +from faebryk.libs.app.erc import ERCPowerSourcesShortedError, simple_erc from faebryk.libs.library import L -from faebryk.libs.util import print_stack, times +from faebryk.libs.util import times logger = logging.getLogger(__name__) -core_logger.setLevel(logger.getEffectiveLevel()) - - -class TestHierarchy(unittest.TestCase): - def test_up_connect(self): - class UARTBuffer(Module): - bus_in: F.UART_Base - bus_out: F.UART_Base - - def __preinit__(self) -> None: - bus_in = self.bus_in - bus_out = self.bus_out - - bus_in.rx.signal.connect(bus_out.rx.signal) - bus_in.tx.signal.connect(bus_out.tx.signal) - bus_in.rx.reference.connect(bus_out.rx.reference) - - app = UARTBuffer() - - self.assertTrue(app.bus_in.rx.is_connected_to(app.bus_out.rx)) - self.assertTrue(app.bus_in.tx.is_connected_to(app.bus_out.tx)) - self.assertTrue(app.bus_in.is_connected_to(app.bus_out)) - - def test_chains(self): - mifs = times(3, ModuleInterface) - mifs[0].connect(mifs[1]) - mifs[1].connect(mifs[2]) - self.assertTrue(mifs[0].is_connected_to(mifs[2])) - - mifs = times(3, ModuleInterface) - mifs[0].connect_shallow(mifs[1]) - mifs[1].connect_shallow(mifs[2]) - self.assertTrue(mifs[0].is_connected_to(mifs[2])) - self.assertIsInstance(mifs[0].is_connected_to(mifs[2]), _TLinkDirectShallow) - - mifs = times(3, ModuleInterface) - mifs[0].connect_shallow(mifs[1]) - mifs[1].connect(mifs[2]) - self.assertTrue(mifs[0].is_connected_to(mifs[2])) - self.assertIsInstance(mifs[0].is_connected_to(mifs[2]), _TLinkDirectShallow) - - # Test hierarchy down filter & chain resolution - mifs = times(3, F.ElectricLogic) - mifs[0].connect_shallow(mifs[1]) - mifs[1].connect(mifs[2]) - self.assertTrue(mifs[0].is_connected_to(mifs[2])) - self.assertIsInstance(mifs[0].is_connected_to(mifs[2]), _TLinkDirectShallow) - - self.assertTrue(mifs[1].signal.is_connected_to(mifs[2].signal)) - self.assertTrue(mifs[1].reference.is_connected_to(mifs[2].reference)) - self.assertFalse(mifs[0].signal.is_connected_to(mifs[1].signal)) - self.assertFalse(mifs[0].reference.is_connected_to(mifs[1].reference)) - self.assertFalse(mifs[0].signal.is_connected_to(mifs[2].signal)) - self.assertFalse(mifs[0].reference.is_connected_to(mifs[2].reference)) - - # Test duplicate resolution - mifs[0].signal.connect(mifs[1].signal) - mifs[0].reference.connect(mifs[1].reference) - self.assertIsInstance(mifs[0].is_connected_to(mifs[1]), LinkDirect) - self.assertIsInstance(mifs[0].is_connected_to(mifs[2]), LinkDirect) - - def test_bridge(self): - self_ = self - - # U1 ---> _________B________ ---> U2 - # TX IL ===> OL TX - # S --> I -> S S -> O --> S - # R -------- R ----- R -------- R - - class Buffer(Module): - ins = L.list_field(2, F.Electrical) - outs = L.list_field(2, F.Electrical) - - ins_l = L.list_field(2, F.ElectricLogic) - outs_l = L.list_field(2, F.ElectricLogic) - - def __preinit__(self) -> None: - self_.assertIs( - self.ins_l[0].reference, - self.ins_l[0].single_electric_reference.get_reference(), - ) - - for el, lo in chain( - zip(self.ins, self.ins_l), - zip(self.outs, self.outs_l), - ): - lo.signal.connect(el) - - for l1, l2 in zip(self.ins_l, self.outs_l): - l1.connect_shallow(l2) - - @L.rt_field - def single_electric_reference(self): - return F.has_single_electric_reference_defined( - F.ElectricLogic.connect_all_module_references(self) - ) - - class UARTBuffer(Module): - buf: Buffer - bus_in: F.UART_Base - bus_out: F.UART_Base - - def __preinit__(self) -> None: - bus1 = self.bus_in - bus2 = self.bus_out - buf = self.buf - - bus1.tx.signal.connect(buf.ins[0]) - bus1.rx.signal.connect(buf.ins[1]) - bus2.tx.signal.connect(buf.outs[0]) - bus2.rx.signal.connect(buf.outs[1]) - - @L.rt_field - def single_electric_reference(self): - return F.has_single_electric_reference_defined( - F.ElectricLogic.connect_all_module_references(self) - ) - - import faebryk.core.core as c - - # Enable to see the stack trace of invalid connections - # c.LINK_TB = True - app = UARTBuffer() - - def _assert_no_link(mif1, mif2): - link = mif1.is_connected_to(mif2) - err = "" - if link and c.LINK_TB: - err = "\n" + print_stack(link.tb) - self.assertFalse(link, err) - - def _assert_link(mif1: ModuleInterface, mif2: ModuleInterface, link=None): - out = mif1.is_connected_to(mif2) - if link: - self.assertIsInstance(out, link) - return - self.assertIsNotNone(out) - - bus1 = app.bus_in - bus2 = app.bus_out - buf = app.buf - - # Check that the two buffer sides are not connected electrically - _assert_no_link(buf.ins[0], buf.outs[0]) - _assert_no_link(buf.ins[1], buf.outs[1]) - _assert_no_link(bus1.rx.signal, bus2.rx.signal) - _assert_no_link(bus1.tx.signal, bus2.tx.signal) - - # direct connect - _assert_link(bus1.tx.signal, buf.ins[0]) - _assert_link(bus1.rx.signal, buf.ins[1]) - _assert_link(bus2.tx.signal, buf.outs[0]) - _assert_link(bus2.rx.signal, buf.outs[1]) - - # connect through trait - self.assertIs( - buf.ins_l[0].single_electric_reference.get_reference(), - buf.ins_l[0].reference, - ) - _assert_link(buf.ins_l[0].reference, buf.outs_l[0].reference) - _assert_link(buf.outs_l[1].reference, buf.ins_l[0].reference) - _assert_link(bus1.rx.reference, bus2.rx.reference, LinkDirect) - - # connect through up - _assert_link(bus1.tx, buf.ins_l[0], LinkDirect) - _assert_link(bus2.tx, buf.outs_l[0], LinkDirect) - - # connect shallow - _assert_link(buf.ins_l[0], buf.outs_l[0], _TLinkDirectShallow) - - # Check that the two buffer sides are connected logically - _assert_link(bus1.tx, bus2.tx) - _assert_link(bus1.rx, bus2.rx) - _assert_link(bus1, bus2) - - def test_specialize(self): - class Specialized(ModuleInterface): ... - - # general connection -> specialized connection - mifs = times(3, ModuleInterface) - mifs_special = times(3, Specialized) - - mifs[0].connect(mifs[1]) - mifs[1].connect(mifs[2]) - - mifs[0].specialize(mifs_special[0]) - mifs[2].specialize(mifs_special[2]) - self.assertTrue(mifs_special[0].is_connected_to(mifs_special[2])) - - # specialized connection -> general connection - mifs = times(3, ModuleInterface) - mifs_special = times(3, Specialized) - - mifs_special[0].connect(mifs_special[1]) - mifs_special[1].connect(mifs_special[2]) - mifs[0].specialize(mifs_special[0]) - mifs[2].specialize(mifs_special[2]) +def test_up_connect(): + class UARTBuffer(Module): + bus_in: F.UART_Base + bus_out: F.UART_Base + + def __preinit__(self) -> None: + self.bus_in.rx.signal.connect(self.bus_out.rx.signal) + self.bus_in.tx.signal.connect(self.bus_out.tx.signal) + self.bus_in.rx.reference.connect(self.bus_out.rx.reference) + + app = UARTBuffer() + + assert app.bus_in.rx.signal.is_connected_to(app.bus_out.rx.signal) + assert app.bus_in.rx.reference.is_connected_to(app.bus_out.rx.reference) + assert app.bus_in.rx.is_connected_to(app.bus_out.rx) + assert app.bus_in.tx.is_connected_to(app.bus_out.tx) + assert app.bus_in.is_connected_to(app.bus_out) + + +def test_down_connect(): + ep = times(2, F.ElectricPower) + ep[0].connect(ep[1]) + + assert ep[0].is_connected_to(ep[1]) + assert ep[0].hv.is_connected_to(ep[1].hv) + assert ep[0].lv.is_connected_to(ep[1].lv) + + +def test_chains_direct(): + mifs = times(3, ModuleInterface) + mifs[0].connect(mifs[1]) + mifs[1].connect(mifs[2]) + assert mifs[0].is_connected_to(mifs[2]) + + +def test_chains_double_shallow_flat(): + mifs = times(3, ModuleInterface) + mifs[0].connect_shallow(mifs[1]) + mifs[1].connect_shallow(mifs[2]) + assert mifs[0].is_connected_to(mifs[2]) + + +def test_chains_mixed_shallow_flat(): + mifs = times(3, ModuleInterface) + mifs[0].connect_shallow(mifs[1]) + mifs[1].connect(mifs[2]) + assert mifs[0].is_connected_to(mifs[2]) + + +def test_chains_mixed_shallow_nested(): + # Test hierarchy down filter & chain resolution + el = times(3, F.ElectricLogic) + el[0].connect_shallow(el[1]) + el[1].connect(el[2]) + assert el[0].is_connected_to(el[2]) + + assert el[1].signal.is_connected_to(el[2].signal) + assert el[1].reference.is_connected_to(el[2].reference) + assert not el[0].signal.is_connected_to(el[1].signal) + assert not el[0].reference.is_connected_to(el[1].reference) + assert not el[0].signal.is_connected_to(el[2].signal) + assert not el[0].reference.is_connected_to(el[2].reference) + + # Test duplicate resolution + el[0].signal.connect(el[1].signal) + el[0].reference.connect(el[1].reference) + assert el[0].is_connected_to(el[1]) + assert el[0].is_connected_to(el[2]) + + +def test_bridge(): + """ + Test the bridge connection between two UART interfaces through a buffer: + + ``` + U1 ---> _________B________ ---> U2 + TX IL ===> OL TX + S --> I -> S S -> O --> S + R -------- R ----- R -------- R + ``` + + Where: + - U1, U2: UART interfaces + - B: Buffer + - TX: Transmit + - S: Signal + - R: Reference + - I: Input + - O: Output + - IL: Input Logic + - OL: Output Logic + """ + + class Buffer(Module): + ins = L.list_field(2, F.Electrical) + outs = L.list_field(2, F.Electrical) + + ins_l = L.list_field(2, F.ElectricLogic) + outs_l = L.list_field(2, F.ElectricLogic) + + def __preinit__(self) -> None: + assert ( + self.ins_l[0].reference + is self.ins_l[0].single_electric_reference.get_reference() + ) + + for el, lo in chain( + zip(self.ins, self.ins_l), + zip(self.outs, self.outs_l), + ): + lo.signal.connect(el) + + for l1, l2 in zip(self.ins_l, self.outs_l): + l1.connect_shallow(l2) + + @L.rt_field + def single_electric_reference(self): + return F.has_single_electric_reference_defined( + F.ElectricLogic.connect_all_module_references(self) + ) + + class UARTBuffer(Module): + buf: Buffer + bus_in: F.UART_Base + bus_out: F.UART_Base + + def __preinit__(self) -> None: + bus_i = self.bus_in + bus_o = self.bus_out + buf = self.buf + + bus_i.tx.signal.connect(buf.ins[0]) + bus_i.rx.signal.connect(buf.ins[1]) + bus_o.tx.signal.connect(buf.outs[0]) + bus_o.rx.signal.connect(buf.outs[1]) + + @L.rt_field + def single_electric_reference(self): + return F.has_single_electric_reference_defined( + F.ElectricLogic.connect_all_module_references(self) + ) + + app = UARTBuffer() + + bus_i = app.bus_in + bus_o = app.bus_out + buf = app.buf + + # Check that the two buffer sides are not connected electrically + assert not list(buf.ins[0].get_paths_to(buf.outs[0])) + assert not buf.ins[1].is_connected_to(buf.outs[1]) + assert not bus_i.rx.signal.is_connected_to(bus_o.rx.signal) + assert not bus_i.tx.signal.is_connected_to(bus_o.tx.signal) + + # direct connect + assert bus_i.tx.signal.is_connected_to(buf.ins[0]) + assert bus_i.rx.signal.is_connected_to(buf.ins[1]) + assert bus_o.tx.signal.is_connected_to(buf.outs[0]) + assert bus_o.rx.signal.is_connected_to(buf.outs[1]) + + # connect through trait + assert ( + buf.ins_l[0].single_electric_reference.get_reference() is buf.ins_l[0].reference + ) + assert buf.ins_l[0].reference.is_connected_to(buf.outs_l[0].reference) + assert buf.outs_l[1].reference.is_connected_to(buf.ins_l[0].reference) + assert bus_i.rx.reference.is_connected_to(bus_o.rx.reference) + + # connect through up + assert bus_i.tx.is_connected_to(buf.ins_l[0]) + assert bus_o.tx.is_connected_to(buf.outs_l[0]) + + # connect shallow + assert buf.ins_l[0].is_connected_to(buf.outs_l[0]) + + # Check that the two buffer sides are connected logically + assert bus_i.tx.is_connected_to(bus_o.tx) + assert bus_i.rx.is_connected_to(bus_o.rx) + assert bus_i.is_connected_to(bus_o) + + +class Specialized(ModuleInterface): ... + + +class DoubleSpecialized(Specialized): ... + + +def test_specialize_general_to_special(): + # general connection -> specialized connection + mifs = times(3, ModuleInterface) + mifs_special = times(3, Specialized) + + mifs[0].connect(mifs[1]) + mifs[1].connect(mifs[2]) + + mifs[0].specialize(mifs_special[0]) + mifs[2].specialize(mifs_special[2]) + + assert mifs_special[0].is_connected_to(mifs_special[2]) + + +def test_specialize_special_to_general(): + # specialized connection -> general connection + mifs = times(3, ModuleInterface) + mifs_special = times(3, Specialized) + + mifs_special[0].connect(mifs_special[1]) + mifs_special[1].connect(mifs_special[2]) + + mifs[0].specialize(mifs_special[0]) + mifs[2].specialize(mifs_special[2]) + + assert mifs[0].is_connected_to(mifs[2]) + + +def test_specialize_link(): + # test special link + class _Link(LinkDirectConditional): + def is_filtered(self, path: list[GraphInterface]): + return LinkDirectConditional.FilterResult.PASS + + mifs = times(3, ModuleInterface) + mifs_special = times(3, Specialized) + + mifs[0].connect(mifs[1], linkcls=_Link) + mifs[1].connect(mifs[2]) + + mifs[0].specialize(mifs_special[0]) + mifs[2].specialize(mifs_special[2]) + + assert mifs_special[0].is_connected_to(mifs_special[2]) + + +def test_specialize_double_with_gap(): + # double specialization with gap + mifs = times(2, ModuleInterface) + mifs_special = times(1, Specialized) + mifs_double_special = times(2, DoubleSpecialized) + + mifs[0].connect(mifs[1]) + mifs[0].specialize(mifs_special[0]) + mifs_special[0].specialize(mifs_double_special[0]) + mifs[1].specialize(mifs_double_special[1]) + + assert mifs_double_special[0].is_connected_to(mifs_double_special[1]) + + +def test_specialize_double_with_gap_2(): + mifs = times(2, ModuleInterface) + mifs_special = times(1, Specialized) + mifs_double_special = times(2, DoubleSpecialized) + + mifs_double_special[0].connect(mifs_double_special[1]) + mifs[0].specialize(mifs_special[0]) + mifs_special[0].specialize(mifs_double_special[0]) + mifs[1].specialize(mifs_double_special[1]) + + assert mifs[0].is_connected_to(mifs[1]) + + +def test_specialize_module(): + battery = F.Battery() + power = F.ElectricPower() + + battery.power.connect(power) + buttoncell = battery.specialize(F.ButtonCell()) + + assert buttoncell.power.is_connected_to(battery.power) + assert power.is_connected_to(buttoncell.power) + + +def test_isolated_connect_simple(): + x1 = F.ElectricLogic() + x2 = F.ElectricLogic() + x1.connect(x2, linkcls=F.ElectricLogic.LinkIsolatedReference) + + assert x1.is_connected_to(x2) + assert x1.signal.is_connected_to(x2.signal) + + assert not x1.reference.is_connected_to(x2.reference) + assert not x1.reference.hv.is_connected_to(x2.reference.hv) + + +def test_isolated_connect_erc(): + y1 = F.ElectricPower() + y2 = F.ElectricPower() + + y1.make_source() + y2.make_source() + + with pytest.raises(ERCPowerSourcesShortedError): + y1.connect(y2) + simple_erc(y1.get_graph()) - self.assertTrue(mifs[0].is_connected_to(mifs[2])) + ldo1 = F.LDO() + ldo2 = F.LDO() - # test special link - class _Link(LinkDirectShallow(lambda link, gif: True)): ... + with pytest.raises(ERCPowerSourcesShortedError): + ldo1.power_out.connect(ldo2.power_out) + simple_erc(ldo1.get_graph()) - mifs = times(3, ModuleInterface) - mifs_special = times(3, Specialized) + a1 = F.I2C() + b1 = F.I2C() - mifs[0].connect(mifs[1], linkcls=_Link) - mifs[1].connect(mifs[2]) + a1.connect(b1, linkcls=F.ElectricLogic.LinkIsolatedReference) + assert a1.is_connected_to(b1) + assert a1.scl.signal.is_connected_to(b1.scl.signal) + assert a1.sda.signal.is_connected_to(b1.sda.signal) - mifs[0].specialize(mifs_special[0]) - mifs[2].specialize(mifs_special[2]) + assert not a1.scl.reference.is_connected_to(b1.scl.reference) + assert not a1.sda.reference.is_connected_to(b1.sda.reference) - self.assertIsInstance(mifs_special[0].is_connected_to(mifs_special[2]), _Link) - def test_isolated_connect(self): - x1 = F.ElectricLogic() - x2 = F.ElectricLogic() - x1.connect(x2, linkcls=F.ElectricLogic.LinkIsolatedReference) - self.assertIsInstance( - x1.is_connected_to(x2), F.ElectricLogic.LinkIsolatedReference - ) +def test_direct_implied_paths(): + powers = times(2, F.ElectricPower) - self.assertIsInstance( - x1.signal.is_connected_to(x2.signal), - F.ElectricLogic.LinkIsolatedReference, - ) + # direct implied + powers[0].connect(powers[1]) - self.assertIsNone(x1.reference.is_connected_to(x2.reference)) + assert powers[1].hv in powers[0].hv.get_connected() - self.assertIsNone(x1.reference.hv.is_connected_to(x2.reference.hv)) + path = powers[0].hv.is_connected_to(powers[1].hv) + assert path + assert len(path.path) == 4 + assert isinstance(path.path[1].is_connected_to(path.path[2]), LinkDirectDerived) - y1 = F.ElectricPower() - y2 = F.ElectricPower() - y1.make_source() - y2.make_source() +def test_children_implied_paths(): + powers = times(3, F.ElectricPower) - with self.assertRaises(F.Power.PowerSourcesShortedError): - y1.connect(y2) + # children implied + powers[0].connect(powers[1]) + powers[1].hv.connect(powers[2].hv) + powers[1].lv.connect(powers[2].lv) - ldo1 = F.LDO() - ldo2 = F.LDO() + assert powers[2] in powers[0].get_connected() - with self.assertRaises(F.Power.PowerSourcesShortedError): - ldo1.power_out.connect(ldo2.power_out) + paths = list(powers[0].get_paths_to(powers[2])) + assert paths + assert len(paths[0].path) == 4 + assert isinstance( + paths[0].path[1].is_connected_to(paths[0].path[2]), LinkDirectDerived + ) - a1 = F.I2C() - b1 = F.I2C() - a1.connect(b1, linkcls=F.ElectricLogic.LinkIsolatedReference) - self.assertIsInstance( - a1.is_connected_to(b1), F.ElectricLogic.LinkIsolatedReference - ) - self.assertIsInstance( - a1.scl.signal.is_connected_to(b1.scl.signal), - F.ElectricLogic.LinkIsolatedReference, - ) - self.assertIsInstance( - a1.sda.signal.is_connected_to(b1.sda.signal), - F.ElectricLogic.LinkIsolatedReference, - ) +def test_shallow_implied_paths(): + powers = times(4, F.ElectricPower) - self.assertIsNone(a1.scl.reference.is_connected_to(b1.scl.reference)) - self.assertIsNone(a1.sda.reference.is_connected_to(b1.sda.reference)) + # shallow implied + powers[0].connect(powers[1]) + powers[1].hv.connect(powers[2].hv) + powers[1].lv.connect(powers[2].lv) + powers[2].connect_shallow(powers[3]) + assert powers[3] in powers[0].get_connected() -if __name__ == "__main__": - unittest.main() + assert not powers[0].hv.is_connected_to(powers[3].hv) diff --git a/test/core/test_parameters.py b/test/core/test_parameters.py index 9db7050d..101ad41e 100644 --- a/test/core/test_parameters.py +++ b/test/core/test_parameters.py @@ -9,6 +9,7 @@ from faebryk.core.core import logger as core_logger from faebryk.core.module import Module from faebryk.core.parameter import Parameter +from faebryk.libs.app.parameters import resolve_dynamic_parameters from faebryk.libs.units import P logger = logging.getLogger(__name__) @@ -247,11 +248,6 @@ def test_comp( test_comp(F.Constant(F.Set([F.Range(F.Range(1))])), 1) def test_modules(self): - def assertIsInstance[T](obj, cls: type[T]) -> T: - self.assertIsInstance(obj, cls) - assert isinstance(obj, cls) - return obj - class Modules(Module): UART_A: F.UART_Base UART_B: F.UART_Base @@ -267,24 +263,21 @@ class Modules(Module): UART_A.baud.merge(F.Constant(9600 * P.baud)) + resolve_dynamic_parameters(m.get_graph()) + for uart in [UART_A, UART_B]: - self.assertEqual( - assertIsInstance(uart.baud.get_most_narrow(), F.Constant).value, - 9600 * P.baud, - ) + self.assertEqual(uart.baud.get_most_narrow(), 9600 * P.baud) UART_C.baud.merge(F.Range(1200 * P.baud, 115200 * P.baud)) UART_A.connect(UART_C) + resolve_dynamic_parameters(m.get_graph()) for uart in [UART_A, UART_B, UART_C]: - self.assertEqual( - assertIsInstance(uart.baud.get_most_narrow(), F.Constant).value, - 9600 * P.baud, - ) + self.assertEqual(uart.baud.get_most_narrow(), 9600 * P.baud) resistor = F.Resistor() - assertIsInstance( + self.assertIsInstance( resistor.get_current_flow_by_voltage_resistance(F.Constant(0.5)), F.Operation, ) @@ -324,43 +317,43 @@ def test_specialize(self): import faebryk.library._F as F from faebryk.libs.brightness import TypicalLuminousIntensity - for i in range(10): - - class App(Module): - led: F.PoweredLED - battery: F.Battery - - def __preinit__(self) -> None: - self.led.power.connect(self.battery.power) - - # Parametrize - self.led.led.color.merge(F.LED.Color.YELLOW) - self.led.led.brightness.merge( - TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value - ) - - app = App() - - bcell = app.battery.specialize(F.ButtonCell()) - bcell.voltage.merge(3 * P.V) - bcell.capacity.merge(F.Range.from_center(225 * P.mAh, 50 * P.mAh)) - bcell.material.merge(F.ButtonCell.Material.Lithium) - bcell.size.merge(F.ButtonCell.Size.N_2032) - bcell.shape.merge(F.ButtonCell.Shape.Round) - - app.led.led.color.merge(F.LED.Color.YELLOW) - app.led.led.max_brightness.merge(500 * P.millicandela) - app.led.led.forward_voltage.merge(1.2 * P.V) - app.led.led.max_current.merge(20 * P.mA) - - v = app.battery.voltage - # vbcell = bcell.voltage - # print(pretty_param_tree_top(v)) - # print(pretty_param_tree_top(vbcell)) - self.assertEqual(v.get_most_narrow(), 3 * P.V) - r = app.led.current_limiting_resistor.resistance - r = r.get_most_narrow() - self.assertIsInstance(r, F.Range, f"{type(r)}") + class App(Module): + led: F.PoweredLED + battery: F.Battery + + def __preinit__(self) -> None: + self.led.power.connect(self.battery.power) + + # Parametrize + self.led.led.color.merge(F.LED.Color.YELLOW) + self.led.led.brightness.merge( + TypicalLuminousIntensity.APPLICATION_LED_INDICATOR_INSIDE.value.value + ) + + app = App() + + bcell = app.battery.specialize(F.ButtonCell()) + bcell.voltage.merge(3 * P.V) + bcell.capacity.merge(F.Range.from_center(225 * P.mAh, 50 * P.mAh)) + bcell.material.merge(F.ButtonCell.Material.Lithium) + bcell.size.merge(F.ButtonCell.Size.N_2032) + bcell.shape.merge(F.ButtonCell.Shape.Round) + + app.led.led.color.merge(F.LED.Color.YELLOW) + app.led.led.max_brightness.merge(500 * P.millicandela) + app.led.led.forward_voltage.merge(1.2 * P.V) + app.led.led.max_current.merge(20 * P.mA) + + resolve_dynamic_parameters(app.get_graph()) + + v = app.battery.voltage + # vbcell = bcell.voltage + # print(pretty_param_tree_top(v)) + # print(pretty_param_tree_top(vbcell)) + self.assertEqual(v.get_most_narrow(), 3 * P.V) + r = app.led.current_limiting_resistor.resistance + r = r.get_most_narrow() + self.assertIsInstance(r, F.Range, f"{type(r)}") def test_units(self): self.assertEqual(F.Constant(1e-9 * P.F), 1 * P.nF) diff --git a/test/core/test_performance.py b/test/core/test_performance.py index 090f461c..2a5b7bd0 100644 --- a/test/core/test_performance.py +++ b/test/core/test_performance.py @@ -1,6 +1,7 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT +import logging import time import unittest from itertools import pairwise @@ -11,10 +12,13 @@ from faebryk.core.module import Module from faebryk.core.moduleinterface import ModuleInterface from faebryk.core.node import Node +from faebryk.libs.app.parameters import resolve_dynamic_parameters from faebryk.libs.library import L from faebryk.libs.test.times import Times from faebryk.libs.util import times +logger = logging.getLogger(__name__) + class TestPerformance(unittest.TestCase): def test_get_all(self): @@ -84,9 +88,9 @@ def _common_timings( n.get_children(direct_only=True, types=ModuleInterface) timings.add(f"get_mifs {name}") - print(f"{test_name:-<80}") - print(f"{timings!r}") - print(str(G)) + logger.info(f"{test_name:-<80}") + logger.info(f"{timings!r}") + logger.info(f"{G}") return timings # _common_timings(lambda: _factory_simple_resistors(100), "simple") @@ -98,9 +102,9 @@ def _common_timings( lambda: _factory_simple_resistors(count), f"Simple resistors: {count}" ) per_resistor = timings.times["instance"] / count - print(f"----> Avg/resistor: {per_resistor*1e3:.2f} ms") + logger.info(f"----> Avg/resistor: {per_resistor*1e3:.2f} ms") - print("=" * 80) + logger.info("=" * 80) for i in range(2, 5): count = 10 * 2**i timings = _common_timings( @@ -108,12 +112,12 @@ def _common_timings( f"Connected resistors: {count}", ) per_resistor = timings.times["instance"] / count - print(f"----> Avg/resistor: {per_resistor*1e3:.2f} ms") + logger.info(f"----> Avg/resistor: {per_resistor*1e3:.2f} ms") def test_graph_merge_rec(self): timings = Times() count = 2**14 - print(f"Count: {count}") + logger.info(f"Count: {count}") gs = times(count, GraphInterface) timings.add("instance") @@ -146,16 +150,16 @@ def rec_connect(gs_sub: list[GraphInterface]): # self.assertLess(timings.times["split 1024"], 50e-3) # self.assertLess(timings.times["instance"], 300e-3) # self.assertLess(timings.times["connect"], 1200e-3) - print(timings) - print(f"----> Avg/connect: {per_connect*1e6:.2f} us") + logger.info(f"\n{timings}") + logger.info(f"----> Avg/connect: {per_connect*1e6:.2f} us") from faebryk.core.graphinterface import GraphImpl - print("Counter", GraphImpl.counter, GraphImpl.counter - count) + logger.info(f"Counter {GraphImpl.counter} {GraphImpl.counter - count}") def test_graph_merge_it(self): timings = Times() count = 2**14 - print(f"Count: {count}") + logger.info(f"Count: {count}") gs = times(count, GraphInterface) timings.add("instance") @@ -171,12 +175,134 @@ def test_graph_merge_it(self): # self.assertLess(timings.times["connect"], 500e-3) # self.assertLess(timings.times["instance"], 200e-3) # self.assertLess(per_connect, 25e-6) - print(timings) - print(f"----> Avg/connect: {per_connect*1e6:.2f} us") + logger.info(f"\n{timings}") + logger.info(f"----> Avg/connect: {per_connect*1e6:.2f} us") from faebryk.core.graphinterface import GraphImpl - print("Counter", GraphImpl.counter, GraphImpl.counter - count) + logger.info(f"Counter {GraphImpl.counter} {GraphImpl.counter - count}") + + def test_mif_connect_check(self): + cnt = 100 + + timings = Times(cnt=cnt, unit="us") + + for t in [ + GraphInterface, + ModuleInterface, + F.Electrical, + F.ElectricPower, + F.ElectricLogic, + F.I2C, + ]: + instances = [(t(), t()) for _ in range(cnt)] + timings.add(f"{t.__name__}: construct") + + for inst1, inst2 in instances: + inst1.connect(inst2) + timings.add(f"{t.__name__}: connect") + + for inst1, inst2 in instances: + self.assertTrue(inst1.is_connected_to(inst2)) + timings.add(f"{t.__name__}: is_connected") + + logger.info(f"\n{timings}") + + def test_mif_connect_hull(self): + cnt = 30 + + timings = Times(cnt=1, unit="ms") + + for t in [ + GraphInterface, + ModuleInterface, + F.Electrical, + F.ElectricPower, + F.ElectricLogic, + F.I2C, + ]: + instances = [t() for _ in range(cnt)] + timings.add(f"{t.__name__}: construct") + + instances[0].connect(*instances[1:]) + timings.add(f"{t.__name__}: connect") + + self.assertTrue(instances[0].is_connected_to(instances[-1])) + timings.add(f"{t.__name__}: is_connected") + + if issubclass(t, ModuleInterface): + list(instances[0].get_connected()) + else: + instances[0].edges + timings.add(f"{t.__name__}: get_connected") + + self.assertTrue(instances[0].is_connected_to(instances[-1])) + timings.add(f"{t.__name__}: is_connected cached") + + logger.info(f"\n{timings}") + + def test_complex_module(self): + from faebryk.core.pathfinder import CPP + from faebryk.core.pathfinder import logger as pathfinder_logger + + pathfinder_logger.setLevel(logging.ERROR) + + timings = Times() + + modules = [F.USB2514B, F.RP2040] + + for t in modules: + app = t() + timings.add(f"{t.__name__}: construct") + + if CPP: + from faebryk.core.cpp.graph import CGraph + + logger.info("Prebuild graph") + CGraph(app.get_graph()) + timings.add(f"{t.__name__}: cpp graph") + + resolve_dynamic_parameters(app.get_graph()) + timings.add(f"{t.__name__}: resolve") + + logger.info(f"\n{timings}") + + def test_no_connect(self): + from faebryk.core.pathfinder import CPP + from faebryk.core.pathfinder import logger as pathfinder_logger + + pathfinder_logger.setLevel(logging.ERROR) + + CNT = 30 + + timings = Times() + + app = F.RP2040_ReferenceDesign() + timings.add("construct") + + if CPP: + from faebryk.core.cpp.graph import CGraph + + logger.info("Prebuild graph") + CGraph(app.get_graph()) + timings.add("cpp graph") + + for i in range(CNT): + list(app.rp2040.power_core.get_connected()) + timings.add(f"_get_connected {i}") + + all_times = [ + timings.times[k] for k in timings.times if k.startswith("_get_connected") + ] + + timings.times["min"] = min(all_times) + timings.times["max"] = max(all_times) + timings.times["avg"] = sum(all_times) / len(all_times) + timings.times["median"] = sorted(all_times)[len(all_times) // 2] + timings.times["80%"] = sorted(all_times)[int(0.8 * len(all_times))] + timings.times["total"] = sum(all_times) + + logger.info(f"\n{timings}") if __name__ == "__main__": diff --git a/test/library/nodes/test_electricpower.py b/test/library/nodes/test_electricpower.py index 94402774..fefb8df8 100644 --- a/test/library/nodes/test_electricpower.py +++ b/test/library/nodes/test_electricpower.py @@ -1,31 +1,48 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -import unittest +from itertools import pairwise +import faebryk.library._F as F +from faebryk.libs.app.parameters import resolve_dynamic_parameters +from faebryk.libs.units import P +from faebryk.libs.util import times -class TestFusedPower(unittest.TestCase): - def test_fused_power(self): - import faebryk.library._F as F - from faebryk.libs.units import P - power_in = F.ElectricPower() - power_out = F.ElectricPower() +def test_fused_power(): + power_in = F.ElectricPower() + power_out = F.ElectricPower() - power_in.voltage.merge(10 * P.V) - power_in.max_current.merge(500 * P.mA) + power_in.voltage.merge(10 * P.V) + power_in.max_current.merge(500 * P.mA) - power_in_fused = power_in.fused() + power_in_fused = power_in.fused() - power_in_fused.connect(power_out) + power_in_fused.connect(power_out) - fuse = next(iter(power_in_fused.get_children(direct_only=False, types=F.Fuse))) + fuse = next(iter(power_in_fused.get_children(direct_only=False, types=F.Fuse))) - self.assertEqual( - fuse.trip_current.get_most_narrow(), F.Range(0 * P.A, 500 * P.mA) - ) - self.assertEqual(power_out.voltage.get_most_narrow(), 10 * P.V) - # self.assertEqual( - # power_in_fused.max_current.get_most_narrow(), F.Range(0 * P.A, 500 * P.mA) - # ) - self.assertEqual(power_out.max_current.get_most_narrow(), F.TBD()) + resolve_dynamic_parameters(fuse.get_graph()) + + assert fuse.trip_current.get_most_narrow() == F.Range(0 * P.A, 500 * P.mA) + assert power_out.voltage.get_most_narrow() == 10 * P.V + # self.assertEqual( + # power_in_fused.max_current.get_most_narrow(), F.Range(0 * P.A, 500 * P.mA) + # ) + assert power_out.max_current.get_most_narrow() == F.TBD() + + +def test_voltage_propagation(): + powers = times(4, F.ElectricPower) + + powers[0].voltage.merge(F.Range(10 * P.V, 15 * P.V)) + + for p1, p2 in pairwise(powers): + p1.connect(p2) + + resolve_dynamic_parameters(powers[0].get_graph()) + + assert powers[-1].voltage.get_most_narrow() == F.Range(10 * P.V, 15 * P.V) + + powers[3].voltage.merge(10 * P.V) + assert powers[0].voltage.get_most_narrow() == 10 * P.V diff --git a/test/library/test_basic.py b/test/library/test_basic.py index aa08e031..bf557f74 100644 --- a/test/library/test_basic.py +++ b/test/library/test_basic.py @@ -1,12 +1,15 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT +import logging import unittest from faebryk.core.core import Namespace from faebryk.core.node import Node from faebryk.libs.library import L +logger = logging.getLogger(__name__) + class TestBasicLibrary(unittest.TestCase): def test_load_library(self): @@ -44,14 +47,28 @@ def test_imports(self): # no trait base and (not issubclass(v, Trait) or issubclass(v, TraitImpl)) } + import signal + + TIMEOUT = 5 # Set timeout to 5 seconds + + def timeout_handler(signum, frame): + raise TimeoutError("Function call timed out") for k, v in symbols.items(): + signal.signal(signal.SIGALRM, timeout_handler) + signal.alarm(TIMEOUT) + try: + logger.info(f"TESTING {k} {'-' * 50}") v() + except TimeoutError: + self.fail(f"Execution of {k} timed out after {TIMEOUT} seconds") except L.AbstractclassError: pass except Exception as e: self.fail(f"Failed to instantiate {k}: {e}") + finally: + signal.alarm(0) # Disable the alarm if __name__ == "__main__": diff --git a/test/library/test_footprints.py b/test/library/test_footprints.py index 297d2f7f..eb5251a5 100644 --- a/test/library/test_footprints.py +++ b/test/library/test_footprints.py @@ -1,77 +1,68 @@ # This file is part of the faebryk project # SPDX-License-Identifier: MIT -import unittest +import pytest import faebryk.library._F as F from faebryk.libs.units import P -class TestFootprints(unittest.TestCase): - def test_qfn_kicad(self): - test_cases = { - "Package_DFN_QFN:QFN-16-1EP_4x4mm_P0.5mm_EP2.45x2.45mm_ThermalVias": F.QFN( # noqa: E501 - pin_cnt=16, - exposed_thermal_pad_cnt=1, - size_xy=(4 * P.mm, 4 * P.mm), - pitch=0.5 * P.mm, - exposed_thermal_pad_dimensions=(2.45 * P.mm, 2.45 * P.mm), - has_thermal_vias=True, - ), - "Package_DFN_QFN:QFN-12-1EP_3x3mm_P0.5mm_EP1.6x1.6mm": F.QFN( - pin_cnt=12, - exposed_thermal_pad_cnt=1, - size_xy=(3 * P.mm, 3 * P.mm), - pitch=0.5 * P.mm, - exposed_thermal_pad_dimensions=(1.6 * P.mm, 1.6 * P.mm), - has_thermal_vias=False, - ), - "Package_DFN_QFN:QFN-20-1EP_3.5x3.5mm_P0.5mm_EP2x2mm": F.QFN( - pin_cnt=20, - exposed_thermal_pad_cnt=1, - size_xy=(3.5 * P.mm, 3.5 * P.mm), - pitch=0.5 * P.mm, - exposed_thermal_pad_dimensions=(2 * P.mm, 2 * P.mm), - has_thermal_vias=False, - ), - } +def test_qfn_kicad(): + test_cases = { + "Package_DFN_QFN:QFN-16-1EP_4x4mm_P0.5mm_EP2.45x2.45mm_ThermalVias": F.QFN( + pin_cnt=16, + exposed_thermal_pad_cnt=1, + size_xy=(4 * P.mm, 4 * P.mm), + pitch=0.5 * P.mm, + exposed_thermal_pad_dimensions=(2.45 * P.mm, 2.45 * P.mm), + has_thermal_vias=True, + ), + "Package_DFN_QFN:QFN-12-1EP_3x3mm_P0.5mm_EP1.6x1.6mm": F.QFN( + pin_cnt=12, + exposed_thermal_pad_cnt=1, + size_xy=(3 * P.mm, 3 * P.mm), + pitch=0.5 * P.mm, + exposed_thermal_pad_dimensions=(1.6 * P.mm, 1.6 * P.mm), + has_thermal_vias=False, + ), + "Package_DFN_QFN:QFN-20-1EP_3.5x3.5mm_P0.5mm_EP2x2mm": F.QFN( + pin_cnt=20, + exposed_thermal_pad_cnt=1, + size_xy=(3.5 * P.mm, 3.5 * P.mm), + pitch=0.5 * P.mm, + exposed_thermal_pad_dimensions=(2 * P.mm, 2 * P.mm), + has_thermal_vias=False, + ), + } - for solution, footprint in test_cases.items(): - self.assertEqual( - solution, - footprint.get_trait(F.has_kicad_footprint).get_kicad_footprint(), - ) + for solution, footprint in test_cases.items(): + assert ( + solution == footprint.get_trait(F.has_kicad_footprint).get_kicad_footprint() + ) - def test_qfn_constraints(self): - # test good done in qfn kicad - # test bad - # only thermal via if thermal pad cnt > 0 - self.assertRaises( - AssertionError, - lambda: F.QFN( - pin_cnt=20, - exposed_thermal_pad_cnt=0, - size_xy=(3.5 * P.mm, 3.5 * P.mm), - pitch=0.5 * P.mm, - exposed_thermal_pad_dimensions=(2.5 * P.mm, 2.5 * P.mm), - has_thermal_vias=True, - ), +def test_qfn_constraints(): + # Test thermal vias constraint + with pytest.raises(AssertionError): + F.QFN( + pin_cnt=20, + exposed_thermal_pad_cnt=0, + size_xy=(3.5 * P.mm, 3.5 * P.mm), + pitch=0.5 * P.mm, + exposed_thermal_pad_dimensions=(2.5 * P.mm, 2.5 * P.mm), + has_thermal_vias=True, ) - # pad has to be smaller than package - self.assertRaises( - AssertionError, - lambda: F.QFN( - pin_cnt=20, - exposed_thermal_pad_cnt=1, - size_xy=(3.5 * P.mm, 3.5 * P.mm), - pitch=0.5 * P.mm, - exposed_thermal_pad_dimensions=(4.5 * P.mm, 2.5 * P.mm), - has_thermal_vias=False, - ), + # Test pad size constraint + with pytest.raises(AssertionError): + F.QFN( + pin_cnt=20, + exposed_thermal_pad_cnt=1, + size_xy=(3.5 * P.mm, 3.5 * P.mm), + pitch=0.5 * P.mm, + exposed_thermal_pad_dimensions=(4.5 * P.mm, 2.5 * P.mm), + has_thermal_vias=False, ) -if __name__ == "__main__": - unittest.main() +# Remove the unittest.main() call