diff --git a/Makefile b/Makefile
index a6c38c9b..9d9dd79a 100644
--- a/Makefile
+++ b/Makefile
@@ -36,9 +36,15 @@ requirements.txt:
## Tests
.PHONY: test
test:
- coverage run --source=hypernetx -m pytest
+ coverage run --source=hypernetx -m pytest tests
coverage report -m
+.PHONY: test-core
+test-core:
+ coverage run --source=hypernetx/classes -m pytest tests/classes --verbose
+ coverage report -m
+
+
## Tests using Tox
## Includes linting, running tests on jupyter notebooks
.PHONY: test-tox
diff --git a/hypernetx/classes/incidence_store_testing.ipynb b/hypernetx/classes/incidence_store_testing.ipynb
deleted file mode 100644
index 272b69bc..00000000
--- a/hypernetx/classes/incidence_store_testing.ipynb
+++ /dev/null
@@ -1,226 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "id": "27fc8a9b",
- "metadata": {
- "toc": true
- },
- "source": [
- "
Table of Contents
\n",
- ""
- ]
- },
- {
- "cell_type": "markdown",
- "id": "3c67cfb1",
- "metadata": {},
- "source": [
- "# Notes\n",
- "\n",
- "going with regular dataframe for a few reasons:\n",
- "* Can have column names to rely on that instead of position.\n",
- "* using multi-indexing requires an additional row of \"data\" which is not needed.\n",
- "* Code is more interpretable.\n",
- "* it seems that it is more difficult to add new multi-indexes."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 1,
- "id": "6804bc42",
- "metadata": {},
- "outputs": [],
- "source": [
- "#!pytest incidence_store_tests.py"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "id": "adb60be4",
- "metadata": {
- "code_folding": [
- 15,
- 21,
- 30,
- 43,
- 49,
- 62
- ]
- },
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "True\n",
- "True\n",
- "True\n",
- "True\n",
- "True\n",
- "True\n"
- ]
- }
- ],
- "source": [
- "import pandas as pd\n",
- "from incidence_store import IncidenceStore\n",
- "\n",
- "def test_iter():\n",
- " # Test iteration over incidence pairs\n",
- " data = pd.DataFrame({'nodes': [1, 2, 2, 3], 'edges': [1, 2, 3, 4]})\n",
- " store = IncidenceStore(data)\n",
- " expected_pairs = [(1, 1), (2, 2), (2, 3), (3, 4)]\n",
- " actual_pairs = list(store)\n",
- "\n",
- " print(len(actual_pairs) == len(expected_pairs))\n",
- " for i in range(len(actual_pairs)):\n",
- " print(actual_pairs[i] == expected_pairs[i])\n",
- "test_iter()\n",
- "\n",
- "def test_len():\n",
- " # Test length of incidence store\n",
- " data = pd.DataFrame({'nodes': [1, 2, 2, 3], 'edges': [1, 2, 3, 4]})\n",
- " store = IncidenceStore(data)\n",
- " print(len(store) == 4) # Should match the number of unique pairs\n",
- "\n",
- "def test_contains():\n",
- " # Test if incidence pair exists\n",
- " data = pd.DataFrame({'nodes': [1, 2, 2, 3], 'edges': [1, 2, 3, 4]})\n",
- " store = IncidenceStore(data)\n",
- "\n",
- " assert (1, 1) in store\n",
- " assert (2, 3) in store\n",
- " assert (3, 5) not in store # Non-existent pair\n",
- "\n",
- "def test_neighbors():\n",
- " # Test getting elements or memberships based on level and key\n",
- " data = pd.DataFrame({'nodes': [1, 2, 2, 3], 'edges': [1, 2, 3, 4]})\n",
- " store = IncidenceStore(data)\n",
- "\n",
- " assert store.neighbors(0, 1) == [1, 2] # Nodes in edge 1\n",
- " assert store.neighbors(1, 2) == [2, 3] # Edges containing node 2\n",
- " assert store.neighbors(0, 5) == [] # Non-existent edge\n",
- " assert store.neighbors(2, 9) == [] # Non-existent node\n",
- "\n",
- " with pytest.raises(ValueError):\n",
- " store.neighbors(3, 1) # Invalid level\n",
- "\n",
- "def test_edges():\n",
- " # Test getting all edges\n",
- " data = pd.DataFrame({'nodes': [1, 2, 2, 3], 'edges': [1, 2, 3, 4]})\n",
- " store = IncidenceStore(data)\n",
- " assert store.edges() == [1, 2, 3, 4]\n",
- "\n",
- "def test_nodes():\n",
- " # Test getting all nodes\n",
- " data = pd.DataFrame({'nodes': [1, 2, 2, 3], 'edges': [1, 2, 3, 4]})\n",
- " store = IncidenceStore(data)\n",
- " assert store.nodes() == [1, 2, 3]\n",
- "\n",
- "def test_dimensions():\n",
- " # Test getting number of nodes and edges\n",
- " data = pd.DataFrame({'nodes': [1, 2, 2, 3], 'edges': [1, 2, 3, 4]})\n",
- " store = IncidenceStore(data)\n",
- " print(store.dimensions() == (3, 4)) # (3 unique nodes, 4 unique edges)\n",
- "test_dimensions()\n",
- "\n",
- "def test_restrict_to():\n",
- " # Test restricting to a subset based on level and items\n",
- " data = pd.DataFrame({'nodes': [1, 2, 2, 3], 'edges': [1, 2, 3, 4]})\n",
- " store = IncidenceStore(data)\n",
- "\n",
- " # Inplace restriction\n",
- " store.restrict_to(0, [1, 2], inplace=True)\n",
- " assert store._data.equals(pd.DataFrame({'nodes': [1, 2], 'edges': [1, 2]}))\n",
- " store = IncidenceStore(data) # Recreate initial store\n",
- "\n",
- " # Non-inplace restriction (returns new dataframe)\n",
- " restricted_df = store.restrict_to(1, [2, 3], inplace=False)\n",
- " assert not restricted_df.equals(store._data) # Should be a new dataframe\n",
- " assert restricted_df.equals(pd.DataFrame({'nodes': [2, 3], 'edges': [2, 3]}))\n",
- "\n",
- " # Invalid level\n",
- " with pytest.raises(ValueError):\n",
- " store.restrict_to(3, [1]) # Invalid level should raise error\n",
- "\n",
- " # Non-existent items\n",
- " store = IncidenceStore(data)\n",
- " restricted_df = store.restrict_to(0, [5], inplace=False)\n",
- " assert restricted_df.empty # Empty dataframe as no pairs with item 5\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "c4b333b3",
- "metadata": {},
- "outputs": [],
- "source": []
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3 (ipykernel)",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.11.5"
- },
- "toc": {
- "base_numbering": 1,
- "nav_menu": {},
- "number_sections": true,
- "sideBar": false,
- "skip_h1_title": false,
- "title_cell": "Table of Contents",
- "title_sidebar": "Contents",
- "toc_cell": true,
- "toc_position": {},
- "toc_section_display": true,
- "toc_window_display": true
- },
- "varInspector": {
- "cols": {
- "lenName": 16,
- "lenType": 16,
- "lenVar": 40
- },
- "kernels_config": {
- "python": {
- "delete_cmd_postfix": "",
- "delete_cmd_prefix": "del ",
- "library": "var_list.py",
- "varRefreshCmd": "print(var_dic_list())"
- },
- "r": {
- "delete_cmd_postfix": ") ",
- "delete_cmd_prefix": "rm(",
- "library": "var_list.r",
- "varRefreshCmd": "cat(var_dic_list()) "
- }
- },
- "types_to_exclude": [
- "module",
- "function",
- "builtin_function_or_method",
- "instance",
- "_Feature"
- ],
- "window_display": false
- }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/hypernetx/classes/property_stores.ipynb b/hypernetx/classes/property_stores.ipynb
deleted file mode 100644
index 221000c0..00000000
--- a/hypernetx/classes/property_stores.ipynb
+++ /dev/null
@@ -1,969 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "code",
- "execution_count": 1,
- "id": "4f19fd90-bf5b-4669-972b-ebc1c7c18cde",
- "metadata": {
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:02.971128Z",
- "start_time": "2024-03-12T23:45:02.393051Z"
- }
- },
- "outputs": [],
- "source": [
- "from property_store import PropertyStore\n",
- "import pandas as pd"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 2,
- "id": "b0c4f4b20bfd82a8",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:02.975967Z",
- "start_time": "2024-03-12T23:45:02.972113Z"
- }
- },
- "outputs": [],
- "source": [
- "LEVEL = 'level'\n",
- "ID = 'id'\n",
- "WEIGHT = 'weight'\n",
- "PROPERTIES = 'misc_properties'\n",
- "PROPERTIES_COLUMNS = [WEIGHT, PROPERTIES]\n",
- "STRENGTH = 'strength'\n",
- "HAIR_COLOR = 'hair_color'\n",
- "INCIDENCES_PROPERTIES_COLUMNS = [WEIGHT, PROPERTIES, STRENGTH, HAIR_COLOR]\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 3,
- "id": "8c21366f6624756b",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:02.976761Z",
- "start_time": "2024-03-12T23:45:02.974479Z"
- }
- },
- "outputs": [],
- "source": [
- "edges = ['I', 'L', 'O', 'P', 'R', 'S']\n",
- "\n",
- "nodes = ['A', 'C', 'E', 'K', 'T1', 'T2', 'V']\n",
- "\n",
- "incidences = [('I', 'K'), ('I', 'T2'), ('L', 'C'), ('L', 'E'), ('O', 'T1'), ('O', 'T2'), ('P', 'A'), ('P', 'C'), ('P', 'K'), ('R', 'A'), ('R', 'E'), ('S', 'A'), ('S', 'K'), ('S', 'T2'), ('S', 'V')]"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 4,
- "id": "73ac534aa8865bd8",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:02.978411Z",
- "start_time": "2024-03-12T23:45:02.976429Z"
- }
- },
- "outputs": [],
- "source": [
- "ip_index = pd.MultiIndex.from_tuples(incidences, names=[LEVEL, ID])"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 5,
- "id": "7073a872bd5fcf69",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:02.982736Z",
- "start_time": "2024-03-12T23:45:02.980366Z"
- }
- },
- "outputs": [],
- "source": [
- "# corresponding property store data\n",
- "# uid, weight, properties\n",
- "import uuid\n",
- "edges_data = [(1, {}) for i in edges]\n",
- "nodes_data = [(1, {}) for i in nodes]\n",
- "inc_data = [(1, {}, 42, 'red') for _ in range(len(ip_index))]"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 6,
- "id": "4d89507e27fb39e1",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:02.986955Z",
- "start_time": "2024-03-12T23:45:02.983145Z"
- }
- },
- "outputs": [],
- "source": [
- "# columns should map corresponding fields to the data\n",
- "edges_index = pd.Index(edges, name=ID)\n",
- "edges_df = pd.DataFrame(data=edges_data, index=edges_index, columns=[WEIGHT, PROPERTIES])\n",
- "\n",
- "nodes_index = pd.Index(nodes, name=ID)\n",
- "nodes_df = pd.DataFrame(data=nodes_data, index=nodes_index, columns=PROPERTIES_COLUMNS)\n",
- "\n",
- "incidence_df = pd.DataFrame(data=inc_data, index=ip_index, columns=INCIDENCES_PROPERTIES_COLUMNS)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 7,
- "id": "efa09bcdb6fbdba6",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:02.991710Z",
- "start_time": "2024-03-12T23:45:02.986233Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": " weight misc_properties\nid \nI 1 {}\nL 1 {}\nO 1 {}\nP 1 {}\nR 1 {}\nS 1 {}",
- "text/html": "\n\n
\n \n \n | \n weight | \n misc_properties | \n
\n \n id | \n | \n | \n
\n \n \n \n I | \n 1 | \n {} | \n
\n \n L | \n 1 | \n {} | \n
\n \n O | \n 1 | \n {} | \n
\n \n P | \n 1 | \n {} | \n
\n \n R | \n 1 | \n {} | \n
\n \n S | \n 1 | \n {} | \n
\n \n
\n
"
- },
- "execution_count": 7,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "edges_df"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 8,
- "id": "3f74ad48d04d04ed",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:02.999636Z",
- "start_time": "2024-03-12T23:45:02.992887Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": " weight misc_properties\nid \nA 1 {}\nC 1 {}\nE 1 {}\nK 1 {}\nT1 1 {}\nT2 1 {}\nV 1 {}",
- "text/html": "\n\n
\n \n \n | \n weight | \n misc_properties | \n
\n \n id | \n | \n | \n
\n \n \n \n A | \n 1 | \n {} | \n
\n \n C | \n 1 | \n {} | \n
\n \n E | \n 1 | \n {} | \n
\n \n K | \n 1 | \n {} | \n
\n \n T1 | \n 1 | \n {} | \n
\n \n T2 | \n 1 | \n {} | \n
\n \n V | \n 1 | \n {} | \n
\n \n
\n
"
- },
- "execution_count": 8,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "nodes_df"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 9,
- "id": "89ce63565f417efe",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.010032Z",
- "start_time": "2024-03-12T23:45:02.998330Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": " weight misc_properties strength hair_color\nlevel id \nI K 1 {} 42 red\n T2 1 {} 42 red\nL C 1 {} 42 red\n E 1 {} 42 red\nO T1 1 {} 42 red\n T2 1 {} 42 red\nP A 1 {} 42 red\n C 1 {} 42 red\n K 1 {} 42 red\nR A 1 {} 42 red\n E 1 {} 42 red\nS A 1 {} 42 red\n K 1 {} 42 red\n T2 1 {} 42 red\n V 1 {} 42 red",
- "text/html": "\n\n
\n \n \n | \n | \n weight | \n misc_properties | \n strength | \n hair_color | \n
\n \n level | \n id | \n | \n | \n | \n | \n
\n \n \n \n I | \n K | \n 1 | \n {} | \n 42 | \n red | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n
\n \n L | \n C | \n 1 | \n {} | \n 42 | \n red | \n
\n \n E | \n 1 | \n {} | \n 42 | \n red | \n
\n \n O | \n T1 | \n 1 | \n {} | \n 42 | \n red | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n
\n \n P | \n A | \n 1 | \n {} | \n 42 | \n red | \n
\n \n C | \n 1 | \n {} | \n 42 | \n red | \n
\n \n K | \n 1 | \n {} | \n 42 | \n red | \n
\n \n R | \n A | \n 1 | \n {} | \n 42 | \n red | \n
\n \n E | \n 1 | \n {} | \n 42 | \n red | \n
\n \n S | \n A | \n 1 | \n {} | \n 42 | \n red | \n
\n \n K | \n 1 | \n {} | \n 42 | \n red | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n
\n \n V | \n 1 | \n {} | \n 42 | \n red | \n
\n \n
\n
"
- },
- "execution_count": 9,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "incidence_df"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 10,
- "id": "27feff2ba146a0e7",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.044743Z",
- "start_time": "2024-03-12T23:45:03.000826Z"
- }
- },
- "outputs": [],
- "source": [
- "edges_ps = PropertyStore(edges_df)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 11,
- "id": "a4f9648a93ef2f16",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.094887Z",
- "start_time": "2024-03-12T23:45:03.005073Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": " weight misc_properties uid\nid \nI 1 {} d5ef0e6e-0ff0-4592-990e-31e4797b6336\nL 1 {} b78b3b2a-519f-4aa6-9544-e8dd2ee0859a\nO 1 {} 0c2ed6e1-660c-4a69-bb1d-6f9ef9c54f37\nP 1 {} 327b9cf3-72fa-4123-80d7-be48ba3cce7c\nR 1 {} 17bbfa31-31bd-441e-b48b-8fa3fe234fc7\nS 1 {} 2e2321e0-acf8-4640-a0a2-d767536a56b1",
- "text/html": "\n\n
\n \n \n | \n weight | \n misc_properties | \n uid | \n
\n \n id | \n | \n | \n | \n
\n \n \n \n I | \n 1 | \n {} | \n d5ef0e6e-0ff0-4592-990e-31e4797b6336 | \n
\n \n L | \n 1 | \n {} | \n b78b3b2a-519f-4aa6-9544-e8dd2ee0859a | \n
\n \n O | \n 1 | \n {} | \n 0c2ed6e1-660c-4a69-bb1d-6f9ef9c54f37 | \n
\n \n P | \n 1 | \n {} | \n 327b9cf3-72fa-4123-80d7-be48ba3cce7c | \n
\n \n R | \n 1 | \n {} | \n 17bbfa31-31bd-441e-b48b-8fa3fe234fc7 | \n
\n \n S | \n 1 | \n {} | \n 2e2321e0-acf8-4640-a0a2-d767536a56b1 | \n
\n \n
\n
"
- },
- "execution_count": 11,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "edges_ps.properties"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 12,
- "id": "a28414bb5852fcf5",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.096051Z",
- "start_time": "2024-03-12T23:45:03.007914Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "{'weight': 1,\n 'misc_properties': {},\n 'uid': '17bbfa31-31bd-441e-b48b-8fa3fe234fc7'}"
- },
- "execution_count": 12,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "edges_ps.get_properties('R')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 13,
- "id": "c3998036009d26a1",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.112544Z",
- "start_time": "2024-03-12T23:45:03.029039Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "1"
- },
- "execution_count": 13,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "edges_ps.get_property('R', 'weight')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 14,
- "id": "8d95683893a3940",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.113570Z",
- "start_time": "2024-03-12T23:45:03.029258Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "42"
- },
- "execution_count": 14,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "edges_ps.set_property('R', 'weight', 42)\n",
- "edges_ps.get_property('R', 'weight')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 15,
- "id": "947ef0930d92dab0",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.113826Z",
- "start_time": "2024-03-12T23:45:03.029338Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": " weight misc_properties uid\nid \nI 1 {} d5ef0e6e-0ff0-4592-990e-31e4797b6336\nL 1 {} b78b3b2a-519f-4aa6-9544-e8dd2ee0859a\nO 1 {} 0c2ed6e1-660c-4a69-bb1d-6f9ef9c54f37\nP 1 {} 327b9cf3-72fa-4123-80d7-be48ba3cce7c\nR 42 {} 17bbfa31-31bd-441e-b48b-8fa3fe234fc7\nS 1 {} 2e2321e0-acf8-4640-a0a2-d767536a56b1",
- "text/html": "\n\n
\n \n \n | \n weight | \n misc_properties | \n uid | \n
\n \n id | \n | \n | \n | \n
\n \n \n \n I | \n 1 | \n {} | \n d5ef0e6e-0ff0-4592-990e-31e4797b6336 | \n
\n \n L | \n 1 | \n {} | \n b78b3b2a-519f-4aa6-9544-e8dd2ee0859a | \n
\n \n O | \n 1 | \n {} | \n 0c2ed6e1-660c-4a69-bb1d-6f9ef9c54f37 | \n
\n \n P | \n 1 | \n {} | \n 327b9cf3-72fa-4123-80d7-be48ba3cce7c | \n
\n \n R | \n 42 | \n {} | \n 17bbfa31-31bd-441e-b48b-8fa3fe234fc7 | \n
\n \n S | \n 1 | \n {} | \n 2e2321e0-acf8-4640-a0a2-d767536a56b1 | \n
\n \n
\n
"
- },
- "execution_count": 15,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "edges_ps.properties"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 16,
- "id": "428824f774b5df6b",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.114323Z",
- "start_time": "2024-03-12T23:45:03.032003Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "6"
- },
- "execution_count": 16,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "len(edges_ps)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 17,
- "id": "2109d60d87d65a9e",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.114810Z",
- "start_time": "2024-03-12T23:45:03.035056Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "{'weight': 42,\n 'misc_properties': {},\n 'uid': '17bbfa31-31bd-441e-b48b-8fa3fe234fc7'}"
- },
- "execution_count": 17,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "edges_ps['R']"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 18,
- "id": "f5bb9e8b27e513bd",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.115299Z",
- "start_time": "2024-03-12T23:45:03.038025Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "{'weight': 42,\n 'misc_properties': {},\n 'uid': '17bbfa31-31bd-441e-b48b-8fa3fe234fc7'}"
- },
- "execution_count": 18,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "edges_ps.get_properties('R')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 19,
- "id": "62ff0373c1ad71b7",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.115763Z",
- "start_time": "2024-03-12T23:45:03.040737Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "True"
- },
- "execution_count": 19,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "'R' in edges_ps\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 20,
- "id": "214a822a5d456a9d",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.116699Z",
- "start_time": "2024-03-12T23:45:03.043347Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "False"
- },
- "execution_count": 20,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "'NEMO' in edges_ps"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 21,
- "id": "f6af320922fd4226",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.131598Z",
- "start_time": "2024-03-12T23:45:03.045583Z"
- }
- },
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "I\n",
- "L\n",
- "O\n",
- "P\n",
- "R\n",
- "S\n"
- ]
- }
- ],
- "source": [
- "for uid_property in edges_ps:\n",
- " print(uid_property)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 22,
- "id": "5d5124e2ffcb9c42",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.132397Z",
- "start_time": "2024-03-12T23:45:03.047960Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "Index(['I', 'L', 'O', 'P', 'R', 'S'], dtype='object', name='id')"
- },
- "execution_count": 22,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "edges_ps.properties.index"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 23,
- "id": "4a7c2bbe59a618ba",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.133215Z",
- "start_time": "2024-03-12T23:45:03.052967Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": " weight misc_properties strength hair_color\nlevel id \nI K 1 {} 42 red\n T2 1 {} 42 red\nL C 1 {} 42 red\n E 1 {} 42 red\nO T1 1 {} 42 red\n T2 1 {} 42 red\nP A 1 {} 42 red\n C 1 {} 42 red\n K 1 {} 42 red\nR A 1 {} 42 red\n E 1 {} 42 red\nS A 1 {} 42 red\n K 1 {} 42 red\n T2 1 {} 42 red\n V 1 {} 42 red",
- "text/html": "\n\n
\n \n \n | \n | \n weight | \n misc_properties | \n strength | \n hair_color | \n
\n \n level | \n id | \n | \n | \n | \n | \n
\n \n \n \n I | \n K | \n 1 | \n {} | \n 42 | \n red | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n
\n \n L | \n C | \n 1 | \n {} | \n 42 | \n red | \n
\n \n E | \n 1 | \n {} | \n 42 | \n red | \n
\n \n O | \n T1 | \n 1 | \n {} | \n 42 | \n red | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n
\n \n P | \n A | \n 1 | \n {} | \n 42 | \n red | \n
\n \n C | \n 1 | \n {} | \n 42 | \n red | \n
\n \n K | \n 1 | \n {} | \n 42 | \n red | \n
\n \n R | \n A | \n 1 | \n {} | \n 42 | \n red | \n
\n \n E | \n 1 | \n {} | \n 42 | \n red | \n
\n \n S | \n A | \n 1 | \n {} | \n 42 | \n red | \n
\n \n K | \n 1 | \n {} | \n 42 | \n red | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n
\n \n V | \n 1 | \n {} | \n 42 | \n red | \n
\n \n
\n
"
- },
- "execution_count": 23,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "incidence_df"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 24,
- "id": "a048092e4b628b1c",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.133724Z",
- "start_time": "2024-03-12T23:45:03.091619Z"
- }
- },
- "outputs": [],
- "source": [
- "inc_ps = PropertyStore(incidence_df)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 25,
- "id": "d94d9e5664364c0c",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.134407Z",
- "start_time": "2024-03-12T23:45:03.091958Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": " weight misc_properties strength hair_color \\\nlevel id \nI K 1 {} 42 red \n T2 1 {} 42 red \nL C 1 {} 42 red \n E 1 {} 42 red \nO T1 1 {} 42 red \n T2 1 {} 42 red \nP A 1 {} 42 red \n C 1 {} 42 red \n K 1 {} 42 red \nR A 1 {} 42 red \n E 1 {} 42 red \nS A 1 {} 42 red \n K 1 {} 42 red \n T2 1 {} 42 red \n V 1 {} 42 red \n\n uid \nlevel id \nI K a9480e6d-b35d-4e85-aecf-281be8b4416d \n T2 746c4626-4bf8-4cac-9175-87609f4b9695 \nL C 954f8ab7-7b29-42d6-9faf-8076edb46fde \n E 56e812d7-5b78-45ba-a71a-5e5a3d14d57d \nO T1 ff55771d-872a-4c31-ad1a-a3429f38fdf4 \n T2 8eba7627-33c1-4ed6-b307-c845733975fc \nP A 20d7fc56-b4fe-44db-926e-bb3c15432878 \n C e484a40e-92f5-438b-b7fc-69c9a2b3e88e \n K 6b579df7-d440-41ab-908b-25e94f880b93 \nR A 3c057d33-9cae-427e-bfcf-3134e85b3219 \n E e110416d-db4b-4a62-9bb6-de9e80221da2 \nS A e410791f-3724-41cf-b5d6-10063741d916 \n K 72c7683b-2022-4360-bb35-0a6fef1ac2a3 \n T2 f3dab40a-5bf2-4ed7-8be8-d62f4b5da7a5 \n V 783873d6-f616-4a2d-bfc4-43c7c186dd05 ",
- "text/html": "\n\n
\n \n \n | \n | \n weight | \n misc_properties | \n strength | \n hair_color | \n uid | \n
\n \n level | \n id | \n | \n | \n | \n | \n | \n
\n \n \n \n I | \n K | \n 1 | \n {} | \n 42 | \n red | \n a9480e6d-b35d-4e85-aecf-281be8b4416d | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n 746c4626-4bf8-4cac-9175-87609f4b9695 | \n
\n \n L | \n C | \n 1 | \n {} | \n 42 | \n red | \n 954f8ab7-7b29-42d6-9faf-8076edb46fde | \n
\n \n E | \n 1 | \n {} | \n 42 | \n red | \n 56e812d7-5b78-45ba-a71a-5e5a3d14d57d | \n
\n \n O | \n T1 | \n 1 | \n {} | \n 42 | \n red | \n ff55771d-872a-4c31-ad1a-a3429f38fdf4 | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n 8eba7627-33c1-4ed6-b307-c845733975fc | \n
\n \n P | \n A | \n 1 | \n {} | \n 42 | \n red | \n 20d7fc56-b4fe-44db-926e-bb3c15432878 | \n
\n \n C | \n 1 | \n {} | \n 42 | \n red | \n e484a40e-92f5-438b-b7fc-69c9a2b3e88e | \n
\n \n K | \n 1 | \n {} | \n 42 | \n red | \n 6b579df7-d440-41ab-908b-25e94f880b93 | \n
\n \n R | \n A | \n 1 | \n {} | \n 42 | \n red | \n 3c057d33-9cae-427e-bfcf-3134e85b3219 | \n
\n \n E | \n 1 | \n {} | \n 42 | \n red | \n e110416d-db4b-4a62-9bb6-de9e80221da2 | \n
\n \n S | \n A | \n 1 | \n {} | \n 42 | \n red | \n e410791f-3724-41cf-b5d6-10063741d916 | \n
\n \n K | \n 1 | \n {} | \n 42 | \n red | \n 72c7683b-2022-4360-bb35-0a6fef1ac2a3 | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n f3dab40a-5bf2-4ed7-8be8-d62f4b5da7a5 | \n
\n \n V | \n 1 | \n {} | \n 42 | \n red | \n 783873d6-f616-4a2d-bfc4-43c7c186dd05 | \n
\n \n
\n
"
- },
- "execution_count": 25,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "inc_ps.properties"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 26,
- "id": "2c04bc5b7745ed6",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.134901Z",
- "start_time": "2024-03-12T23:45:03.092034Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "{'weight': 1,\n 'misc_properties': {},\n 'strength': 42,\n 'hair_color': 'red',\n 'uid': '954f8ab7-7b29-42d6-9faf-8076edb46fde'}"
- },
- "execution_count": 26,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "inc_ps.get_properties(('L', 'C'))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 27,
- "id": "6ecfec1df0bc900c",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.135350Z",
- "start_time": "2024-03-12T23:45:03.092079Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "'red'"
- },
- "execution_count": 27,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "inc_ps.get_property(('L', 'C'), 'hair_color')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 28,
- "id": "ae093f997d2818ec",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.135829Z",
- "start_time": "2024-03-12T23:45:03.092245Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "'green'"
- },
- "execution_count": 28,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "inc_ps.set_property(('L', 'C'), 'hair_color', 'green')\n",
- "inc_ps.get_property(('L', 'C'), 'hair_color')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 29,
- "id": "e97f3c0b1cc74cf5",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.139929Z",
- "start_time": "2024-03-12T23:45:03.092292Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": " weight misc_properties strength hair_color \\\nlevel id \nI K 1 {} 42 red \n T2 1 {} 42 red \nL C 1 {} 42 green \n E 1 {} 42 red \nO T1 1 {} 42 red \n T2 1 {} 42 red \nP A 1 {} 42 red \n C 1 {} 42 red \n K 1 {} 42 red \nR A 1 {} 42 red \n E 1 {} 42 red \nS A 1 {} 42 red \n K 1 {} 42 red \n T2 1 {} 42 red \n V 1 {} 42 red \n\n uid \nlevel id \nI K a9480e6d-b35d-4e85-aecf-281be8b4416d \n T2 746c4626-4bf8-4cac-9175-87609f4b9695 \nL C 954f8ab7-7b29-42d6-9faf-8076edb46fde \n E 56e812d7-5b78-45ba-a71a-5e5a3d14d57d \nO T1 ff55771d-872a-4c31-ad1a-a3429f38fdf4 \n T2 8eba7627-33c1-4ed6-b307-c845733975fc \nP A 20d7fc56-b4fe-44db-926e-bb3c15432878 \n C e484a40e-92f5-438b-b7fc-69c9a2b3e88e \n K 6b579df7-d440-41ab-908b-25e94f880b93 \nR A 3c057d33-9cae-427e-bfcf-3134e85b3219 \n E e110416d-db4b-4a62-9bb6-de9e80221da2 \nS A e410791f-3724-41cf-b5d6-10063741d916 \n K 72c7683b-2022-4360-bb35-0a6fef1ac2a3 \n T2 f3dab40a-5bf2-4ed7-8be8-d62f4b5da7a5 \n V 783873d6-f616-4a2d-bfc4-43c7c186dd05 ",
- "text/html": "\n\n
\n \n \n | \n | \n weight | \n misc_properties | \n strength | \n hair_color | \n uid | \n
\n \n level | \n id | \n | \n | \n | \n | \n | \n
\n \n \n \n I | \n K | \n 1 | \n {} | \n 42 | \n red | \n a9480e6d-b35d-4e85-aecf-281be8b4416d | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n 746c4626-4bf8-4cac-9175-87609f4b9695 | \n
\n \n L | \n C | \n 1 | \n {} | \n 42 | \n green | \n 954f8ab7-7b29-42d6-9faf-8076edb46fde | \n
\n \n E | \n 1 | \n {} | \n 42 | \n red | \n 56e812d7-5b78-45ba-a71a-5e5a3d14d57d | \n
\n \n O | \n T1 | \n 1 | \n {} | \n 42 | \n red | \n ff55771d-872a-4c31-ad1a-a3429f38fdf4 | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n 8eba7627-33c1-4ed6-b307-c845733975fc | \n
\n \n P | \n A | \n 1 | \n {} | \n 42 | \n red | \n 20d7fc56-b4fe-44db-926e-bb3c15432878 | \n
\n \n C | \n 1 | \n {} | \n 42 | \n red | \n e484a40e-92f5-438b-b7fc-69c9a2b3e88e | \n
\n \n K | \n 1 | \n {} | \n 42 | \n red | \n 6b579df7-d440-41ab-908b-25e94f880b93 | \n
\n \n R | \n A | \n 1 | \n {} | \n 42 | \n red | \n 3c057d33-9cae-427e-bfcf-3134e85b3219 | \n
\n \n E | \n 1 | \n {} | \n 42 | \n red | \n e110416d-db4b-4a62-9bb6-de9e80221da2 | \n
\n \n S | \n A | \n 1 | \n {} | \n 42 | \n red | \n e410791f-3724-41cf-b5d6-10063741d916 | \n
\n \n K | \n 1 | \n {} | \n 42 | \n red | \n 72c7683b-2022-4360-bb35-0a6fef1ac2a3 | \n
\n \n T2 | \n 1 | \n {} | \n 42 | \n red | \n f3dab40a-5bf2-4ed7-8be8-d62f4b5da7a5 | \n
\n \n V | \n 1 | \n {} | \n 42 | \n red | \n 783873d6-f616-4a2d-bfc4-43c7c186dd05 | \n
\n \n
\n
"
- },
- "execution_count": 29,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "inc_ps.properties"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 30,
- "id": "46be788a07e18776",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.140565Z",
- "start_time": "2024-03-12T23:45:03.092350Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "15"
- },
- "execution_count": 30,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "len(inc_ps)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 31,
- "id": "aabe6182c6f74b94",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.141084Z",
- "start_time": "2024-03-12T23:45:03.092433Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "{'weight': 1,\n 'misc_properties': {},\n 'strength': 42,\n 'hair_color': 'green',\n 'uid': '954f8ab7-7b29-42d6-9faf-8076edb46fde'}"
- },
- "execution_count": 31,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "inc_ps[('L', 'C')]"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 32,
- "id": "58ee1c61f5180d25",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.142188Z",
- "start_time": "2024-03-12T23:45:03.093355Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "{'weight': 1,\n 'misc_properties': {},\n 'strength': 42,\n 'hair_color': 'green',\n 'uid': '954f8ab7-7b29-42d6-9faf-8076edb46fde'}"
- },
- "execution_count": 32,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "inc_ps.get_properties(('L', 'C'))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 33,
- "id": "c23420ac85d15fe0",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.143691Z",
- "start_time": "2024-03-12T23:45:03.095808Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "True"
- },
- "execution_count": 33,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "('L', 'C') in inc_ps"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 34,
- "id": "7a1f14c1620e2809",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.150148Z",
- "start_time": "2024-03-12T23:45:03.098092Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "False"
- },
- "execution_count": 34,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "('NE', 'MO') in inc_ps"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 35,
- "id": "7570b20087101e83",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.174304Z",
- "start_time": "2024-03-12T23:45:03.100495Z"
- }
- },
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "('I', 'K')\n",
- "('I', 'T2')\n",
- "('L', 'C')\n",
- "('L', 'E')\n",
- "('O', 'T1')\n",
- "('O', 'T2')\n",
- "('P', 'A')\n",
- "('P', 'C')\n",
- "('P', 'K')\n",
- "('R', 'A')\n",
- "('R', 'E')\n",
- "('S', 'A')\n",
- "('S', 'K')\n",
- "('S', 'T2')\n",
- "('S', 'V')\n"
- ]
- }
- ],
- "source": [
- "for uid_property in inc_ps:\n",
- " print(uid_property)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 36,
- "id": "90a3b2ffd651fd91",
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.175722Z",
- "start_time": "2024-03-12T23:45:03.103236Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "MultiIndex([('I', 'K'),\n ('I', 'T2'),\n ('L', 'C'),\n ('L', 'E'),\n ('O', 'T1'),\n ('O', 'T2'),\n ('P', 'A'),\n ('P', 'C'),\n ('P', 'K'),\n ('R', 'A'),\n ('R', 'E'),\n ('S', 'A'),\n ('S', 'K'),\n ('S', 'T2'),\n ('S', 'V')],\n names=['level', 'id'])"
- },
- "execution_count": 36,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "inc_ps.properties.index"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 37,
- "id": "c4a53079-eb4f-426d-8d7f-2ec6e3de61d9",
- "metadata": {
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.176448Z",
- "start_time": "2024-03-12T23:45:03.105878Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": "Index(['weight', 'misc_properties', 'uid'], dtype='object')"
- },
- "execution_count": 37,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "edges_ps.properties.columns"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 38,
- "id": "3e035c08-9639-4339-a0f2-6eade48cfef3",
- "metadata": {
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.177675Z",
- "start_time": "2024-03-12T23:45:03.110044Z"
- }
- },
- "outputs": [
- {
- "data": {
- "text/plain": " weight misc_properties uid\nid \nA 1 {} A\nC 1 {} C\nE 1 {} E\nK 1 {} K\nT1 1 {} T1\nT2 1 {} T2\nV 1 {} V",
- "text/html": "\n\n
\n \n \n | \n weight | \n misc_properties | \n uid | \n
\n \n id | \n | \n | \n | \n
\n \n \n \n A | \n 1 | \n {} | \n A | \n
\n \n C | \n 1 | \n {} | \n C | \n
\n \n E | \n 1 | \n {} | \n E | \n
\n \n K | \n 1 | \n {} | \n K | \n
\n \n T1 | \n 1 | \n {} | \n T1 | \n
\n \n T2 | \n 1 | \n {} | \n T2 | \n
\n \n V | \n 1 | \n {} | \n V | \n
\n \n
\n
"
- },
- "execution_count": 38,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "nodes_ps = PropertyStore(nodes_df, index=True)\n",
- "nodes_ps.properties"
- ]
- },
- {
- "cell_type": "code",
- "outputs": [],
- "source": [],
- "metadata": {
- "collapsed": false,
- "ExecuteTime": {
- "end_time": "2024-03-12T23:45:03.179151Z",
- "start_time": "2024-03-12T23:45:03.112166Z"
- }
- },
- "id": "e32fbfed693fdf87",
- "execution_count": 38
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "venv-hnx",
- "language": "python",
- "name": "venv-hnx"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.11.3"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/hypernetx/classes/tests/conftest.py b/hypernetx/classes/tests/conftest.py
deleted file mode 100644
index 2316f40b..00000000
--- a/hypernetx/classes/tests/conftest.py
+++ /dev/null
@@ -1,435 +0,0 @@
-import pytest
-import os
-import itertools as it
-import networkx as nx
-import pandas as pd
-import numpy as np
-
-from hypernetx import Hypergraph, HarryPotter, LesMis as LM
-from hypernetx.classes.helpers import create_dataframe
-from collections import OrderedDict, defaultdict
-
-
-class SevenBySix:
- """Example hypergraph with 7 nodes and 6 edges."""
-
- def __init__(self, static=False):
- a, c, e, k, t1, t2, v = nd = ("A", "C", "E", "K", "T1", "T2", "V")
- i, l, o, p, r, s = ("I", "L", "O", "P", "R", "S")
- self.edges = [{a, c, k}, {a, e}, {a, k, t2, v}, {c, e}, {t1, t2}, {k, t2}]
- self.nodes = set(nd)
- self.edgedict = OrderedDict(
- [
- (p, {a, c, k}),
- (r, {a, e}),
- (s, {a, k, t2, v}),
- (l, {c, e}),
- (o, {t1, t2}),
- (i, {k, t2}),
- ]
- )
-
- self.arr = np.array(
- [
- [0, 0, 0, 1, 0, 1, 0],
- [0, 1, 1, 0, 0, 0, 0],
- [0, 0, 0, 0, 1, 1, 0],
- [1, 1, 0, 1, 0, 0, 0],
- [1, 0, 1, 0, 0, 0, 0],
- [1, 0, 0, 1, 0, 1, 1],
- ]
- )
- self.labels = OrderedDict(
- [
- ("edges", [i, l, o, p, r, s]),
- ("nodes", [a, c, e, k, t1, t2, v]),
- ]
- )
-
- self.data = np.array(
- [
- [3, 0],
- [3, 1],
- [3, 3],
- [4, 0],
- [4, 2],
- [5, 0],
- [5, 3],
- [5, 5],
- [5, 6],
- [1, 1],
- [1, 2],
- [2, 4],
- [2, 5],
- [0, 3],
- [0, 5],
- ]
- )
-
- self.dataframe = create_dataframe(self.edgedict)
-
-
-class TriLoop:
- """Example hypergraph with 2 two 1-cells and 1 2-cell forming a loop"""
-
- def __init__(self):
- A, B, C, D = "A", "B", "C", "D"
- AB, BC, ACD = "AB", "BC", "ACD"
- self.edgedict = {AB: {A, B}, BC: {B, C}, ACD: {A, C, D}}
- self.hypergraph = Hypergraph(self.edgedict, name="TriLoop")
-
-
-class TriLoop2:
- """Triloop example with redundant node and edge"""
-
- def __init__(self):
- A, B, C, D, E = "A", "B", "C", "D", "E"
- AB, BC, ACD, ACD2 = "AB", "BC", "ACD", "ACD2"
- self.edgedict = {AB: {A, B}, BC: {B, C}, ACD: {A, C, D, E}, ACD2: {A, C, D, E}}
- self.hypergraph = Hypergraph(self.edgedict, name="TriLoop2")
-
-
-class SBSDupes:
- def __init__(self):
- self.edgedict = OrderedDict(
- [
- ("I", {"K", "T2"}),
- ("L", {"C", "E", "F"}),
- ("M", {"C", "E", "F"}),
- ("O", {"T1", "T2"}),
- ("P", {"A", "C", "K"}),
- ("R", {"A", "E", "F"}),
- ("S", {"A", "K", "T2", "V"}),
- ]
- )
-
- self.dataframe = create_dataframe(self.edgedict)
-
-
-class LesMis:
- def __init__(self):
- self.edgedict = OrderedDict(
- [
- (1, {"CL", "CV", "GE", "GG", "MB", "MC", "ME", "MY", "NP", "SN"}),
- (2, {"IS", "JL", "JV", "MB", "ME", "MR", "MT", "MY", "PG"}),
- (3, {"BL", "DA", "FA", "FN", "FT", "FV", "LI", "ZE"}),
- (4, {"CO", "FN", "TH", "TM"}),
- (5, {"BM", "FF", "FN", "JA", "JV", "MT", "MY", "VI"}),
- (6, {"FN", "JA", "JV"}),
- (
- 7,
- {
- "BM",
- "BR",
- "CC",
- "CH",
- "CN",
- "FN",
- "JU",
- "JV",
- "PO",
- "SC",
- "SP",
- "SS",
- },
- ),
- (8, {"FN", "JA", "JV", "PO", "SP", "SS"}),
- ]
- )
- self.hypergraph = Hypergraph(self.edgedict)
-
-
-class Dataframe:
- def __init__(self):
- fname = os.path.join(os.path.dirname(__file__), "sample.csv")
- self.df = pd.read_csv(fname, index_col=0)
-
-
-class CompleteBipartite:
- def __init__(self, n1, n2):
- self.g = nx.complete_bipartite_graph(n1, n2)
- self.left, self.right = nx.bipartite.sets(self.g)
-
-
-@pytest.fixture
-def props_dataframe():
- multi_index = pd.MultiIndex.from_tuples([(0, "P")], names=["level", "id"])
- data = {
- "properties": [{"prop1": "propval1", "prop2": "propval2"}],
- }
- return pd.DataFrame(data, index=multi_index)
-
-
-@pytest.fixture
-def cell_props_dataframe_multidx():
- multi_index = pd.MultiIndex.from_tuples([("P", "A"), ("P", "C")], names=[0, 1])
- data = {
- "cell_properties": [
- {"prop1": "propval1", "prop2": "propval2"},
- {"prop1": "propval1", "prop2": "propval2"},
- ]
- }
-
- return pd.DataFrame(data, index=multi_index)
-
-
-@pytest.fixture
-def cell_props_dataframe():
- data = {
- 0: ["P", "P"],
- 1: ["A", "C"],
- "cell_properties": [
- {"prop1": "propval1", "prop2": "propval2"},
- {"prop1": "propval1", "prop2": "propval2"},
- ],
- }
- return pd.DataFrame(data)
-
-
-@pytest.fixture
-def sbs():
- return SevenBySix()
-
-
-@pytest.fixture
-def sbs_dataframe(sbs):
- return sbs.dataframe
-
-
-@pytest.fixture
-def sbs_dict(sbs):
- return sbs.edgedict
-
-
-@pytest.fixture
-def sbs_data(sbs):
- return np.asarray(sbs.data)
-
-
-@pytest.fixture
-def sbs_labels(sbs):
- return sbs.labels
-
-
-@pytest.fixture
-def triloop():
- return TriLoop()
-
-
-@pytest.fixture
-def triloop2():
- return TriLoop2()
-
-
-@pytest.fixture
-def sbs_hypergraph(sbs):
- return Hypergraph(sbs.edgedict, name="sbsh", edge_col="edges", node_col="nodes")
-
-
-@pytest.fixture
-def sbs_graph(sbs):
- edges = set()
- for _, e in sbs.edgedict.items():
- edges.update(it.combinations(e, 2))
- G = nx.Graph(name="sbsg")
- G.add_edges_from(edges)
- return G
-
-
-@pytest.fixture
-def sbsd():
- return SBSDupes()
-
-
-@pytest.fixture
-def sbsd_hypergraph():
- sbsd = SBSDupes()
- return Hypergraph(sbsd.edgedict)
-
-
-@pytest.fixture
-def lesmis():
- return LesMis()
-
-
-@pytest.fixture
-def G():
- return nx.karate_club_graph()
-
-
-@pytest.fixture
-def H():
- G = nx.karate_club_graph()
- return Hypergraph({f"e{i}": e for i, e in enumerate(G.edges())})
-
-
-@pytest.fixture
-def bipartite_example():
- from networkx.algorithms import bipartite
-
- return bipartite.random_graph(10, 5, 0.4, 0)
-
-
-@pytest.fixture
-def complete_bipartite_example():
- return CompleteBipartite(2, 3).g
-
-
-@pytest.fixture
-def dataframe():
- return Dataframe()
-
-
-@pytest.fixture
-def dataframe_example():
- M = np.array([[1, 1, 0, 0], [0, 1, 1, 0], [1, 0, 1, 0]])
- index = ["A", "B", "C"]
- columns = ["a", "b", "c", "d"]
- return pd.DataFrame(M, index=index, columns=columns)
-
-
-@pytest.fixture
-def harry_potter():
- return HarryPotter()
-
-
-@pytest.fixture
-def array_example():
- return np.array(
- [[0, 1, 1, 0, 1], [1, 1, 1, 1, 1], [1, 0, 0, 1, 0], [0, 0, 0, 0, 1]]
- )
-
-
-####################Fixtures suite for test_hypergraph.py####################
-####################These fixtures are modular and thus have inter-dependencies####################
-@pytest.fixture
-def les_mis():
- return LM()
-
-
-@pytest.fixture
-def scenes():
- return {
- "0": ("FN", "TH"),
- "1": ("TH", "JV"),
- "2": ("BM", "FN", "JA"),
- "3": ("JV", "JU", "CH", "BM"),
- "4": ("JU", "CH", "BR", "CN", "CC", "JV", "BM"),
- "5": ("TH", "GP"),
- "6": ("GP", "MP"),
- "7": ("MA", "GP"),
- }
-
-
-@pytest.fixture
-def edges(scenes):
- return list(set(list(scenes.keys())))
-
-
-@pytest.fixture
-def nodes(scenes):
- return list(set(list(np.concatenate([v for v in scenes.values()]))))
-
-
-@pytest.fixture
-def edge_properties(edges):
- edge_properties = defaultdict(dict)
- edge_properties.update(
- {str(ed): {"weight": np.random.randint(2, 10)} for ed in range(0, 8, 2)}
- )
- for ed in edges:
- edge_properties[ed].update({"color": np.random.choice(["red", "green"])})
- return edge_properties
-
-
-@pytest.fixture
-def node_properties(les_mis, nodes):
- return {
- ch: {
- "FullName": les_mis.dnames.loc[ch].FullName,
- "Description": les_mis.dnames.loc[ch].Description,
- "color": np.random.choice(["pink", "blue"]),
- }
- for ch in nodes
- }
-
-
-@pytest.fixture
-def scenes_dataframe(scenes):
- scenes_dataframe = (
- pd.DataFrame(pd.Series(scenes).explode())
- .reset_index()
- .rename(columns={"index": "Scenes", 0: "Characters"})
- )
- scenes_dataframe["color"] = np.random.choice(
- ["red", "green"], len(scenes_dataframe)
- )
- scenes_dataframe["heaviness"] = np.random.rand(len(scenes_dataframe))
-
- return scenes_dataframe
-
-
-@pytest.fixture
-def hyp_no_props():
- return Hypergraph(
- pd.DataFrame(
- np.array(
- [
- np.random.choice(list("ABCD"), 50),
- np.random.choice(list("abcdefghijklmnopqrstuvwxyz"), 50),
- ]
- ).T, # creates a transposed ndarray
- columns=["Club", "Member"],
- )
- )
-
-
-@pytest.fixture
-def hyp_df_with_props(scenes_dataframe, node_properties, edge_properties):
- return Hypergraph(
- scenes_dataframe,
- # cell_properties=["color"],
- cell_weight_col="heaviness",
- node_properties=node_properties,
- edge_properties=edge_properties,
- )
-
-
-@pytest.fixture
-def hyp_dict_with_props(scenes):
- scenes_with_cellprops = {
- ed: {
- ch: {
- "color": np.random.choice(["red", "green"]),
- "cell_weight": np.random.rand(),
- }
- for ch in v
- }
- for ed, v in scenes.items()
- }
-
- return Hypergraph(
- scenes_with_cellprops,
- edge_col="Scenes",
- node_col="Characters",
- cell_weight_col="cell_weight",
- cell_properties=scenes_with_cellprops,
- )
-
-
-@pytest.fixture
-def hyp_props_on_edges_nodes(scenes_dataframe, edge_properties, node_properties):
- return Hypergraph(
- setsystem=scenes_dataframe,
- edge_col="Scenes",
- node_col="Characters",
- cell_weight_col="cell_weight",
- cell_properties=["color"],
- edge_properties=edge_properties,
- node_properties=node_properties,
- default_edge_weight=2.5,
- default_node_weight=6,
- )
-
-
-####################Fixtures suite for test_hypergraph.py####################
diff --git a/hypernetx/classes/tests/test_hypergraph.py b/hypernetx/classes/tests/test_hypergraph.py
deleted file mode 100644
index 851d660f..00000000
--- a/hypernetx/classes/tests/test_hypergraph.py
+++ /dev/null
@@ -1,438 +0,0 @@
-from collections import OrderedDict
-
-import pytest
-import numpy as np
-import pandas as pd
-from hypernetx.classes.hypergraph import Hypergraph
-
-from networkx.algorithms import bipartite
-
-from hypernetx.classes.property_store import PropertyStore
-
-
-def test_hypergraph_from_iterable_of_sets(sbs):
- H = Hypergraph(sbs.edges)
- assert len(H.edges) == 6
- assert len(H.nodes) == 7
- assert H.degree("A") == 3
- assert H.number_of_edges() == 6
- assert H.number_of_nodes() == 7
-
-
-def test_hypergraph_from_dict(sbs):
- H = Hypergraph(sbs.edgedict)
- assert len(H.edges) == 6
- assert len(H.nodes) == 7
- assert H.degree("A") == 3
- assert H.size("R") == 2
- assert H.order() == 7
-
-
-def test_hypergraph_custom_attributes(sbs):
- H = Hypergraph(sbs.edges)
- assert isinstance(H.__str__(), str)
- assert isinstance(H.__repr__(), str)
- assert H.__contains__("A")
- assert H.__len__() == 7
- nodes = [key for key in H.__iter__()]
- assert sorted(nodes) == ["A", "C", "E", "K", "T1", "T2", "V"]
- assert sorted(H.__getitem__("C")) == ["A", "E", "K"]
-
-
-def test_get_linegraph(sbs):
- H = Hypergraph(sbs.edges)
- assert len(H.edges) == 6
- assert len(H.nodes) == 7
- assert len(set(H.get_linegraph(s=1)).difference(set([0, 1, 2, 3, 4, 5]))) == 0
-
-
-def test_hypergraph_from_incidence_dataframe(lesmis):
- df = lesmis.hypergraph.incidence_dataframe()
- H = Hypergraph.from_incidence_dataframe(df)
- assert H.shape == (40, 8)
- assert H.size(3) == 8
- assert H.degree("JA") == 3
-
-
-def test_hypergraph_from_numpy_array(sbs):
- H = Hypergraph.from_numpy_array(sbs.arr)
- assert len(H.nodes) == 6
- assert len(H.edges) == 7
- assert H.dim("e5") == 2
- assert set(H.neighbors("v2")) == {"v0", "v5"}
-
-
-def test_hypergraph_from_bipartite(sbsd_hypergraph):
- H = sbsd_hypergraph
- HB = Hypergraph.from_bipartite(H.bipartite())
- assert len(HB.edges) == 7
- assert len(HB.nodes) == 8
-
-
-def test_add_edge_inplace(sbs):
- h = Hypergraph(sbs.edgedict)
- assert h.shape == (7, 6)
-
- # add a new edge in place; i.e. the current hypergraph should be mutated
- new_edge = "X"
- h.add_edge(new_edge)
-
- # the Hypergraph should not increase its number of edges and incidences because the current behavior of adding
- # an edge does not connect two or more nodes.
- # In other words, adding an edge with no nodes
- assert h.shape == (7, 6)
- assert new_edge not in h.edges.elements
-
- # the new edge has no user-defined property data, so it should not be listed in the PropertyStore
- assert new_edge not in h.edges.properties
-
- # However, the new_edge will be listed in the complete list of all user and non-user-define properties for all edges
- assert new_edge in h.edges.to_dataframe.index.tolist()
-
- assert new_edge in h.edges.to_dataframe.index.tolist()
-
-
-def test_add_edge_not_inplace(sbs):
- h = Hypergraph(sbs.edgedict)
- assert h.shape == (7, 6)
-
- # add a new edge not in place; the current hypergraph should be diffrent from the new hypergraph
- # created from add_edge
- new_edge = "X"
- new_hg = h.add_edge(new_edge, inplace=False)
-
- assert new_hg.shape == (7, 6)
- assert new_edge not in new_hg.edges.elements
-
- assert new_edge not in new_hg.edges.properties
- assert new_edge in new_hg.edges.to_dataframe.index.tolist()
-
- # verify that the new edge is not in the old HyperGraph
- assert new_edge not in h.edges.to_dataframe.index.tolist()
-
-
-def test_remove_edges(sbs):
- H = Hypergraph(sbs.edgedict)
- assert H.shape == (7, 6)
- # remove an edge without removing any nodes
- H = H.remove_edges("P")
- assert H.shape == (7, 5)
- # remove an edge containing a singleton ear
- H = H.remove_edges("O")
- assert H.shape == (6, 4)
-
-
-@pytest.mark.skip("reason=remove has a new signature")
-def test_remove(triloop2):
- H = triloop2.hypergraph
- k = "ACD2"
- assert H.shape == (5, 4)
- newH = H.remove(k, level=0)
- assert newH.shape == (5, 3)
- newH = H.remove("E", level=1)
- assert newH.shape == (4, 4)
- newH = H.remove("ACD", level=0)
- assert newH.shape == (5, 3)
- newH = H.remove(["ACD", "E"])
- assert newH.shape == (4, 3)
- # with pytest.raises(TypeError):
- # H.remove({"ACD": "edge"})
-
-
-def test_remove_nodes():
- a, b, c, d = "a", "b", "c", "d"
- hbug = Hypergraph({0: [a, b], 1: [a, c], 2: [a, d]})
- assert a in hbug.nodes
- assert a in hbug.edges[0]
- assert a in hbug.edges[1]
- assert a in hbug.edges[2]
- hbug = hbug.remove_nodes(a)
- assert a not in hbug.nodes
- assert a not in hbug.edges[0]
- assert a not in hbug.edges[1]
- assert a not in hbug.edges[2]
-
-
-def test_matrix(sbs_hypergraph):
- H = sbs_hypergraph
- assert H.incidence_matrix().todense().shape == (7, 6)
- assert H.adjacency_matrix(s=2).todense().shape == (7, 7)
- assert H.edge_adjacency_matrix().todense().shape == (6, 6)
- aux_matrix = H.auxiliary_matrix(node=False)
- assert aux_matrix.todense().shape == (6, 6)
-
-
-def test_collapse_edges(sbsd_hypergraph):
- H = sbsd_hypergraph
- assert len(H.edges) == 7
- HC = H.collapse_edges()
- assert len(HC.edges) == 6
-
-
-def test_collapse_nodes(sbsd_hypergraph):
- H = sbsd_hypergraph
- assert len(H.nodes) == 8
- HC = H.collapse_nodes()
- assert len(HC.nodes) == 7
-
-
-def test_collapse_nodes_and_edges(sbsd_hypergraph):
- H = sbsd_hypergraph
- HC2 = H.collapse_nodes_and_edges()
- assert len(H.edges) == 7
- assert len(HC2.edges) == 6
- assert len(H.nodes) == 8
- assert len(HC2.nodes) == 7
-
-
-def test_restrict_to_edges(sbs_hypergraph):
- H = sbs_hypergraph
- HS = H.restrict_to_edges(["P", "O"])
- assert len(H.edges) == 6
- assert len(HS.edges) == 2
-
-
-def test_restrict_to_nodes(sbs_hypergraph):
- H = sbs_hypergraph
- assert len(H.nodes) == 7
- H1 = H.restrict_to_nodes(["A", "E", "K"])
- assert len(H.nodes) == 7
- assert len(H1.nodes) == 3
- assert len(H1.edges) == 5
- assert "C" in H.edges["P"]
- assert "C" not in H1.edges["P"]
-
-
-# @pytest.mark.skip("reason=Deprecated method")
-def test_remove_from_restriction(triloop):
- h = triloop.hypergraph
- h1 = h.restrict_to_nodes(h.neighbors("A")).remove_nodes(
- "A"
- ) # Hypergraph does not have a remove_node method
- assert "A" not in h1
- assert "A" not in h1.edges["ACD"]
-
-
-def test_toplexes(sbsd_hypergraph):
- H = sbsd_hypergraph
- T = H.toplexes(return_hyp=True)
- assert len(T.nodes) == 8
- assert len(T.edges) == 5
- T = T.collapse_nodes()
- assert len(T.nodes) == 7
-
-
-def test_is_connected():
- setsystem = [{1, 2, 3, 4}, {3, 4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
- h = Hypergraph(setsystem)
- assert h.is_connected() is True
- assert h.is_connected(s=2) is False
- assert h.is_connected(s=2, edges=True) is True
- # test case below will raise nx.NetworkXPointlessConcept
- assert h.is_connected(s=3, edges=True) is False
-
-
-# @pytest.mark.skip("Deprecated methods")
-def test_singletons():
- E = {1: {2, 3, 4, 5}, 6: {2, 5, 7, 8, 9}, 10: {11}, 12: {13}, 14: {7}}
- h = Hypergraph(E)
- assert h.shape == (9, 5)
- singles = h.singletons()
- assert len(singles) == 2
- h = h.remove_edges(singles)
- assert h.shape == (7, 3)
-
-
-def test_remove_singletons():
- E = {1: {2, 3, 4, 5}, 6: {2, 5, 7, 8, 9}, 10: {11}, 12: {13}, 14: {7}}
- h = Hypergraph(E)
- assert h.shape == (9, 5)
- h1 = h.remove_singletons()
- assert h1.shape == (7, 3)
- assert h.shape == (9, 5)
-
-
-def test_components():
- setsystem = [{1, 2, 3, 4}, {4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
- h = Hypergraph(setsystem)
- # h.components() causes an error
- assert [len(g) for g in h.component_subgraphs()] == [8]
-
-
-def test_connected_components():
- setsystem = [{1, 2, 3, 4}, {4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
- h = Hypergraph(setsystem)
- assert len(list(h.connected_components())) == 1
- assert list(h.connected_components(edges=True)) == [{0, 1, 2, 3}]
- assert [len(g) for g in h.connected_component_subgraphs()] == [8]
-
-
-def test_s_components():
- setsystem = [{1, 2, 3, 4}, {4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
- h = Hypergraph(setsystem)
- assert len(list(h.s_components())) == 1
- assert len(list(h.s_components(s=2))) == 2
- assert len(list(h.s_components(s=3))) == 4
- assert len(list(h.s_components(s=3, edges=False))) == 7
- assert len(list(h.s_components(s=4, edges=False))) == 8
-
-
-def test_s_connected_components():
- setsystem = [{1, 2, 3, 4}, {4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
- h = Hypergraph(setsystem)
- assert list(h.s_connected_components()) == [{0, 1, 2, 3}]
- assert list(h.s_connected_components(s=2)) == [{1, 2, 3}]
- assert list(h.s_connected_components(s=2, edges=False)) == [{5, 6}]
-
-
-def test_s_component_subgraphs():
- setsystem = [{1, 2, 3, 4}, {4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
- h = Hypergraph(setsystem)
- assert {5, 4}.issubset(
- [len(g) for g in h.s_component_subgraphs(s=2, return_singletons=True)]
- )
- assert {3, 4}.issubset(
- [len(g) for g in h.s_component_subgraphs(s=3, return_singletons=True)]
- )
-
-
-def test_size(sbs):
- h = Hypergraph(sbs.edgedict)
- assert h.size("S") == 4
- assert h.size("S", {"T2", "V"}) == 2
- assert h.size("S", {"T1", "T2"}) == 1
- assert h.size("S", {"T2"}) == 1
- assert h.size("S", {"T1"}) == 0
- assert h.size("S", {}) == 0
-
-
-def test_diameter(sbs):
- h = Hypergraph(sbs.edgedict)
- assert h.diameter() == 3
- with pytest.raises(Exception) as excinfo:
- h.diameter(s=2)
- assert "Hypergraph is not s-connected." in str(excinfo.value)
-
-
-def test_node_diameters(sbs):
- h = Hypergraph(sbs.edgedict)
- assert h.node_diameters()[0] == 3
- assert h.node_diameters()[2] == [{"A", "C", "E", "K", "T1", "T2", "V"}]
-
-
-def test_edge_diameter(sbs):
- h = Hypergraph(sbs.edgedict)
- assert h.edge_diameter() == 3
- assert h.edge_diameters()[2] == [{"I", "L", "O", "P", "R", "S"}]
- with pytest.raises(Exception) as excinfo:
- h.edge_diameter(s=2)
- assert "Hypergraph is not s-connected." in str(excinfo.value)
-
-
-def test_bipartite(sbs_hypergraph):
- assert bipartite.is_bipartite(sbs_hypergraph.bipartite())
-
-
-def test_dual(sbs_hypergraph):
- H = sbs_hypergraph
- HD = H.dual()
- assert isinstance(HD.nodes.property_store, PropertyStore)
- assert isinstance(HD.edges.property_store, PropertyStore)
- assert set(H.nodes) == set(HD.edges)
- assert set(H.edges) == set(HD.nodes)
- assert list(H.dataframe.columns) == list(HD.dataframe.columns)
-
-
-@pytest.mark.filterwarnings("ignore:No 3-path between ME and FN")
-def test_distance(lesmis):
- h = lesmis.hypergraph
- assert h.distance("ME", "FN") == 2
- assert h.distance("ME", "FN", s=2) == 3
- assert h.distance("ME", "FN", s=3) == np.inf
-
-
-def test_edge_distance(lesmis):
- h = lesmis.hypergraph
- assert h.edge_distance(1, 4) == 2
- h2 = h.remove([5], 0)
- assert h2.edge_distance(1, 4) == 3
- assert h2.edge_distance(1, 4, s=2) == np.inf
-
-
-def test_dataframe(lesmis):
- h = lesmis.hypergraph
- df = h.incidence_dataframe()
- assert np.allclose(np.array(np.sum(df)), np.array([10, 9, 8, 4, 8, 3, 12, 6]))
-
-
-def test_construct_empty_hypergraph():
- h = Hypergraph()
- assert h.shape == (0, 0)
- assert h.edges.is_empty()
- assert h.nodes.is_empty()
- assert isinstance(h.dataframe, pd.DataFrame)
-
-
-def test_construct_hypergraph_from_empty_dict():
- h = Hypergraph({})
- assert h.shape == (0, 0)
- assert h.edges.is_empty()
- assert h.nodes.is_empty()
-
-
-def test_construct_hypergraph_empty_dict():
- h = Hypergraph(dict())
- assert h.shape == (0, 0)
- assert h.edges.is_empty()
- assert h.nodes.is_empty()
-
-
-def test_static_hypergraph_s_connected_components(lesmis):
- H = Hypergraph(lesmis.edgedict)
- assert {7, 8} in list(H.s_connected_components(edges=True, s=4))
-
-
-def test_difference_on_same_hypergraph(lesmis):
- hg = Hypergraph(lesmis.edgedict)
- hg_copy = Hypergraph(lesmis.edgedict)
-
- hg_diff = hg - hg_copy
-
- assert len(hg_diff) == 0
- assert len(hg_diff.nodes) == 0
- assert len(hg_diff.edges) == 0
- assert hg_diff.shape == (0, 0)
- assert hg_diff.incidence_dict == {}
-
-
-def test_difference_on_empty_hypergraph(sbs_hypergraph):
- hg_empty = Hypergraph()
-
- hg_diff = sbs_hypergraph - hg_empty
-
- assert len(hg_diff) == 7
- assert len(hg_diff.nodes) == 7
- assert len(hg_diff.edges) == 6
- assert hg_diff.shape == (7, 6)
-
- edges = ["I", "L", "O", "P", "R", "S"]
- assert all(uid in edges for uid in hg_diff.incidence_dict.keys())
-
-
-def test_difference_on_similar_hypergraph(sbs_hypergraph):
- a, c, e, k, t1, t2, v = ("A", "C", "E", "K", "T1", "T2", "V")
- l, o, p, r, s = ("L", "O", "P", "R", "S")
- data = OrderedDict(
- [(p, {a, c, k}), (r, {a, e}), (s, {a, k, t2, v}), (l, {c, e}), (o, {t1, t2})]
- )
- hg_similar = Hypergraph(data, edge_col="edges", node_col="nodes")
-
- hg_diff = sbs_hypergraph - hg_similar
-
- assert len(hg_diff) == 2
- assert len(hg_diff.nodes) == 2
- assert len(hg_diff.edges) == 1
- assert hg_diff.shape == (2, 1)
- print(hg_diff.incidence_dict.keys())
- edges = ["I"]
- assert all(uid in edges for uid in hg_diff.incidence_dict.keys())
diff --git a/pyproject.toml b/pyproject.toml
index 27c4cfb8..de924abf 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -81,11 +81,9 @@ commitizen = {version = "^3.20.0", optional = true}
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
-
[tool.pytest.ini_options]
minversion = "6.0"
addopts = "-ra"
testpaths = [
- "hypernetx/classes/tests",
- "hypernetx/algorithms/tests",
+ "tests"
]
diff --git a/hypernetx/algorithms/tests/conftest.py b/tests/algorithms/conftest.py
similarity index 100%
rename from hypernetx/algorithms/tests/conftest.py
rename to tests/algorithms/conftest.py
diff --git a/hypernetx/algorithms/tests/test_contagion.py b/tests/algorithms/test_contagion.py
similarity index 100%
rename from hypernetx/algorithms/tests/test_contagion.py
rename to tests/algorithms/test_contagion.py
diff --git a/hypernetx/algorithms/tests/test_generative.py b/tests/algorithms/test_generative.py
similarity index 100%
rename from hypernetx/algorithms/tests/test_generative.py
rename to tests/algorithms/test_generative.py
diff --git a/hypernetx/algorithms/tests/test_homology_mod_2.py b/tests/algorithms/test_homology_mod_2.py
similarity index 100%
rename from hypernetx/algorithms/tests/test_homology_mod_2.py
rename to tests/algorithms/test_homology_mod_2.py
diff --git a/hypernetx/algorithms/tests/test_laplacians_clustering.py b/tests/algorithms/test_laplacians_clustering.py
similarity index 100%
rename from hypernetx/algorithms/tests/test_laplacians_clustering.py
rename to tests/algorithms/test_laplacians_clustering.py
diff --git a/hypernetx/algorithms/tests/test_modularity.py b/tests/algorithms/test_modularity.py
similarity index 100%
rename from hypernetx/algorithms/tests/test_modularity.py
rename to tests/algorithms/test_modularity.py
diff --git a/hypernetx/algorithms/tests/test_s_centrality_measures.py b/tests/algorithms/test_s_centrality_measures.py
similarity index 100%
rename from hypernetx/algorithms/tests/test_s_centrality_measures.py
rename to tests/algorithms/test_s_centrality_measures.py
diff --git a/tests/classes/conftest.py b/tests/classes/conftest.py
new file mode 100644
index 00000000..02cf9931
--- /dev/null
+++ b/tests/classes/conftest.py
@@ -0,0 +1,249 @@
+import pytest
+import os
+import itertools as it
+import networkx as nx
+import pandas as pd
+import numpy as np
+
+from hypernetx import Hypergraph, HarryPotter, LesMis as LM
+from hypernetx.classes.helpers import create_dataframe
+from collections import OrderedDict, defaultdict, namedtuple
+
+
+class SevenBySix:
+ """Example hypergraph with 7 nodes and 6 edges."""
+
+ def __init__(self):
+ # Nodes
+ SBS_Nodes = namedtuple("SBS_Nodes", "A C E K T1 T2 V")
+ a, c, e, k, t1, t2, v = nd = ("A", "C", "E", "K", "T1", "T2", "V")
+ self.nodes = SBS_Nodes(a, c, e, k, t1, t2, v)
+
+ # Edges
+ SBS_Edges = namedtuple("SBS_Edges", "I L O P R S")
+ i, l, o, p, r, s = ("I", "L", "O", "P", "R", "S")
+ self.edges = SBS_Edges(i, l, o, p, r, s)
+
+ # Labels
+ self.labels = OrderedDict(
+ [
+ ("edges", list(self.edges)),
+ ("nodes", list(self.nodes)),
+ ]
+ )
+
+ # define edges
+ self.edges_list = [{a, c, k}, {a, e}, {a, k, t2, v}, {c, e}, {t1, t2}, {k, t2}]
+ self.edgedict = OrderedDict(
+ [
+ (p, {a, c, k}),
+ (r, {a, e}),
+ (s, {a, k, t2, v}),
+ (l, {c, e}),
+ (o, {t1, t2}),
+ (i, {k, t2}),
+ ]
+ )
+ self.dataframe = create_dataframe(self.edgedict)
+
+ # row = number of nodes = 6
+ # columns = number of edges = 7
+ self.arr = np.array(
+ [
+ [0, 0, 0, 1, 0, 1, 0],
+ [0, 1, 1, 0, 0, 0, 0],
+ [0, 0, 0, 0, 1, 1, 0],
+ [1, 1, 0, 1, 0, 0, 0],
+ [1, 0, 1, 0, 0, 0, 0],
+ [1, 0, 0, 1, 0, 1, 1],
+ ]
+ )
+
+ self.data = np.array(
+ [
+ [3, 0],
+ [3, 1],
+ [3, 3],
+ [4, 0],
+ [4, 2],
+ [5, 0],
+ [5, 3],
+ [5, 5],
+ [5, 6],
+ [1, 1],
+ [1, 2],
+ [2, 4],
+ [2, 5],
+ [0, 3],
+ [0, 5],
+ ]
+ )
+
+
+@pytest.fixture
+def sbs() -> SevenBySix:
+ return SevenBySix()
+
+
+class TriLoop:
+ """Example hypergraph with 2 two 1-cells and 1 2-cell forming a loop"""
+
+ def __init__(self):
+ A, B, C, D = "A", "B", "C", "D"
+ AB, BC, ACD = "AB", "BC", "ACD"
+ self.edgedict = {AB: {A, B}, BC: {B, C}, ACD: {A, C, D}}
+
+
+@pytest.fixture
+def triloop():
+ return TriLoop()
+
+
+class TriLoop2:
+ """Triloop example with redundant node and edge"""
+
+ def __init__(self):
+ # Nodes
+ A, B, C, D, E = "A", "B", "C", "D", "E"
+ # Edges
+ AB, BC, ACD, ACD2 = "AB", "BC", "ACD", "ACD2"
+ self.nodes = set([A, B, C, D, E])
+
+ self.edgedict = {AB: {A, B}, BC: {B, C}, ACD: {A, C, D, E}, ACD2: {A, C, D, E}}
+ self.name = "TriLoop2"
+
+
+@pytest.fixture
+def triloop2():
+ return TriLoop2()
+
+
+class SBSDupes:
+ def __init__(self):
+ # Nodes
+ a, c, e, f, k, t1, t2, v = nd = ("A", "C", "E", "F", "K", "T1", "T2", "V")
+ self.nodes = (a, c, e, f, k, t1, t2, v)
+
+ # Edges
+ i, l, m, o, p, r, s = ("I", "L", "M", "O", "P", "R", "S")
+ self.edges = (i, l, m, o, p, r, s)
+
+ self.edgedict = OrderedDict(
+ [
+ (i, {k, t2}),
+ (l, {c, e, f}),
+ (m, {c, e, f}),
+ ("O", {"T1", "T2"}),
+ ("P", {"A", "C", "K"}),
+ ("R", {"A", "E", "F"}),
+ ("S", {"A", "K", "T2", "V"}),
+ ]
+ )
+
+ self.dataframe = create_dataframe(self.edgedict)
+
+
+@pytest.fixture
+def sbs_dupes():
+ return SBSDupes()
+
+
+class LesMis:
+ def __init__(self):
+ self.edgedict = OrderedDict(
+ [
+ (1, {"CL", "CV", "GE", "GG", "MB", "MC", "ME", "MY", "NP", "SN"}),
+ (2, {"IS", "JL", "JV", "MB", "ME", "MR", "MT", "MY", "PG"}),
+ (3, {"BL", "DA", "FA", "FN", "FT", "FV", "LI", "ZE"}),
+ (4, {"CO", "FN", "TH", "TM"}),
+ (5, {"BM", "FF", "FN", "JA", "JV", "MT", "MY", "VI"}),
+ (6, {"FN", "JA", "JV"}),
+ (
+ 7,
+ {
+ "BM",
+ "BR",
+ "CC",
+ "CH",
+ "CN",
+ "FN",
+ "JU",
+ "JV",
+ "PO",
+ "SC",
+ "SP",
+ "SS",
+ },
+ ),
+ (8, {"FN", "JA", "JV", "PO", "SP", "SS"}),
+ ]
+ )
+
+
+@pytest.fixture
+def lesmis():
+ return LesMis()
+
+
+@pytest.fixture
+def sample_df():
+ fname = os.path.join(os.path.dirname(__file__), "sample.csv")
+ return pd.read_csv(fname, index_col=0)
+
+
+#### Old fixtures not in use
+
+
+class CompleteBipartite:
+ def __init__(self, n1, n2):
+ self.g = nx.complete_bipartite_graph(n1, n2)
+ self.left, self.right = nx.bipartite.sets(self.g)
+
+
+@pytest.fixture
+def sbs_graph():
+ sbs = SevenBySix()
+ edges = set()
+ for _, e in sbs.edgedict.items():
+ edges.update(it.combinations(e, 2))
+ G = nx.Graph(name="sbsg")
+ G.add_edges_from(edges)
+ return G
+
+
+@pytest.fixture
+def G():
+ return nx.karate_club_graph()
+
+
+@pytest.fixture
+def H():
+ G = nx.karate_club_graph()
+ return Hypergraph({f"e{i}": e for i, e in enumerate(G.edges())})
+
+
+@pytest.fixture
+def bipartite_example():
+ from networkx.algorithms import bipartite
+
+ return bipartite.random_graph(10, 5, 0.4, 0)
+
+
+@pytest.fixture
+def complete_bipartite_example():
+ return CompleteBipartite(2, 3).g
+
+
+@pytest.fixture
+def dataframe_example():
+ M = np.array([[1, 1, 0, 0], [0, 1, 1, 0], [1, 0, 1, 0]])
+ index = ["A", "B", "C"]
+ columns = ["a", "b", "c", "d"]
+ return pd.DataFrame(M, index=index, columns=columns)
+
+
+@pytest.fixture
+def array_example():
+ return np.array(
+ [[0, 1, 1, 0, 1], [1, 1, 1, 1, 1], [1, 0, 0, 1, 0], [0, 0, 0, 0, 1]]
+ )
diff --git a/hypernetx/classes/tests/sample.csv b/tests/classes/sample.csv
similarity index 100%
rename from hypernetx/classes/tests/sample.csv
rename to tests/classes/sample.csv
diff --git a/tests/classes/test_hypergraph.py b/tests/classes/test_hypergraph.py
new file mode 100644
index 00000000..ed3f1d01
--- /dev/null
+++ b/tests/classes/test_hypergraph.py
@@ -0,0 +1,497 @@
+from collections import OrderedDict
+
+import pytest
+import numpy as np
+import pandas as pd
+from hypernetx.classes.hypergraph import Hypergraph
+
+from networkx.algorithms import bipartite
+
+from hypernetx.classes.property_store import PropertyStore
+from tests.classes.conftest import SevenBySix
+
+
+#################Tests on constructors and from_ ################################
+
+
+@pytest.mark.parametrize(
+ "hg",
+ [
+ Hypergraph(SevenBySix().edges_list), # list of edges
+ Hypergraph(SevenBySix().edgedict), # dictionary of edges to nodes
+ Hypergraph(SevenBySix().dataframe), # dataframe of edges to nodes
+ ],
+)
+def test_constructor_on_various_set_systems(hg):
+ sbs = SevenBySix()
+ assert len(hg.edges) == len(sbs.edges)
+ assert len(hg.nodes) == len(sbs.nodes)
+ assert hg.number_of_edges() == len(sbs.edges)
+ assert hg.number_of_nodes() == len(sbs.nodes)
+
+ assert hg.degree(sbs.nodes.A) == 3
+ assert hg.order() == len(sbs.nodes)
+
+
+@pytest.mark.parametrize("h", [Hypergraph(), Hypergraph({})])
+def test_construct_empty_hypergraph(h):
+ h = Hypergraph()
+ assert h.shape == (0, 0)
+ assert h.edges.is_empty()
+ assert h.nodes.is_empty()
+
+
+def test_from_incidence_dataframe(lesmis):
+ h = Hypergraph(lesmis.edgedict)
+ df = h.incidence_dataframe()
+ hg = Hypergraph.from_incidence_dataframe(df)
+ assert hg.shape == (40, 8)
+ assert hg.size(3) == 8
+ assert hg.degree("JA") == 3
+
+
+def test_from_numpy_array(sbs):
+ hg = Hypergraph.from_numpy_array(sbs.arr)
+ assert len(hg.nodes) == len(sbs.arr)
+ assert len(hg.edges) == len(sbs.arr[0])
+ assert hg.dim("e5") == 2
+ assert set(hg.neighbors("v2")) == {"v0", "v5"}
+
+
+def test_from_bipartite(sbs):
+ hg = Hypergraph(sbs.edgedict)
+ hg_b = Hypergraph.from_bipartite(hg.bipartite())
+ assert len(hg_b.edges) == len(sbs.edges)
+ assert len(hg_b.nodes) == 7
+
+
+#################tests on methods ################################
+def test_len(sbs):
+ hg = Hypergraph(sbs.edgedict)
+ assert len(hg) == len(sbs.nodes)
+
+
+def test_contains(sbs):
+ hg = Hypergraph(sbs.edgedict)
+ assert sbs.nodes.A in hg
+
+
+def test_iterator(sbs):
+ hg = Hypergraph(sbs.edgedict)
+ nodes = [key for key in hg]
+ assert sorted(nodes) == list(sbs.nodes)
+
+
+def test_getitem(sbs):
+ hg = Hypergraph(sbs.edgedict)
+ nodes = hg[sbs.nodes.C]
+ assert sorted(nodes) == [sbs.nodes.A, sbs.nodes.E, sbs.nodes.K]
+
+
+def test_get_linegraph(sbs):
+ hg = Hypergraph(sbs.edgedict)
+ assert len(hg.edges) == len(sbs.edges)
+ assert len(hg.nodes) == len(sbs.nodes)
+
+ lg = hg.get_linegraph(s=1)
+
+ diff = set(lg).difference(set(sbs.edges))
+ assert len(diff) == 0
+
+
+def test_add_edge_inplace(sbs):
+ h = Hypergraph(sbs.edgedict)
+ assert h.shape == (7, 6)
+
+ # add a new edge in place; i.e. the current hypergraph should be mutated
+ new_edge = "X"
+ h.add_edge(new_edge)
+
+ # the Hypergraph should not increase its number of edges and incidences because the current behavior of adding
+ # an edge does not connect two or more nodes.
+ # In other words, adding an edge with no nodes
+ assert h.shape == (7, 6)
+ assert new_edge not in h.edges.elements
+
+ # the new edge has no user-defined property data, so it should not be listed in the PropertyStore
+ assert new_edge not in h.edges.properties
+
+ # However, the new_edge will be listed in the complete list of all user and non-user-define properties for all edges
+ assert new_edge in h.edges.to_dataframe.index.tolist()
+
+ assert new_edge in h.edges.to_dataframe.index.tolist()
+
+
+def test_add_edge_not_inplace(sbs):
+ h = Hypergraph(sbs.edgedict)
+ assert h.shape == (7, 6)
+
+ # add a new edge not in place; the current hypergraph should be diffrent from the new hypergraph
+ # created from add_edge
+ new_edge = "X"
+ new_hg = h.add_edge(new_edge, inplace=False)
+
+ assert new_hg.shape == (7, 6)
+ assert new_edge not in new_hg.edges.elements
+
+ assert new_edge not in new_hg.edges.properties
+ assert new_edge in new_hg.edges.to_dataframe.index.tolist()
+
+ # verify that the new edge is not in the old HyperGraph
+ assert new_edge not in h.edges.to_dataframe.index.tolist()
+
+
+def test_remove_edges(sbs):
+ hg = Hypergraph(sbs.edgedict)
+ # shape returns (#nodes, #edges)
+ assert hg.shape == (len(sbs.nodes), len(sbs.edges))
+
+ # remove an edge containing nodes that are in other edges
+ # the number of nodes should not decrease
+ hg = hg.remove_edges(sbs.edges.P)
+ assert hg.shape == (len(sbs.nodes), len(sbs.edges) - 1)
+
+ # remove an edge containing a singleton ear (i.e. a node not present in other edges)
+ # the number of nodes should decrease by exactly one
+ hg = hg.remove_edges(sbs.edges.O)
+ assert hg.shape == (len(sbs.nodes) - 1, len(sbs.edges) - 2)
+
+
+def test_remove_nodes(sbs):
+ hg = Hypergraph(sbs.edgedict)
+
+ assert sbs.nodes.A in hg.nodes
+ assert sbs.nodes.A in hg.edges[sbs.edges.P]
+ assert sbs.nodes.A in hg.edges[sbs.edges.R]
+ assert sbs.nodes.A in hg.edges[sbs.edges.S]
+
+ hg_new = hg.remove_nodes(sbs.nodes.A)
+
+ assert sbs.nodes.A not in hg_new.nodes
+ assert sbs.nodes.A not in hg_new.edges[sbs.edges.P]
+ assert sbs.nodes.A not in hg_new.edges[sbs.edges.R]
+ assert sbs.nodes.A not in hg_new.edges[sbs.edges.S]
+
+
+def test_remove(triloop2):
+ H = Hypergraph(triloop2.edgedict, name=triloop2.name)
+
+ assert H.shape == (5, 4)
+ duplicate_edge = ["ACD2"]
+ newH = H.remove(duplicate_edge, level=0)
+ assert newH.shape == (5, 3)
+
+ assert H.shape == (5, 4)
+ newH = H.remove(["E"], level=1)
+ assert newH.shape == (4, 4)
+
+ assert H.shape == (5, 4)
+ newH = H.remove(["ACD"], level=0)
+ assert newH.shape == (5, 3)
+
+ # remove incidence in which the node is associated with other edges
+ assert H.shape == (5, 4)
+ newH = H.remove([("ACD", "E")])
+ assert newH.shape == (5, 4)
+
+ # edge case:
+ # level defaults to 2, which handles the case of incidence pairs
+ # the list of incidence pairs must be a list of tuples
+ # if no tuples are given, the default behavior is to treat the list as a list of edges to be removed
+ # if one of the edges in the list doesn't exist, it is ignored
+
+ # case 1: level defaults to 2, list of uids is a list of edges and nodes
+ assert H.shape == (5, 4)
+ newH = H.remove(["ACD", "E"])
+ assert newH.shape == (5, 3)
+
+ # case 2: level defaults to 2, list of uids is a list of edges
+ # removing 2 edges that have the node B. Node B is not associated with any other edge
+ # new hypergraph should have 4 nodes and 2 edges
+ assert H.shape == (5, 4)
+ newH = H.remove(["AB", "BC"])
+ assert newH.shape == (4, 2)
+
+ # case 3: level defaults to 2, list of uids is a list of nodes
+ # no change
+ assert H.shape == (5, 4)
+ newH = H.remove(list(triloop2.nodes))
+ assert newH.shape == (5, 4)
+
+
+def test_matrix(sbs):
+ hg = Hypergraph(sbs.edgedict)
+
+ assert hg.incidence_matrix().todense().shape == (len(sbs.nodes), len(sbs.edges))
+ assert hg.adjacency_matrix(s=2).todense().shape == (7, 7)
+ assert hg.edge_adjacency_matrix().todense().shape == (6, 6)
+
+ aux_matrix = hg.auxiliary_matrix(node=False)
+ assert aux_matrix.todense().shape == (6, 6)
+
+
+def test_collapse_edges(sbs_dupes):
+ hg = Hypergraph(sbs_dupes.edgedict)
+ assert len(hg.edges) == len(sbs_dupes.edges)
+
+ hc = hg.collapse_edges()
+ assert len(hc.edges) == len(sbs_dupes.edges) - 1
+
+
+def test_collapse_nodes(sbs_dupes):
+ hg = Hypergraph(sbs_dupes.edgedict)
+ assert len(hg.nodes) == len(sbs_dupes.nodes)
+
+ hc = hg.collapse_nodes()
+ assert len(hc.nodes) == len(sbs_dupes.nodes) - 1
+
+
+def test_collapse_nodes_and_edges(sbs_dupes):
+ hg = Hypergraph(sbs_dupes.edgedict)
+ hc2 = hg.collapse_nodes_and_edges()
+
+ assert len(hg.edges) == len(sbs_dupes.edges)
+ assert len(hc2.edges) == len(sbs_dupes.edges) - 1
+ assert len(hg.nodes) == len(sbs_dupes.nodes)
+ assert len(hc2.nodes) == len(sbs_dupes.nodes) - 1
+
+
+def test_restrict_to_edges(sbs):
+ H = Hypergraph(sbs.edgedict)
+ HS = H.restrict_to_edges(["P", "O"])
+ assert len(H.edges) == 6
+ assert len(HS.edges) == 2
+
+
+def test_restrict_to_nodes(sbs):
+ H = Hypergraph(sbs.edgedict)
+ assert len(H.nodes) == 7
+ H1 = H.restrict_to_nodes(["A", "E", "K"])
+ assert len(H.nodes) == 7
+ assert len(H1.nodes) == 3
+ assert len(H1.edges) == 5
+ assert "C" in H.edges["P"]
+ assert "C" not in H1.edges["P"]
+
+
+def test_remove_from_restriction(triloop):
+ h = Hypergraph(triloop.edgedict)
+ h1 = h.restrict_to_nodes(h.neighbors("A")).remove_nodes(
+ "A"
+ ) # Hypergraph does not have a remove_node method
+ assert "A" not in h1
+ assert "A" not in h1.edges["ACD"]
+
+
+def test_toplexes(sbs_dupes):
+ h = Hypergraph(sbs_dupes.edgedict)
+ T = h.toplexes(return_hyp=True)
+ assert len(T.nodes) == 8
+ assert len(T.edges) == 5
+ T = T.collapse_nodes()
+ assert len(T.nodes) == 7
+
+
+def test_is_connected():
+ setsystem = [{1, 2, 3, 4}, {3, 4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
+ h = Hypergraph(setsystem)
+ assert h.is_connected() is True
+ assert h.is_connected(s=2) is False
+ assert h.is_connected(s=2, edges=True) is True
+ # test case below will raise nx.NetworkXPointlessConcept
+ assert h.is_connected(s=3, edges=True) is False
+
+
+def test_singletons():
+ E = {1: {2, 3, 4, 5}, 6: {2, 5, 7, 8, 9}, 10: {11}, 12: {13}, 14: {7}}
+ h = Hypergraph(E)
+ assert h.shape == (9, 5)
+ singles = h.singletons()
+ assert len(singles) == 2
+ h = h.remove_edges(singles)
+ assert h.shape == (7, 3)
+
+
+def test_remove_singletons():
+ E = {1: {2, 3, 4, 5}, 6: {2, 5, 7, 8, 9}, 10: {11}, 12: {13}, 14: {7}}
+ h = Hypergraph(E)
+ assert h.shape == (9, 5)
+ h1 = h.remove_singletons()
+ assert h1.shape == (7, 3)
+ assert h.shape == (9, 5)
+
+
+def test_components():
+ setsystem = [{1, 2, 3, 4}, {4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
+ h = Hypergraph(setsystem)
+ # h.components() causes an error
+ assert [len(g) for g in h.component_subgraphs()] == [8]
+
+
+def test_connected_components():
+ setsystem = [{1, 2, 3, 4}, {4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
+ h = Hypergraph(setsystem)
+ assert len(list(h.connected_components())) == 1
+ assert list(h.connected_components(edges=True)) == [{0, 1, 2, 3}]
+ assert [len(g) for g in h.connected_component_subgraphs()] == [8]
+
+
+def test_s_components():
+ setsystem = [{1, 2, 3, 4}, {4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
+ h = Hypergraph(setsystem)
+ assert len(list(h.s_components())) == 1
+ assert len(list(h.s_components(s=2))) == 2
+ assert len(list(h.s_components(s=3))) == 4
+ assert len(list(h.s_components(s=3, edges=False))) == 7
+ assert len(list(h.s_components(s=4, edges=False))) == 8
+
+
+def test_s_connected_components():
+ setsystem = [{1, 2, 3, 4}, {4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
+ h = Hypergraph(setsystem)
+ assert list(h.s_connected_components()) == [{0, 1, 2, 3}]
+ assert list(h.s_connected_components(s=2)) == [{1, 2, 3}]
+ assert list(h.s_connected_components(s=2, edges=False)) == [{5, 6}]
+
+
+def test_s_component_subgraphs():
+ setsystem = [{1, 2, 3, 4}, {4, 5, 6}, {5, 6, 7}, {5, 6, 8}]
+ h = Hypergraph(setsystem)
+ assert {5, 4}.issubset(
+ [len(g) for g in h.s_component_subgraphs(s=2, return_singletons=True)]
+ )
+ assert {3, 4}.issubset(
+ [len(g) for g in h.s_component_subgraphs(s=3, return_singletons=True)]
+ )
+
+
+def test_size(sbs):
+ h = Hypergraph(sbs.edgedict)
+ assert h.size(sbs.edges.S) == len(sbs.edgedict[sbs.edges.S])
+ assert h.size("S", {"T2", "V"}) == 2
+ assert h.size("S", {"T1", "T2"}) == 1
+ assert h.size("S", {"T2"}) == 1
+ assert h.size("S", {"T1"}) == 0
+ assert h.size("S", {}) == 0
+
+
+def test_diameter(sbs):
+ h = Hypergraph(sbs.edgedict)
+ assert h.diameter() == 3
+
+
+def test_diameter_should_raise_error(sbs):
+ h = Hypergraph(sbs.edgedict)
+ with pytest.raises(Exception) as excinfo:
+ h.diameter(s=2)
+ assert "Hypergraph is not s-connected." in str(excinfo.value)
+
+
+def test_node_diameters(sbs):
+ h = Hypergraph(sbs.edgedict)
+ assert h.node_diameters()[0] == 3
+ assert h.node_diameters()[2] == [set(sbs.nodes)]
+
+
+def test_edge_diameter(sbs):
+ h = Hypergraph(sbs.edgedict)
+ assert h.edge_diameter() == 3
+ assert h.edge_diameters()[2] == [{"I", "L", "O", "P", "R", "S"}]
+ with pytest.raises(Exception) as excinfo:
+ h.edge_diameter(s=2)
+ assert "Hypergraph is not s-connected." in str(excinfo.value)
+
+
+def test_bipartite(sbs):
+ hg = Hypergraph(sbs.edgedict)
+ assert bipartite.is_bipartite(hg.bipartite())
+
+
+def test_dual(sbs):
+ h = Hypergraph(sbs.edgedict)
+ hd = h.dual()
+ assert isinstance(hd.nodes.property_store, PropertyStore)
+ assert isinstance(hd.edges.property_store, PropertyStore)
+ assert set(h.nodes) == set(hd.edges)
+ assert set(h.edges) == set(hd.nodes)
+ assert list(h.dataframe.columns) == list(hd.dataframe.columns)
+
+
+@pytest.mark.filterwarnings("ignore:No 3-path between ME and FN")
+def test_distance(lesmis):
+ h = Hypergraph(lesmis.edgedict)
+ assert h.distance("ME", "FN") == 2
+ assert h.distance("ME", "FN", s=2) == 3
+ assert h.distance("ME", "FN", s=3) == np.inf
+
+
+def test_edge_distance(lesmis):
+ h = Hypergraph(lesmis.edgedict)
+ assert h.edge_distance(1, 4) == 2
+ h2 = h.remove([5], 0)
+ assert h2.edge_distance(1, 4) == 3
+ assert h2.edge_distance(1, 4, s=2) == np.inf
+
+
+def test_incidence_dataframe(lesmis):
+ h = Hypergraph(lesmis.edgedict)
+ df = h.incidence_dataframe()
+ assert np.allclose(np.array(np.sum(df)), np.array([10, 9, 8, 4, 8, 3, 12, 6]))
+
+
+def test_static_hypergraph_s_connected_components(lesmis):
+ h = Hypergraph(lesmis.edgedict)
+ assert {7, 8} in list(h.s_connected_components(edges=True, s=4))
+
+
+def test_difference_on_same_hypergraph(lesmis):
+ hg = Hypergraph(lesmis.edgedict)
+ hg_copy = Hypergraph(lesmis.edgedict)
+
+ hg_diff = hg - hg_copy
+
+ assert len(hg_diff) == 0
+ assert len(hg_diff.nodes) == 0
+ assert len(hg_diff.edges) == 0
+ assert hg_diff.shape == (0, 0)
+ assert hg_diff.incidence_dict == {}
+
+
+def test_difference_on_empty_hypergraph(sbs):
+ hg_empty = Hypergraph()
+ hg = Hypergraph(sbs.edgedict)
+ hg_diff = hg - hg_empty
+
+ assert len(hg_diff) == len(sbs.nodes)
+ assert len(hg_diff.nodes) == len(sbs.nodes)
+ assert len(hg_diff.edges) == len(sbs.edges)
+ assert hg_diff.shape == (len(sbs.nodes), len(sbs.edges))
+
+ assert all(e in sbs.edges for e in hg_diff.edges)
+ assert all(n in sbs.nodes for n in hg_diff.nodes)
+
+
+def test_difference_on_similar_hypergraph(sbs):
+ hg = Hypergraph(sbs.edgedict)
+
+ # create a hypergraph based on hg, but remove the 'I' edge
+ a, c, e, k, t1, t2, v = ("A", "C", "E", "K", "T1", "T2", "V")
+ l, o, p, r, s = ("L", "O", "P", "R", "S")
+ data = OrderedDict(
+ [(p, {a, c, k}), (r, {a, e}), (s, {a, k, t2, v}), (l, {c, e}), (o, {t1, t2})]
+ )
+ hg_similar = Hypergraph(data, edge_col="edges", node_col="nodes")
+
+ # returns a hypergraph with one edge and two nodes
+ hg_diff = hg - hg_similar
+
+ assert len(hg_diff) == 2
+ assert len(hg_diff.nodes) == 2
+ assert len(hg_diff.edges) == 1
+ assert hg_diff.shape == (2, 1)
+
+ edges_diff = ["I"]
+ assert all(edge in edges_diff for edge in hg_diff.edges)
+
+ nodes_diff = ["K", "T2"]
+ assert all(node in nodes_diff for node in hg_diff.nodes)
diff --git a/hypernetx/classes/tests/test_hypergraph_factory_methods.py b/tests/classes/test_hypergraph_factory_methods.py
similarity index 94%
rename from hypernetx/classes/tests/test_hypergraph_factory_methods.py
rename to tests/classes/test_hypergraph_factory_methods.py
index 72ccea8d..316d8c1e 100644
--- a/hypernetx/classes/tests/test_hypergraph_factory_methods.py
+++ b/tests/classes/test_hypergraph_factory_methods.py
@@ -68,9 +68,7 @@ def test_from_dataframe_with_key():
assert "C" not in h.edges["a"]
-def test_from_dataframe_with_transforms_and_fillna(dataframe):
- df = dataframe.df
-
+def test_from_dataframe_with_transforms_and_fillna(sample_df):
# @pytest.mark.skip()
# def keymark.1(x):
# return x**2
@@ -81,10 +79,10 @@ def test_from_dataframe_with_transforms_and_fillna(dataframe):
# def keymark.3(x):
# return (x > 0) * x
- h = Hypergraph.from_incidence_dataframe(df)
+ h = Hypergraph.from_incidence_dataframe(sample_df)
assert "A" in h.edges["a"]
assert "A" not in h.edges["b"]
- h = Hypergraph.from_incidence_dataframe(df, fillna=1)
+ h = Hypergraph.from_incidence_dataframe(sample_df, fillna=1)
assert "A" in h.edges["b"]
# h = Hypergraph.from_incidence_dataframe(df, transforms=[key1, key2])
# assert "A" in h.edges["c"]
diff --git a/hypernetx/classes/tests/test_hypergraph_view.py b/tests/classes/test_hypergraph_view.py
similarity index 100%
rename from hypernetx/classes/tests/test_hypergraph_view.py
rename to tests/classes/test_hypergraph_view.py
diff --git a/hypernetx/classes/tests/test_incidence_store.py b/tests/classes/test_incidence_store.py
similarity index 100%
rename from hypernetx/classes/tests/test_incidence_store.py
rename to tests/classes/test_incidence_store.py
diff --git a/hypernetx/classes/tests/test_nx_hnx_agreement.py b/tests/classes/test_nx_hnx_agreement.py
similarity index 100%
rename from hypernetx/classes/tests/test_nx_hnx_agreement.py
rename to tests/classes/test_nx_hnx_agreement.py
diff --git a/hypernetx/classes/tests/test_property_store.py b/tests/classes/test_property_store.py
similarity index 100%
rename from hypernetx/classes/tests/test_property_store.py
rename to tests/classes/test_property_store.py
diff --git a/tox.ini b/tox.ini
index 1b8cfb70..27b3354f 100644
--- a/tox.ini
+++ b/tox.ini
@@ -29,7 +29,7 @@ commands_pre =
poetry install --with test
commands =
env
- poetry run coverage run --source=hypernetx -m pytest --junitxml=pytest.xml
+ poetry run coverage run --source=hypernetx -m pytest tests --junitxml=pytest.xml
poetry run coverage report -m
[testenv:notebooks-tests]