diff --git a/datasets/doc/source/conf.py b/datasets/doc/source/conf.py
index 11285c375f96..c287ec318b5d 100644
--- a/datasets/doc/source/conf.py
+++ b/datasets/doc/source/conf.py
@@ -162,7 +162,7 @@ def find_test_modules(package_path):
.. raw:: html
-
+
"""
diff --git a/datasets/doc/source/how-to-visualize-label-distribution.ipynb b/datasets/doc/source/how-to-visualize-label-distribution.ipynb
new file mode 100644
index 000000000000..26db72047cff
--- /dev/null
+++ b/datasets/doc/source/how-to-visualize-label-distribution.ipynb
@@ -0,0 +1,1122 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "fb7e89caa9e6d772",
+ "metadata": {},
+ "source": [
+ "# Visualize Label Distribution"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "67c54a8d7c872547",
+ "metadata": {},
+ "source": [
+ "If you partition datasets to simulate heterogeneity through label skew and/or size skew, you can now effortlessly visualize the partitioned dataset using `flwr-datasets`.\n",
+ "\n",
+ "In this how-to guide, you'll learn how to visualize and compare partitioned datasets when applying different methods or parameters.\n",
+ "\n",
+ "All the described visualization functions are compatible with all ``Partitioner`` you can find in\n",
+ "[flwr_datasets.partitioner](https://flower.ai/docs/datasets/ref-api/flwr_datasets.partitioner.html#module-flwr_datasets.partitioner)\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7220467f2c6ba432",
+ "metadata": {},
+ "source": [
+ "## Common Setup"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4e2ad2f0a0f7174d",
+ "metadata": {},
+ "source": [
+ "Install Flower Datasets library:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "c46514b679f394ce",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "! pip install -q \"flwr-datasets[vision]\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d7ffd5b6836a5ee0",
+ "metadata": {},
+ "source": [
+ "## Plot Label Distribution"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "38fbbdfe6b930916",
+ "metadata": {},
+ "source": [
+ "### Bar plot"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "a5778edf97a7ee04",
+ "metadata": {},
+ "source": [
+ "Let's visualize the result of `DirichletPartitioner`.\n",
+ "We will create a `FederatedDataset` and assign `DirichletPartitioner` to the `train` split:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "42397afaaf50529e",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from flwr_datasets import FederatedDataset\n",
+ "from flwr_datasets.partitioner import DirichletPartitioner\n",
+ "from flwr_datasets.visualization import plot_label_distributions\n",
+ "\n",
+ "\n",
+ "fds = FederatedDataset(\n",
+ " dataset=\"cifar10\",\n",
+ " partitioners={\n",
+ " \"train\": DirichletPartitioner(\n",
+ " num_partitions=10,\n",
+ " partition_by=\"label\",\n",
+ " alpha=0.3,\n",
+ " seed=42,\n",
+ " min_partition_size=0,\n",
+ " ),\n",
+ " },\n",
+ ")\n",
+ "\n",
+ "partitioner = fds.partitioners[\"train\"]"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "c4d5855ee8a605d3",
+ "metadata": {},
+ "source": [
+ "Once we have the partitioner with the dataset assigned, we are ready to pass it to the plotting function:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f75b48256ed68897",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAuYAAAHHCAYAAADzgZ1dAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy80BEi2AAAACXBIWXMAAA9hAAAPYQGoP6dpAABmH0lEQVR4nO3deVzN2f8H8Ndt31etpJKk7GTJ2oxGtvnaZgZjRhGGKcRYx6CQxr4TxiSGwRjb2BMxTJLImixTw3dQBpVC2/38/vDt83MV03a7n/J6Ph738ehzPudzzvvce+l9T+eej0wQBAFERERERKRSaqoOgIiIiIiImJgTEREREUkCE3MiIiIiIglgYk5EREREJAFMzImIiIiIJICJORERERGRBDAxJyIiIiKSACbmREREREQSwMSciIiIiEgCmJgT0b/y9fWFg4NDieoGBQVBJpMpN6BK4OnpiYYNG1Zomw4ODvD19a3QNktq48aNkMlkSElJUXpfb75fUlJSIJPJsHDhQqX3DVSf9yARvX+YmBP9i8KEpvCho6ODevXqISAgAKmpqUrvvzDJKHzo6enBzc0N3333HTIzMyusn/v37yMoKAgJCQn/Wvf58+cICgpCdHR0hfVfEWQyGQICAlQdhtJFR0crvCe0tbVhZWUFT09PzJ07F48ePaqQfqT6OgPSjo2IqKyYmBOV0KxZs7B582asXLkSbdu2xZo1a+Dh4YHnz59XSv9r1qzB5s2bsXjxYtSvXx8hISHo2rUrBEGokPbv37+P4ODgYhPz9evXIykpSTx+/vw5goODi02KvvvuO7x48aJCYqJ3GzNmDDZv3ox169Zh4sSJMDMzw8yZM+Hq6orjx48r1P3yyy/x4sUL2Nvbl7j9d73O7/Lm+0UZ+B4koupIQ9UBEFUV3bp1g7u7OwBg2LBhMDc3x+LFi7F3714MHDiwXG0/f/4cenp676zzySefoEaNGgCAkSNHol+/fti1axfOnj0LDw+PMvedn58PuVz+zjqampolbk9DQwMaGvyvpTJ06NABn3zyiULZpUuX0KVLF/Tr1w/Xr1+HjY0NAEBdXR3q6upKjSc7Oxv6+vqler8oA9+DRFRVccacqIw+/PBDAEBycrJY9tNPP6FFixbQ1dWFmZkZBgwYgHv37ilcV7h2OT4+Hh07doSenh6+/fbbcvWfm5uLGTNmoEWLFjA2Noa+vj46dOiAEydOKFzz+lrfpUuXwsnJCdra2li9ejVatmwJABgyZIi4RGLjxo0AFNcMp6SkwMLCAgAQHBws1g0KCgJQ/Pre/Px8zJ49W+zPwcEB3377LXJychTqOTg4oGfPnjh9+jRatWoFHR0d1KlTB5s2bSr18/M2e/fuRY8ePWBrawttbW04OTlh9uzZKCgoKLZ+fHw82rZtC11dXTg6OiIsLKxInZycHMycORN169aFtrY27OzsMGnSpCLje1NeXh6Cg4Ph7OwMHR0dmJubo3379oiMjCzz+Jo0aYKlS5ciPT0dK1euFMuLW2N+/vx5eHt7o0aNGuL4hg4dCuDfX2dfX18YGBjgzp076N69OwwNDTFo0CDx3Nu+k7BkyRLY29tDV1cXnTp1wtWrVxXOe3p6wtPTs8h11ek9SET0NpxSICqjO3fuAADMzc0BACEhIZg+fTo+++wzDBs2DI8ePcKKFSvQsWNHXLx4ESYmJuK1jx8/Rrdu3TBgwAB88cUXsLKyKlf/mZmZ+OGHHzBw4EAMHz4cz549w4YNG+Dt7Y1z586hadOmCteGh4fj5cuXGDFiBLS1tdGnTx88e/YMM2bMwIgRI9ChQwcAQNu2bYv0a2FhgTVr1mDUqFHo06cP+vbtCwBo3LjxW2MdNmwYIiIi8Mknn+Cbb75BbGwsQkNDkZiYiN27dyvUvX37Nj755BP4+fnBx8cHP/74I3x9fdGiRQs0aNCg1M/TmzZu3AgDAwOMHz8eBgYGOH78OGbMmIHMzEwsWLBAoe7Tp0/RvXt3fPbZZxg4cCB27NiBUaNGQUtLS0xg5XI5/vOf/+D06dMYMWIEXF1dceXKFSxZsgQ3b97Enj173hpLUFAQQkNDMWzYMLRq1QqZmZk4f/48Lly4gI8++qjMYyx8/o4ePYqQkJBi66SlpaFLly6wsLDAlClTYGJigpSUFOzatQtAyV7n/Px8eHt7o3379li4cOG//tVn06ZNePbsGfz9/fHy5UssW7YMH374Ia5cuVKqfwNV/T1IRPRWAhG9U3h4uABAOHbsmPDo0SPh3r17wrZt2wRzc3NBV1dX+O9//yukpKQI6urqQkhIiMK1V65cETQ0NBTKO3XqJAAQwsLCStT/zJkzBQBCUlKS8OjRIyE5OVlYu3atoK2tLVhZWQnZ2dlCfn6+kJOTo3Dd06dPBSsrK2Ho0KFiWXJysgBAMDIyEtLS0hTqx8XFCQCE8PDwIjH4+PgI9vb24vGjR48EAMLMmTPfGm+hhIQEAYAwbNgwhXoTJkwQAAjHjx8Xy+zt7QUAwqlTp8SytLQ0QVtbW/jmm2/e+TwJgiAAEPz9/d9Z5/nz50XKvvrqK0FPT094+fKlWFb4Oi1atEgsy8nJEZo2bSpYWloKubm5giAIwubNmwU1NTXh999/V2gzLCxMACCcOXNGYXw+Pj7icZMmTYQePXr867jedOLECQGA8Msvv7y1TpMmTQRTU1PxuPB9nJycLAiCIOzevVsAIMTFxb21jXe9zj4+PgIAYcqUKcWee/39Uvi+K/z3Uig2NlYAIIwbN04s69Spk9CpU6d/bVOq70EiovLgUhaiEvLy8oKFhQXs7OwwYMAAGBgYYPfu3ahZsyZ27doFuVyOzz77DP/884/4sLa2hrOzc5ElJdra2hgyZEip+ndxcYGFhQUcHR3x1VdfoW7dujhw4AD09PSgrq4OLS0tAK9mcJ88eYL8/Hy4u7vjwoULRdrq16+fuBRA2Q4ePAgAGD9+vEL5N998AwA4cOCAQrmbm5s4Yw+8mh11cXHBn3/+WSHx6Orqij8/e/YM//zzDzp06IDnz5/jxo0bCnU1NDTw1VdficdaWlr46quvkJaWhvj4eADAL7/8AldXV9SvX1/htS9cavTma/86ExMTXLt2Dbdu3aqQsb3OwMAAz549e2ffALB//37k5eWVuZ9Ro0aVuG7v3r1Rs2ZN8bhVq1Zo3bq1+B5RFqm9B4mI3oZLWYhKaNWqVahXrx40NDRgZWUFFxcXqKm9+mx769YtCIIAZ2fnYq9988twNWvWFBPpkvr1119hZGQETU1N1KpVC05OTgrnIyIisGjRIty4cUMh0XJ0dCzSVnFlyvLXX39BTU0NdevWVSi3traGiYkJ/vrrL4Xy2rVrF2nD1NQUT58+rZB4rl27hu+++w7Hjx8vst1kRkaGwrGtrS309fUVyurVqwfg1TrnNm3a4NatW0hMTHzrB520tLS3xjJr1iz06tUL9erVQ8OGDdG1a1d8+eWX71ySUVJZWVkwNDR86/lOnTqhX79+CA4OxpIlS+Dp6YnevXvj888/h7a2don60NDQQK1atUocU3H/PurVq4cdO3aUuI2ykNp7kIjobZiYE5VQq1atxF1Z3iSXyyGTyXDo0KFid74wMDBQOH591rakOnbsKO7K8qaffvoJvr6+6N27NyZOnAhLS0uoq6sjNDRUXIte3v7Lq6Q3fHnbziFCBWwLmZ6ejk6dOsHIyAizZs2Ck5MTdHR0cOHCBUyePPlfd6cpjlwuR6NGjbB48eJiz9vZ2b312o4dO+LOnTvYu3cvjh49ih9++AFLlixBWFgYhg0bVupYCuXl5eHmzZvvvEGSTCbDzp07cfbsWfz22284cuQIhg4dikWLFuHs2bNF3rPF0dbWFj+cVhSZTFbsa/22L+eWtu2SUOZ7kIjoXZiYE1UAJycnCIIAR0dHcUa1Mu3cuRN16tTBrl27FJKPmTNnlriN0twpsTR17e3tIZfLcevWLbi6uorlqampSE9PL9W+2uUVHR2Nx48fY9euXejYsaNY/vrOOq+7f/++uAVgoZs3bwKAuEOIk5MTLl26hM6dO5fpbpNmZmYYMmQIhgwZgqysLHTs2BFBQUHlSsx37tyJFy9ewNvb+1/rtmnTBm3atEFISAi2bt2KQYMGYdu2bRg2bFiF3z2zuCU7N2/eVNjBxdTUtNglI2/OalfV9yAR0btwjTlRBejbty/U1dURHBxcZFZNEAQ8fvxYqf0XzvC93ndsbCxiYmJK3EZh8pmenv6vdQt33yhJ3e7duwMAli5dqlBeOMPco0ePEsdYXsU9T7m5uVi9enWx9fPz87F27VqFumvXroWFhQVatGgBAPjss8/w999/Y/369UWuf/HiBbKzs98az5vvCwMDA9StW/dft1l8l0uXLiEwMBCmpqbw9/d/a72nT58Wea8W7t5T2H9pXueS2LNnD/7++2/x+Ny5c4iNjUW3bt3EMicnJ9y4cUPh7qWXLl3CmTNnFNqqqu9BIqJ34Yw5UQVwcnLCnDlzMHXqVKSkpKB3794wNDREcnIydu/ejREjRmDChAlK679nz57YtWsX+vTpgx49eiA5ORlhYWFwc3NDVlZWicdgYmKCsLAwGBoaQl9fH61bty52Pbquri7c3Nywfft21KtXD2ZmZmjYsGGxSyeaNGkCHx8frFu3TlxKcu7cOURERKB379744IMPyj3+150/fx5z5swpUu7p6Ym2bdvC1NQUPj4+GDNmDGQyGTZv3vzWJQq2traYN28eUlJSUK9ePWzfvh0JCQlYt26d+L2BL7/8Ejt27MDIkSNx4sQJtGvXDgUFBbhx4wZ27NiBI0eOvHUJlJubGzw9PdGiRQuYmZnh/Pnz2LlzJwICAko01t9//x0vX75EQUEBHj9+jDNnzmDfvn0wNjbG7t27YW1t/dZrIyIisHr1avTp0wdOTk549uwZ1q9fDyMjIzGRLc3rXBJ169ZF+/btMWrUKOTk5GDp0qUwNzfHpEmTxDpDhw7F4sWL4e3tDT8/P6SlpSEsLAwNGjRQ+E6AlN+DRERlpqLdYIiqjMJt5t61rVyhX3/9VWjfvr2gr68v6OvrC/Xr1xf8/f2FpKQksU6nTp2EBg0alLj/wq3fHj169NY6crlcmDt3rmBvby9oa2sLzZo1E/bv3//WbesWLFhQbDt79+4V3NzcBA0NDYWtE99sRxAE4Y8//hBatGghaGlpKWxb9+ZWdYIgCHl5eUJwcLDg6OgoaGpqCnZ2dsLUqVMVticUhFdb1RW3feDbttB7E4C3PmbPni0IgiCcOXNGaNOmjaCrqyvY2toKkyZNEo4cOSIAEE6cOKHQZ4MGDYTz588LHh4ego6OjmBvby+sXLmySL+5ubnCvHnzhAYNGgja2tqCqamp0KJFCyE4OFjIyMhQGN/r2yXOmTNHaNWqlWBiYiLo6uoK9evXF0JCQsStGN+mcLvEwoempqZgYWEhdOzYUQgJCSmyFaYgFN0u8cKFC8LAgQOF2rVrC9ra2oKlpaXQs2dP4fz58wrXve119vHxEfT19YuN713vu0WLFgl2dnaCtra20KFDB+HSpUtFrv/pp5+EOnXqCFpaWkLTpk2FI0eOVJn3IBFRecgEgd9mISIiIiJSNa4xJyIiIiKSACbmREREREQSwMSciIiIiEgCmJgTEREREUkAE3MiIiIiIglgYk5EREREJAG8wVAFkcvluH//PgwNDSv8NtZERESkHIIg4NmzZ7C1tYWaGucrSbWYmFeQ+/fvw87OTtVhEBERURncu3cPtWrVUnUY9J5TaWJ+6tQpLFiwAPHx8Xjw4AF2796N3r17i+cFQcDMmTOxfv16pKeno127dlizZg2cnZ3FOk+ePMHo0aPx22+/QU1NDf369cOyZctgYGAg1rl8+TL8/f0RFxcHCwsLjB49WuEW0ADwyy+/YPr06UhJSYGzszPmzZsn3pa6JAwNDQG8+odtZGRUxmeEiIiIKlNmZibs7OzE3+NEqqTSxDw7OxtNmjTB0KFD0bdv3yLn58+fj+XLlyMiIgKOjo6YPn06vL29cf36dejo6AAABg0ahAcPHiAyMhJ5eXkYMmQIRowYga1btwJ49Q+uS5cu8PLyQlhYGK5cuYKhQ4fCxMQEI0aMAAD88ccfGDhwIEJDQ9GzZ09s3boVvXv3xoULF9CwYcMSjaVw+YqRkRETcyIioiqGy1BJCmSCIAiqDgJ49Q/i9RlzQRBga2uLb775BhMmTAAAZGRkwMrKChs3bsSAAQOQmJgINzc3xMXFwd3dHQBw+PBhdO/eHf/9739ha2uLNWvWYNq0aXj48CG0tLQAAFOmTMGePXtw48YNAED//v2RnZ2N/fv3i/G0adMGTZs2RVhYWIniz8zMhLGxMTIyMpiYExERVRH8/U1SItlvOSQnJ+Phw4fw8vISy4yNjdG6dWvExMQAAGJiYmBiYiIm5QDg5eUFNTU1xMbGinU6duwoJuUA4O3tjaSkJDx9+lSs83o/hXUK+yEiIiIiUjbJfvnz4cOHAAArKyuFcisrK/Hcw4cPYWlpqXBeQ0MDZmZmCnUcHR2LtFF4ztTUFA8fPnxnP8XJyclBTk6OeJyZmVma4RERERERKZDsjLnUhYaGwtjYWHxwRxYiIiIiKg/JJubW1tYAgNTUVIXy1NRU8Zy1tTXS0tIUzufn5+PJkycKdYpr4/U+3lan8Hxxpk6dioyMDPFx79690g6RiIiIiEgk2cTc0dER1tbWiIqKEssyMzMRGxsLDw8PAICHhwfS09MRHx8v1jl+/Djkcjlat24t1jl16hTy8vLEOpGRkXBxcYGpqalY5/V+CusU9lMcbW1tcQcW7sRCREREROWl0sQ8KysLCQkJSEhIAPDqC58JCQm4e/cuZDIZAgMDMWfOHOzbtw9XrlzB4MGDYWtrK+7c4urqiq5du2L48OE4d+4czpw5g4CAAAwYMAC2trYAgM8//xxaWlrw8/PDtWvXsH37dixbtgzjx48X4xg7diwOHz6MRYsW4caNGwgKCsL58+cREBBQ2U8JEREREb2vBBU6ceKEAKDIw8fHRxAEQZDL5cL06dMFKysrQVtbW+jcubOQlJSk0Mbjx4+FgQMHCgYGBoKRkZEwZMgQ4dmzZwp1Ll26JLRv317Q1tYWatasKXz//fdFYtmxY4dQr149QUtLS2jQoIFw4MCBUo0lIyNDACBkZGSU7kkgIiIileHvb5ISyexjXtVxH1QiIqKqh7+/SUoku8aciIiIiOh9wsSciIiIiEgCmJgTEREREUkAE3MiIiIiIglgYk5EREREJAFMzImIiIiIJEBD1QEQEREpW50N/ZTex59+vyq9DyKq3jhjTkREREQkAUzMiYiIiIgkgIk5EREREZEEMDEnIiIiIpIAJuZERERERBLAxJyIiIiISAKYmBMRERERSQATcyIiIiIiCWBiTkREREQkAUzMiYiIiIgkgIk5EREREZEEMDEnIiIiIpIAJuZERERERBLAxJyIiIiISAKYmBMRERERSQATcyIiIiIiCWBiTkREREQkAUzMiYiIiIgkQEPVARARESlb33pmqg6BiOhfccaciIiIiEgCmJgTEREREUkAE3MiIiIiIglgYk5EREREJAFMzImIiIiIJICJORERERGRBDAxJyIiIiKSACbmREREREQSwMSciIiIiEgCmJgTEREREUkAE3MiIiIiIglgYk5EREREJAFMzImIiIiIJICJORERERGRBDAxJyIiIiKSACbmREREREQSwMSciIiIiEgCmJgTEREREUkAE3MiIiIiIglgYk5EREREJAFMzImIiIiIJICJORERERGRBDAxJyIiIiKSAA1VB0BERKRsk9w7qDoEIqJ/xRlzIiIiIiIJYGJORERERCQBXMpCREREVAnkcjlyc3NVHQZVIk1NTairq5e4PhNzIiIiIiXLzc1FcnIy5HK5qkOhSmZiYgJra2vIZLJ/rcvEnIiIiEiJBEHAgwcPoK6uDjs7O6ipcSXx+0AQBDx//hxpaWkAABsbm3+9hok5ERERkRLl5+fj+fPnsLW1hZ6enqrDoUqkq6sLAEhLS4OlpeW/LmvhRzYiIiIiJSooKAAAaGlpqTgSUoXCD2N5eXn/WpeJOREREVElKMkaY6p+SvO6MzEnIiIiIpIAJuZEREREVCE2btwIExOTcrcjk8mwZ8+ecrdT1TAxJyIiIiKRr68vevfureow3ktMzImIiIiIJEDSiXlBQQGmT58OR0dH6OrqwsnJCbNnz4YgCGIdQRAwY8YM2NjYQFdXF15eXrh165ZCO0+ePMGgQYNgZGQEExMT+Pn5ISsrS6HO5cuX0aFDB+jo6MDOzg7z58+vlDESERERVRWLFy9Go0aNoK+vDzs7O3z99ddFcioA2LNnD5ydnaGjowNvb2/cu3dP4fzevXvRvHlz6OjooE6dOggODkZ+fn6xfebm5iIgIAA2NjbQ0dGBvb09QkNDlTI+VZN0Yj5v3jysWbMGK1euRGJiIubNm4f58+djxYoVYp358+dj+fLlCAsLQ2xsLPT19eHt7Y2XL1+KdQYNGoRr164hMjIS+/fvx6lTpzBixAjxfGZmJrp06QJ7e3vEx8djwYIFCAoKwrp16yp1vERERERSpqamhuXLl+PatWuIiIjA8ePHMWnSJIU6z58/R0hICDZt2oQzZ84gPT0dAwYMEM///vvvGDx4MMaOHYvr169j7dq12LhxI0JCQortc/ny5di3bx927NiBpKQkbNmyBQ4ODsocpspI+gZDf/zxB3r16oUePXoAABwcHPDzzz/j3LlzAF7Nli9duhTfffcdevXqBQDYtGkTrKyssGfPHgwYMACJiYk4fPgw4uLi4O7uDgBYsWIFunfvjoULF8LW1hZbtmxBbm4ufvzxR2hpaaFBgwZISEjA4sWLFRJ4IiIiovdZYGCg+LODgwPmzJmDkSNHYvXq1WJ5Xl4eVq5cidatWwMAIiIi4OrqinPnzqFVq1YIDg7GlClT4OPjAwCoU6cOZs+ejUmTJmHmzJlF+rx79y6cnZ3Rvn17yGQy2NvbK3eQKiTpGfO2bdsiKioKN2/eBABcunQJp0+fRrdu3QAAycnJePjwIby8vMRrjI2N0bp1a8TExAAAYmJiYGJiIiblAODl5QU1NTXExsaKdTp27Kiw8b+3tzeSkpLw9OlTpY+TiIiIqCo4duwYOnfujJo1a8LQ0BBffvklHj9+jOfPn4t1NDQ00LJlS/G4fv36MDExQWJiIoBX+dysWbNgYGAgPoYPH44HDx4otFPI19cXCQkJcHFxwZgxY3D06FHlD1RFJD1jPmXKFGRmZqJ+/fpQV1dHQUEBQkJCMGjQIADAw4cPAQBWVlYK11lZWYnnHj58CEtLS4XzGhoaMDMzU6jj6OhYpI3Cc6ampkViy8nJQU5OjnicmZlZnqESERERSVpKSgp69uyJUaNGISQkBGZmZjh9+jT8/PyQm5sr3uHy32RlZSE4OBh9+/Ytck5HR6dIWfPmzZGcnIxDhw7h2LFj+Oyzz+Dl5YWdO3eWe0xSI+nEfMeOHdiyZQu2bt0qLi8JDAyEra2t+OcPVQkNDUVwcLBKYyAiIiKqLPHx8ZDL5Vi0aBHU1F4tutixY0eRevn5+Th//jxatWoFAEhKSkJ6ejpcXV0BvEq0k5KSULdu3RL3bWRkhP79+6N///745JNP0LVrVzx58gRmZmYVMDLpkHRiPnHiREyZMkX8wkCjRo3w119/ITQ0FD4+PrC2tgYApKamwsbGRrwuNTUVTZs2BQBYW1sjLS1Nod38/Hw8efJEvN7a2hqpqakKdQqPC+u8aerUqRg/frx4nJmZCTs7u3KMloiIiEgaMjIykJCQoFBWo0YN5OXlYcWKFfj4449x5swZhIWFFblWU1MTo0ePxvLly6GhoYGAgAC0adNGTNRnzJiBnj17onbt2vjkk0+gpqaGS5cu4erVq5gzZ06R9hYvXgwbGxs0a9YMampq+OWXX2BtbV0hNzKSGkmvMX/+/Ln4iayQuro65HI5AMDR0RHW1taIiooSz2dmZiI2NhYeHh4AAA8PD6SnpyM+Pl6sc/z4ccjlcvFLCR4eHjh16hTy8vLEOpGRkXBxcSl2GQsAaGtrw8jISOFBREREVB1ER0ejWbNmCo/Nmzdj8eLFmDdvHho2bIgtW7YUu22hnp4eJk+ejM8//xzt2rWDgYEBtm/fLp739vbG/v37cfToUbRs2RJt2rTBkiVL3vqlTkNDQ8yfPx/u7u5o2bIlUlJScPDgwSI5YnUgE17fFFxifH19cezYMaxduxYNGjTAxYsXMWLECAwdOhTz5s0D8GpLxe+//x4RERFwdHTE9OnTcfnyZVy/fl1cp9StWzekpqYiLCwMeXl5GDJkCNzd3bF161YArz4Vuri4oEuXLpg8eTKuXr2KoUOHYsmSJSXelSUzMxPGxsbIyMhgkk5EJDFpLzYpvQ9L3cFK74MqXmX8/n758iWSk5Ph6OhY7Bpqqt5K8/pLeinLihUrMH36dHz99ddIS0uDra0tvvrqK8yYMUOsM2nSJGRnZ2PEiBFIT09H+/btcfjwYYWBb9myBQEBAejcuTPU1NTQr18/LF++XDxvbGyMo0ePwt/fHy1atECNGjUwY8YMbpVIVY5sVBul9yGsOav0PoiIiN5Hkp4xr0o4Y05SwMScqHicMae34Yw5KVtpXv/qtziHiIiIiKgKYmJORERERCQBTMyJiIiIiCSAiTkRERERkQQwMSciIiIikgAm5kREREREEsDEnIiIiIhIApiYExEREZFKpaSkQCaTISEhQdWhqJSk7/xJREREVF1Vxk3hXleWG8R5enqiadOmWLp0acUHREVwxpyIiIiIykQQBOTn56s6jGqDiTkRERERFeHr64uTJ09i2bJlkMlkkMlk2LhxI2QyGQ4dOoQWLVpAW1sbp0+fhq+vL3r37q1wfWBgIDw9PcVjuVyO+fPno27dutDW1kbt2rUREhJSbN8FBQUYOnQo6tevj7t37ypxlNLCpSxEREREVMSyZctw8+ZNNGzYELNmzQIAXLt2DQAwZcoULFy4EHXq1IGpqWmJ2ps6dSrWr1+PJUuWoH379njw4AFu3LhRpF5OTg4GDhyIlJQU/P7777CwsKi4QUkcE3MiIiIiKsLY2BhaWlrQ09ODtbU1AIiJ9KxZs/DRRx+VuK1nz55h2bJlWLlyJXx8fAAATk5OaN++vUK9rKws9OjRAzk5OThx4gSMjY0raDRVA5eyEBEREVGpuLu7l6p+YmIicnJy0Llz53fWGzhwILKzs3H06NH3LikHmJgTERERUSnp6+srHKupqUEQBIWyvLw88WddXd0Stdu9e3dcvnwZMTEx5Q+yCmJiTkRERETF0tLSQkFBwb/Ws7CwwIMHDxTKXt+T3NnZGbq6uoiKinpnO6NGjcL333+P//znPzh58mSZYq7KuMaciIiIiIrl4OCA2NhYpKSkwMDAAHK5vNh6H374IRYsWIBNmzbBw8MDP/30E65evYpmzZoBAHR0dDB58mRMmjQJWlpaaNeuHR49eoRr167Bz89Poa3Ro0ejoKAAPXv2xKFDh4qsQ6/OOGNORERERMWaMGEC1NXV4ebmBgsLi7duXejt7Y3p06dj0qRJaNmyJZ49e4bBgwcr1Jk+fTq++eYbzJgxA66urujfvz/S0tKKbS8wMBDBwcHo3r07/vjjjwofl1TJhDcXBFGZZGZmwtjYGBkZGTAyMlJ1OPSeqoy7yJXlznFEqpb2YpPS+7DUHfzvlUhyKuP398uXL5GcnAxHR0fo6OgopQ+SrtK8/pwxJyIiIiKSACbmREREREQSwMSciIiIiEgCmJgTEREREUkAE3MiIiIiIglgYk5EREREJAFMzImIiIiIJICJORERERGRBDAxJyIiIiKSACbmRERERFQqvr6+6N279zvrODg4YOnSpZUST3WhoeoAiIiIiN5HdTb0q9T+/vT7tVL7i4uLg76+fqX2WdUxMSciIiKiCmdhYaHqEKocLmUhIiIiomLt3LkTjRo1gq6uLszNzeHl5YXs7Gzx/MKFC2FjYwNzc3P4+/sjLy9PPPfmUhaZTIY1a9agW7du0NXVRZ06dbBz587KHI7kMTEnIiIioiIePHiAgQMHYujQoUhMTER0dDT69u0LQRAAACdOnMCdO3dw4sQJREREYOPGjdi4ceM725w+fTr69euHS5cuYdCgQRgwYAASExMrYTRVA5eyEBEREVERDx48QH5+Pvr27Qt7e3sAQKNGjcTzpqamWLlyJdTV1VG/fn306NEDUVFRGD58+Fvb/PTTTzFs2DAAwOzZsxEZGYkVK1Zg9erVyh1MFcEZcyIiIiIqokmTJujcuTMaNWqETz/9FOvXr8fTp0/F8w0aNIC6urp4bGNjg7S0tHe26eHhUeSYM+b/j4k5ERERERWhrq6OyMhIHDp0CG5ublixYgVcXFyQnJwMANDU1FSoL5PJIJfLVRFqtcHEnIiIiIiKJZPJ0K5dOwQHB+PixYvQ0tLC7t27y9ze2bNnixy7urqWN8xqg2vMiYiIiKiI2NhYREVFoUuXLrC0tERsbCwePXoEV1dXXL58uUxt/vLLL3B3d0f79u2xZcsWnDt3Dhs2bKjgyKsuJuZEREREVISRkRFOnTqFpUuXIjMzE/b29li0aBG6deuG7du3l6nN4OBgbNu2DV9//TVsbGzw888/w83NrYIjr7qYmBMRUbVn8aISOtGthD6oWqnsO3GWlqurKw4fPlzsueK2RXx9z3IASElJKVLH1tYWR48erYDoqieuMSciIiIikgAm5kREREREEsClLERERESkdIV3DKW344w5EREREZEEMDEnIiIiIpIAJuZERERERBLAxJyIiIiISAKYmBMRERERSQB3ZVGxCb8PV2r7CzusV2r7RERERFQxOGNOREREREV4enoiMDBQ1WG8VzhjTkRERKQCyv6r+Zv4V3Tp44w5ERERESldbm6uqkOQPCbmRERERFQsuVyOSZMmwczMDNbW1ggKChLP3b17F7169YKBgQGMjIzw2WefITU1VTwfFBSEpk2b4ocffoCjoyN0dHQAADt37kSjRo2gq6sLc3NzeHl5ITs7W7zuhx9+gKurK3R0dFC/fn2sXr260saralzKQkRERETFioiIwPjx4xEbG4uYmBj4+vqiXbt26Ny5s5iUnzx5Evn5+fD390f//v0RHR0tXn/79m38+uuv2LVrF9TV1fHgwQMMHDgQ8+fPR58+ffDs2TP8/vvvEAQBALBlyxbMmDEDK1euRLNmzXDx4kUMHz4c+vr68PHxUdGzUHmYmBMRERFRsRo3boyZM2cCAJydnbFy5UpERUUBAK5cuYLk5GTY2dkBADZt2oQGDRogLi4OLVu2BPBq+cqmTZtgYWEBALhw4QLy8/PRt29f2NvbAwAaNWok9jdz5kwsWrQIffv2BQA4Ojri+vXrWLt27XuRmHMpCxEREREVq3HjxgrHNjY2SEtLQ2JiIuzs7MSkHADc3NxgYmKCxMREscze3l5MygGgSZMm6Ny5Mxo1aoRPP/0U69evx9OnTwEA2dnZuHPnDvz8/GBgYCA+5syZgzt37ih5pNLAGXMiIiIiKpampqbCsUwmg1wuL/H1+vr6Csfq6uqIjIzEH3/8gaNHj2LFihWYNm0aYmNjoaenBwBYv349WrduXeS69wFnzImIiIioVFxdXXHv3j3cu3dPLLt+/TrS09Ph5ub2zmtlMhnatWuH4OBgXLx4EVpaWti9ezesrKxga2uLP//8E3Xr1lV4ODo6KntIksAZcyIiIiIqFS8vLzRq1AiDBg3C0qVLkZ+fj6+//hqdOnWCu7v7W6+LjY1FVFQUunTpAktLS8TGxuLRo0dwdXUFAAQHB2PMmDEwNjZG165dkZOTg/Pnz+Pp06cYP358ZQ1PZZiYExEREVGpyGQy7N27F6NHj0bHjh2hpqaGrl27YsWKFe+8zsjICKdOncLSpUuRmZkJe3t7LFq0CN26dQMADBs2DHp6eliwYAEmTpwIfX19NGrU6L25A6lMKNyfhsolMzMTxsbGyMjIgJGRUYmvU/Zdv3iXr/eLbFQbpfchrDmr9D6IKprwZJPS+5CZDVZ6H1Txyvr7uzRevnyJ5ORkhb286f1Rmtefa8yJiIiIiCRA8ktZ/v77b0yePBmHDh3C8+fPUbduXYSHh4vrlwRBwMyZM7F+/Xqkp6ejXbt2WLNmDZydncU2njx5gtGjR+O3336Dmpoa+vXrh2XLlsHAwECsc/nyZfj7+yMuLg4WFhYYPXo0Jk2apPTxedU2VXofRERERCR9kp4xf/r0Kdq1awdNTU0cOnQI169fx6JFi2Bq+v/J7Pz587F8+XKEhYUhNjYW+vr68Pb2xsuXL8U6gwYNwrVr1xAZGYn9+/fj1KlTGDFihHg+MzMTXbp0gb29PeLj47FgwQIEBQVh3bp1lTpeIiIiInp/SXrGfN68ebCzs0N4eLhY9vp2OYIgYOnSpfjuu+/Qq1cvAK/uOmVlZYU9e/ZgwIABSExMxOHDhxEXFyfOsq9YsQLdu3fHwoULYWtriy1btiA3Nxc//vgjtLS00KBBAyQkJGDx4sUKCTwRERERkbJIesZ83759cHd3x6effgpLS0s0a9YM69f//5cZk5OT8fDhQ3h5eYllxsbGaN26NWJiYgAAMTExMDExUdi6x8vLC2pqaoiNjRXrdOzYEVpaWmIdb29vJCUliXejelNOTg4yMzMVHkREREREZSXpxPzPP/8U14sfOXIEo0aNwpgxYxAREQEAePjwIQDAyspK4TorKyvx3MOHD2FpaalwXkNDA2ZmZgp1imvj9T7eFBoaCmNjY/Hx+i1piYiIiIhKS9KJuVwuR/PmzTF37lw0a9YMI0aMwPDhwxEWFqbq0DB16lRkZGSIj9fvfEVEREREVFqSXmNuY2NT5Laurq6u+PXXXwEA1tbWAIDU1FTY2NiIdVJTU9G0aVOxTlpamkIb+fn5ePLkiXi9tbU1UlNTFeoUHhfWeZO2tja0tbXLODIiIqLS4X0KiKo/Sc+Yt2vXDklJSQplN2/ehL29PYBXXwS1trZGVFSUeD4zMxOxsbHw8PAAAHh4eCA9PR3x8fFinePHj0Mul6N169ZinVOnTiEvL0+sExkZCRcXF4UdYIiIiIiIlEXSifm4ceNw9uxZzJ07F7dv38bWrVuxbt06+Pv7A3h1O9jAwEDMmTMH+/btw5UrVzB48GDY2tqid+/eAF7NsHft2hXDhw/HuXPncObMGQQEBGDAgAGwtbUFAHz++efQ0tKCn58frl27hu3bt2PZsmUYP368qoZOREREpHKCIGDEiBEwMzODTCZDQkKCqkOq1iS9lKVly5bYvXs3pk6dilmzZsHR0RFLly7FoEGDxDqTJk1CdnY2RowYgfT0dLRv3x6HDx9WuOXpli1bEBAQgM6dO4s3GFq+fLl43tjYGEePHoW/vz9atGiBGjVqYMaMGdwqkYiIiJTm8F/Kv5Hh67razy/1NYcPH8bGjRsRHR2NOnXqoEaNGkqIjApJOjEHgJ49e6Jnz55vPS+TyTBr1izMmjXrrXXMzMywdevWd/bTuHFj/P7772WOk4iIiKi6uXPnDmxsbNC2bdtiz+fm5ipsN03lI/nEnKSPX0giIiKqfnx9fcUtqmUyGezt7eHg4ICGDRtCQ0MDP/30Exo1aoQTJ07g5MmTmDhxIi5dugQzMzP4+Phgzpw50NB4lWo+e/YMI0eOxJ49e2BkZIRJkyZh7969aNq0KZYuXarCUUqLpNeYExEREZFqLFu2DLNmzUKtWrXw4MEDxMXFAQAiIiKgpaWFM2fOICwsDH///Te6d++Oli1b4tKlS1izZg02bNiAOXPmiG2NHz8eZ86cwb59+xAZGYnff/8dFy5cUNXQJIsz5kRERERUhLGxMQwNDaGurq6wfbSzszPmz///9erTpk2DnZ0dVq5cCZlMhvr16+P+/fuYPHkyZsyYgezsbERERGDr1q3o3LkzACA8PFzchIP+HxNzIiIiIiqxFi1aKBwnJibCw8MDMplMLGvXrh2ysrLw3//+F0+fPkVeXh5atWolnjc2NoaLi0ulxVxVMDEnIqJqT7is/D+ZyzwHK70PIinQ19dXdQjVFhNzomrE0b2mqkMgIqL3TOFd2QVBEGfNz5w5A0NDQ9SqVQumpqbQ1NREXFwcateuDQDIyMjAzZs30bFjR1WGLjlMzFWsuWVDVYdAREREVGZff/01li5ditGjRyMgIABJSUmYOXMmxo8fDzU1NRgaGsLHxwcTJ06EmZkZLC0tMXPmTKipqSksfyEm5ipn8ULJHegquX0iIiJ6r9WsWRMHDx7ExIkT0aRJE5iZmcHPzw/fffedWGfx4sUYOXIkevbsKW6XeO/ePYUbQhITcyIiIiKVKMudOCtbYGAgAgMDxePo6Ohi63Xq1Annzp17azuGhobYsmWLeJydnY3g4GDeZf0NZdrHvE6dOnj8+HGR8vT0dNSpU6fcQRERERFR9XHx4kX8/PPPuHPnDi5cuIBBgwYBAHr16qXiyKSlTDPmKSkpKCgoKFKek5ODv//+u9xBEREREVH1snDhQiQlJUFLSwstWrTA77//jho1aqg6LEkpVWK+b98+8ecjR47A2NhYPC4oKEBUVBQcHBwqLDgiIiIiqvqaNWuG+Ph4VYcheaVKzHv37g0AkMlk8PHxUTinqakJBwcHLFq0qMKCIyIiIiJ6X5QqMZfL5QAAR0dHxMXF8c8PREREREQVpExrzJOTkys6DiIiIiKi91qZt0uMiopCVFQU0tLSxJn0Qj/++GO5AyMiIiIiep+UKTEPDg7GrFmz4O7uDhsbG961iYiIiIionMqUmIeFhWHjxo348ssvKzoeIiIiIqL3UpluMJSbm4u2bdtWdCxEREREJHGenp4KdwOlilOmGfNhw4Zh69atmD59ekXHQ0RERPReSHuxqVL7s9QdXKn9UemVKTF/+fIl1q1bh2PHjqFx48bQ1NRUOL948eIKCY6IiIiI6H1RpqUsly9fRtOmTaGmpoarV6/i4sWL4iMhIaGCQyQiIiIiVcjOzsbgwYNhYGAAGxubIjeSfPr0KQYPHgxTU1Po6emhW7duuHXrlkKd9evXw87ODnp6eujTpw8WL14MExOTShxF1VGmGfMTJ05UdBxEREREJDETJ07EyZMnsXfvXlhaWuLbb7/FhQsX0LRpUwCAr68vbt26hX379sHIyAiTJ09G9+7dcf36dWhqauLMmTMYOXIk5s2bh//85z84duwYl0K/Q5n3MaeKIVy+oNT2ZZ5cT0ZERESll5WVhQ0bNuCnn35C586dAQARERGoVasWAIgJ+ZkzZ8RNQbZs2QI7Ozvs2bMHn376KVasWIFu3bphwoQJAIB69erhjz/+wP79+1UzKIkrU2L+wQcfvHPv8uPHj5c5ICIiIiJSvTt37iA3NxetW7cWy8zMzODi4gIASExMhIaGhsJ5c3NzuLi4IDExEQCQlJSEPn36KLTbqlUrJuZvUabEvPDPF4Xy8vKQkJCAq1evwsfHpyLiIiIiIiJ6r5QpMV+yZEmx5UFBQcjKyipXQERERESkek5OTtDU1ERsbCxq164N4NWXPW/evIlOnTrB1dUV+fn5iI2NFZeyPH78GElJSXBzcwMAuLi4IC4uTqHdN4/p/5VpV5a3+eKLL/Djjz9WZJNEREREpAIGBgbw8/PDxIkTcfz4cVy9ehW+vr5QU3uVPjo7O6NXr14YPnw4Tp8+jUuXLuGLL75AzZo10atXLwDA6NGjcfDgQSxevBi3bt3C2rVrcejQoXcuiX6fVWhiHhMTAx0dnYpskoiIiIhUZMGCBejQoQM+/vhjeHl5oX379mjRooV4Pjw8HC1atEDPnj3h4eEBQRBw8OBB8R437dq1Q1hYGBYvXowmTZrg8OHDGDduHPPFtyjTUpa+ffsqHAuCgAcPHuD8+fPcAoeIiIioBKrCnTgNDAywefNmbN68WSybOHGi+LOpqSk2bXr3HUyHDx+O4cOHKxzXrVu34oOtBsqUmBsbGyscq6mpwcXFBbNmzUKXLl0qJDAiIiIiqvoWLlyIjz76CPr6+jh06BAiIiKwevVqVYclSWVKzMPDwys6DiIiIiKqhs6dO4f58+fj2bNnqFOnDpYvX45hw4apOixJKtcNhuLj48V9Khs0aIBmzZpVSFBUtRya0lHVIRAREZFE7dixQ9UhVBllSszT0tIwYMAAREdHw8TEBACQnp6ODz74ANu2bYOFhUVFxkhEREREVO2VaVeW0aNH49mzZ7h27RqePHmCJ0+e4OrVq8jMzMSYMWMqOkYiIiIiomqvTDPmhw8fxrFjx+Dq6iqWubm5YdWqVfzyJxERERFRGZRpxlwul4v7U75OU1MTcrm83EEREREREb1vypSYf/jhhxg7dizu378vlv39998YN24cOnfuXGHBERERERG9L8qUmK9cuRKZmZlwcHCAk5MTnJyc4OjoiMzMTKxYsaKiYyQiIiIiqvbKtMbczs4OFy5cwLFjx3Djxg0AgKurK7y8vCo0OCIiIiKSFk9PTzRt2hRLly5VdSjVTqkS8+PHjyMgIABnz56FkZERPvroI3z00UcAgIyMDDRo0ABhYWHo0KGDUoIlIiIiqi6EJ+++lX1Fk5kNrtT+qPRKtZRl6dKlGD58OIyMjIqcMzY2xldffYXFixdXWHBERERE9H7Jzc1VdQgqU6rE/NKlS+jatetbz3fp0gXx8fHlDoqIiIiIVC87OxuDBw+GgYEBbGxssGjRIoXzOTk5mDBhAmrWrAl9fX20bt0a0dHRCnVOnz6NDh06QFdXF3Z2dhgzZgyys7PF8w4ODpg9ezYGDx4MIyMjjBgxojKGJkmlSsxTU1OL3SaxkIaGBh49elTuoIiIiIhI9SZOnIiTJ09i7969OHr0KKKjo3HhwgXxfEBAAGJiYrBt2zZcvnwZn376Kbp27Ypbt24BAO7cuYOuXbuiX79+uHz5MrZv347Tp08jICBAoZ+FCxeiSZMmuHjxIqZPn16pY5SSUq0xr1mzJq5evYq6desWe/7y5cuwsbGpkMCIiIiISHWysrKwYcMG/PTTT+J22BEREahVqxYA4O7duwgPD8fdu3dha2sLAJgwYQIOHz6M8PBwzJ07F6GhoRg0aBACAwMBAM7Ozli+fDk6deqENWvWQEdHB8Crrbi/+eabyh+kxJQqMe/evTumT5+Orl27ik9koRcvXmDmzJno2bNnhQZIRERERJXvzp07yM3NRevWrcUyMzMzuLi4AACuXLmCgoIC1KtXT+G6nJwcmJubA3i1DPry5cvYsmWLeF4QBMjlciQnJ4t3kXd3d1f2cKqEUiXm3333HXbt2oV69eohICBAfGFu3LiBVatWoaCgANOmTVNKoERERO8zR/eaqg6BSEFWVhbU1dURHx8PdXV1hXMGBgZina+++gpjxowpcn3t2rXFn/X19ZUbbBVRqsTcysoKf/zxB0aNGoWpU6dCEAQAgEwmg7e3N1atWgUrKyulBEpERERElcfJyQmampqIjY0Vk+inT5/i5s2b6NSpE5o1a4aCggKkpaW9davs5s2b4/r1629dBk2KSn2DIXt7exw8eBBPnz7F7du3IQgCnJ2dYWpqqoz4iIiIiEgFDAwM4Ofnh4kTJ8Lc3ByWlpaYNm0a1NRe7R1Sr149DBo0CIMHD8aiRYvQrFkzPHr0CFFRUWjcuDF69OiByZMno02bNggICMCwYcOgr6+P69evIzIyEitXrlTxCKWnTHf+BABTU1O0bNmyImMhIiIiem9UhRv+LFiwAFlZWfj4449haGiIb775BhkZGeL58PBwzJkzB9988w3+/vtv1KhRA23atBG/c9i4cWOcPHkS06ZNQ4cOHSAIApycnNC/f39VDUnSypyYExEREVH1ZmBggM2bN2Pz5s1i2cSJE8WfNTU1ERwcjODg4Le20bJlSxw9evSt51NSUiok1uqgVPuYExERERGRcjAxJyIiIiKSACbmREREREQSwDXmRNVI33pmqg6BiIiIyogz5kREREREEsDEnIiIiIhIApiYExERERFJANeYE5HkyEa1UXofwpqzSu+DiIioNDhjTkREREQkAUzMiYiIiIgkgEtZiIiIiFRAHh1Yqf2peS6t1P6CgoKwZ88eJCQkVGq/VVmVmjH//vvvIZPJEBgYKJa9fPkS/v7+MDc3h4GBAfr164fU1FSF6+7evYsePXpAT08PlpaWmDhxIvLz8xXqREdHo3nz5tDW1kbdunWxcePGShgREREREdErVSYxj4uLw9q1a9G4cWOF8nHjxuG3337DL7/8gpMnT+L+/fvo27eveL6goAA9evRAbm4u/vjjD0RERGDjxo2YMWOGWCc5ORk9evTABx98gISEBAQGBmLYsGE4cuRIpY2PiIiISGrkcjnmz5+PunXrQltbG7Vr10ZISAgAYPLkyahXrx709PRQp04dTJ8+HXl5eQCAjRs3Ijg4GJcuXYJMJoNMJuOkZwlUiaUsWVlZGDRoENavX485c+aI5RkZGdiwYQO2bt2KDz/8EAAQHh4OV1dXnD17Fm3atMHRo0dx/fp1HDt2DFZWVmjatClmz56NyZMnIygoCFpaWggLC4OjoyMWLVoEAHB1dcXp06exZMkSeHt7q2TMRERERKo2depUrF+/HkuWLEH79u3x4MED3LhxAwBgaGiIjRs3wtbWFleuXMHw4cNhaGiISZMmoX///rh69SoOHz6MY8eOAQCMjY1VOZQqoUok5v7+/ujRowe8vLwUEvP4+Hjk5eXBy8tLLKtfvz5q166NmJgYtGnTBjExMWjUqBGsrKzEOt7e3hg1ahSuXbuGZs2aISYmRqGNwjqvL5l5U05ODnJycsTjzMzMChgpEVUndTb0U2r7f/r9qtT2iej99uzZMyxbtgwrV66Ej48PAMDJyQnt27cHAHz33XdiXQcHB0yYMAHbtm3DpEmToKurCwMDA2hoaMDa2lol8VdFkk/Mt23bhgsXLiAuLq7IuYcPH0JLSwsmJiYK5VZWVnj48KFY5/WkvPB84bl31cnMzMSLFy+gq6tbpO/Q0FAEBweXeVxEREREUpaYmIicnBx07ty52PPbt2/H8uXLcefOHWRlZSE/Px9GRkaVHGX1Iuk15vfu3cPYsWOxZcsW6OjoqDocBVOnTkVGRob4uHfvnqpDIiIiIqowxU1MFoqJicGgQYPQvXt37N+/HxcvXsS0adOQm5tbiRFWP5JOzOPj45GWlobmzZtDQ0MDGhoaOHnyJJYvXw4NDQ1YWVkhNzcX6enpCtelpqaKfzaxtrYusktL4fG/1TEyMnrrm1JbWxtGRkYKDyIiIqLqwtnZGbq6uoiKiipy7o8//oC9vT2mTZsGd3d3ODs746+//lKoo6WlhYKCgsoKt1qQ9FKWzp0748qVKwplQ4YMQf369TF58mTY2dlBU1MTUVFR6Nfv1VrOpKQk3L17Fx4eHgAADw8PhISEIC0tDZaWlgCAyMhIGBkZwc3NTaxz8OBBhX4iIyPFNoiIiIjeNzo6Opg8eTImTZoELS0ttGvXDo8ePcK1a9fg7OyMu3fvYtu2bWjZsiUOHDiA3bt3K1zv4OCA5ORkJCQkoFatWjA0NIS2traKRlM1SDoxNzQ0RMOGDRXK9PX1YW5uLpb7+flh/PjxMDMzg5GREUaPHg0PDw+0adMGANClSxe4ubnhyy+/xPz58/Hw4UN899138Pf3F98cI0eOxMqVKzFp0iQMHToUx48fx44dO3DgwIHKHTARERG9Nyr7hj9lMX36dGhoaGDGjBm4f/8+bGxsMHLkSPj5+WHcuHEICAhATk4OevTogenTpyMoKEi8tl+/fti1axc++OADpKenIzw8HL6+viobS1Ug6cS8JJYsWQI1NTX069cPOTk58Pb2xurVq8Xz6urq2L9/P0aNGgUPDw/o6+vDx8cHs2bNEus4OjriwIEDGDduHJYtW4ZatWrhhx9+4FaJRERE9F5TU1PDtGnTMG3atCLn5s+fj/nz5yuUvb6jnba2Nnbu3KnsEKuVKpeYR0dHKxzr6Ohg1apVWLVq1Vuvsbe3L7JU5U2enp64ePFiRYRIRERERFRqkv7yJxERERHR+4KJORERERGRBFS5pSxEVP05utdUdQhERESVjjPmREREREQSwMSciIiIiEgCmJgTEREREUkAE3MiIiIiIgnglz+JSHL61jNTdQhERESVjok5EUmOV21TVYdARPTe8/T0RNOmTbF06dJizzs4OCAwMFDhbp8lERQUhD179iAhIaHcMVY3TMyJiIiIVODFtO6V2p9uyLvvgl5acXFx0NfXr9A233dMzImIqNrLibyp9D50PZXeBZGkWFhYvPN8Xl4eNDU1Kyma6oFf/iQiIiKiYuXn5yMgIADGxsaoUaMGpk+fDkEQALxayvL6MheZTIY1a9bgP//5D/T19RESEgIA+P7772FlZQVDQ0P4+fnh5cuXqhhKlcDEnIiIiIiKFRERAQ0NDZw7dw7Lli3D4sWL8cMPP7y1flBQEPr06YMrV65g6NCh2LFjB4KCgjB37lycP38eNjY2WL16dSWOoGrhUhYiIiXh7jJEVNXZ2dlhyZIlkMlkcHFxwZUrV7BkyRIMHz682Pqff/45hgwZIh4PGDAAfn5+8PPzAwDMmTMHx44d46z5W3DGnIiIiIiK1aZNG8hkMvHYw8MDt27dQkFBQbH13d3dFY4TExPRunVrhTIPD4+KD7Sa4Iw5EZGScNtHInrfcJeW8mFiTkSkJM0tG6o6BCKicomNjVU4Pnv2LJydnaGurl6i611dXREbG4vBgwcrtEHF41IWIiIiIirW3bt3MX78eCQlJeHnn3/GihUrMHbs2BJfP3bsWPz4448IDw/HzZs3MXPmTFy7dk2JEVdtnDEnIiIiUoGKvuGPMgwePBgvXrxAq1atoK6ujrFjx2LEiBElvr5///64c+cOJk2ahJcvX6Jfv34YNWoUjhw5osSoqy4m5kRERERURHR0tPjzmjVripxPSUlROC7c3/xN3377Lb799luFsnnz5pU7vuqIS1mIiIiIiCSAiTkRERERkQQwMSciIiIikgCuMSciyeE2g0RE9D7ijDkRERERkQQwMSciIiIikgAm5kREREREEsA15iqWE3lTqe3reiq1eSIiIiKqIJwxJyIiIiKSACbmRERERPRe2LhxI0xMTN5ZJygoCE2bNhWPfX190bt3b6XGVYhLWYiIiIhU4FbrBpXan3PstUrtD3iVCAcGBiI9Pb3S+y6rCRMmYPTo0Srpm4k5EREREdH/GBgYwMDAQCV9cykLERFVe/899pfSH0TV0eHDh9G+fXuYmJjA3NwcPXv2xJ07dwAA0dHRkMlkCrPhCQkJkMlkSElJQXR0NIYMGYKMjAzIZDLIZDIEBQUBAJ4+fYrBgwfD1NQUenp66NatG27duiW2U7jkZP/+/XBxcYGenh4++eQTPH/+HBEREXBwcICpqSnGjBmDgoIC8bp/a7fQnj174OzsDB0dHXh7e+PevXviuTeXsrxJLpcjNDQUjo6O0NXVRZMmTbBz584yPsOKOGNORKQkFi+U3IGuktsnUgLZqDZK70NYc1bpfbwvsrOzMX78eDRu3BhZWVmYMWMG+vTpg4SEhH+9tm3btli6dClmzJiBpKQkABBnon19fXHr1i3s27cPRkZGmDx5Mrp3747r169DU1MTAPD8+XMsX74c27Ztw7Nnz9C3b1/06dMHJiYmOHjwIP7880/069cP7dq1Q//+/UvVbkhICDZt2gQtLS18/fXXGDBgAM6cOVOi5yQ0NBQ//fQTwsLC4OzsjFOnTuGLL76AhYUFOnXqVNqnWAETcyIiIqo0h6Z0VHUIVAr9+vVTOP7xxx9hYWGB69ev/+u1WlpaMDY2hkwmg7W1tVhemDifOXMGbdu2BQBs2bIFdnZ22LNnDz799FMAQF5eHtasWQMnJycAwCeffILNmzcjNTUVBgYGcHNzwwcffIATJ06gf//+pWp35cqVaN26NQAgIiICrq6uOHfuHFq1avXOMeXk5GDu3Lk4duwYPDw8AAB16tTB6dOnsXbtWibmRERERKQct27dwowZMxAbG4t//vkHcrkcAHD37l3o6emVqc3ExERoaGiIiTEAmJubw8XFBYmJiWKZnp6emJQDgJWVFRwcHBTWf1tZWSEtLa1U7WpoaKBly5bicf369WFiYoLExMR/Tcxv376N58+f46OPPlIoz83NRbNmzUr6FLwVE3MiIiIiKtbHH38Me3t7rF+/Hra2tpDL5WjYsCFyc3PFBFkQBLF+Xl5ehfVduPSkkEwmK7as8MNCZcjKygIAHDhwADVr1lQ4p62tXe72mZgTERFRpWlu2VDVIVAJPX78GElJSVi/fj06dOgAADh9+rR43sLCAgDw4MEDmJqaAkCRtedaWloKX84EAFdXV+Tn5yM2NlZcclLYl5ubW5njLWm7+fn5OH/+vDg7npSUhPT0dLi6uv5rH25ubtDW1sbdu3fLvWylOEzMiYiIqNLUiL2g/E48Byu/j/eAqakpzM3NsW7dOtjY2ODu3buYMmWKeL5u3bqws7NDUFAQQkJCcPPmTSxatEihDQcHB2RlZSEqKgpNmjSBnp4enJ2d0atXLwwfPhxr166FoaEhpkyZgpo1a6JXr15ljrek7WpqamL06NFYvnw5NDQ0EBAQgDZt2vzrMhYAMDQ0xIQJEzBu3DjI5XK0b98eGRkZOHPmDIyMjODj41Pm+AEm5kREREQqoYob/pSGmpoatm3bhjFjxqBhw4ZwcXHB8uXL4enpCeBVgvvzzz9j1KhRaNy4MVq2bIk5c+aIX7IEXu3MMnLkSPTv3x+PHz/GzJkzERQUhPDwcIwdOxY9e/ZEbm4uOnbsiIMHDxZZqlJaJWlXT08PkydPxueff46///4bHTp0wIYNG0rcx+zZs2FhYYHQ0FD8+eefMDExQfPmzfHtt9+WK3YAkAmvLwyiMsvMzISxsTEyMjJgZGRU4uteTOuuxKgA3ZCDSm0fAA7/NUnpfXS1n6/0PqqDCb8PV3ofCzusV3ofaS82Kb0PS13lz6gJT5Q7DpkZZwVLqjLusKjsJKvOhn7/Xqmc/vT7Vel9KPv3HlC6331l/f1dGi9fvkRycjIcHR2ho6OjlD5Iukrz+vMGQ0REREREEsDEnIiIiIhIApiYExERERFJABNzIiIiIiIJYGJORERERCQBTMyJiIiIiCSAiTkRERERkQQwMSciIiIikgAm5kRERERUaikpKZDJZEhISCh3W76+vujdu3e526nqNFQdABERERFVPXZ2dnjw4AFq1Kih6lCqDSbmRERERCqwVeZSqf19LiRVaHvq6uqwtrZ+63lBEFBQUAANDaabJcWlLERERERUrMOHD6N9+/YwMTGBubk5evbsiTt37gAoupQlOjoaMpkMhw4dQosWLaCtrY3Tp08jKCgITZs2xdq1a2FnZwc9PT189tlnyMjIKFO/r/e9a9cufPDBB9DT00OTJk0QExOj0M7p06fRoUMH6Orqws7ODmPGjEF2dnbFP1EVhB9hiKqRSe4dVB0CERFVI9nZ2Rg/fjwaN26MrKwszJgxA3369HnnuvIpU6Zg4cKFqFOnDkxNTREdHY3bt29jx44d+O2335CZmQk/Pz98/fXX2LJlS6n7VVP7/3nladOmYeHChXB2dsa0adMwcOBA3L59GxoaGrhz5w66du2KOXPm4Mcff8SjR48QEBCAgIAAhIeHV/RTVSGYmBMRERFRsfr166dw/OOPP8LCwgLXr1+HgYFBsdfMmjULH330kULZy5cvsWnTJtSsWRMAsGLFCvTo0QOLFi0qdjnMu/pt2LChWD5hwgT06NEDABAcHIwGDRrg9u3bqF+/PkJDQzFo0CAEBgYCAJydnbF8+XJ06tQJa9asgY6OTumejErAxJyIiKgK6FvPTNUh0Hvo1q1bmDFjBmJjY/HPP/9ALpcDAO7evQs3N7dir3F3dy9SVrt2bTEpBwAPDw/I5XIkJSUVm5i/q9/XE/PGjRuLP9vY2AAA0tLSUL9+fVy6dAmXL19WmJUXBAFyuRzJyclwdXUtzVNRKZiYExEREVGxPv74Y9jb22P9+vWwtbWFXC5Hw4YNkZub+9Zr9PX1K61fTU1N8WeZTAYAYhKflZWFr776CmPGjCnSfu3atcsdozIwMSciIiKiIh4/foykpCSsX78eHTq8+g7T6dOny9TW3bt3cf/+fdja2gIAzp49CzU1Nbi4FN2ZpqL6bd68Oa5fv466deuWKWZVYGJORERElea/x/5Seh/OIUrv4r1gamoKc3NzrFu3DjY2Nrh79y6mTJlSprZ0dHTg4+ODhQsXIjMzE2PGjMFnn31W7DKWiup38uTJaNOmDQICAjBs2DDo6+vj+vXriIyMxMqVK8s0DmVjYk5UjVi8qIROdCuhDyIiUjk1NTVs27YNY8aMQcOGDeHi4oLly5fD09Oz1G3VrVsXffv2Rffu3fHkyRP07NkTq1evVmq/jRs3xsmTJzFt2jR06NABgiDAyckJ/fv3L3X8lYWJOREREZEKVPQNf5TBy8sL169fVygTBKHYnz09PRWO3zRq1CiMGjWq2HMbN24sVb8ODg5F+jIxMSlS1rJlSxw9evStMUkNbzBERERERCQBkk7MQ0ND0bJlSxgaGsLS0hK9e/dGUpLip8uXL1/C398f5ubmMDAwQL9+/ZCamqpQ5+7du+jRowf09PRgaWmJiRMnIj8/X6FOdHQ0mjdvDm1tbdStW7fIJzciIiIiImWS9FKWkydPwt/fHy1btkR+fj6+/fZbdOnSBdevXxe34hk3bhwOHDiAX375BcbGxggICEDfvn1x5swZAEBBQQF69OgBa2tr/PHHH3jw4AEGDx4MTU1NzJ07FwCQnJyMHj16YOTIkdiyZQuioqIwbNgw2NjYwNvbW2XjJyot4fIFpfch8xys9D6IiKj6CAoKQlBQkKrDqBIknZgfPnxY4Xjjxo2wtLREfHw8OnbsiIyMDGzYsAFbt27Fhx9+CAAIDw+Hq6srzp49izZt2uDo0aO4fv06jh07BisrKzRt2hSzZ8/G5MmTERQUBC0tLYSFhcHR0RGLFi0CALi6uuL06dNYsmQJE3MiIiIiqhSSTszflJGRAQAwM3t197P4+Hjk5eXBy8tLrFO/fn3Url0bMTExaNOmDWJiYtCoUSNYWVmJdby9vTFq1Chcu3YNzZo1Q0xMjEIbhXUKb+FanJycHOTk5IjHmZmZFTFEonLJibyp9D50PZXeBRER0XtJ0mvMXyeXyxEYGIh27dqJt2J9+PAhtLS0YGJiolDXysoKDx8+FOu8npQXni889646mZmZePGi+P3nQkNDYWxsLD7s7OzKPUYiIiKqvt61YwlVX6V53atMYu7v74+rV69i27Ztqg4FADB16lRkZGSIj3v37qk6JCIiIpIgdXV1AHjnbeyp+nr+/DkAQFNT81/rVomlLAEBAdi/fz9OnTqFWrVqieXW1tbIzc1Fenq6wqx5amqqeCcpa2trnDt3TqG9wl1bXq/z5k4uqampMDIygq5u8XdT0dbWhra2drnHRkRERNWbhoYG9PT08OjRI2hqakJNrcrMi1I5CIKA58+fIy0tDSYmJuIHtHeRdGIuCAJGjx6N3bt3Izo6Go6OjgrnW7RoAU1NTURFRaFfv34AgKSkJNy9exceHh4AAA8PD4SEhCAtLQ2WlpYAgMjISBgZGcHNzU2sc/DgQYW2IyMjxTaIiN5XdTb0U3off/r9qvQ+iFRJJpPBxsYGycnJ+Ouvv1QdDlUyExMTcTL430g6Mff398fWrVuxd+9eGBoaimvCjY2NoaurC2NjY/j5+WH8+PEwMzODkZERRo8eDQ8PD7Rp0wYA0KVLF7i5ueHLL7/E/Pnz8fDhQ3z33Xfw9/cXZ7xHjhyJlStXYtKkSRg6dCiOHz+OHTt24MCBAyobOxEREVUfWlpacHZ25nKW94ympmaJZsoLSToxX7NmDYBXt3h9XXh4OHx9fQEAS5YsgZqaGvr164ecnBx4e3tj9erVYl11dXXs378fo0aNgoeHB/T19eHj44NZs2aJdRwdHXHgwAGMGzcOy5YtQ61atfDDDz9wq0Qieu/1rWem6hCIqg01NTXo6OioOgySMEkn5iX5FquOjg5WrVqFVatWvbWOvb19kaUqb/L09MTFixdLHSMRERERUUXgtw+IiIiIiCSAiTkRERERkQQwMSciIiIikgAm5kREREREEsDEnIiIiIhIApiYExERERFJABNzIiIiIiIJkPQ+5u+D/x5T7q15nUOU2jwRERERVRDOmBMRERERSQATcyIiIiIiCeBSFiq3TuuuKr8TLskhIiKiao4z5kREREREEsAZcyIieiuv2qaqDoGI6L3BGXMiIiIiIgngjDmV2+65d5Tex+dcY05ERETVHBNzIiJ6q+aWDVUdAhHRe4NLWYiIiIiIJICJORERERGRBDAxJyIiIiKSAK4xJyKit7J4UQmd6FZCH0REVQBnzImIiIiIJICJORERERGRBDAxJyIiIiKSACbmREREREQSwC9/EhERUaWJO5ev9D6cld4DkXJwxpyIiIiISAI4Y070P3U29FN6H3/6/ar0PoiIiKhq4ow5EREREZEEMDEnIiIiIpIALmUh+p++9cxUHQIRERG9x5iYE/2PV21TVYdARERE7zEuZSEiIiIikgDOmBNVI/899pfS+3AOUXoXJCHC5QtK70PmOVjpfRARVQWcMSciIiIikgAm5kREREREEsDEnIiIiIhIArjGXMXizuUrtX1npbZORERERBWFiTkRSY7Fi0roRLcS+iCqQJPcO6g6BCJSMibmRNWIsv8CA1TOX2FeLtqm9D50Q7gTCBERSQvXmBMRERERSQBnzImIiKqAGrHK31Me3FOeSKU4Y05EREREJAFMzImIiIiIJIBLWYiIiKqAnMibSu9D11PpXRDRO3DGnIiIiIhIApiYExERERFJABNzIiIiIiIJYGJORERERCQB/PIn0f80t2yo6hCIiIjoPcbEnOh/LF5UQie6ldAHERERVUlMzImI6K22fXBI6X18LixVeh9ERFUB15gTEREREUkAE3MiIiIiIgngUhYiIiURLl9Qavsyz8FKbZ+IiCoXE3MiIiVR9i3Ueft0IqLqhYk5ERFRFfDfY38pvQ/nEKV3QUTvwMSciIioCog7l6/0PpyV3gMRvQu//ElEREREJAFMzImIiIiIJICJORERERGRBHCNORGRkij7y3r8oh4RUfXCxPwNq1atwoIFC/Dw4UM0adIEK1asQKtWrVQdFhERlQO/OElEVQGXsrxm+/btGD9+PGbOnIkLFy6gSZMm8Pb2RlpamqpDIyIiIqJqjjPmr1m8eDGGDx+OIUOGAADCwsJw4MAB/Pjjj5gyZYqKoyOiqkbZs7ScoSUiql6YmP9Pbm4u4uPjMXXqVLFMTU0NXl5eiImJUWFkRO8f3kiFiIjeR0zM/+eff/5BQUEBrKysFMqtrKxw48aNIvVzcnKQk5MjHmdkZAAAMjMzS9XvcxSUIdqSK208ZaHsMQCVM44XczcrvQ/dGb2V2n51eS2yCqrHOPjvu2Q4jpKpDmMApDeOwrqCICgrHKISY2JeRqGhoQgODi5Sbmdnp4Jo3m64sbGqQ6gQ1WUcWFT1x1FtXotqMI7q8lpwHNJRHcYAlG0cz549g3E1GT9VXUzM/6dGjRpQV1dHamqqQnlqaiqsra2L1J86dSrGjx8vHsvlcjx58gTm5uaQyWRKiTEzMxN2dna4d+8ejIyMlNJHZagO46gOYwCqxziqwxgAjkNKqsMYgOoxjsoYgyAIePbsGWxtbZXSPlFpMDH/Hy0tLbRo0QJRUVHo3bs3gFfJdlRUFAICAorU19bWhra2tkKZiYlJJUQKGBkZVdn/ZF9XHcZRHcYAVI9xVIcxAByHlFSHMQDVYxzKHgNnykkqmJi/Zvz48fDx8YG7uztatWqFpUuXIjs7W9ylhYiIiIhIWZiYv6Z///549OgRZsyYgYcPH6Jp06Y4fPhwkS+EEhERERFVNCbmbwgICCh26YoUaGtrY+bMmUWW0FQ11WEc1WEMQPUYR3UYA8BxSEl1GANQPcZRHcZAVBoygfsDERERERGpnJqqAyAiIiIiIibmRERERESSwMSciIiIiEgCmJgTEREREUkAE/MqYtWqVXBwcICOjg5at26Nc+fOqTqkUjt16hQ+/vhj2NraQiaTYc+ePaoOqdRCQ0PRsmVLGBoawtLSEr1790ZSUpKqwyqVNWvWoHHjxuINOzw8PHDo0CFVh1Vu33//PWQyGQIDA1UdSqkEBQVBJpMpPOrXr6/qsErt77//xhdffAFzc3Po6uqiUaNGOH/+vKrDKhUHB4cir4VMJoO/v7+qQyuxgoICTJ8+HY6OjtDV1YWTkxNmz56NqrjPw7NnzxAYGAh7e3vo6uqibdu2iIuLU3VYRErFxLwK2L59O8aPH4+ZM2fiwoULaNKkCby9vZGWlqbq0EolOzsbTZo0wapVq1QdSpmdPHkS/v7+OHv2LCIjI5GXl4cuXbogOztb1aGVWK1atfD9998jPj4e58+fx4cffohevXrh2rVrqg6tzOLi4rB27Vo0btxY1aGUSYMGDfDgwQPxcfr0aVWHVCpPnz5Fu3btoKmpiUOHDuH69etYtGgRTE1NVR1aqcTFxSm8DpGRkQCATz/9VMWRldy8efOwZs0arFy5EomJiZg3bx7mz5+PFStWqDq0Uhs2bBgiIyOxefNmXLlyBV26dIGXlxf+/vtvVYdGpDwCSV6rVq0Ef39/8bigoECwtbUVQkNDVRhV+QAQdu/ereowyi0tLU0AIJw8eVLVoZSLqamp8MMPP6g6jDJ59uyZ4OzsLERGRgqdOnUSxo4dq+qQSmXmzJlCkyZNVB1GuUyePFlo3769qsOocGPHjhWcnJwEuVyu6lBKrEePHsLQoUMVyvr27SsMGjRIRRGVzfPnzwV1dXVh//79CuXNmzcXpk2bpqKoiJSPM+YSl5ubi/j4eHh5eYllampq8PLyQkxMjAojIwDIyMgAAJiZmak4krIpKCjAtm3bkJ2dDQ8PD1WHUyb+/v7o0aOHwr+RqubWrVuwtbVFnTp1MGjQINy9e1fVIZXKvn374O7ujk8//RSWlpZo1qwZ1q9fr+qwyiU3Nxc//fQThg4dCplMpupwSqxt27aIiorCzZs3AQCXLl3C6dOn0a1bNxVHVjr5+fkoKCiAjo6OQrmurm6V+4sSUWnwzp8S988//6CgoABWVlYK5VZWVrhx44aKoiIAkMvlCAwMRLt27dCwYUNVh1MqV65cgYeHB16+fAkDAwPs3r0bbm5uqg6r1LZt24YLFy5U6XWnrVu3xsaNG+Hi4oIHDx4gODgYHTp0wNWrV2FoaKjq8Erkzz//xJo1azB+/Hh8++23iIuLw5gxY6ClpQUfHx9Vh1cme/bsQXp6Onx9fVUdSqlMmTIFmZmZqF+/PtTV1VFQUICQkBAMGjRI1aGViqGhITw8PDB79my4urrCysoKP//8M2JiYlC3bl1Vh0ekNEzMicrI398fV69erZKzNy4uLkhISEBGRgZ27twJHx8fnDx5skol5/fu3cPYsWMRGRlZZFatKnl9JrNx48Zo3bo17O3tsWPHDvj5+akwspKTy+Vwd3fH3LlzAQDNmjXD1atXERYWVmUT8w0bNqBbt26wtbVVdSilsmPHDmzZsgVbt25FgwYNkJCQgMDAQNja2la512Lz5s0YOnQoatasCXV1dTRv3hwDBw5EfHy8qkMjUhom5hJXo0YNqKurIzU1VaE8NTUV1tbWKoqKAgICsH//fpw6dQq1atVSdTilpqWlJc46tWjRAnFxcVi2bBnWrl2r4shKLj4+HmlpaWjevLlYVlBQgFOnTmHlypXIycmBurq6CiMsGxMTE9SrVw+3b99WdSglZmNjU+RDnaurK3799VcVRVQ+f/31F44dO4Zdu3apOpRSmzhxIqZMmYIBAwYAABo1aoS//voLoaGhVS4xd3JywsmTJ5GdnY3MzEzY2Nigf//+qFOnjqpDI1IarjGXOC0tLbRo0QJRUVFimVwuR1RUVJVdE1yVCYKAgIAA7N69G8ePH4ejo6OqQ6oQcrkcOTk5qg6jVDp37owrV64gISFBfLi7u2PQoEFISEiokkk5AGRlZeHOnTuwsbFRdSgl1q5duyLbht68eRP29vYqiqh8wsPDYWlpiR49eqg6lFJ7/vw51NQUf7Wrq6tDLperKKLy09fXh42NDZ4+fYojR46gV69eqg6JSGk4Y14FjB8/Hj4+PnB3d0erVq2wdOlSZGdnY8iQIaoOrVSysrIUZgGTk5ORkJAAMzMz1K5dW4WRlZy/vz+2bt2KvXv3wtDQEA8fPgQAGBsbQ1dXV8XRlczUqVPRrVs31K5dG8+ePcPWrVsRHR2NI0eOqDq0UjE0NCyytl9fXx/m5uZVas3/hAkT8PHHH8Pe3h7379/HzJkzoa6ujoEDB6o6tBIbN24c2rZti7lz5+Kzzz7DuXPnsG7dOqxbt07VoZWaXC5HeHg4fHx8oKFR9X5FfvzxxwgJCUHt2rXRoEEDXLx4EYsXL8bQoUNVHVqpHTlyBIIgwMXFBbdv38bEiRNRv379Kve7j6hUVL0tDJXMihUrhNq1awtaWlpCq1athLNnz6o6pFI7ceKEAKDIw8fHR9WhlVhx8QMQwsPDVR1aiQ0dOlSwt7cXtLS0BAsLC6Fz587C0aNHVR1WhaiK2yX2799fsLGxEbS0tISaNWsK/fv3F27fvq3qsErtt99+Exo2bChoa2sL9evXF9atW6fqkMrkyJEjAgAhKSlJ1aGUSWZmpjB27Fihdu3ago6OjlCnTh1h2rRpQk5OjqpDK7Xt27cLderUEbS0tARra2vB399fSE9PV3VYREolE4QqeDswIiIiIqJqhmvMiYiIiIgkgIk5EREREZEEMDEnIiIiIpIAJuZERERERBLAxJyIiIiISAKYmBMRERERSQATcyIiIiIiCWBiTkTvtZSUFMhkMiQkJLyznqenJwIDAyslJiIiej8xMSciyfH19YVMJoNMJoOWlhbq1q2LWbNmIT8/v9zt9u7dW6HMzs4ODx48QMOGDQEA0dHRkMlkSE9PV6i3a9cuzJ49u1z9/5s3PyQUHhc+DA0N0aBBA/j7++PWrVtKjYWIiCofE3MikqSuXbviwYMHuHXrFr755hsEBQVhwYIFZWqroKAAcrm82HPq6uqwtraGhobGO9swMzODoaFhmfovr2PHjuHBgwe4dOkS5s6di8TERDRp0gRRUVEqiYeIiJSDiTkRSZK2tjasra1hb2+PUaNGwcvLC/v27QMALF68GI0aNYK+vj7s7Ozw9ddfIysrS7x248aNMDExwb59++Dm5gZtbW0MHToUERER2Lt3rzgDHR0drTBLnZKSgg8++AAAYGpqCplMBl9fXwBFl7I8ffoUgwcPhqmpKfT09NCtWzeFWezCGI4cOQJXV1cYGBiIHzZKy9zcHNbW1qhTpw569eqFY8eOoXXr1vDz80NBQUEZnl0iIpIiJuZEVCXo6uoiNzcXAKCmpobly5fj2rVriIiIwPHjxzFp0iSF+s+fP8e8efPwww8/4Nq1a1i+fDk+++wzMTl+8OAB2rZtq3CNnZ0dfv31VwBAUlISHjx4gGXLlhUbj6+vL86fP499+/YhJiYGgiCge/fuyMvLU4hh4cKF2Lx5M06dOoW7d+9iwoQJ5X4u1NTUMHbsWPz111+Ij48vd3tERCQN7/7bLRGRigmCgKioKBw5cgSjR48GAIWZawcHB8yZMwcjR47E6tWrxfK8vDysXr0aTZo0Ect0dXWRk5MDa2vrYvtSV1eHmZkZAMDS0hImJibF1rt16xb27duHM2fOiMn9li1bYGdnhz179uDTTz8VYwgLC4OTkxMAICAgALNmzSrbE/GG+vXrA3i1Dr1Vq1YV0iYREakWE3MikqT9+/fDwMAAeXl5kMvl+PzzzxEUFATg1Zrr0NBQ3LhxA5mZmcjPz8fLly/x/Plz6OnpAQC0tLTQuHFjpcSWmJgIDQ0NtG7dWiwzNzeHi4sLEhMTxTI9PT0xKQcAGxsbpKWlVUgMgiAAAGQyWYW0R0REqselLEQkSR988AESEhJw69YtvHjxAhEREdDX10dKSgp69uyJxo0b49dff0V8fDxWrVoFAOJSF+DV7Liqk1ZNTU2FY5lMJibU5VX4AcDR0bFC2iMiItXjjDkRSZK+vj7q1q1bpDw+Ph5yuRyLFi2CmtqruYUdO3aUqE0tLa1//bKklpYWALyznqurK/Lz8xEbGysuZXn8+DGSkpLg5uZWoljKQy6XY/ny5XB0dESzZs2U3h8REVUOzpgTUZVSt25d5OXlYcWKFfjzzz+xefNmhIWFlehaBwcHXL58GUlJSfjnn38UvqhZyN7eHjKZDPv378ejR48Udnsp5OzsjF69emH48OE4ffo0Ll26hC+++AI1a9ZEr169yj3GNz1+/BgPHz7En3/+iX379sHLywvnzp3Dhg0boK6uXuH9ERGRajAxJ6IqpUmTJli8eDHmzZuHhg0bYsuWLQgNDS3RtcOHD4eLiwvc3d1hYWGBM2fOFKlTs2ZNBAcHY8qUKbCyskJAQECxbYWHh6NFixbo2bMnPDw8IAgCDh48WGT5SkXw8vKCjY0NGjVqhClTpsDV1RWXL18Wt3YkIqLqQSZU1IJHIiIiIiIqM86YExERERFJABNzIiIiIiIJYGJORERERCQBTMyJiIiIiCSAiTkRERERkQQwMSciIiIikgAm5kREREREEsDEnIiIiIhIApiYExERERFJABNzIiIiIiIJYGJORERERCQBTMyJiIiIiCTg/wD1w8OR9+ukgwAAAABJRU5ErkJggg==",
+ "text/plain": [
+ "
"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fig, ax, df = plot_label_distributions(\n",
+ " partitioner,\n",
+ " label_name=\"label\",\n",
+ " plot_type=\"bar\",\n",
+ " size_unit=\"absolute\",\n",
+ " partition_id_axis=\"x\",\n",
+ " legend=True,\n",
+ " verbose_labels=True,\n",
+ " title=\"Per Partition Labels Distribution\",\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "be05badab744d9f7",
+ "metadata": {},
+ "source": [
+ "You can configure many details directly using the function parameters. The ones that can interest you the most are:\n",
+ "\n",
+ "* `size_unit` to have the sizes normalized such that they sum up to 1 and express the fraction of the data in each partition,\n",
+ "* `legend` and `verbose_labels` in case the dataset has more descriptive names and not numbers,\n",
+ "* `cmap` to change the values of the bars (for an overview of the available colors, have a look at [link](https://matplotlib.org/stable/users/explain/colors/colormaps.html); check out `cmap=\"tab20b\"`) \n",
+ "\n",
+ " And for even greater control, you can specify `plot_kwargs` and `legend_kwargs` as `Dict`, which will be further passed to the `plot` and `legend` functions."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3dbf6dc4ede79f05",
+ "metadata": {},
+ "source": [
+ "You can also inspect the exact numbers that were used to create this plot. Three objects are returned (see reference [here](https://flower.ai/docs/datasets/ref-api/flwr_datasets.visualization.plot_label_distributions.html#flwr_datasets.visualization.plot_label_distributions)). Let's inspect the returned DataFrame."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f6edd14d8b260e9e",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "
airplane
\n",
+ "
automobile
\n",
+ "
bird
\n",
+ "
cat
\n",
+ "
deer
\n",
+ "
dog
\n",
+ "
frog
\n",
+ "
horse
\n",
+ "
ship
\n",
+ "
truck
\n",
+ "
\n",
+ "
\n",
+ "
Partition ID
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ " \n",
+ " \n",
+ "
\n",
+ "
0
\n",
+ "
817
\n",
+ "
794
\n",
+ "
1462
\n",
+ "
2123
\n",
+ "
432
\n",
+ "
25
\n",
+ "
456
\n",
+ "
384
\n",
+ "
14
\n",
+ "
9
\n",
+ "
\n",
+ "
\n",
+ "
1
\n",
+ "
1416
\n",
+ "
6
\n",
+ "
97
\n",
+ "
5
\n",
+ "
3
\n",
+ "
0
\n",
+ "
3409
\n",
+ "
0
\n",
+ "
3
\n",
+ "
868
\n",
+ "
\n",
+ "
\n",
+ "
2
\n",
+ "
0
\n",
+ "
4
\n",
+ "
11
\n",
+ "
2
\n",
+ "
454
\n",
+ "
3
\n",
+ "
511
\n",
+ "
15
\n",
+ "
84
\n",
+ "
21
\n",
+ "
\n",
+ "
\n",
+ "
3
\n",
+ "
762
\n",
+ "
159
\n",
+ "
1100
\n",
+ "
51
\n",
+ "
120
\n",
+ "
166
\n",
+ "
2
\n",
+ "
1982
\n",
+ "
1351
\n",
+ "
2175
\n",
+ "
\n",
+ "
\n",
+ "
4
\n",
+ "
2
\n",
+ "
43
\n",
+ "
714
\n",
+ "
2
\n",
+ "
19
\n",
+ "
2400
\n",
+ "
425
\n",
+ "
1
\n",
+ "
151
\n",
+ "
477
\n",
+ "
\n",
+ "
\n",
+ "
5
\n",
+ "
67
\n",
+ "
79
\n",
+ "
170
\n",
+ "
25
\n",
+ "
2552
\n",
+ "
477
\n",
+ "
27
\n",
+ "
44
\n",
+ "
590
\n",
+ "
0
\n",
+ "
\n",
+ "
\n",
+ "
6
\n",
+ "
422
\n",
+ "
2
\n",
+ "
4
\n",
+ "
486
\n",
+ "
380
\n",
+ "
92
\n",
+ "
90
\n",
+ "
380
\n",
+ "
50
\n",
+ "
6
\n",
+ "
\n",
+ "
\n",
+ "
7
\n",
+ "
122
\n",
+ "
2811
\n",
+ "
597
\n",
+ "
2174
\n",
+ "
1038
\n",
+ "
1727
\n",
+ "
1
\n",
+ "
682
\n",
+ "
515
\n",
+ "
4
\n",
+ "
\n",
+ "
\n",
+ "
8
\n",
+ "
256
\n",
+ "
29
\n",
+ "
342
\n",
+ "
75
\n",
+ "
1
\n",
+ "
84
\n",
+ "
8
\n",
+ "
1511
\n",
+ "
2240
\n",
+ "
1417
\n",
+ "
\n",
+ "
\n",
+ "
9
\n",
+ "
1136
\n",
+ "
1073
\n",
+ "
503
\n",
+ "
57
\n",
+ "
1
\n",
+ "
26
\n",
+ "
71
\n",
+ "
1
\n",
+ "
2
\n",
+ "
23
\n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " airplane automobile bird cat deer dog frog horse ship \\\n",
+ "Partition ID \n",
+ "0 817 794 1462 2123 432 25 456 384 14 \n",
+ "1 1416 6 97 5 3 0 3409 0 3 \n",
+ "2 0 4 11 2 454 3 511 15 84 \n",
+ "3 762 159 1100 51 120 166 2 1982 1351 \n",
+ "4 2 43 714 2 19 2400 425 1 151 \n",
+ "5 67 79 170 25 2552 477 27 44 590 \n",
+ "6 422 2 4 486 380 92 90 380 50 \n",
+ "7 122 2811 597 2174 1038 1727 1 682 515 \n",
+ "8 256 29 342 75 1 84 8 1511 2240 \n",
+ "9 1136 1073 503 57 1 26 71 1 2 \n",
+ "\n",
+ " truck \n",
+ "Partition ID \n",
+ "0 9 \n",
+ "1 868 \n",
+ "2 21 \n",
+ "3 2175 \n",
+ "4 477 \n",
+ "5 0 \n",
+ "6 6 \n",
+ "7 4 \n",
+ "8 1417 \n",
+ "9 23 "
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "df"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "2902213a",
+ "metadata": {},
+ "source": [
+ "Each row represents a unique partition ID, and the columns represent unique labels (either in the verbose version if `verbose_labels=True` or typically `int` values otherwise, representing the partition IDs).\n",
+ "That you can index the DataFrame `df[partition_id, label_id]` to get the number of samples in `partition_id` for the specified `label_id.`"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8ffe4039",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "714"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "df.loc[4, \"bird\"]"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "2e6c17af529a668f",
+ "metadata": {},
+ "source": [
+ "Let's see a plot with `size_unit=\"percent\"`, which is another excellent way to understand the partitions. In this mode, the number of datapoints for each class in a given partition are normalized, so they sum up to 100."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "a241894a47f3cc9f",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAtQAAAHHCAYAAACfh89YAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy80BEi2AAAACXBIWXMAAA9hAAAPYQGoP6dpAABgdklEQVR4nO3deVxU5f///+ew76KkLIqIior7brikJe/MpVxLzRJyKwOX3MtccE1Tc0stK7dssXJr00zT0gy3MEtzS9NPuWUqggkC5/eHP+fbCBowDAP4uN9uc7txrnPOdb0OTPnk4ppzTIZhGAIAAACQKw72LgAAAAAozAjUAAAAgBUI1AAAAIAVCNQAAACAFQjUAAAAgBUI1AAAAIAVCNQAAACAFQjUAAAAgBUI1AAAAIAVCNRAERcdHa1y5cpl69jx48fLZDLZtqB80KJFC1WvXj1P+yxXrpyio6PztM/sWrp0qUwmk06ePGnzsW5/v5w8eVImk0kzZsyw+dhS0XkPAri3EKhRpN0KIrdebm5uqlSpkmJjY3Xu3Dmbj38rHNx6eXh4qGrVqnr55ZeVmJiYZ+P8+eefGj9+vBISEv7z2GvXrmn8+PHaunVrno2fF0wmk2JjY+1dhs1t3brV4j3h6uoqf39/tWjRQlOmTNGFCxfyZJyC+nOWCnZtAJAbBGrcEyZMmKAVK1Zo/vz5aty4sRYuXKiIiAhdu3YtX8ZfuHChVqxYoVmzZqlKlSqaPHmyHnnkERmGkSf9//nnn4qLi8syUC9evFiHDx82b1+7dk1xcXFZhpmXX35Z//zzT57UhLsbOHCgVqxYoTfffFPDhw9XiRIlNG7cOIWHh2vLli0Wxz799NP6559/FBISku3+7/Zzvpvb3y+2wHsQQFHjZO8CgPzQunVr1a9fX5LUp08f+fn5adasWVq3bp26d+9uVd/Xrl2Th4fHXY/p0qWL7rvvPknSc889p86dO2v16tX64YcfFBERkeux09LSlJGRcddjnJ2ds92fk5OTnJz430J+aNasmbp06WLRtn//fj388MPq3LmzDh48qMDAQEmSo6OjHB0dbVpPcnKyPD09c/R+sQXegwAKI2aocU966KGHJEknTpwwt7377ruqV6+e3N3dVaJECXXr1k2nT5+2OO/W2ty9e/fqgQcekIeHh1566SWrxk9NTdXYsWNVr149FStWTJ6enmrWrJm++eYbi3P+vZZ19uzZqlChglxdXbVgwQI1aNBAkvTMM8+YlxIsXbpUkuWa2JMnT6pkyZKSpLi4OPOx48ePl5T1+tW0tDRNnDjRPF65cuX00ksvKSUlxeK4cuXKqV27dtq+fbsaNmwoNzc3lS9fXsuXL8/x9+dO1q1bp7Zt2yooKEiurq6qUKGCJk6cqPT09CyP37t3rxo3bix3d3eFhoZq0aJFmY5JSUnRuHHjVLFiRbm6uio4OFgjRozIdH23u3HjhuLi4hQWFiY3Nzf5+fmpadOm2rRpU66vr1atWpo9e7YuX76s+fPnm9uzWkO9Z88etWrVSvfdd5/5+nr16iXpv3/O0dHR8vLy0vHjx9WmTRt5e3urR48e5n13WnP/2muvKSQkRO7u7mrevLl+/vlni/0tWrRQixYtMp1XlN6DAJAVpgFwTzp+/Lgkyc/PT5I0efJkjRkzRk888YT69OmjCxcuaN68eXrggQf0448/ytfX13zuxYsX1bp1a3Xr1k1PPfWU/P39rRo/MTFRb731lrp3766+ffvq6tWrevvtt9WqVSvt2rVLtWvXtjh3yZIlun79uvr16ydXV1d17NhRV69e1dixY9WvXz81a9ZMktS4ceNM45YsWVILFy5U//791bFjR3Xq1EmSVLNmzTvW2qdPHy1btkxdunTR0KFDFR8fr6lTp+rQoUNas2aNxbHHjh1Tly5d1Lt3b0VFRemdd95RdHS06tWrp2rVquX4+3S7pUuXysvLS0OGDJGXl5e2bNmisWPHKjExUa+++qrFsZcuXVKbNm30xBNPqHv37lq1apX69+8vFxcXc/DMyMjQY489pu3bt6tfv34KDw/XgQMH9Nprr+nIkSNau3btHWsZP368pk6dqj59+qhhw4ZKTEzUnj17tG/fPv3vf//L9TXe+v599dVXmjx5cpbHnD9/Xg8//LBKliypUaNGydfXVydPntTq1aslZe/nnJaWplatWqlp06aaMWPGf/6VZfny5bp69apiYmJ0/fp1zZkzRw899JAOHDiQo/8GCvt7EACyZABF2JIlSwxJxtdff21cuHDBOH36tPHBBx8Yfn5+hru7u/F///d/xsmTJw1HR0dj8uTJFuceOHDAcHJysmhv3ry5IclYtGhRtsYfN26cIck4fPiwceHCBePEiRPGG2+8Ybi6uhr+/v5GcnKykZaWZqSkpFicd+nSJcPf39/o1auXue3EiROGJMPHx8c4f/68xfG7d+82JBlLlizJVENUVJQREhJi3r5w4YIhyRg3btwd670lISHBkGT06dPH4rhhw4YZkowtW7aY20JCQgxJxrfffmtuO3/+vOHq6moMHTr0rt8nwzAMSUZMTMxdj7l27Vqmtmeffdbw8PAwrl+/bm679XOaOXOmuS0lJcWoXbu2UapUKSM1NdUwDMNYsWKF4eDgYHz33XcWfS5atMiQZOzYscPi+qKioszbtWrVMtq2bfuf13W7b775xpBkfPTRR3c8platWkbx4sXN27fexydOnDAMwzDWrFljSDJ27959xz7u9nOOiooyJBmjRo3Kct+/3y+33ne3/nu5JT4+3pBkvPDCC+a25s2bG82bN//PPgvqexAAcoslH7gnREZGqmTJkgoODla3bt3k5eWlNWvWqHTp0lq9erUyMjL0xBNP6K+//jK/AgICFBYWlmnphaurq5555pkcjV+5cmWVLFlSoaGhevbZZ1WxYkV9/vnn8vDwkKOjo1xcXCTdnDH9+++/lZaWpvr162vfvn2Z+urcubP5T+a29sUXX0iShgwZYtE+dOhQSdLnn39u0V61alXzDLl0czaycuXK+u233/KkHnd3d/PXV69e1V9//aVmzZrp2rVr+vXXXy2OdXJy0rPPPmvednFx0bPPPqvz589r7969kqSPPvpI4eHhqlKlisXP/taSnNt/9v/m6+urX375RUePHs2Ta/s3Ly8vXb169a5jS9Jnn32mGzdu5Hqc/v37Z/vYDh06qHTp0ubthg0bqlGjRub3iK0UtPcgAGSFJR+4J7z++uuqVKmSnJyc5O/vr8qVK8vB4ebvk0ePHpVhGAoLC8vy3Ns/pFW6dGlzAM6uTz75RD4+PnJ2dlaZMmVUoUIFi/3Lli3TzJkz9euvv1oEpNDQ0Ex9ZdVmK7///rscHBxUsWJFi/aAgAD5+vrq999/t2gvW7Zspj6KFy+uS5cu5Uk9v/zyi15++WVt2bIl020Hr1y5YrEdFBQkT09Pi7ZKlSpJurmO9/7779fRo0d16NChO/6Ccv78+TvWMmHCBLVv316VKlVS9erV9cgjj+jpp5++69KF7EpKSpK3t/cd9zdv3lydO3dWXFycXnvtNbVo0UIdOnTQk08+KVdX12yN4eTkpDJlymS7pqz++6hUqZJWrVqV7T5yo6C9BwEgKwRq3BMaNmxovsvH7TIyMmQymfTll19meScFLy8vi+1/z5Jm1wMPPGC+y8ft3n33XUVHR6tDhw4aPny4SpUqJUdHR02dOtW81tra8a2V3Qdt3OlOFEYe3B7w8uXLat68uXx8fDRhwgRVqFBBbm5u2rdvn0aOHPmfdzvJSkZGhmrUqKFZs2ZluT84OPiO5z7wwAM6fvy41q1bp6+++kpvvfWWXnvtNS1atEh9+vTJcS233LhxQ0eOHLnrg2lMJpM+/vhj/fDDD/r000+1ceNG9erVSzNnztQPP/yQ6T2bFVdXV/MvlXnFZDJl+bO+04dGc9p3dtjyPQgAd0Kgxj2vQoUKMgxDoaGh5hnM/PTxxx+rfPnyWr16tUVoGDduXLb7yMmT5XJybEhIiDIyMnT06FGFh4eb28+dO6fLly/n6L7I1tq6dasuXryo1atX64EHHjC3//tOLf/2559/mm8Fd8uRI0ckyXzHiQoVKmj//v1q2bJlrp7OV6JECT3zzDN65plnlJSUpAceeEDjx4+3KlB//PHH+ueff9SqVav/PPb+++/X/fffr8mTJ+u9995Tjx499MEHH6hPnz55/rTBrJa2HDlyxOKOIMWLF89yacXts8iF9T0IAHfCGmrc8zp16iRHR0fFxcVlmsUyDEMXL1606fi3ZtT+PXZ8fLx27tyZ7T5uhcbLly//57G37uaQnWPbtGkjSZo9e7ZF+60Z3bZt22a7Rmtl9X1KTU3VggULsjw+LS1Nb7zxhsWxb7zxhkqWLKl69epJkp544gn98ccfWrx4cabz//nnHyUnJ9+xntvfF15eXqpYseJ/3m7vbvbv36/BgwerePHiiomJueNxly5dyvRevXU3mFvj5+TnnB1r167VH3/8Yd7etWuX4uPj1bp1a3NbhQoV9Ouvv1o87XH//v3asWOHRV+F9T0IAHfCDDXueRUqVNCkSZP04osv6uTJk+rQoYO8vb114sQJrVmzRv369dOwYcNsNn67du20evVqdezYUW3bttWJEye0aNEiVa1aVUlJSdm+Bl9fXy1atEje3t7y9PRUo0aNslxv7e7urqpVq+rDDz9UpUqVVKJECVWvXj3LJQa1atVSVFSU3nzzTfOSi127dmnZsmXq0KGDHnzwQauv/9/27NmjSZMmZWpv0aKFGjdurOLFiysqKkoDBw6UyWTSihUr7vin/KCgIE2bNk0nT55UpUqV9OGHHyohIUFvvvmmeV38008/rVWrVum5557TN998oyZNmig9PV2//vqrVq1apY0bN95xqVDVqlXVokUL1atXTyVKlNCePXv08ccfZ/vx6d99952uX7+u9PR0Xbx4UTt27ND69etVrFgxrVmzRgEBAXc8d9myZVqwYIE6duyoChUq6OrVq1q8eLF8fHzMATQnP+fsqFixopo2bar+/fsrJSVFs2fPlp+fn0aMGGE+plevXpo1a5ZatWql3r176/z581q0aJGqVatmsea9IL8HASBX7HR3ESBf3Lrd2N1uL3bLJ598YjRt2tTw9PQ0PD09jSpVqhgxMTHG4cOHzcc0b97cqFatWrbHv3ULsAsXLtzxmIyMDGPKlClGSEiI4erqatSpU8f47LPP7nj7sldffTXLftatW2dUrVrVcHJysriF3u39GIZhfP/990a9evUMFxcXi9uX3X7LMsMwjBs3bhhxcXFGaGio4ezsbAQHBxsvvviixW3qDOPmLcuyuo3cnW6ldjtJd3xNnDjRMAzD2LFjh3H//fcb7u7uRlBQkDFixAhj48aNhiTjm2++sRizWrVqxp49e4yIiAjDzc3NCAkJMebPn59p3NTUVGPatGlGtWrVDFdXV6N48eJGvXr1jLi4OOPKlSsW1/fv2+ZNmjTJaNiwoeHr62u4u7sbVapUMSZPnmy+Jd+d3Lpt3q2Xs7OzUbJkSeOBBx4wJk+enOmWiIaR+bZ5+/btM7p3726ULVvWcHV1NUqVKmW0a9fO2LNnj8V5d/o5R0VFGZ6enlnWd7f33cyZM43g4GDD1dXVaNasmbF///5M57/77rtG+fLlDRcXF6N27drGxo0bC817EAByy2QYfFIDAAAAyC3WUAMAAABWIFADAAAAViBQAwAAAFYgUAMAAABWIFADAAAAViBQAwAAAFbgwS6SMjIy9Oeff8rb2zvPH9cLAABswzAMXb16VUFBQXJwYI4Q9kOglvTnn38qODjY3mUAAIBcOH36tMqUKWPvMnAPI1BL8vb2lnTzP0gfHx87VwMAALIjMTFRwcHB5n/HAXshUEvmZR4+Pj4EagAAChmWa8LeWHAEAAAAWIFADQAAAFiBQA0AAABYgUANAAAAWIFADQAAAFiBQA0AAABYgUANAAAAWIFADQAAAFiBQA0AAABYgUANAAAAWMGugfrbb7/Vo48+qqCgIJlMJq1du9Ziv2EYGjt2rAIDA+Xu7q7IyEgdPXrU4pi///5bPXr0kI+Pj3x9fdW7d28lJSXl41UAAADgXmbXQJ2cnKxatWrp9ddfz3L/9OnTNXfuXC1atEjx8fHy9PRUq1atdP36dfMxPXr00C+//KJNmzbps88+07fffqt+/frl1yUAAADgHmcyDMOwdxGSZDKZtGbNGnXo0EHSzdnpoKAgDR06VMOGDZMkXblyRf7+/lq6dKm6deumQ4cOqWrVqtq9e7fq168vSdqwYYPatGmj//u//1NQUFC2xk5MTFSxYsV05coV+fj42OT6AABA3uLfbxQUBXYN9YkTJ3T27FlFRkaa24oVK6ZGjRpp586dkqSdO3fK19fXHKYlKTIyUg4ODoqPj8/3mgEAAHDvcbJ3AXdy9uxZSZK/v79Fu7+/v3nf2bNnVapUKYv9Tk5OKlGihPmYrKSkpCglJcW8nZiYmFdlAwAA4B5TYAO1LU2dOlVxcXFW9fFep6zXfeelJ1fH2HyME0sO2bT/0GfCbdq/ZPtrkLiO7CoK1yDx33d28bPIvqLw30ZRuAbAVgpsoA4ICJAknTt3ToGBgeb2c+fOqXbt2uZjzp8/b3FeWlqa/v77b/P5WXnxxRc1ZMgQ83ZiYqKCg4NzVN9D/3PL0fEAkN92frrFpv0TfgDgpgK7hjo0NFQBAQHavHmzuS0xMVHx8fGKiIiQJEVEROjy5cvau3ev+ZgtW7YoIyNDjRo1umPfrq6u8vHxsXgBAAAAuWHXGeqkpCQdO3bMvH3ixAklJCSoRIkSKlu2rAYPHqxJkyYpLCxMoaGhGjNmjIKCgsx3AgkPD9cjjzyivn37atGiRbpx44ZiY2PVrVu3bN/hAwAA/LcfPxpm8zFCn/nc5mMAtmDXQL1nzx49+OCD5u1byzCioqK0dOlSjRgxQsnJyerXr58uX76spk2basOGDXJz+3/LLVauXKnY2Fi1bNlSDg4O6ty5s+bOnZvv1wIAAIB7k10DdYsWLXS322CbTCZNmDBBEyZMuOMxJUqU0HvvvWeL8gAAAID/VGA/lAhkl60/eCXx4SsAAHBnBfZDiQAAAEBhQKAGAAAArECgBgAAAKzAGmoAKKJ4ABUA5A9mqAEAAAArMEMNAAD+U+MJbexdAlBgMUMNAAAAWIFADQAAAFiBJR8AAOA/7XM6ZPMxWFSCwooZagAAAMAKBGoAAADACgRqAAAAwAqsoQYAFFg8nAZAYcAMNQAAAGAFAjUAAABgBZZ85NL3n662+Rid+ve2+RgAAGTH6lX7bD5Gm9o2HwKwCWaoAQAAACsQqAEAAAArEKgBAAAAKxCoAQAAACsQqAEAAAArEKgBAAAAKxCoAQAAACsQqAEAAAArEKgBAAAAKxCoAQAAACvw6HEUeg/9z83eJQAAgHsYM9QAAACAFQjUAAAAgBUI1AAAAIAVCNQAAACAFfhQIgq97z9dbfMxOvXvbfMxAABA4USgBpBndn66xeZjhD4TbvMxAADICZZ8AAAAAFYgUAMAAABWIFADAAAAVmANNQCg4Gpw3d4VAMB/YoYaAAAAsAKBGgAAALACSz5yqfGENvYuAQAA5JP09HTduHHD3mUgHzk7O8vR0TFbxxKoAQAA7sAwDJ09e1aXL1+2dymwA19fXwUEBMhkMt31OAJ1Lq1c/rbNxxhaP8bmYwAAgDu7FaZLlSolDw+P/wxWKBoMw9C1a9d0/vx5SVJgYOBdjydQAwAAZCE9Pd0cpv38/OxdDvKZu7u7JOn8+fMqVarUXZd/8KFEAACALNxaM+3h4WHnSmAvt372/7V+nhlqALjNQ/9zs3cJAAoQlnncu7L7sydQ3+N2frrFpv2HPhNu0/5RsLhd/yIfRuGzBQCAgoVAfY+zfQAi/AAAUNAtXbpUgwcPtvpuJiaTSWvWrFGHDh3ypK7CgjXUAAAARUB0dPQ9F2QLCmaoAeA233+62uZjdOrf2+ZjAADyBzPUAAAARdysWbNUo0YNeXp6Kjg4WM8//7ySkpIyHbd27VqFhYXJzc1NrVq10unTpy32r1u3TnXr1pWbm5vKly+vuLg4paWlZTlmamqqYmNjFRgYKDc3N4WEhGjq1Kk2uT57Y4YaAG7TeEIbe5cAAHnKwcFBc+fOVWhoqH777Tc9//zzGjFihBYsWGA+5tq1a5o8ebKWL18uFxcXPf/88+rWrZt27NghSfruu+/Us2dPzZ07V82aNdPx48fVr18/SdK4ceMyjTl37lytX79eq1atUtmyZXX69OlMAb2oIFADAAAUcYMHDzZ/Xa5cOU2aNEnPPfecRaC+ceOG5s+fr0aNGkmSli1bpvDwcO3atUsNGzZUXFycRo0apaioKElS+fLlNXHiRI0YMSLLQH3q1CmFhYWpadOmMplMCgkJse1F2hFLPgAAAIq4r7/+Wi1btlTp0qXl7e2tp59+WhcvXtS1a9fMxzg5OalBgwbm7SpVqsjX11eHDh2SJO3fv18TJkyQl5eX+dW3b1+dOXPGop9boqOjlZCQoMqVK2vgwIH66quvbH+hdkKgBgAAKMJOnjypdu3aqWbNmvrkk0+0d+9evf7665JurnPOrqSkJMXFxSkhIcH8OnDggI4ePSo3t8wPxKpbt65OnDihiRMn6p9//tETTzyhLl265Nl1FSQs+QAAACjC9u7dq4yMDM2cOVMODjfnUletWpXpuLS0NO3Zs0cNGzaUJB0+fFiXL19WePjNh7TVrVtXhw8fVsWKFbM9to+Pj7p27aquXbuqS5cueuSRR/T333+rRIkSeXBlBQeBGgAAoIi4cuWKEhISLNruu+8+3bhxQ/PmzdOjjz6qHTt2aNGiRZnOdXZ21oABAzR37lw5OTkpNjZW999/vzlgjx07Vu3atVPZsmXVpUsXOTg4aP/+/fr55581adKkTP3NmjVLgYGBqlOnjhwcHPTRRx8pICBAvr6+trh0u2LJBwAAQBGxdetW1alTx+K1YsUKzZo1S9OmTVP16tW1cuXKLG9f5+HhoZEjR+rJJ59UkyZN5OXlpQ8//NC8v1WrVvrss8/01VdfqUGDBrr//vv12muv3fHDht7e3po+fbrq16+vBg0a6OTJk/riiy/Ms+RFCTPUAAAARcDSpUu1dOnSO+5/4YUXLLaffvpp89fR0dGKjo6WJHXq1OmOfbRq1UqtWrW6437DMMxf9+3bV3379v2PqouGovcrAgAAAJCPCnSgTk9P15gxYxQaGip3d3dVqFBBEydOtPjtxzAMjR07VoGBgXJ3d1dkZKSOHj1qx6oBAABwLynQgXratGlauHCh5s+fr0OHDmnatGmaPn265s2bZz5m+vTpmjt3rhYtWqT4+Hh5enqqVatWun79uh0rBwAAwL2iQK+h/v7779W+fXu1bdtW0s0n+7z//vvatWuXpJuz07Nnz9bLL7+s9u3bS5KWL18uf39/rV27Vt26dbNb7QAAALg3FOgZ6saNG2vz5s06cuSIpJtP6Nm+fbtat24tSTpx4oTOnj2ryMhI8znFihVTo0aNtHPnzjv2m5KSosTERIsXAAAAkBsFeoZ61KhRSkxMVJUqVeTo6Kj09HRNnjxZPXr0kCSdPXtWkuTv729xnr+/v3lfVqZOnaq4uDjbFQ4AAIB7RoEO1KtWrdLKlSv13nvvqVq1akpISNDgwYMVFBSkqKioXPf74osvasiQIebtxMREBQcH56iP8F6Ncz0+AAAAio4CHaiHDx+uUaNGmddC16hRQ7///rumTp2qqKgoBQQESJLOnTunwMBA83nnzp1T7dq179ivq6urXF1dbVo7AAAA7g0Feg31tWvXMj1Nx9HRURkZGZKk0NBQBQQEaPPmzeb9iYmJio+PV0RERL7WCgAAgHtTgQ7Ujz76qCZPnqzPP/9cJ0+e1Jo1azRr1ix17NhRkmQymTR48GBNmjRJ69ev14EDB9SzZ08FBQWpQ4cO9i0eAACgiDt58qRMJpMSEhLsXYpdFeglH/PmzdOYMWP0/PPP6/z58woKCtKzzz6rsWPHmo8ZMWKEkpOT1a9fP12+fFlNmzbVhg0b5ObmZsfKAQBAUXZiyaF8HS/0mfAcHd+iRQvVrl1bs2fPtk1BsFCgA7W3t7dmz5591zeDyWTShAkTNGHChPwrDAAAoBAzDEPp6elycirQUbDQKNBLPgAAAJAz0dHR2rZtm+bMmSOTySSTyaSlS5fKZDLpyy+/VL169eTq6qrt27crOjo60zLZwYMHq0WLFubtjIwMTZ8+XRUrVpSrq6vKli2ryZMnZzl2enq6evXqpSpVqujUqVM2vMqChV9LAAAAipA5c+boyJEjql69uvkv+L/88oukm8/4mDFjhsqXL6/ixYtnq78XX3xRixcv1muvvaamTZvqzJkz+vXXXzMdl5KSou7du+vkyZP67rvvVLJkyby7qAKOQA0AAFCEFCtWTC4uLvLw8DDfYvhWAJ4wYYL+97//Zbuvq1evas6cOZo/f775GSAVKlRQ06ZNLY5LSkpS27ZtlZKSom+++UbFihXLo6spHAjUAFBEff/papv236l/b5v2DyDv1a9fP0fHHzp0SCkpKWrZsuVdj+vevbvKlCmjLVu2yN3d3ZoSCyXWUAMAANwjPD09LbYdHBxkGIZF240bN8xfZzcct2nTRj/99JN27txpfZGFEIEaAACgiHFxcVF6evp/HleyZEmdOXPGou3f95QOCwuTu7u7xUP0stK/f3+98soreuyxx7Rt27Zc1VyYseQDAAAb2/npFpuPkdP7FOeUW/ICm/aPvFWuXDnFx8fr5MmT8vLyMj9l+nYPPfSQXn31VS1fvlwRERF699139fPPP6tOnTqSJDc3N40cOVIjRoyQi4uLmjRpogsXLuiXX35R796Wy74GDBig9PR0tWvXTl9++WWmddZFGTPUAAAARcywYcPk6OioqlWrqmTJkne8hV2rVq00ZswYjRgxQg0aNNDVq1fVs2dPi2PGjBmjoUOHauzYsQoPD1fXrl11/vz5LPsbPHiw4uLi1KZNG33//fd5fl0FFTPUAAAAOWTrvwhYq1KlSpnWM0dHR2d5bFxcnOLi4u7Yl4ODg0aPHq3Ro0dn2leuXLlMa7CHDBmiIUOG5LzoQowZagAAAMAKzFADyDONJ7SxdwkAAOQ7ZqgBAAAAKxCoAQAAACsQqAEAAAArEKgBAAAAKxCoAQAAACsQqAEAAAArEKgBAAAAKxCoAQAA7hHR0dHq0KHDXY8pV66cZs+enS/1FBU82AUAACCH3uv0er6O9+TqmHwba/fu3fL09My38YoCAjVQQOz8dIvNxwh9JtzmY6Dg4MmVAHKjZMmS9i6h0GHJBwAAQBHz8ccfq0aNGnJ3d5efn58iIyOVnJxs3j9jxgwFBgbKz89PMTExunHjhnnf7Us+TCaTFi5cqNatW8vd3V3ly5fXxx9/nJ+XU+AxQw0At1m5/G2bjzG0fv79+RbAveXMmTPq3r27pk+fro4dO+rq1av67rvvZBiGJOmbb75RYGCgvvnmGx07dkxdu3ZV7dq11bdv3zv2OWbMGL3yyiuaM2eOVqxYoW7duunAgQMKD+cvnxKBGgAAoEg5c+aM0tLS1KlTJ4WEhEiSatSoYd5fvHhxzZ8/X46OjqpSpYratm2rzZs33zVQP/744+rTp48kaeLEidq0aZPmzZunBQsW2PZiCgmWfAAAABQhtWrVUsuWLVWjRg09/vjjWrx4sS5dumTeX61aNTk6Opq3AwMDdf78+bv2GRERkWn70KFDeVt4IUagBgAAKEIcHR21adMmffnll6patarmzZunypUr68SJE5IkZ2dni+NNJpMyMjLsUWqRQaAGAAAoYkwmk5o0aaK4uDj9+OOPcnFx0Zo1a3Ld3w8//JBpm/XT/w9rqAEAAIqQ+Ph4bd68WQ8//LBKlSql+Ph4XbhwQeHh4frpp59y1edHH32k+vXrq2nTplq5cqV27dqlt9+2/Qe4CwsCNQAAQBHi4+Ojb7/9VrNnz1ZiYqJCQkI0c+ZMtW7dWh9++GGu+oyLi9MHH3yg559/XoGBgXr//fdVtWrVPK688CJQAwAKLG5hiIIqP59cmFPh4eHasGFDlvuWLl2aqe32x4yfPHky0zFBQUH66quv8qC6ook11AAAAIAVCNQAAACAFVjyAQAAgDu69YRF3Bkz1AAAAIAVCNQAAACAFQjUAAAAgBVYQw0ARZStbznH7eYA4CZmqAEAAAArEKgBAAAAKxCoAQAAipgWLVpo8ODB9i7jnsEaagAAgBw6u9C2n1G4XUD/3vk6HnKGGWoAAADcVWpqqr1LKNAI1AAAAEVQRkaGRowYoRIlSiggIEDjx4837zt16pTat28vLy8v+fj46IknntC5c+fM+8ePH6/atWvrrbfeUmhoqNzc3CRJH3/8sWrUqCF3d3f5+fkpMjJSycnJ5vPeeusthYeHy83NTVWqVNGCBQvy7XrtiSUfAAAARdCyZcs0ZMgQxcfHa+fOnYqOjlaTJk3UsmVLc5jetm2b0tLSFBMTo65du2rr1q3m848dO6ZPPvlEq1evlqOjo86cOaPu3btr+vTp6tixo65evarvvvvO/GjylStXauzYsZo/f77q1KmjH3/8UX379pWnp6eioqLs9F3IHwRqAACAIqhmzZoaN26cJCksLEzz58/X5s2bJUkHDhzQiRMnFBwcLElavny5qlWrpt27d6tBgwaSbi7zWL58uUqWLClJ2rdvn9LS0tSpUyeFhIRIkmrUqGEeb9y4cZo5c6Y6deokSQoNDdXBgwf1xhtvFPlAzZIPAACAIqhmzZoW24GBgTp//rwOHTqk4OBgc5iWpKpVq8rX11eHDh0yt4WEhJjDtCTVqlVLLVu2VI0aNfT4449r8eLFunTpkiQpOTlZx48fV+/eveXl5WV+TZo0ScePH7fxldofM9QAAABFkLOzs8W2yWRSRkZGts/39PS02HZ0dNSmTZv0/fff66uvvtK8efM0evRoxcfHy8PDQ5K0ePFiNWrUKNN5RR0z1AAAAPeQ8PBwnT59WqdPnza3HTx4UJcvX1bVqlXveq7JZFKTJk0UFxenH3/8US4uLlqzZo38/f0VFBSk3377TRUrVrR4hYaG2vqS7I4ZagAAgHtIZGSkatSooR49emj27NlKS0vT888/r+bNm6t+/fp3PC8+Pl6bN2/Www8/rFKlSik+Pl4XLlxQeHi4JCkuLk4DBw5UsWLF9MgjjyglJUV79uzRpUuXNGTIkPy6PLsgUAMAANxDTCaT1q1bpwEDBuiBBx6Qg4ODHnnkEc2bN++u5/n4+Ojbb7/V7NmzlZiYqJCQEM2cOVOtW7eWJPXp00ceHh569dVXNXz4cHl6eqpGjRr3xBMbCdQAAAA5VNCfXPjv29/dsnbtWvPXZcuW1bp16+54/vjx4y3uWy3dXCqyYcOGu4775JNP6sknn8xJqUUCa6gBAAAAKxCoAQAAACsQqAEAAAArEKgBAAAAKxCoAQAAACsQqAEAAAAr5DpQnzlzRl26dFHJkiVVokQJPfroo/rtt9/ysjYAAACgwMt1oO7Vq5eqV6+ubdu2acuWLfL3978n7zsIAACAe1u2A/WgQYOUnJxs3j527JhGjhypqlWrqnbt2ho0aJAOHz6c5wX+8ccfeuqpp+Tn5yd3d3fVqFFDe/bsMe83DENjx45VYGCg3N3dFRkZqaNHj+Z5HQAAAEBWsh2oy5Qpo3r16mn9+vWSpK5du6pRo0YaNWqUhg4dqscee0w9evTI0+IuXbqkJk2ayNnZWV9++aUOHjyomTNnqnjx4uZjpk+frrlz52rRokWKj4+Xp6enWrVqpevXr+dpLQAAAIWFYRjq16+fSpQoIZPJpISEBHuXVKRl+9Hjw4cPV5cuXfT8889r6dKlmjdvnho1aqStW7cqPT1d06dPV5cuXfK0uGnTpik4OFhLliwxt4WGhpq/NgxDs2fP1ssvv6z27dtLkpYvXy5/f3+tXbtW3bp1y9N6AAAAJGl1m7b5Ol6nLz7P0fEbNmzQ0qVLtXXrVpUvX1733XefjSqDlMM11KGhofryyy/VuXNnNW/eXCdPntSMGTM0e/ZsPf744zKZTHla3Pr161W/fn09/vjjKlWqlOrUqaPFixeb9584cUJnz55VZGSkua1YsWJq1KiRdu7cmae1AAAAFBbHjx9XYGCgGjdurICAADk5Wc6hpqam2qmyoinHH0q8ePGievTood27d+vHH39URESEfvrpJ1vUpt9++00LFy5UWFiYNm7cqP79+2vgwIFatmyZJOns2bOSJH9/f4vz/P39zfuykpKSosTERIsXAABAURAdHa0BAwbo1KlTMplMKleunFq0aKHY2FgNHjxY9913n1q1aiVJ2rZtmxo2bChXV1cFBgZq1KhRSktLM/d19epV9ejRQ56engoMDNRrr72mFi1aaPDgwXa6uoIp24F68+bN8vf3V8mSJVWmTBn9+uuveueddzR16lR1795dI0aM0D///JOnxWVkZKhu3bqaMmWK6tSpo379+qlv375atGiRVf1OnTpVxYoVM7+Cg4PzqGIAAAD7mjNnjiZMmKAyZcrozJkz2r17tyRp2bJlcnFx0Y4dO7Ro0SL98ccfatOmjRo0aKD9+/dr4cKFevvttzVp0iRzX0OGDNGOHTu0fv16bdq0Sd9995327dtnr0srsLIdqGNiYjRixAhdu3ZN8+fPN/9m8uCDD2rfvn1ydnZW7dq187S4wMBAVa1a1aItPDxcp06dkiQFBARIks6dO2dxzLlz58z7svLiiy/qypUr5tfp06fztG4AAAB7KVasmLy9veXo6KiAgACVLFlSkhQWFqbp06ercuXKqly5shYsWKDg4GDNnz9fVapUUYcOHRQXF6eZM2cqIyNDV69e1bJlyzRjxgy1bNlS1atX15IlS5Senm7nKyx4sh2oz5w5o7Zt28rNzU2PPPKILly4YN7n6uqqyZMna/Xq1XlaXJMmTTLdiu/IkSMKCQmRdHNNd0BAgDZv3mzen5iYqPj4eEVERNyxX1dXV/n4+Fi8AAAAirJ69epZbB86dEgREREWn4Fr0qSJkpKS9H//93/67bffdOPGDTVs2NC8v1ixYqpcuXK+1VxYZPsuH4899pi6dOmixx57TNu3b1ebNm0yHVOtWrU8Le6FF15Q48aNNWXKFD3xxBPatWuX3nzzTb355puSJJPJpMGDB2vSpEkKCwtTaGioxowZo6CgIHXo0CFPawEA5L/wXo3tXQJQZHh6etq7hCIr2zPUb7/9tp599llduXJFTz31lGbPnm3Dsm5q0KCB1qxZo/fff1/Vq1fXxIkTNXv2bIv7XY8YMUIDBgxQv3791KBBAyUlJWnDhg1yc3OzeX0AAACFVXh4uHbu3CnDMMxtO3bskLe3t8qUKaPy5cvL2dnZvAZbkq5cuaIjR47Yo9wCLdsz1C4uLhowYIAta8lSu3bt1K5duzvuN5lMmjBhgiZMmJCPVQEAABRuzz//vGbPnq0BAwYoNjZWhw8f1rhx4zRkyBA5ODjI29tbUVFRGj58uEqUKKFSpUpp3LhxcnBwyPNbJRd2Ob5tHgAAAAq/0qVL64svvtCuXbtUq1YtPffcc+rdu7defvll8zGzZs1SRESE2rVrp8jISDVp0kTh4eGsBLhNtmeoAQAAcFNOn1yY3wYPHmxxr+itW7dmeVzz5s21a9euO/bj7e2tlStXmreTk5MVFxenfv365VWpRQKBGgAAAFn68ccf9euvv6phw4a6cuWKeYlt+/bt7VxZwUKgBgAAwB3NmDFDhw8flouLi+rVq6fvvvtO9913n73LKlByHKjLly+v3bt3y8/Pz6L98uXLqlu3rn777bc8Kw4AAAD2U6dOHe3du9feZRR4Of5Q4smTJ7N8Qk5KSor++OOPPCkKAAAAKCyyPUO9fv1689cbN25UsWLFzNvp6enavHmzypUrl6fFAQAAAAVdtgP1rScPmkwmRUVFWexzdnZWuXLlNHPmzDwtDgAAACjosh2oMzIyJEmhoaHavXs3i9EBAAAA5eJDiSdOnLBFHQAAAEChlKvb5m3evFmbN2/W+fPnzTPXt7zzzjt5Uhhwr3nofzx1CgCAwijHd/mIi4vTww8/rM2bN+uvv/7SpUuXLF4AAAAoeFq0aGHx9ETknRzPUC9atEhLly7V008/bYt6AAAACryze17P1/EC6sfk63jImRzPUKempqpx48a2qAUAAAAodHIcqPv06aP33nvPFrUAAAAgDyQnJ6tnz57y8vJSYGBgplsbX7p0ST179lTx4sXl4eGh1q1b6+jRoxbHLF68WMHBwfLw8FDHjh01a9Ys+fr65uNVFB45XvJx/fp1vfnmm/r6669Vs2ZNOTs7W+yfNWtWnhUHAACAnBs+fLi2bdumdevWqVSpUnrppZe0b98+1a5dW5IUHR2to0ePav369fLx8dHIkSPVpk0bHTx4UM7OztqxY4eee+45TZs2TY899pi+/vprjRkzxr4XVYDlOFD/9NNP5h/Gzz//bLHPZDLlSVEAAADInaSkJL399tt699131bJlS0nSsmXLVKZMGUkyB+kdO3aYl/GuXLlSwcHBWrt2rR5//HHNmzdPrVu31rBhwyRJlSpV0vfff6/PPvvMPhdVwOU4UH/zzTe2qAMAAAB54Pjx40pNTVWjRo3MbSVKlFDlypUlSYcOHZKTk5PFfj8/P1WuXFmHDh2SJB0+fFgdO3a06Ldhw4YE6jvI8RrqW44dO6aNGzfqn3/+kSQZhpFnRQEAAACFRY4D9cWLF9WyZUtVqlRJbdq00ZkzZyRJvXv31tChQ/O8QAAAAGRfhQoV5OzsrPj4eHPbpUuXdOTIEUlSeHi40tLSLPZfvHhRhw8fVtWqVSVJlStX1u7duy36vX0b/0+OA/ULL7wgZ2dnnTp1Sh4eHub2rl27asOGDXlaHAAAAHLGy8tLvXv31vDhw7Vlyxb9/PPPio6OloPDzdgXFham9u3bq2/fvtq+fbv279+vp556SqVLl1b79u0lSQMGDNAXX3yhWbNm6ejRo3rjjTf05Zdf8nm5O8hxoP7qq680bdo088L2W8LCwvT777/nWWEAAADInVdffVXNmjXTo48+qsjISDVt2lT16tUz71+yZInq1aundu3aKSIiQoZh6IsvvjDfva1JkyZatGiRZs2apVq1amnDhg164YUX5ObmZq9LKtBy/KHE5ORki5npW/7++2+5urrmSVEAAAAFWUF/cqGXl5dWrFihFStWmNuGDx9u/rp48eJavnz5Xfvo27ev+vbta7FdsWLFvC+2CMjxDHWzZs0sfgAmk0kZGRmaPn26HnzwwTwtDgAAAPYxY8YM7d+/X8eOHdO8efO0bNkyRUVF2busAinHM9TTp09Xy5YttWfPHqWmpmrEiBH65Zdf9Pfff2vHjh22qBEAAAD5bNeuXZo+fbquXr2q8uXLa+7cuerTp4+9yyqQchyoq1evriNHjmj+/Pny9vZWUlKSOnXqpJiYGAUGBtqiRgAAAOSzVatW2buEQiPHgVqSihUrptGjR+d1LQAAAEChk+M11EuWLNFHH32Uqf2jjz7SsmXL8qQoAAAAoLDIcaCeOnWq7rvvvkztpUqV0pQpU/KkKAAAAKCwyHGgPnXqlEJDQzO1h4SE6NSpU3lSFAAAAFBY5DhQlypVSj/99FOm9v3798vPzy9PigIAAAAKixwH6u7du2vgwIH65ptvlJ6ervT0dG3ZskWDBg1St27dbFEjAAAAUGDl+C4fEydO1MmTJ9WyZUs5Od08PSMjQz179mQNNQAAQAHVokUL1a5dW7Nnz7Z3KUVOjgK1YRg6e/asli5dqkmTJikhIUHu7u6qUaOGQkJCbFUjAABAgTJzYN18HW/o3H35Oh5yJseBumLFivrll18UFhamsLAwW9UFAACAQiQ1NVUuLi72LsMucrSG2sHBQWFhYbp48aKt6gEAAICVkpOT1bNnT3l5eSkwMFAzZ8602J+SkqJhw4apdOnS8vT0VKNGjbR161aLY7Zv365mzZrJ3d1dwcHBGjhwoJKTk837y5Urp4kTJ6pnz57y8fFRv3798uPSCqQcr6F+5ZVXNHz4cC1cuFDVq1e3RU0AgDwQ3quxvUsAYCfDhw/Xtm3btG7dOpUqVUovvfSS9u3bp9q1a0uSYmNjdfDgQX3wwQcKCgrSmjVr9Mgjj+jAgQMKCwvT8ePH9cgjj2jSpEl65513dOHCBcXGxio2NlZLliwxjzNjxgyNHTtW48aNs9OVFgw5DtQ9e/bUtWvXVKtWLbm4uMjd3d1i/99//51nxQEAACBnkpKS9Pbbb+vdd99Vy5YtJUnLli1TmTJlJN18psiSJUt06tQpBQUFSZKGDRumDRs2aMmSJZoyZYqmTp2qHj16aPDgwZKksLAwzZ07V82bN9fChQvl5uYmSXrooYc0dOjQ/L/IAibHgZpPhgIAABRcx48fV2pqqho1amRuK1GihCpXrixJOnDggNLT01WpUiWL81JSUszPFNm/f79++uknrVy50rzfMAxlZGToxIkTCg8PlyTVr1/f1pdTKOQ4UEdFRdmijkJn9Srbf9q2TW2bDwEAAO4xSUlJcnR01N69e+Xo6Gixz8vLy3zMs88+q4EDB2Y6v2zZsuavPT09bVtsIZHjQC3d/M1nyZIlOn78uObMmaNSpUrpyy+/VNmyZVWtWrW8rhEAAADZVKFCBTk7Oys+Pt4cfi9duqQjR46oefPmqlOnjtLT03X+/Hk1a9Ysyz7q1q2rgwcPqmLFivlZeqGV4yclbtu2TTVq1FB8fLxWr16tpKQkSTf/NHCvL0gHAACwNy8vL/Xu3VvDhw/Xli1b9PPPPys6OloODjdjX6VKldSjRw/17NlTq1ev1okTJ7Rr1y5NnTpVn3/+uSRp5MiR+v777xUbG6uEhAQdPXpU69atU2xsrD0vrcDK8Qz1qFGjNGnSJA0ZMkTe3t7m9oceekjz58/P0+IAAAAKooL+oJVXX31VSUlJevTRR+Xt7a2hQ4fqypUr5v1LlizRpEmTNHToUP3xxx+67777dP/996tdu3aSpJo1a2rbtm0aPXq0mjVrJsMwVKFCBXXt2tVel1Sg5ThQHzhwQO+9916m9lKlSumvv/7Kk6IAAACQe15eXlqxYoVWrFhhbhs+fLj5a2dnZ8XFxSkuLu6OfTRo0EBfffXVHfefPHkyT2otCnK85MPX11dnzpzJ1P7jjz+qdOnSeVIUAAAAUFjkOFB369ZNI0eO1NmzZ2UymZSRkaEdO3Zo2LBh6tmzpy1qBAAAAAqsHAfqKVOmqEqVKgoODlZSUpKqVq2qBx54QI0bN9bLL79sixoBAACAAivHa6hdXFy0ePFijR07VgcOHFBSUpLq1KmjsLAwW9QHAAAAFGjZDtQZGRl69dVXtX79eqWmpqply5YaN25cpkePAwAAAPeSbC/5mDx5sl566SV5eXmpdOnSmjNnjmJiYmxZGwAAAFDgZTtQL1++XAsWLNDGjRu1du1affrpp1q5cqUyMjJsWR8AAABQoGU7UJ86dUpt2rQxb0dGRspkMunPP/+0SWEAAABAYZDtQJ2WliY3NzeLNmdnZ924cSPPiwIAAAAKi2x/KNEwDEVHR8vV1dXcdv36dT333HPy9PQ0t61evTpvKwQAAAAKsGwH6qioqExtTz31VJ4WA+RG4wlt/vsgAADy0BcJsfk6Xpva8/NtrPHjx2vt2rVKSEjItzELu2wH6iVLltiyDiDXVi5/2+ZjDK3PHW0AAEDWcvykRAAAABRsGRkZmj59uipWrChXV1eVLVtWkydPliSNHDlSlSpVkoeHh8qXL68xY8aYPxO3dOlSxcXFaf/+/TKZTDKZTFq6dKkdr6RwyPGTEgEAAFCwvfjii1q8eLFee+01NW3aVGfOnNGvv/4qSfL29tbSpUsVFBSkAwcOqG/fvvL29taIESPUtWtX/fzzz9qwYYO+/vprSVKxYsXseSmFAoEaAACgCLl69armzJmj+fPnmz8DV6FCBTVt2lSS9PLLL5uPLVeunIYNG6YPPvhAI0aMkLu7u7y8vOTk5KSAgAC71F8YEagBAACKkEOHDiklJUUtW7bMcv+HH36ouXPn6vjx40pKSlJaWpp8fHzyucqipVCtoX7llVdkMpk0ePBgc9v169cVExMjPz8/eXl5qXPnzjp37pz9igQAALAjd3f3O+7buXOnevTooTZt2uizzz7Tjz/+qNGjRys1NTUfKyx6Ck2g3r17t9544w3VrFnTov2FF17Qp59+qo8++kjbtm3Tn3/+qU6dOtmpSgAAAPsKCwuTu7u7Nm/enGnf999/r5CQEI0ePVr169dXWFiYfv/9d4tjXFxclJ6enl/lFgmFYslHUlKSevToocWLF2vSpEnm9itXrujtt9/We++9p4ceekjSzdv7hYeH64cfftD9999vr5IBAADsws3NTSNHjtSIESPk4uKiJk2a6MKFC/rll18UFhamU6dO6YMPPlCDBg30+eefa82aNRbnlytXTidOnFBCQoLKlCkjb29viwf7IbNCEahjYmLUtm1bRUZGWgTqvXv36saNG4qMjDS3ValSRWXLltXOnTvvGKhTUlKUkpJi3k5MTLRd8QAAoMjJzwet5MaYMWPk5OSksWPH6s8//1RgYKCee+459e7dWy+88IJiY2OVkpKitm3basyYMRo/frz53M6dO2v16tV68MEHdfnyZS1ZskTR0dF2u5bCoMAH6g8++ED79u3T7t27M+07e/asXFxc5Ovra9Hu7++vs2fP3rHPqVOnKi4uLq9LBQAAKBAcHBw0evRojR49OtO+6dOna/r06RZt//58mqurqz7++GNbl1ikFOg11KdPn9agQYO0cuVKubm55Vm/L774oq5cuWJ+nT59Os/6BgAAwL2lQAfqvXv36vz586pbt66cnJzk5OSkbdu2ae7cuXJycpK/v79SU1N1+fJli/POnTt313snurq6ysfHx+IFAAAA5EaBXvLRsmVLHThwwKLtmWeeUZUqVTRy5EgFBwfL2dlZmzdvVufOnSVJhw8f1qlTpxQREWGPkgEAKJL27V2VD6PUzocxgLxXoAO1t7e3qlevbtHm6ekpPz8/c3vv3r01ZMgQlShRQj4+PhowYIAiIiK4wwcAAADyRYEO1Nnx2muvycHBQZ07d1ZKSopatWqlBQsW2LssAADMHvpf3n0OCEDBU+gC9datWy223dzc9Prrr+v111+3T0EAAAC4pxXoDyUCAAAABR2BGgAAALACgRoAAACwAoEaAACgiGnRooXF0w9vV65cOc2ePTvH/Y4fP161a9fOdV1FVaH7UCIAAIC99Xmpcb6O99aU7/O0v927d8vT0zNP+7yXEagBAAXW6lX7bD5Gm9o2HwIocEqWLHnX/Tdu3JCzs3M+VVP4seQDAACgCEpLS1NsbKyKFSum++67T2PGjJFhGJIyL/kwmUxauHChHnvsMXl6emry5MmSpFdeeUX+/v7y9vZW7969df36dXtcSoFHoAYAACiCli1bJicnJ+3atUtz5szRrFmz9NZbb93x+PHjx6tjx446cOCAevXqpVWrVmn8+PGaMmWK9uzZo8DAQB6edwcs+QAAACiCgoOD9dprr8lkMqly5co6cOCAXnvtNfXt2zfL45988kk988wz5u1u3bqpd+/e6t27tyRp0qRJ+vrrr5mlzgIz1AAAAEXQ/fffL5PJZN6OiIjQ0aNHlZ6enuXx9evXt9g+dOiQGjVqZNEWERGR94UWAQRqAAAAcNcPKxCoAQAAiqD4+HiL7R9++EFhYWFydHTM1vnh4eFZ9oHMCNQAAABF0KlTpzRkyBAdPnxY77//vubNm6dBgwZl+/xBgwbpnXfe0ZIlS3TkyBGNGzdOv/zyiw0rLrz4UCIAAEAO5fWDVmyhZ8+e+ueff9SwYUM5Ojpq0KBB6tevX7bP79q1q44fP64RI0bo+vXr6ty5s/r376+NGzfasOrCiUANAABQxGzdutX89cKFCzPtP3nypMX2rftT3+6ll17SSy+9ZNE2bdo0q+sraljyAQAAAFiBQA0AAABYgUANAAAAWIFADQAAAFiBQA0AAABYgUANAAAAWIFADQAAAFiBQA0AAABYgUANAAAAWIFADQAAgAJv6dKl8vX1vesx48ePV+3atc3b0dHR6tChg03rknj0OAAAQI7FDkrI1/Hmz6mdr+NJNwPs4MGDdfny5XwfO7eGDRumAQMG5Pu4BGoAAAAUCV5eXvLy8sr3cVnyAQAosNySF9j8BRRVGzZsUNOmTeXr6ys/Pz+1a9dOx48flyRt3bpVJpPJYvY5ISFBJpNJJ0+e1NatW/XMM8/oypUrMplMMplMGj9+vCTp0qVL6tmzp4oXLy4PDw+1bt1aR48eNfdza2nGZ599psqVK8vDw0NdunTRtWvXtGzZMpUrV07FixfXwIEDlZ6ebj7vv/q9Ze3atQoLC5Obm5tatWql06dPm/fdvuTjdhkZGZo6dapCQ0Pl7u6uWrVq6eOPP87ld/j/YYYaKCgaXLd3BQCAIiQ5OVlDhgxRzZo1lZSUpLFjx6pjx45KSEj4z3MbN26s2bNna+zYsTp8+LAkmWd+o6OjdfToUa1fv14+Pj4aOXKk2rRpo4MHD8rZ2VmSdO3aNc2dO1cffPCBrl69qk6dOqljx47y9fXVF198od9++02dO3dWkyZN1LVr1xz1O3nyZC1fvlwuLi56/vnn1a1bN+3YsSNb35OpU6fq3Xff1aJFixQWFqZvv/1WTz31lEqWLKnmzZvn9FtsRqAGAAAogjp37myx/c4776hkyZI6ePDgf57r4uKiYsWKyWQyKSAgwNx+K/Du2LFDjRs3liStXLlSwcHBWrt2rR5//HFJ0o0bN7Rw4UJVqFBBktSlSxetWLFC586dk5eXl6pWraoHH3xQ33zzjbp27ZqjfufPn69GjRpJkpYtW6bw8HDt2rVLDRs2vOs1paSkaMqUKfr6668VEREhSSpfvry2b9+uN954g0ANAAAAS0ePHtXYsWMVHx+vv/76SxkZGZKkU6dOycPDI1d9Hjp0SE5OTuZAK0l+fn6qXLmyDh06ZG7z8PAwh2lJ8vf3V7ly5SzWN/v7++v8+fM56tfJyUkNGjQwb1epUkW+vr46dOjQfwbqY8eO6dq1a/rf//5n0Z6amqo6depk91uQJQI1AABAEfToo48qJCREixcvVlBQkDIyMlS9enWlpqaag61hGObjb9y4kWdj31qicYvJZMqy7VbIzw9JSUmSpM8//1ylS5e22Ofq6mpV33woEQAAoIi5ePGiDh8+rJdfflktW7ZUeHi4Ll26ZN5fsmRJSdKZM2fMbbevrXZxcbH40KAkhYeHKy0tTfHx8ZnGqlq1aq7rzW6/aWlp2rNnj3n78OHDunz5ssLDw/9zjKpVq8rV1VWnTp1SxYoVLV7BwcG5rl1ihhoAAKDIKV68uPz8/PTmm28qMDBQp06d0qhRo8z7b4XI8ePHa/LkyTpy5Ihmzpxp0Ue5cuWUlJSkzZs3q1atWvLw8FBYWJjat2+vvn376o033pC3t7dGjRql0qVLq3379rmuN7v9Ojs7a8CAAZo7d66cnJwUGxur+++//z+Xe0iSt7e3hg0bphdeeEEZGRlq2rSprly5oh07dsjHx0dRUVG5rp9ADQAAkEP2eNBKTjg4OOiDDz7QwIEDVb16dVWuXFlz585VixYtJN0Mpu+//7769++vmjVrqkGDBpo0aZL5w3/SzTt9PPfcc+ratasuXryocePGafz48VqyZIkGDRqkdu3aKTU1VQ888IC++OKLTEs6cio7/Xp4eGjkyJF68skn9ccff6hZs2Z6++23sz3GxIkTVbJkSU2dOlW//fabfH19VbduXb300ktW1W4y/r145h6VmJioYsWK6cqVK/Lx8cnWOX1eamzjqqS3pnxv8zFWt2lr0/47ffG5TfuXpJkD69p8jKFz99l8jLN7Xrf5GAH1Y2zaf1G4BqnovKe+SIi1af9tas+3af9S/jyNLj+C0dmF2f8HP7cC+ve2af+Nm1oXOLLj++1TcnR8bv79zonr16/rxIkTCg0NlZubW573j4Ivu+8BZqiBAmLlctv/gzs0H8IoAAD3Gj6UCAAAAFiBQA0AAABYgUANAAAAWIFADQAAAFiBDyXe49ymhNq7BPz/wnvZ/s4xAAAg7xGo73GrV9n21l1tatu0ewAAALsjUAMFhK1/uZH4BQcAAFtgDTUAAMA95uTJkzKZTEpISLC6r+joaHXo0MHqfgozZqgBAADuMcHBwTpz5ozuu+8+e5dSJBCoAQAAcig/HsX+bzl9LPt/cXR0VEBAwB33G4ah9PR0OTkRFbODJR+55Ja8wOYvAACA3NqwYYOaNm0qX19f+fn5qV27djp+/LikzEs+tm7dKpPJpC+//FL16tWTq6urtm/frvHjx6t27dp64403FBwcLA8PDz3xxBO6cuVKrsb999irV6/Wgw8+KA8PD9WqVUs7d+606Gf79u1q1qyZ3N3dFRwcrIEDByo5OTnvv1F5gF877nEHv21h7xIAAIANJCcna8iQIapZs6aSkpI0duxYdezY8a7rpkeNGqUZM2aofPnyKl68uLZu3apjx45p1apV+vTTT5WYmKjevXvr+eef18qVK3M8roPD/5vLHT16tGbMmKGwsDCNHj1a3bt317Fjx+Tk5KTjx4/rkUce0aRJk/TOO+/owoULio2NVWxsrJYsWZLX3yqrEagBAACKoM6dO1tsv/POOypZsqQOHjwoLy+vLM+ZMGGC/ve//1m0Xb9+XcuXL1fp0qUlSfPmzVPbtm01c+bMLJeN3G3c6tWrm9uHDRumtm3bSpLi4uJUrVo1HTt2TFWqVNHUqVPVo0cPDR48WJIUFhamuXPnqnnz5lq4cKHc3Nxy9s2wMZZ8AAAAFEFHjx5V9+7dVb58efn4+KhcuXKSpFOnTt3xnPr162dqK1u2rDlMS1JERIQyMjJ0+PBhq8atWbOm+evAwEBJ0vnz5yVJ+/fv19KlS+Xl5WV+tWrVShkZGTpx4sR/X3w+Y4YaAACgCHr00UcVEhKixYsXKygoSBkZGapevbpSU1PveI6np2e+jevs7Gz+2mQySZIyMjIkSUlJSXr22Wc1cODATP2XLVvW6hrzGoEaAACgiLl48aIOHz6sxYsXq1mzZpJufsgvN06dOqU///xTQUFBkqQffvhBDg4Oqly5ss3GrVu3rg4ePKiKFSvmqub8RqAGAAAoYooXLy4/Pz+9+eabCgwM1KlTpzRq1Khc9eXm5qaoqCjNmDFDiYmJGjhwoJ544oks10/n1bgjR47U/fffr9jYWPXp00eenp46ePCgNm3apPnz5+fqOmyJNdQAAABFjIODgz744APt3btX1atX1wsvvKBXX301V31VrFhRnTp1Ups2bfTwww+rZs2aWrAg69v75tW4NWvW1LZt23TkyBE1a9ZMderU0dixY82z5AUNM9QAUEStXrXPpv23qW3T7oECLa8ftGILkZGROnjwoEWbYRhZft2iRQuL7dv1799f/fv3z3Lf0qVLczRuuXLlMo3l6+ubqa1Bgwb66quv7lhTQUKgBoAiivvMA0D+YMkHAAAAYAUCNQAAALI0fvz4uz5ZETcV6EA9depUNWjQQN7e3ipVqpQ6dOiQ6Sbi169fV0xMjPz8/OTl5aXOnTvr3LlzdqoYAAAA95oCHai3bdummJgY/fDDD9q0aZNu3Lihhx9+WMnJyeZjXnjhBX366af66KOPtG3bNv3555/q1KmTHasGAADAvaRAfyhxw4YNFttLly5VqVKltHfvXj3wwAO6cuWK3n77bb333nt66KGHJElLlixReHi4fvjhB91///32KBsAABQhd7v7BYq27P7sC/QM9e2uXLkiSSpRooQkae/evbpx44YiIyPNx1SpUkVly5bVzp0779hPSkqKEhMTLV4AAAD/duvR2NeuXbNzJbCXWz/7fz8mPSsFeob63zIyMjR48GA1adJE1atXlySdPXtWLi4u8vX1tTjW399fZ8+evWNfU6dOVVxcnC3LBQAAhZyjo6N8fX11/vx5SZKHh4dMJpOdq0J+MAxD165d0/nz5+Xr6ytHR8e7Hl9oAnVMTIx+/vnnXD+H/t9efPFFDRkyxLydmJio4OBgq/sFAABFy63Ha98K1bi3+Pr6ZvmI9dsVikAdGxurzz77TN9++63KlCljbg8ICFBqaqouX75sMUt97ty5u168q6urXF1dbVkyAAAoAkwmkwIDA1WqVCnduHHD3uUgHzk7O//nzPQtBTpQG4ahAQMGaM2aNdq6datCQ0Mt9terV0/Ozs7avHmzOnfuLEk6fPiwTp06pYiICHuUDAAAiiBHR8dshyvcewp0oI6JidF7772ndevWydvb27wuulixYnJ3d1exYsXUu3dvDRkyRCVKlJCPj48GDBigiIgI7vABAACAfFGgA/XChQslSS1atLBoX7JkiaKjoyVJr732mhwcHNS5c2elpKSoVatWWrBgQT5XCgAAgHtVgQ7U2bn3n5ubm15//XW9/vrr+VARgLtZufxtm48xtH6MzccAACAnCnSgBlC4hPdqbO8SAADIdwRqAHlm9ap9Nh+jTW2bDwEAQI4UqiclAgAAAAUNgRoAAACwAoEaAAAAsAKBGgAAALACgRoAAACwAoEaAAAAsAKBGgAAALACgRoAAACwAoEaAAAAsAKBGgAAALACgRoAAACwAoEaAAAAsAKBGgAAALACgRoAAACwAoEaAAAAsIKTvQsAAKDIa3Dd3hUAsCFmqAEAAAArEKgBAAAAK7DkI5f27V2VD6PUzocxAAAAYA1mqAEAAAArMEMNAICN7XM6ZPMx2th8BAB3QqAGCgi35AX2LgEAAOQCSz4AAAAAKxCoAQAAACsQqAEAAAArsIYaAAAbW71qn83HaFPb5kMAuANmqAEAAAArEKgBAAAAK7DkAyggePomAACFEzPUAAAAgBUI1AAAAIAVCNQAAACAFVhDDQC3Ce/V2N4lAAAKEWaoAQAAACswQw0At+EhHACAnGCGGgAAALACgRoAAACwAoEaAAAAsAKBGgAAALACgRoAAACwAoEaAAAAsAKBGgAAALACgRoAAACwAoEaAAAAsAKBGgAAALACjx4HAMDG3JIX2LsEADbEDDUAAABgBWaoAQCwsX17V+XDKLXzYQwAWWGGGgAAALACM9QAcBvWuwIAcoIZagAAAMAKBGoAAADACgRqAAAAwAoEagAAAMAKBGoAAADACgRqAAAAwArcNg+FXnivxvYuAUUMD+EAAOREkQnUr7/+ul599VWdPXtWtWrV0rx589SwYUN7l4V8sHrVPpuP0aa2zYcAkAV+uQFQGBSJQP3hhx9qyJAhWrRokRo1aqTZs2erVatWOnz4sEqVKmXv8oB7Bg9EAQDci4rEGupZs2apb9++euaZZ1S1alUtWrRIHh4eeuedd+xdGgAAAIq4Qj9DnZqaqr179+rFF180tzk4OCgyMlI7d+60Y2XIL8yKAgAAeyr0gfqvv/5Senq6/P39Ldr9/f3166+/ZnlOSkqKUlJSzNtXrlyRJCUmJmZ73LS0lP8+yEo5qSe3bH0d+XENqSlJNh+jKPwsJNtfBz+L7CsK11EUrkHiOrKrIF7DreMNw7BFOUC2mYxC/i78888/Vbp0aX3//feKiIgwt48YMULbtm1TfHx8pnPGjx+vuLi4/CwTAADYyOnTp1WmTBl7l4F7WKGfob7vvvvk6Oioc+fOWbSfO3dOAQEBWZ7z4osvasiQIebtjIwM/f333/Lz85PJZMrzGhMTExUcHKzTp0/Lx8cnz/vPL1xHwVEUrkEqGtdRFK5B4joKkqJwDVL+XIdhGLp69aqCgoJs0j+QXYU+ULu4uKhevXravHmzOnToIOlmQN68ebNiY2OzPMfV1VWurq4Wbb6+vjauVPLx8SnU/3O8hesoOIrCNUhF4zqKwjVIXEdBUhSuQbL9dRQrVsxmfQPZVegDtSQNGTJEUVFRql+/vho2bKjZs2crOTlZzzzzjL1LAwAAQBFXJAJ1165ddeHCBY0dO1Znz55V7dq1tWHDhkwfVAQAAADyWpEI1JIUGxt7xyUe9ubq6qpx48ZlWmZS2HAdBUdRuAapaFxHUbgGiesoSIrCNUhF5zqA7Cj0d/kAAAAA7KlIPCkRAAAAsBcCNQAAAGAFAjUAAABgBQI1AAAAYAUCdT54/fXXVa5cObm5ualRo0batWuXvUvKkW+//VaPPvqogoKCZDKZtHbtWnuXlGNTp05VgwYN5O3trVKlSqlDhw46fPiwvcvKsYULF6pmzZrmByVEREToyy+/tHdZVnnllVdkMpk0ePBge5eSI+PHj5fJZLJ4ValSxd5l5coff/yhp556Sn5+fnJ3d1eNGjW0Z88ee5eVbeXKlcv0szCZTIqJibF3aTmSnp6uMWPGKDQ0VO7u7qpQoYImTpyownbvgKtXr2rw4MEKCQmRu7u7GjdurN27d9u7LMCmCNQ29uGHH2rIkCEaN26c9u3bp1q1aqlVq1Y6f/68vUvLtuTkZNWqVUuvv/66vUvJtW3btikmJkY//PCDNm3apBs3bujhhx9WcnKyvUvLkTJlyuiVV17R3r17tWfPHj300ENq3769fvnlF3uXliu7d+/WG2+8oZo1a9q7lFypVq2azpw5Y35t377d3iXl2KVLl9SkSRM5Ozvryy+/1MGDBzVz5kwVL17c3qVl2+7duy1+Dps2bZIkPf7443auLGemTZumhQsXav78+Tp06JCmTZum6dOna968efYuLUf69OmjTZs2acWKFTpw4IAefvhhRUZG6o8//rB3aYDtGLCphg0bGjExMebt9PR0IygoyJg6daodq8o9ScaaNWvsXYbVzp8/b0gytm3bZu9SrFa8eHHjrbfesncZOXb16lUjLCzM2LRpk9G8eXNj0KBB9i4pR8aNG2fUqlXL3mVYbeTIkUbTpk3tXUaeGjRokFGhQgUjIyPD3qXkSNu2bY1evXpZtHXq1Mno0aOHnSrKuWvXrhmOjo7GZ599ZtFet25dY/To0XaqCrA9ZqhtKDU1VXv37lVkZKS5zcHBQZGRkdq5c6cdK8OVK1ckSSVKlLBzJbmXnp6uDz74QMnJyYqIiLB3OTkWExOjtm3bWvz3UdgcPXpUQUFBKl++vHr06KFTp07Zu6QcW79+verXr6/HH39cpUqVUp06dbR48WJ7l5Vrqampevfdd9WrVy+ZTCZ7l5MjjRs31ubNm3XkyBFJ0v79+7V9+3a1bt3azpVlX1pamtLT0+Xm5mbR7u7uXij/ggNkV5F5UmJB9Ndffyk9PT3TI9D9/f3166+/2qkqZGRkaPDgwWrSpImqV69u73Jy7MCBA4qIiND169fl5eWlNWvWqGrVqvYuK0c++OAD7du3r1Cvq2zUqJGWLl2qypUr68yZM4qLi1OzZs30888/y9vb297lZdtvv/2mhQsXasiQIXrppZe0e/duDRw4UC4uLoqKirJ3eTm2du1aXb58WdHR0fYuJcdGjRqlxMREValSRY6OjkpPT9fkyZPVo0cPe5eWbd7e3oqIiNDEiRMVHh4uf39/vf/++9q5c6cqVqxo7/IAmyFQ454TExOjn3/+udDOllSuXFkJCQm6cuWKPv74Y0VFRWnbtm2FJlSfPn1agwYN0qZNmzLNYhUm/541rFmzpho1aqSQkBCtWrVKvXv3tmNlOZORkaH69etrypQpkqQ6dero559/1qJFiwploH777bfVunVrBQUF2buUHFu1apVWrlyp9957T9WqVVNCQoIGDx6soKCgQvWzWLFihXr16qXSpUvL0dFRdevWVffu3bV37157lwbYDIHahu677z45Ojrq3LlzFu3nzp1TQECAnaq6t8XGxuqzzz7Tt99+qzJlyti7nFxxcXExz/TUq1dPu3fv1pw5c/TGG2/YubLs2bt3r86fP6+6deua29LT0/Xtt99q/vz5SklJkaOjox0rzB1fX19VqlRJx44ds3cpORIYGJjpl7Hw8HB98skndqoo937//Xd9/fXXWr16tb1LyZXhw4dr1KhR6tatmySpRo0a+v333zV16tRCFagrVKigbdu2KTk5WYmJiQoMDFTXrl1Vvnx5e5cG2AxrqG3IxcVF9erV0+bNm81tGRkZ2rx5c6Fc81qYGYah2NhYrVmzRlu2bFFoaKi9S8ozGRkZSklJsXcZ2dayZUsdOHBACQkJ5lf9+vXVo0cPJSQkFMowLUlJSUk6fvy4AgMD7V1KjjRp0iTTLSSPHDmikJAQO1WUe0uWLFGpUqXUtm1be5eSK9euXZODg+U/y46OjsrIyLBTRdbx9PRUYGCgLl26pI0bN6p9+/b2LgmwGWaobWzIkCGKiopS/fr11bBhQ82ePVvJycl65pln7F1atiUlJVnMup04cUIJCQkqUaKEypYta8fKsi8mJkbvvfee1q1bJ29vb509e1aSVKxYMbm7u9u5uux78cUX1bp1a5UtW1ZXr17Ve++9p61bt2rjxo32Li3bvL29M61d9/T0lJ+fX6Fa0z5s2DA9+uijCgkJ0Z9//qlx48bJ0dFR3bt3t3dpOfLCCy+ocePGmjJlip544gnt2rVLb775pt588017l5YjGRkZWrJkiaKiouTkVDj/aXv00Uc1efJklS1bVtWqVdOPP/6oWbNmqVevXvYuLUc2btwowzBUuXJlHTt2TMOHD1eVKlUK1b97QI7Z+zYj94J58+YZZcuWNVxcXIyGDRsaP/zwg71LypFvvvnGkJTpFRUVZe/Ssi2r+iUZS5YssXdpOdKrVy8jJCTEcHFxMUqWLGm0bNnS+Oqrr+xdltUK423zunbtagQGBhouLi5G6dKlja5duxrHjh2zd1m58umnnxrVq1c3XF1djSpVqhhvvvmmvUvKsY0bNxqSjMOHD9u7lFxLTEw0Bg0aZJQtW9Zwc3Mzypcvb4wePdpISUmxd2k58uGHHxrly5c3XFxcjICAACMmJsa4fPmyvcsCbMpkGIXsEUwAAABAAcIaagAAAMAKBGoAAADACgRqAAAAwAoEagAAAMAKBGoAAADACgRqAAAAwAoEagAAAMAKBGoAhdLJkydlMpmUkJBw1+NatGihwYMH50tNAIB7E4EaQJ6Jjo6WyWSSyWSSi4uLKlasqAkTJigtLc3qfjt06GDRFhwcrDNnzpgfV75161aZTCZdvnzZ4rjVq1dr4sSJVo3/X24P97e2b728vb1VrVo1xcTE6OjRozatBQCQ/wjUAPLUI488ojNnzujo0aMaOnSoxo8fr1dffTVXfaWnpysjIyPLfY6OjgoICJCTk9Nd+yhRooS8vb1zNb61vv76a505c0b79+/XlClTdOjQIdWqVUubN2+2Sz0AANsgUAPIU66urgoICFBISIj69++vyMhIrV+/XpI0a9Ys1ahRQ56engoODtbzzz+vpKQk87lLly6Vr6+v1q9fr6pVq8rV1VW9evXSsmXLtG7dOvOM79atWy1mhU+ePKkHH3xQklS8eHGZTCZFR0dLyrzk49KlS+rZs6eKFy8uDw8PtW7d2mLW+FYNGzduVHh4uLy8vMy/JOSUn5+fAgICVL58ebVv315ff/21GjVqpN69eys9PT0X310AQEFEoAZgU+7u7kpNTZUkOTg4aO7cufrll1+0bNkybdmyRSNGjLA4/tq1a5o2bZreeust/fLLL5o7d66eeOIJc6g9c+aMGjdubHFOcHCwPvnkE0nS4cOHdebMGc2ZMyfLeqKjo7Vnzx6tX79eO3fulGEYatOmjW7cuGFRw4wZM7RixQp9++23OnXqlIYNG2b198LBwUGDBg3S77//rr1791rdHwCgYLj730oBIJcMw9DmzZu1ceNGDRgwQJIsZorLlSunSZMm6bnnntOCBQvM7Tdu3NCCBQtUq1Ytc5u7u7tSUlIUEBCQ5ViOjo4qUaKEJKlUqVLy9fXN8rijR49q/fr12rFjhzmUr1y5UsHBwVq7dq0ef/xxcw2LFi1ShQoVJEmxsbGaMGFC7r4Rt6lSpYqkm+usGzZsmCd9AgDsi0ANIE999tln8vLy0o0bN5SRkaEnn3xS48ePl3RzTfHUqVP166+/KjExUWlpabp+/bquXbsmDw8PSZKLi4tq1qxpk9oOHTokJycnNWrUyNzm5+enypUr69ChQ+Y2Dw8Pc5iWpMDAQJ0/fz5PajAMQ5JkMpnypD8AgP2x5ANAnnrwwQeVkJCgo0eP6p9//tGyZcvk6empkydPql27dqpZs6Y++eQT7d27V6+//rokmZeESDdno+0dNp2dnS22TSaTOQhb61ZwDw0NzZP+AAD2xww1gDzl6empihUrZmrfu3evMjIyNHPmTDk43PxdftWqVdnq08XF5T8/xOfi4iJJdz0uPDxcaWlpio+PNy/5uHjxog4fPqyqVatmqxZrZGRkaO7cuQoNDVWdOnVsPh4AIH8wQw0gX1SsWFE3btzQvHnz9Ntvv2nFihVatGhRts4tV66cfvrpJx0+fFh//fWXxQcIbwkJCZHJZNJnn32mCxcuWNw95JawsDC1b99effv21fbt27V//3499dRTKl26tNq3b2/1Nd7u4sWLOnv2rH777TetX79ekZGR2rVrl95++205Ojrm+XgAAPsgUAPIF7Vq1dKsWbM0bdo0Va9eXStXrtTUqVOzdW7fvn1VuXJl1a9fXyVLltSOHTsyHVO6dGnFxcVp1KhR8vf3V2xsbJZ9LVmyRPXq1VO7du0UEREhwzD0xRdfZFrmkRciIyMVGBioGjVqaNSoUQoPD9dPP/1kvsUfAKBoMBl5tTAQAAAAuAcxQw0AAABYgUANAAAAWIFADQAAAFiBQA0AAABYgUANAAAAWIFADQAAAFiBQA0AAABYgUANAAAAWIFADQAAAFiBQA0AAABYgUANAAAAWIFADQAAAFjh/wNDLA/3AnNA0AAAAABJRU5ErkJggg==",
+ "text/plain": [
+ "
"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fig, ax, df = plot_label_distributions(\n",
+ " partitioner,\n",
+ " label_name=\"label\",\n",
+ " plot_type=\"heatmap\",\n",
+ " size_unit=\"absolute\",\n",
+ " partition_id_axis=\"x\",\n",
+ " legend=True,\n",
+ " verbose_labels=True,\n",
+ " title=\"Per Partition Labels Distribution\",\n",
+ " plot_kwargs={\"annot\": True},\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5167593e67fa3dbb",
+ "metadata": {},
+ "source": [
+ "Note: we used the `plot_kwargs={\"annot\": True}` to add the number directly to the plot."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e2e41273551ac32a",
+ "metadata": {},
+ "source": [
+ "If you are a `pandas` fan, then you might be interested that a similar heatmap can be created with the DataFrame object for visualization in jupyter notebook:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "fcc90b52bfd650cf",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "
airplane
\n",
+ "
automobile
\n",
+ "
bird
\n",
+ "
cat
\n",
+ "
deer
\n",
+ "
dog
\n",
+ "
frog
\n",
+ "
horse
\n",
+ "
ship
\n",
+ "
truck
\n",
+ "
\n",
+ "
\n",
+ "
Partition ID
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ "
\n",
+ " \n",
+ " \n",
+ "
\n",
+ "
0
\n",
+ "
817
\n",
+ "
794
\n",
+ "
1462
\n",
+ "
2123
\n",
+ "
432
\n",
+ "
25
\n",
+ "
456
\n",
+ "
384
\n",
+ "
14
\n",
+ "
9
\n",
+ "
\n",
+ "
\n",
+ "
1
\n",
+ "
1416
\n",
+ "
6
\n",
+ "
97
\n",
+ "
5
\n",
+ "
3
\n",
+ "
0
\n",
+ "
3409
\n",
+ "
0
\n",
+ "
3
\n",
+ "
868
\n",
+ "
\n",
+ "
\n",
+ "
2
\n",
+ "
0
\n",
+ "
4
\n",
+ "
11
\n",
+ "
2
\n",
+ "
454
\n",
+ "
3
\n",
+ "
511
\n",
+ "
15
\n",
+ "
84
\n",
+ "
21
\n",
+ "
\n",
+ "
\n",
+ "
3
\n",
+ "
762
\n",
+ "
159
\n",
+ "
1100
\n",
+ "
51
\n",
+ "
120
\n",
+ "
166
\n",
+ "
2
\n",
+ "
1982
\n",
+ "
1351
\n",
+ "
2175
\n",
+ "
\n",
+ "
\n",
+ "
4
\n",
+ "
2
\n",
+ "
43
\n",
+ "
714
\n",
+ "
2
\n",
+ "
19
\n",
+ "
2400
\n",
+ "
425
\n",
+ "
1
\n",
+ "
151
\n",
+ "
477
\n",
+ "
\n",
+ "
\n",
+ "
5
\n",
+ "
67
\n",
+ "
79
\n",
+ "
170
\n",
+ "
25
\n",
+ "
2552
\n",
+ "
477
\n",
+ "
27
\n",
+ "
44
\n",
+ "
590
\n",
+ "
0
\n",
+ "
\n",
+ "
\n",
+ "
6
\n",
+ "
422
\n",
+ "
2
\n",
+ "
4
\n",
+ "
486
\n",
+ "
380
\n",
+ "
92
\n",
+ "
90
\n",
+ "
380
\n",
+ "
50
\n",
+ "
6
\n",
+ "
\n",
+ "
\n",
+ "
7
\n",
+ "
122
\n",
+ "
2811
\n",
+ "
597
\n",
+ "
2174
\n",
+ "
1038
\n",
+ "
1727
\n",
+ "
1
\n",
+ "
682
\n",
+ "
515
\n",
+ "
4
\n",
+ "
\n",
+ "
\n",
+ "
8
\n",
+ "
256
\n",
+ "
29
\n",
+ "
342
\n",
+ "
75
\n",
+ "
1
\n",
+ "
84
\n",
+ "
8
\n",
+ "
1511
\n",
+ "
2240
\n",
+ "
1417
\n",
+ "
\n",
+ "
\n",
+ "
9
\n",
+ "
1136
\n",
+ "
1073
\n",
+ "
503
\n",
+ "
57
\n",
+ "
1
\n",
+ "
26
\n",
+ "
71
\n",
+ "
1
\n",
+ "
2
\n",
+ "
23
\n",
+ "
\n",
+ " \n",
+ "
\n"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "df.style.background_gradient(axis=None, cmap=\"Greens\", vmin=0)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "37d85e1b40d54918",
+ "metadata": {},
+ "source": [
+ "## Plot Comparison of Label Distributions"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4f49259a3de7dd17",
+ "metadata": {},
+ "source": [
+ "Now, once you know how to visualize a single partitioned dataset, you'll learn how to compare a few of them on a single plot.\n",
+ "\n",
+ "Let's compare:\n",
+ "\n",
+ "- IidPartitioner,\n",
+ "- DirichletPartitioner,\n",
+ "- ShardPartitioner\n",
+ "still using the `cifar10` dataset.\n",
+ "\n",
+ "We need to create a list of partitioners. Each partitioner needs to have a dataset assigned to it (it does not have to be the same dataset so you can also compare the same partitioning on different datasets)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "9e84a9192a266f3e",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from flwr_datasets import FederatedDataset\n",
+ "from flwr_datasets.partitioner import (\n",
+ " IidPartitioner,\n",
+ " DirichletPartitioner,\n",
+ " ShardPartitioner,\n",
+ ")\n",
+ "\n",
+ "partitioner_list = []\n",
+ "title_list = [\"IidPartitioner\", \"DirichletPartitioner\", \"ShardPartitioner\"]\n",
+ "\n",
+ "## IidPartitioner\n",
+ "fds = FederatedDataset(\n",
+ " dataset=\"cifar10\",\n",
+ " partitioners={\n",
+ " \"train\": IidPartitioner(num_partitions=10),\n",
+ " },\n",
+ ")\n",
+ "partitioner_list.append(fds.partitioners[\"train\"])\n",
+ "\n",
+ "## DirichletPartitioner\n",
+ "fds = FederatedDataset(\n",
+ " dataset=\"cifar10\",\n",
+ " partitioners={\n",
+ " \"train\": DirichletPartitioner(\n",
+ " num_partitions=10,\n",
+ " partition_by=\"label\",\n",
+ " alpha=1.0,\n",
+ " min_partition_size=0,\n",
+ " ),\n",
+ " },\n",
+ ")\n",
+ "partitioner_list.append(fds.partitioners[\"train\"])\n",
+ "\n",
+ "## ShardPartitioner\n",
+ "fds = FederatedDataset(\n",
+ " dataset=\"cifar10\",\n",
+ " partitioners={\n",
+ " \"train\": ShardPartitioner(\n",
+ " num_partitions=10, partition_by=\"label\", num_shards_per_partition=2\n",
+ " )\n",
+ " },\n",
+ ")\n",
+ "partitioner_list.append(fds.partitioners[\"train\"])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d18bae80",
+ "metadata": {},
+ "source": [
+ "Now let's visualize them side by side"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f2ee2864",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAA5UAAAHlCAYAAABlFdg7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy80BEi2AAAACXBIWXMAAA9hAAAPYQGoP6dpAACMIElEQVR4nOzdd1QUZ9sG8GtpSwdFUFCkKyp2xVgQE1Ek9t4Sxd7QoLHEGBUsIfYa0RiDRk3sJbGLLbHGHisqATFqhBiKIIKwz/eHH/u60pZ1l2Xh+p2z5zAzzzxzz87cAzfTJEIIASIiIiIiIiIV6Gk7ACIiIiIiItJdLCqJiIiIiIhIZSwqiYiIiIiISGUsKomIiIiIiEhlLCqJiIiIiIhIZSwqiYiIiIiISGUsKomIiIiIiEhlLCqJiIiIiIhIZSwqiYiIiIiISGUsKolITiKRICQkRNthvLeNGzfC09MThoaGsLa21nY4BQoMDISzs7NSbUNCQiCRSDQaz8mTJyGRSHDy5EmNLqc4ODs7o0OHDtoOg4iIqNRjUUn0lujoaIwYMQKurq4wNjaGpaUlmjdvjmXLliE9PV3b4ZES7t69i8DAQLi5uWHt2rX47rvv8m2bU6TlfExNTVGzZk189dVXSElJUVtMT548QUhICK5du1Zo25cvXyIkJKRUFHWquHHjBnr06AEnJycYGxujcuXKaNOmDVasWKHt0CgPJ0+eRLdu3VCpUiUYGRnBzs4OHTt2xK5du+RtYmNjIZFIsHDhQoX53s69tz99+vRRWMaBAwcgkUjg4OAAmUyWZxzOzs4KfZiZmcHb2xs//vhjnu3nzp2LTp06oWLFioX+M+3x48fo1asXrK2tYWlpic6dO+Ovv/4qwrdERFT6GWg7AKKSYv/+/ejZsyekUikGDBgALy8vZGZm4vTp05g0aRJu3bpVYIFSGqSnp8PAQLcPCydPnoRMJsOyZcvg7u6u1Dzh4eEwNzdHamoqjhw5grlz5+L48eM4c+aMWs4MPnnyBKGhoXB2dka9evUUpq1du1bhD+WXL18iNDQUANCqVSuFtl999RW++OKL946nIC1btkR6ejqMjIw0upy8nD17Fh9++CGqVq2KYcOGoVKlSnj06BHOnz+PZcuWYezYscUeE+Vv5syZmDVrFjw8PDBixAg4OTnh+fPnOHDgALp3747NmzejX79+BfYxbtw4NG7cWGHcu2fuN2/eDGdnZ8TGxuL48ePw8/PLs6969erh888/BwA8ffoU33//PQYOHIiMjAwMGzZMoe1XX32FSpUqoX79+jh8+HC+8aWmpuLDDz9EcnIyvvzySxgaGmLJkiXw9fXFtWvXYGNjU+D6ERGVFbr91yORmsTExKBPnz5wcnLC8ePHYW9vL582ZswYPHjwAPv379dihJojk8mQmZkJY2NjGBsbazuc9xYfHw8ARbrstUePHqhQoQIAYOTIkejevTt27dqF8+fPo2nTpirHkpWVle+ZlRyGhoZK92dgYKDxol9PT09r+8HcuXNhZWWFixcv5tp+OduVSoYdO3Zg1qxZ6NGjB3766SeF/XjSpEk4fPgwXr9+XWg/Pj4+6NGjR77T09LSsHfvXoSFhSEiIgKbN2/Ot6isXLkyPvnkE/lwYGAgXF1dsWTJklxFZUxMDJydnfHvv//C1tY23+WvWrUK9+/fxx9//CEvfgMCAuDl5YVFixbh66+/LnQdiYjKAl7+SgRg/vz5SE1Nxbp16xQKyhzu7u747LPP5MNZWVmYPXs23NzcIJVK4ezsjC+//BIZGRkK8+Xc03Xy5Ek0atQIJiYmqF27tvzSxl27dqF27dowNjZGw4YNcfXqVYX5AwMDYW5ujr/++gv+/v4wMzODg4MDZs2aBSGEQtuFCxeiWbNmsLGxgYmJCRo2bIgdO3bkWheJRIKgoCBs3rwZtWrVglQqxaFDh+TT3r4M7MWLFwgODoazszOkUins7OzQpk0bXLlyRaHP7du3o2HDhjAxMUGFChXwySef4PHjx3muy+PHj9GlSxeYm5vD1tYWEydORHZ2dj5bRtGqVavkMTs4OGDMmDFISkpS+L5nzpwJALC1tVX5HtGPPvoIwJs/PDMzMzFjxgw0bNgQVlZWMDMzg4+PD06cOKEwz9uX+C1dulS+b6xatUr+x+igQYPkl+etX79e/r3knJmJjY2V/4EbGhoqb5uzDnndU1nUffH06dPw9vaGsbExXF1dc10emNc9la1atYKXlxdu376NDz/8EKampqhcuTLmz5+f67t7+PAhOnXqBDMzM9jZ2WH8+PE4fPiwUvdpRkdHo1atWnn+Q8DOzi7XuE2bNsHb2xumpqYoV64cWrZsiSNHjuRqV9g6A0BSUhKCg4Ph6OgIqVQKd3d3zJs3T+GfAm9v42+//Raurq4wNTVF27Zt8ejRIwghMHv2bFSpUgUmJibo3Lkz/vvvv1zLOnjwIHx8fGBmZgYLCwu0b98et27dUmjzzz//YNCgQahSpQqkUins7e3RuXNnxMbGFvgdAsDx48fl/VtbW6Nz5864c+eOQpucfenBgwcIDAyEtbU1rKysMGjQILx8+bLQZUyfPh3ly5fHDz/8kOc/Rvz9/dVyP+vu3buRnp6Onj17ok+fPti1axdevXql1Ly2trbw9PREdHR0rmnK3se8Y8cONG7cWOFsqqenJ1q3bo1t27Yp1QcRUVnAopIIwK+//gpXV1c0a9ZMqfZDhw7FjBkz0KBBA/mlUGFhYbnuBQKABw8eoF+/fujYsSPCwsKQmJiIjh07YvPmzRg/fjw++eQThIaGIjo6Gr169cp1Zis7Oxvt2rVDxYoVMX/+fDRs2BAzZ86UF085li1bhvr162PWrFn4+uuvYWBggJ49e+Z5hvX48eMYP348evfujWXLluX7B9bIkSMRHh6O7t27Y9WqVZg4cSJMTEwU/kBdv349evXqBX19fYSFhWHYsGHYtWsXWrRooVDw5ayLv78/bGxssHDhQvj6+mLRokVKXVYcEhKCMWPGwMHBAYsWLUL37t2xZs0atG3bVn5GZOnSpejatSuAN5e0bty4Ed26dSu073fl/BFqY2ODlJQUfP/992jVqhXmzZuHkJAQJCQkwN/fP897JCMiIrBixQoMHz4cixYtQteuXTFr1iwAwPDhw7Fx40Zs3LgRLVu2zDWvra0twsPDAQBdu3aVty1oHYq6L/bo0QNt2rTBokWLUK5cOQQGBuYqaPKSmJiIdu3aoW7duli0aBE8PT0xZcoUHDx4UN4mLS0NH330ESIjIzFu3DhMmzYNZ8+exZQpUwrtHwCcnJxw+fJl3Lx5s9C2oaGh+PTTT2FoaIhZs2YhNDQUjo6OOH78eJHX+eXLl/D19cWmTZswYMAALF++HM2bN8fUqVMxYcKEXMvevHkzVq1ahbFjx+Lzzz/HqVOn0KtXL3z11Vc4dOgQpkyZguHDh+PXX3/FxIkTFebduHEj2rdvD3Nzc8ybNw/Tp0/H7du30aJFC4WCsXv37ti9ezcGDRqEVatWYdy4cXjx4gXi4uIK/F4iIyPh7++P+Ph4hISEYMKECTh79iyaN2+eZ0Haq1cvvHjxAmFhYejVqxfWr18vv/w6P/fv38fdu3fRpUsXWFhYFNi2MC9evMC///6r8Hn7GLh582Z8+OGHqFSpEvr06YMXL17g119/VarvrKws/P333yhXrpxKsclkMvz5559o1KhRrmne3t6Ijo7GixcvVOqbiKjUEURlXHJysgAgOnfurFT7a9euCQBi6NChCuMnTpwoAIjjx4/Lxzk5OQkA4uzZs/Jxhw8fFgCEiYmJePjwoXz8mjVrBABx4sQJ+biBAwcKAGLs2LHycTKZTLRv314YGRmJhIQE+fiXL18qxJOZmSm8vLzERx99pDAegNDT0xO3bt3KtW4AxMyZM+XDVlZWYsyYMfl+F5mZmcLOzk54eXmJ9PR0+fh9+/YJAGLGjBm51mXWrFkKfdSvX180bNgw32UIIUR8fLwwMjISbdu2FdnZ2fLxK1euFADEDz/8IB83c+ZMAUDhu8lPTtuoqCiRkJAgYmJixJo1a4RUKhUVK1YUaWlpIisrS2RkZCjMl5iYKCpWrCgGDx4sHxcTEyMACEtLSxEfH6/Q/uLFiwKAiIiIyBXDwIEDhZOTk3w4ISEh13Z4N94cquyLv/32m3xcfHy8kEql4vPPP5ePO3HiRK790NfXVwAQP/74o3xcRkaGqFSpkujevbt83KJFiwQAsWfPHvm49PR04enpmavPvBw5ckTo6+sLfX190bRpUzF58mRx+PBhkZmZqdDu/v37Qk9PT3Tt2lVhfxDiTX4UdZ1nz54tzMzMxL179xT6+uKLL4S+vr6Ii4sTQvxvG9va2oqkpCR5u6lTpwoAom7duuL169fy8X379hVGRkbi1atXQgghXrx4IaytrcWwYcMUlvPPP/8IKysr+fjExEQBQCxYsKDA7ysv9erVE3Z2duL58+fycdevXxd6enpiwIAB8nE5+9Lb+7AQQnTt2lXY2NgUuIy9e/cKAGLJkiVKxZTzvb29Pjn7WV6fmJgYIYQQz549EwYGBmLt2rXy+Zo1a5bnsdrJyUm0bdtWJCQkiISEBHHjxg3x6aefCgAFHsMKyrecae8es4QQ4ttvvxUAxN27d5X6DoiISjueqaQyL+cpn8r+x/3AgQMAkOsMRs4DIt49M1izZk2F+/KaNGkC4M0lllWrVs01Pq+nCgYFBcl/zrl8NTMzE5GRkfLxJiYm8p8TExORnJwMHx+fXJeqAoCvry9q1qxZyJq+uS/xwoULePLkSZ7TL126hPj4eIwePVrhPrz27dvD09Mzz7OkI0eOVBj28fEp9EmKkZGRyMzMRHBwMPT0/nfYGjZsGCwtLd/7ftfq1avD1tYWLi4uGDFiBNzd3bF//36YmppCX19f/tAamUyG//77D1lZWWjUqFGe32337t0LvEdLnVTZF318fOTDtra2qF69ulJPsjQ3N1e4X83IyAje3t4K8x46dAiVK1dGp06d5OOMjY1z3c+WnzZt2uDcuXPo1KkTrl+/jvnz58Pf3x+VK1fGL7/8Im+3Z88eyGQyzJgxQ2F/AJDr8mBl1nn79u3w8fFBuXLlFM6Y+fn5ITs7G7/99ptCnz179oSVlZV8OCd3P/nkE4V7Xps0aYLMzEz5peBHjx5FUlIS+vbtq7AcfX19NGnSRH5JtYmJCYyMjHDy5EkkJiYq9d0Bbx5Oc+3aNQQGBqJ8+fLy8XXq1EGbNm3k+8vb8srH58+fF/j046IeMwsyY8YMHD16VOFTqVIlAMCWLVugp6eH7t27y9v37dsXBw8ezPN7OXLkCGxtbWFra4vatWtj48aNGDRoEBYsWKBSbDlP/JZKpbmm5Rzv+FRwIqI3+KAeKvMsLS0BQOnLmB4+fAg9Pb1cTxatVKkSrK2t8fDhQ4XxbxeOAOR/jDo6OuY5/t0/lvT09ODq6qowrlq1agCgcDnbvn37MGfOHFy7dk3hfrq8nl7q4uKS7/q9bf78+Rg4cCAcHR3RsGFDfPzxxxgwYIA8npx1rV69eq55PT09cfr0aYVxxsbGuQqucuXKFfqHc37LMTIygqura67vvKh27twJS0tLGBoaokqVKnBzc1OYvmHDBixatAh3795VePhIXt+jst+tOrzvvggo9/0DQJUqVXLtS+XKlcOff/6pEI+bm1uudso+hRcAGjdujF27diEzMxPXr1/H7t27sWTJEvTo0QPXrl1DzZo1ER0dDT09PaX+MaLMOt+/fx9//vlnvv8MePchQarm9P379wH8757dd+Uci6RSKebNm4fPP/8cFStWxAcffIAOHTpgwIAB8oIrLwXlY40aNXD48GGkpaXBzMws33XJuVQ0MTFRHk9+carj0s/atWvn++CdnHtmnz9/jufPnwMA6tevj8zMTGzfvh3Dhw9XaN+kSRPMmTMH2dnZuHnzJubMmYPExESVn2Sc84+6d+9PBiC/r/Ptf+YREZVlLCqpzLO0tISDg4NS93G9TdlXTejr6xdpvHjnATzK+P3339GpUye0bNkSq1atgr29PQwNDREREYGffvopV3tl/xDq1asXfHx8sHv3bhw5cgQLFizAvHnzsGvXLgQEBBQ5zvzWWdtatmwpf/rruzZt2oTAwEB06dIFkyZNgp2dnfz+0bweAKKNPzLfd19UZp9T5/6qDCMjI/kDUqpVq4ZBgwZh+/btue4lLowycctkMrRp0waTJ0/Os23OP3EK67OwZeXcK7hx48Y8i8O3z3IGBwejY8eO2LNnDw4fPozp06cjLCwMx48fR/369fNcjipU2a6enp4A3rxTVFPu37+PixcvAgA8PDxyTd+8eXOuorJChQryAtXf3x+enp7o0KEDli1blue9sYUpX748pFIpnj59mmtazjgHB4ci90tEVBqxqCQC0KFDB3z33Xc4d+5coa+QcHJygkwmw/3791GjRg35+GfPniEpKQlOTk5qjU0mk+Gvv/5S+MP23r17AP73BMOdO3fC2NgYhw8fVrhUKyIi4r2Xb29vj9GjR2P06NGIj49HgwYNMHfuXAQEBMjXNSoqKtfZl6ioKLV9F28v5+2ztpmZmYiJicn3TIc67NixA66urti1a5dC8VaU4qYo77osStvi3heVief27dsQQiisx4MHD96r35wHpeT8Ie/m5gaZTIbbt2/neu+nKtzc3JCamqrR/ShnOcCbJ9kqsyw3Nzd8/vnn+Pzzz3H//n3Uq1cPixYtwqZNm/Js/3aevOvu3buoUKGCwllKVVWrVg3Vq1fH3r17sWzZMpibm793n+/avHkzDA0NsXHjxlyF7+nTp7F8+XLExcXleSY6R/v27eHr64uvv/4aI0aMKPK66+npoXbt2rh06VKuaRcuXICrq6taLgEmIioNeE8lEYDJkyfDzMwMQ4cOxbNnz3JNj46OxrJlywAAH3/8MYA3Txp92+LFiwG8+UNG3VauXCn/WQiBlStXwtDQEK1btwbw5myDRCJReDVHbGws9uzZo/Iys7OzkZycrDDOzs4ODg4O8svBGjVqBDs7O6xevVrhErGDBw/izp07avsu/Pz8YGRkhOXLlyucQVm3bh2Sk5M18p3nyPmD9u3lXrhwAefOnVO6j5w/Zt99Gm5eTE1NlW6rjX2xIP7+/nj8+LHC/Y+vXr3C2rVrlZr/xIkTeZ4hy7kXMOeyzi5dukBPTw+zZs3K9bRkVc6c9urVC+fOncPhw4dzTUtKSkJWVlaR+8yLv78/LC0t8fXXX+f5DseEhAQAb55G++5rM9zc3GBhYZHnpZg57O3tUa9ePWzYsEFh/7l58yaOHDki31/UITQ0FM+fP8fQoUPz/H6OHDmCffv2qdz/5s2b4ePjg969e6NHjx4Kn0mTJgEAfv7550L7mTJlCp4/f670PviuHj164OLFiwqFZVRUFI4fP46ePXuq1CcRUWnEM5VEePMH208//YTevXujRo0aGDBgALy8vJCZmYmzZ89i+/btCAwMBADUrVsXAwcOxHfffYekpCT4+vrijz/+wIYNG9ClSxd8+OGHao3N2NgYhw4dwsCBA9GkSRMcPHgQ+/fvx5dffim/B6x9+/ZYvHgx2rVrh379+iE+Ph7ffvst3N3dFe55K4oXL16gSpUq6NGjB+rWrQtzc3NERkbi4sWLWLRoEQDA0NAQ8+bNw6BBg+Dr64u+ffvi2bNn8teUjB8/Xi3fga2tLaZOnYrQ0FC0a9cOnTp1QlRUlPwdkG8/QEbdOnTogF27dqFr165o3749YmJisHr1atSsWROpqalK9eHm5gZra2usXr0aFhYWMDMzQ5MmTfK8/9LExAQ1a9bE1q1bUa1aNZQvXx5eXl7w8vLK1ba498XCjBgxAitXrkTfvn3x2Wefwd7eHps3b5Y/1KSws7Bjx47Fy5cv0bVrV3h6esrzb+vWrXB2dsagQYMAvLlHc9q0aZg9ezZ8fHzQrVs3SKVSXLx4EQ4ODggLCytS3JMmTcIvv/yCDh06IDAwEA0bNkRaWhpu3LiBHTt2IDY2Nt/Lo4vC0tIS4eHh+PTTT9GgQQP06dMHtra2iIuLw/79+9G8eXOsXLkS9+7dQ+vWrdGrVy/UrFkTBgYG2L17N549e5bnq2LetmDBAgQEBKBp06YYMmQI0tPTsWLFClhZWan0ztb89O7dGzdu3MDcuXNx9epV9O3bF05OTnj+/DkOHTqEY8eO5XnpvTIuXLiABw8eKDyg7G2VK1dGgwYNsHnz5kJfVxMQEAAvLy8sXrwYY8aMkb9Tc+PGjXj48KH8nZy//fYb5syZAwD49NNP5Wd9R48ejbVr16J9+/aYOHEiDA0NsXjxYlSsWFH+QCwiIgJfKUL0tnv37olhw4YJZ2dnYWRkJCwsLETz5s3FihUr5K8FEEKI169fi9DQUOHi4iIMDQ2Fo6OjmDp1qkIbId485r59+/a5loM8HnOf12P3Bw4cKMzMzER0dLRo27atMDU1FRUrVhQzZ87M9SqFdevWCQ8PDyGVSoWnp6eIiIjI9QqK/Jb99rScR+tnZGSISZMmibp16woLCwthZmYm6tatK1atWpVrvq1bt4r69esLqVQqypcvL/r37y/+/vtvhTY56/KuvGLMz8qVK4Wnp6cwNDQUFStWFKNGjRKJiYl59leUV4oU1FYmk4mvv/5aODk5CalUKurXry/27duX61UgeW2/t+3du1fUrFlTGBgYKLxe5N1+hBDi7NmzomHDhsLIyEhhm+T1Xb3vvujr6yt8fX3lw/m9UqRWrVq55s0r9r/++ku0b99emJiYCFtbW/H555+LnTt3CgDi/PnzeX43OQ4ePCgGDx4sPD09hbm5uTAyMhLu7u5i7Nix4tmzZ7na//DDD/L9rly5csLX11ccPXq0yOssxJvXfUydOlW4u7sLIyMjUaFCBdGsWTOxcOFC+StN8tvGOd/Z9u3bFcZHREQIAOLixYu52vv7+wsrKythbGws3NzcRGBgoLh06ZIQQoh///1XjBkzRnh6egozMzNhZWUlmjRpIrZt21bg95cjMjJSNG/eXJiYmAhLS0vRsWNHcfv2bYU2+e37OTHnvNajMMeOHROdO3cWdnZ2wsDAQNja2oqOHTuKvXv3ytsU9EqRd78zIYQYO3asACCio6PzXW5ISIgAIK5fvy6EyH9bCyHE+vXrc73SJ+c1OXl93n31zaNHj0SPHj2EpaWlMDc3Fx06dBD3799X5ushIiozJEJo6CkLRPTeAgMDsWPHDqXPiBGVREuXLsX48ePx999/o3LlytoOh4iIiNSM91QSEZHavPvevlevXmHNmjXw8PBgQUlERFRK8Z5KIiJSm27duqFq1aqoV68ekpOTsWnTJty9exebN2/WdmhERESkISwqiYhIbfz9/fH9999j8+bNyM7ORs2aNbFlyxb07t1b26ERERGRhvCeSiIiIiIiIlIZ76kkIiIiIiIilbGoJCIiIiIiIpWxqCQiIiIiIiKVsagkIiIiIiIilbGoJCIiIiIiIpWxqCQiIiIiIiKVsagkIiIiIiIilbGoJCIiIiIiIpWxqCQiIiIiIiKVsagkIiIiIiIilbGoJCIiIiIiIpWxqCQiIiIiIiKVsagkIiIiIiIilbGoJCIiIiIiIpWxqCQiIiIiIiKVsagkIiIiIiIilbGoJCIiIiIiIpWxqCQiIiIiIiKVsagkIiIiIiIilbGoLGNCQkIgkUiUaiuRSBASEqLZgIqoKPGvX78eEokEsbGxmg2K6D0UZZ8GVMvLnFy4dOlSoW1btWqFVq1aFal/XcBjB5UkEokEQUFBWo2hJOb6yZMnIZFIcPLkyULbxsbGQiKRYP369RqPi4gKx6KylCvKH5PK9pXzMTY2RrVq1RAUFIRnz56pIdo3Xr58iZCQEKV+qQDA119/jT179qht+UTvI688cXBwgL+/P5YvX44XL15oO0SNyS8XeeygsuTGjRvo0aMHnJycYGxsjMqVK6NNmzZYsWKFtkMrUE6RlvPR19dH1apV0bVrV1y7dk2ty1q1apXSxeBPP/2EpUuXqnX5RKR+LCrLmK+++grp6env1cesWbOwceNGrFy5Es2aNUN4eDiaNm2Kly9fqiXGly9fIjQ0NM8/DPOKP78/DD/99FOkp6fDyclJLXERFUVOnoSHh2Ps2LEAgODgYNSuXRt//vmnvF1RczI9PR1fffWV2uNVh8KKNB47qLQ7e/YsGjVqhOvXr2PYsGFYuXIlhg4dCj09PSxbtkzb4Smlb9++2LhxI3744Qf069cPx48fxwcffKDWwjK/orJly5ZIT09Hy5Yt5ePyKyqdnJyQnp6OTz/9VG1xEZHqDLQdABUvAwMDGBi832YPCAhAo0aNAABDhw6FjY0NFi9ejL1796Jv374q9yuTyZCZmVlgm6LEr6+vD319fZXj0bSc9TU2NtZ2KKQBb+cJAEydOhXHjx9Hhw4d0KlTJ9y5cwcmJiZK7dNv7yu6vL/w2KEePHaUXHPnzoWVlRUuXrwIa2trhWnx8fHFGktaWhrMzMyKPF+DBg3wySefyIebN2+OTp06ITw8HGvWrHmvmF6+fAlTU9N8p+vp6Sm9X+dc9VCSFba+RKUJz1SWMXndV5SRkYHx48fD1tYWFhYW6NSpE/7++2+l+/zoo48AADExMQCAhQsXolmzZrCxsYGJiQkaNmyIHTt25Jov556SzZs3o1atWpBKpVi9ejVsbW0BAKGhofLLcHLuIXs3folEgrS0NGzYsEHeNjAwEED+90WtWrVKvjwHBweMGTMGSUlJCm1atWoFLy8v3L59Gx9++CFMTU1RuXJlzJ8/P9d6ZGRkYObMmXB3d4dUKoWjoyMmT56MjIyMQtf30KFDSn/PpPs++ugjTJ8+HQ8fPsSmTZsA5J2TBe0red1T+fjxYwwZMgQODg6QSqVwcXHBqFGjchVaGRkZmDBhAmxtbWFmZoauXbsiISGh0LiV2ccLysWCvg+Axw4eO0qP6Oho1KpVK1dBCQB2dna5xu3ZswdeXl6QSqWoVatWru368OFDjB49GtWrV4eJiQlsbGzQs2fPXPtmzj576tQpjB49GnZ2dqhSpYp8+nfffQc3NzeYmJjA29sbv//+u9Lr9G6e7t27F+3bt5cfb9zc3DB79mxkZ2crzJeTC5cvX0bLli1hamqKL7/8Es7Ozrh16xZOnTolz72cezvfvaeyVatW2L9/Px4+fChv6+zsDCD/eyqPHz8OHx8fmJmZwdraGp07d8adO3cU2uQcDx48eIDAwEBYW1vDysoKgwYNyvPKiU2bNqFhw4YwMTFB+fLl0adPHzx69Eip9SUqK3imkjB06FBs2rQJ/fr1Q7NmzXD8+HG0b99e6fmjo6MBADY2NgCAZcuWoVOnTujfvz8yMzOxZcsW9OzZE/v27cvV7/Hjx7Ft2zYEBQWhQoUKqFu3LsLDwzFq1Ch07doV3bp1AwDUqVMnz2Vv3LgRQ4cOhbe3N4YPHw4AcHNzyzfWkJAQhIaGws/PD6NGjUJUVBTCw8Nx8eJFnDlzBoaGhvK2iYmJaNeuHbp164ZevXphx44dmDJlCmrXro2AgAAAb84YdOrUCadPn8bw4cNRo0YN3LhxA0uWLMG9e/dyXVr37vrm/HKksuPTTz/Fl19+iSNHjmDYsGH5tlN2X3ny5Am8vb2RlJSE4cOHw9PTE48fP8aOHTvw8uVLGBkZyduOHTsW5cqVw8yZMxEbG4ulS5ciKCgIW7duzTcOZffxouYiwGMHjx2lj5OTE86dO4ebN2/Cy8urwLanT5/Grl27MHr0aFhYWGD58uXo3r074uLi5Dlx8eJFnD17Fn369EGVKlUQGxuL8PBwtGrVCrdv3851Fmz06NGwtbXFjBkzkJaWBgBYt24dRowYgWbNmiE4OBh//fUXOnXqhPLly8PR0bHQdXo3T9evXw9zc3NMmDAB5ubmOH78OGbMmIGUlBQsWLBAYd7nz58jICAAffr0wSeffIKKFSuiVatWGDt2LMzNzTFt2jQAQMWKFfNc9rRp05CcnIy///4bS5YsAQCYm5vnG2tkZCQCAgLg6uqKkJAQpKenY8WKFWjevDmuXLmSK2969eoFFxcXhIWF4cqVK/j+++9hZ2eHefPmydvMnTsX06dPR69evTB06FAkJCRgxYoVaNmyJa5evarwD4S81peozBBUqkVERAgA4uLFi0IIIWbOnCne3uzXrl0TAMTo0aMV5uvXr58AIGbOnJmrr8jISJGQkCAePXoktmzZImxsbISJiYn4+++/hRBCvHz5UqGvzMxM4eXlJT766COF8QCEnp6euHXrlsL4hISEXMvO8W78QghhZmYmBg4cmO+6x8TECCGEiI+PF0ZGRqJt27YiOztb3m7lypUCgPjhhx/k43x9fQUA8eOPP8rHZWRkiEqVKonu3bvLx23cuFHo6emJ33//XWHZq1evFgDEmTNnCl1fKl3ezbm8WFlZifr16wsh8t6nC9pX3s2NAQMGCD09vTyXJ5PJFGLy8/OTjxNCiPHjxwt9fX2RlJQkH+fr6yt8fX3lw0XZxwvLRR47eOwo7Y4cOSL09fWFvr6+aNq0qZg8ebI4fPiwyMzMVGgHQBgZGYkHDx7Ix12/fl0AECtWrJCPezcnhBDi3LlzufaxnH22RYsWIisrSz4+MzNT2NnZiXr16omMjAz5+O+++04AUMj1mJgYAUCEhoaKhIQE8c8//4iTJ0+K+vXrCwBi586d+cY0YsQIYWpqKl69eiUfl5MLq1evztW+Vq1aCsvOceLECQFAnDhxQj6uffv2wsnJKVfbnHgjIiLk4+rVqyfs7OzE8+fP5eOuX78u9PT0xIABA+Tjco4HgwcPVuiza9euwsbGRj4cGxsr9PX1xdy5cxXa3bhxQxgYGCiML2h9icoCXv5axh04cAAAMG7cOIXxwcHB+c7j5+cHW1tbODo6ok+fPjA3N8fu3btRuXJlAICJiYm8bWJiIpKTk+Hj44MrV67k6svX1xc1a9ZUw5oULjIyEpmZmQgODoae3v92/WHDhsHS0hL79+9XaG9ubq5wX4mRkRG8vb3x119/ycdt374dNWrUgKenJ/7991/5J+dyoRMnTij0WZzrSyWXubl5oU+BVWZfkclk2LNnDzp27Khw/2aOdy+rHT58uMI4Hx8fZGdn4+HDh/kuo6j7eEF47OCxo7Rr06YNzp07h06dOuH69euYP38+/P39UblyZfzyyy8Kbf38/BTOjtepUweWlpYK+8nbOfH69Ws8f/4c7u7usLa2zjMvhg0bpnA/8KVLlxAfH4+RI0cqXLUQGBgIKyurPNdh5syZsLW1RaVKldCqVStER0dj3rx58rP/b8f04sUL/Pvvv/Dx8cHLly9x9+5dhb6kUikGDRpU4HemLk+fPsW1a9cQGBiI8uXLy8fXqVMHbdq0kf+987aRI0cqDPv4+OD58+dISUkBAOzatQsymQy9evVSyNNKlSrBw8MjV54W5/oSlTS8/LWMe/jwIfT09HJd9lW9evV85/n2229RrVo1GBgYoGLFiqhevbrCH1r79u3DnDlzcO3atVz3XL3LxcVFDWuhnJw/nN9dNyMjI7i6uub6w7pKlSq5Yi5XrpzCkzvv37+PO3fuyO/lete7D2YozvWlkis1NTXP+6vepsy+kpCQgJSUlEIvs8tRtWpVheFy5coBeFPA5aeo+3hBeOzgsaMsaNy4MXbt2oXMzExcv34du3fvxpIlS9CjRw9cu3ZN/s+Bd/MReLOfvJ2P6enpCAsLQ0REBB4/fgwhhHxacnJyrvnf3U9y9k0PDw+F8YaGhnB1dc0z/uHDh6Nnz57Q09ODtbW1/D7eHLdu3cJXX32F48ePy4uv/GKqXLmyQjGrSfnlKQDUqFEDhw8fzvXwooKOiZaWlrh//z6EELm+vxxvX/YOFO/6EpU0LCqpyLy9vfM8KwIAv//+Ozp16oSWLVti1apVsLe3h6GhISIiIvDTTz/lav/2fzxLmvye/vj2L3WZTIbatWtj8eLFebZ9936Vkry+VDz+/vtvJCcnw93dvcB2mthXlNmn31XUfbwgPHbw2FGWGBkZoXHjxmjcuDGqVauGQYMGYfv27Zg5cyYA5faTsWPHIiIiAsHBwWjatCmsrKwgkUjQp08fyGSyXPOqYz/x8PCAn59fntOSkpLg6+sLS0tLzJo1C25ubjA2NsaVK1cwZcqUXDGV9P22sG0gk8kgkUhw8ODBPNu+e39nSV9fIk1iUVnGOTk5QSaTITo6WuG/e1FRUSr1t3PnThgbG+Pw4cMK/9mMiIhQuo+8zkqoo33OO+eioqIU/kObmZmJmJiYfH+JFsTNzQ3Xr19H69atixw3lU0bN24EAPj7+793X7a2trC0tMTNmzffu6/8FGUff58c4LGDSrOcf6Y8ffq0SPPt2LEDAwcOxKJFi+TjXr16leupw/nJ2Xfv378vv7QaeHMpbUxMDOrWrVukeE6ePInnz59j165dCu+SzHkyrLKKss+rkqfvunv3LipUqFDkV6y4ublBCAEXFxdUq1atSPMSlTW8p7KMy3kS4fLlyxXG5/WiYWXo6+tDIpEoPFo8Nja2wBeivyvnaXbK/tI0MzNTqq2fnx+MjIywfPlyhf8Er1u3DsnJyUV64m2OXr164fHjx1i7dm2uaenp6fKn7xEBb57gOXv2bLi4uKB///7v3Z+enh66dOmCX3/9FZcuXco1vaAzkMoqyj6ubC7mhceO/+GxQ3edOHEiz7zLuZ+voFtL8qKvr5+rvxUrVuR6fUd+GjVqBFtbW6xevVrhFUPr169XKVdzzta9HVNmZiZWrVpVpH6KcqwwMzPL81Lfd9nb26NevXrYsGGDQt83b97EkSNH8PHHHxcpRgDo1q0b9PX1ERoamms7CCHw/PnzIvdJVFrxTGUZV69ePfTt2xerVq1CcnIymjVrhmPHjuHBgwcq9de+fXssXrwY7dq1Q79+/RAfH49vv/0W7u7uCvcTFcTExAQ1a9bE1q1bUa1aNZQvXx5eXl753jfWsGFDREZGYvHixXBwcICLiwuaNGmSq52trS2mTp2K0NBQtGvXDp06dUJUVBRWrVqFxo0bKzxYQ1mffvoptm3bhpEjR+LEiRNo3rw5srOzcffuXWzbtg2HDx/O93I/Kt0OHjyIu3fvIisrC8+ePcPx48dx9OhRODk54ZdfflHbS7u//vprHDlyBL6+vvJXUzx9+hTbt2/H6dOn83xfXlEUZR9XNhfzwmMHjx2lwdixY/Hy5Ut07doVnp6eyMzMxNmzZ7F161Y4OzsX+SEuHTp0wMaNG2FlZYWaNWvi3LlziIyMlL/eozCGhoaYM2cORowYgY8++gi9e/dGTEwMIiIi8r2nsiDNmjVDuXLlMHDgQIwbNw4SiQQbN24s8j+wGjZsiPDwcMyZMwfu7u6ws7NTOJP6btutW7diwoQJaNy4MczNzdGxY8c82y5YsAABAQFo2rQphgwZIn+liJWVVa73+yrDzc0Nc+bMwdSpUxEbG4suXbrAwsICMTEx2L17N4YPH46JEycWuV+iUqnYnzdLxaqwV4oIIUR6eroYN26csLGxEWZmZqJjx47i0aNH+b5SpKBXJQghxLp164SHh4eQSqXC09NTRERE5PvahDFjxuTZx9mzZ0XDhg2FkZGRQhx59XP37l3RsmVLYWJiIgDIXxHw7msBcqxcuVJ4enoKQ0NDUbFiRTFq1CiRmJio0MbX11fUqlUrV1wDBw7M9WjzzMxMMW/ePFGrVi0hlUpFuXLlRMOGDUVoaKhITk5Wan2p9MjZ73I+RkZGolKlSqJNmzZi2bJlIiUlRaF9UXPj3bwUQoiHDx+KAQMGCFtbWyGVSoWrq6sYM2aM/BUC+eVuXo/vf/eVIkIov48Xlos8djgpjOOxo/Q5ePCgGDx4sPD09BTm5ubCyMhIuLu7i7Fjx4pnz57J2+W3TZ2cnBRec5OYmCgGDRokKlSoIMzNzYW/v7+4e/durnaF5diqVauEi4uLkEqlolGjRuK3337Lles5r+hYsGBBget45swZ8cEHHwgTExPh4OAgf21KXseSvHJBCCH++ecf0b59e2FhYaHwapO8jkmpqamiX79+wtraWgCQ51FerxQRQojIyEjRvHlzYWJiIiwtLUXHjh3F7du3FdrkHA8SEhIUxueX+zt37hQtWrQQZmZmwszMTHh6eooxY8aIqKgopdaXqCyQCKGG66OIiIiIiIioTOI9lURERERERKQyFpVERERERESkMhaVREREREREpDIWlURERERERKQyFpVERERERESkMhaVREREREREpDIDbQdQ3GQyGZ48eQILCwtIJBJth0NUJggh8OLFCzg4OEBPT/3/y2JeExU/TeY1c5qo+Gn6dzWVbmWuqHzy5AkcHR21HQZRmfTo0SNUqVJF7f0yr4m0RxN5zZwm0h5N/a6m0q3MFZUWFhYA3iSMpaWllqMhKhtSUlLg6Ogozz91Y14TFT9N5jVzmqj4afp3NZVuZa6ozLmMxtLSkr+oiIqZpi5jY14TaY8m8po5TaQ9vOScVMELpomIiIiIiEhlLCqJiIiIiIhIZSwqiYiIiIiISGUsKomIiIiIiEhlLCqJiIiIiIhIZSwqiYiIiIiISGVl7pUiObZZNYAp9NXWX2Nv9X+VVfyc1N6ntE01tfcpqdNArf0lmKi1OwDAlfib6u/0/0XGJaq9z133/lN7nzGXHqu9TxF+Xu19vo/3yeu8clgTOQi8Xx6+m2+q5sv75IQq+7wm9ml10URu6LKSlNfiv58hsjTwS4HU5vALzf1+JfVp5zRf2yFQKcczlURERERERKQyFpVERERERESkMhaVREREREREpDKdKypfvHiB4OBgODk5wcTEBM2aNcPFixe1HRYREREREVGZpHNF5dChQ3H06FFs3LgRN27cQNu2beHn54fHj/mgBSIiIiIiouKmU0Vleno6du7cifnz56Nly5Zwd3dHSEgI3N3dER4eru3wiIiIiIiIyhydeqVIVlYWsrOzYWxsrDDexMQEp0+fznOejIwMZGRkyIdTUlI0GiMREREREVFZolNnKi0sLNC0aVPMnj0bT548QXZ2NjZt2oRz587h6dOnec4TFhYGKysr+cfR0bGYoyYiIiIiIiq9dKqoBICNGzdCCIHKlStDKpVi+fLl6Nu3L/T08l6VqVOnIjk5Wf559OhRMUdMRERERERUeunU5a8A4ObmhlOnTiEtLQ0pKSmwt7dH79694erqmmd7qVQKqVRazFESERERERGVDTp3pjKHmZkZ7O3tkZiYiMOHD6Nz587aDomIiIiIiKjM0bkzlYcPH4YQAtWrV8eDBw8wadIkeHp6YtCgQdoOjYiIiIiIqMzRuTOVycnJGDNmDDw9PTFgwAC0aNEChw8fhqGhobZDIyIiIiIiKnN07kxlr1690KtXL22HQURERERERNDBM5VERERERERUckiEEELbQRSnlJQUWFlZITk5GZaWltoOh6hM0HTeMa+Jip8m8445TVT8mHf0PnimkoiIiIiIiFTGopKIiIiIiIhUxqKSiIiIiIiIVMaikoiIiIiIiFTGopKIiIiIiIhUxqKSiIiIiIiIVMaikoiIiIiIiFTGopKIiIiIiIhUxqKSiIiIiIiIVMaikoiIiIiIiFTGopKIiIiIiIhUxqKSiIiIiIiIVMaikoiIiIiIiFTGopKIiIiIiIhUxqKSiIiIiIiIVMaikoiIiIiIiFRmoO0AtGWbVQOYQl+tfTb2Vv/XWcXPSe19AoC0TTW19iep00Ct/SWYqLU7uSvxN9XaX2Rcolr7A4Bd9/5Te585Yi49Vmt/Ivy8Wvt7X9EfecNcX7m8Liy31J0j2qZKjiaYqD9nNEETeahp6s5zdeZ2ScprTfyuJvX66nsvbYdASvhryE5th0ClHM9UEhERERERkcpYVBIREREREZHKWFQSERERERGRylhUEhERERERkcp0qqjMzs7G9OnT4eLiAhMTE7i5uWH27NkQQmg7NCIiIiIiojJJp57+Om/ePISHh2PDhg2oVasWLl26hEGDBsHKygrjxo3TdnhERERERERljk4VlWfPnkXnzp3Rvn17AICzszN+/vln/PHHH1qOjIiIiIiIqGzSqctfmzVrhmPHjuHevXsAgOvXr+P06dMICAjId56MjAykpKQofIiIiIiIiEg9dOpM5RdffIGUlBR4enpCX18f2dnZmDt3Lvr375/vPGFhYQgNDS3GKImIiIiIiMoOnSoqt23bhs2bN+Onn35CrVq1cO3aNQQHB8PBwQEDBw7Mc56pU6diwoQJ8uGUlBQ4OjoWV8hERERERDotOzsbr1+/1nYYVIz09fVhYGAAiUSiVHudKionTZqEL774An369AEA1K5dGw8fPkRYWFi+RaVUKoVUKi3OMImIiIiISoXU1FT8/ffffNtCGWRqagp7e3sYGRkV2lanisqXL19CT0/xNlB9fX3IZDItRUREREREVDplZ2fj77//hqmpKWxtbZU+a0W6TQiBzMxMJCQkICYmBh4eHrlqsHfpVFHZsWNHzJ07F1WrVkWtWrVw9epVLF68GIMHD9Z2aEREREREpcrr168hhICtrS1MTEy0HQ4VIxMTExgaGuLhw4fIzMyEsbFxge11qqhcsWIFpk+fjtGjRyM+Ph4ODg4YMWIEZsyYoe3QiIiIiIhKJZ6hLJsKOzv5Np0qKi0sLLB06VIsXbpU26EQERERERERdOw9lURERERERFSySEQZe5RTSkoKrKyskJycDEtLS22HQ1QmaDrvmNdExU+TececJip+eeXdq1evEBMTAxcXl0LvqdN169evR3BwMJKSkt6rH4lEgt27d6NLly5qiUubirL9eaaSiIiIiIh0XmBgYKko5nQRi0oiIiIiIiJSGYtKIiIiIiIq1RYvXozatWvDzMwMjo6OGD16NFJTU3O127NnDzw8PGBsbAx/f388evRIYfrevXvRoEEDGBsbw9XVFaGhocjKyspzmZmZmQgKCoK9vT2MjY3h5OSEsLAwjayftrGoJCIiIiKiUk1PTw/Lly/HrVu3sGHDBhw/fhyTJ09WaPPy5UvMnTsXP/74I86cOYOkpCT06dNHPv3333/HgAED8Nlnn+H27dtYs2YN1q9fj7lz5+a5zOXLl+OXX37Btm3bEBUVhc2bN8PZ2VmTq6k1OvVKESIiIiIioqIKDg6W/+zs7Iw5c+Zg5MiRWLVqlXz869evsXLlSjRp0gQAsGHDBtSoUQN//PEHvL29ERoaii+++AIDBw4EALi6umL27NmYPHkyZs6cmWuZcXFx8PDwQIsWLSCRSODk5KTZldQinqkkIiIiIqJSLTIyEq1bt0blypVhYWGBTz/9FM+fP8fLly/lbQwMDNC4cWP5sKenJ6ytrXHnzh0AwPXr1zFr1iyYm5vLP8OGDcPTp08V+skRGBiIa9euoXr16hg3bhyOHDmi+RXVEhaVRERERERUasXGxqJDhw6oU6cOdu7cicuXL+Pbb78F8Oa+R2WlpqYiNDQU165dk39u3LiB+/fv5/nKjQYNGiAmJgazZ89Geno6evXqhR49eqhtvUoSXv5KRERERESl1uXLlyGTybBo0SLo6b05p7Zt27Zc7bKysnDp0iV4e3sDAKKiopCUlIQaNWoAeFMkRkVFwd3dXellW1paonfv3ujduzd69OiBdu3a4b///kP58uXVsGYlB4tKIiIiIiIqFZKTk3Ht2jWFcRUqVMDr16+xYsUKdOzYEWfOnMHq1atzzWtoaIixY8di+fLlMDAwQFBQED744AN5kTljxgx06NABVatWRY8ePaCnp4fr16/j5s2bmDNnTq7+Fi9eDHt7e9SvXx96enrYvn07KlWqBGtra02sulbx8lciIiIiIioVTp48ifr16yt8Nm7ciMWLF2PevHnw8vLC5s2b83y1h6mpKaZMmYJ+/fqhefPmMDc3x9atW+XT/f39sW/fPhw5cgSNGzfGBx98gCVLluT7AB4LCwvMnz8fjRo1QuPGjREbG4sDBw7Iz5aWJhIhhNB2EMUpJSUFVlZWSE5OhqWlpbbDISoTNJ13zGui4qfJvGNOExW/vPLu1atXiImJgYuLS573DFLpVpTtX/rKZCIiIiIiIio2LCqJiIiIiIhIZSwqiYiIiIiISGUsKomIiIiIiEhlLCqJiIiIiIhIZWX2PZXbrBrAFPpq66+xt2a+yip+eT+iWFXSNtXU2h8ASOo0UHufAJBgov4+r8TfVHufkXGJau1v173/1NofAMRceqz2PgFAhJ/XSL+qUndelzZFOU69e+zRxLHjbeo6jqhy3FDHcUEXjgMFefsYUZLyOvojb5jrM6dLMnX/nUKaYTL3gLZDoFKOZyqJiIiIiIhIZSwqiYiIiIiISGUsKomIiIiIiEhlZfaeSiIiIiIiKjrJqA+KdXkl6V7vt8XGxsLFxQVXr15FvXr1tB2OVunUmUpnZ2dIJJJcnzFjxmg7NCIiIiIiKgFatWqF4OBgbYdRpujUmcqLFy8iOztbPnzz5k20adMGPXv21GJURERERESkK4QQyM7OhoGBTpVCJZpOnam0tbVFpUqV5J99+/bBzc0Nvr6+2g6NiIiIiIi0LDAwEKdOncKyZcvkVzWuX78eEokEBw8eRMOGDSGVSnH69GkEBgaiS5cuCvMHBwejVatW8mGZTIb58+fD3d0dUqkUVatWxdy5c/NcdnZ2NgYPHgxPT0/ExcVpcC1LHp0tzzMzM7Fp0yZMmDABEokk33YZGRnIyMiQD6ekpBRHeEREREREVMyWLVuGe/fuwcvLC7NmzQIA3Lp1CwDwxRdfYOHChXB1dUW5cuWU6m/q1KlYu3YtlixZghYtWuDp06e4e/durnYZGRno27cvYmNj8fvvv8PW1lZ9K6UDdLao3LNnD5KSkhAYGFhgu7CwMISGhhZPUEREREREpDVWVlYwMjKCqakpKlWqBADyInDWrFlo06aN0n29ePECy5Ytw8qVKzFw4EAAgJubG1q0aKHQLjU1Fe3bt0dGRgZOnDgBKysrNa2N7tCpy1/ftm7dOgQEBMDBwaHAdlOnTkVycrL88+jRo2KKkIiIiIiISopGjRoVqf2dO3eQkZGB1q1bF9iub9++SEtLw5EjR8pkQQnoaFH58OFDREZGYujQoYW2lUqlsLS0VPgQEREREVHZYmZmpjCsp6cHIYTCuNevX8t/NjExUarfjz/+GH/++SfOnTv3/kHqKJ0sKiMiImBnZ4f27dtrOxQiIiIiIipBjIyMFN4YkR9bW1s8ffpUYdy1a9fkP3t4eMDExATHjh0rsJ9Ro0bhm2++QadOnXDq1CmVYtZ1OndPpUwmQ0REBAYOHMjHABMRERERkQJnZ2dcuHABsbGxMDc3h0wmy7PdRx99hAULFuDHH39E06ZNsWnTJty8eRP169cHABgbG2PKlCmYPHkyjIyM0Lx5cyQkJODWrVsYMmSIQl9jx45FdnY2OnTogIMHD+a677K007mqLDIyEnFxcRg8eLC2QyEiIiIiKnNE+Hlth1CgiRMnYuDAgahZsybS09MRERGRZzt/f39Mnz4dkydPxqtXrzB48GAMGDAAN27ckLeZPn06DAwMMGPGDDx58gT29vYYOXJknv0FBwdDJpPh448/xqFDh9CsWTONrF9JpHNFZdu2bXNd+0xERERERAQA1apVy3V/Y35vjAgNDS3wTRF6enqYNm0apk2blmuas7NzrrpkwoQJmDBhQtGD1nE6eU8lERERERERlQwSUcZO+6WkpMDKygrJycl8EixRMdF03jGviYqfJvOOOU1U/PLKu1evXiEmJgYuLi4wNjbWcoRU3Iqy/XmmkoiIiIiIiFTGopKIiIiIiIhUxqKSiIiIiIiIVMaikoiIiIiIiFTGopKIiIiIiIhUxqKSiIiIiIiIVMaikoiIiIiIiFRmoO0AiIiIiIhId7iu616sy/tryE619RUYGIikpCTs2bMn3zbOzs4IDg5GcHCw2pZb2rGoJCIiIiIi+n8XL16EmZmZtsPQKSwqiYiIiIiI/p+tra22Q9A5vKeSiIiIiIhKlR07dqB27dowMTGBjY0N/Pz8kJaWJp++cOFC2Nvbw8bGBmPGjMHr16/l05ydnbF06VL5sEQiQXh4OAICAmBiYgJXV1fs2LGjOFenxGNRSUREREREpcbTp0/Rt29fDB48GHfu3MHJkyfRrVs3CCEAACdOnEB0dDROnDiBDRs2YP369Vi/fn2BfU6fPh3du3fH9evX0b9/f/Tp0wd37twphrXRDbz8lYiIiIiISo2nT58iKysL3bp1g5OTEwCgdu3a8unlypXDypUroa+vD09PT7Rv3x7Hjh3DsGHD8u2zZ8+eGDp0KABg9uzZOHr0KFasWIFVq1ZpdmV0BM9UEhERERFRqVG3bl20bt0atWvXRs+ePbF27VokJibKp9eqVQv6+vryYXt7e8THxxfYZ9OmTXMN80zl/7CoJCIiIiKiUkNfXx9Hjx7FwYMHUbNmTaxYsQLVq1dHTEwMAMDQ0FChvUQigUwm00aopQaLSiIiIiIiKlUkEgmaN2+O0NBQXL16FUZGRti9e7fK/Z0/fz7XcI0aNd43zFKjzN5Tuc2qAUyhX3hDJTT2Vv/XWMXPSe19AoC0TTWN9Cup00Aj/SaYqL/PK/E31d5nZFxi4Y2KaNe9/9TeZ8ylx2rtT4SfL7xRMVJXXmsip/Ojaq5rKpc1paBjhKp5/nYuFzUHNZFfuqKw40BJyuuE9J/xylADvwhIbSpO4P1kuqAk5XVxuHDhAo4dO4a2bdvCzs4OFy5cQEJCAmrUqIE///xTpT63b9+ORo0aoUWLFti8eTP++OMPrFu3Ts2R664yW1QSEREREVHR/TVkp7ZDKJClpSV+++03LF26FCkpKXBycsKiRYsQEBCArVu3qtRnaGgotmzZgtGjR8Pe3h4///wzatasqebIdReLSiIiIiIiKjVq1KiBQ4cO5Tktr1eHvP1OSgCIjY3N1cbBwQFHjhxRQ3SlE++pJCIiIiIiIpWxqCQiIiIiIiKV6VxR+fjxY3zyySewsbGBiYkJateujUuXLmk7LCIiIiIiKoWEEOjSpYu2wyjRdOqeysTERDRv3hwffvghDh48CFtbW9y/fx/lypXTdmhERERERERlkk4VlfPmzYOjoyMiIiLk41xcXAqcJyMjAxkZGfLhlJQUjcVHRERERERU1ujU5a+//PILGjVqhJ49e8LOzg7169fH2rVrC5wnLCwMVlZW8o+jo2MxRUtERERERFT66VRR+ddffyE8PBweHh44fPgwRo0ahXHjxmHDhg35zjN16lQkJyfLP48ePSrGiImIiIiIiEo3nbr8VSaToVGjRvj6668BAPXr18fNmzexevVqDBw4MM95pFIppFJpcYZJRERERERUZujUmUp7e3vUrFlTYVyNGjUQFxenpYiIiIiIiIjKNp06U9m8eXNERUUpjLt37x6cnJy0FBERERERUdky8fdhxbq8hT4FP0PlXa1atUK9evWwdOlSzQREuejUmcrx48fj/Pnz+Prrr/HgwQP89NNP+O677zBmzBhth0ZERERERFQm6VRR2bhxY+zevRs///wzvLy8MHv2bCxduhT9+/fXdmhERERERFQKZWZmajuEEk+nikoA6NChA27cuIFXr17hzp07GDaseE+/ExERERFRySaTyTB58mSUL18elSpVQkhIiHxaXFwcOnfuDHNzc1haWqJXr1549uyZfHpISAjq1auH77//Hi4uLjA2NgYA7NixA7Vr14aJiQlsbGzg5+eHtLQ0+Xzff/89atSoAWNjY3h6emLVqlXFtr7apvI9lampqYiNjcWLFy9gYWEBFxcXmJmZqTM2IiIiIiKiItuwYQMmTJiACxcu4Ny5cwgMDETz5s3RunVreUF56tQpZGVlYcyYMejduzdOnjwpn//BgwfYuXMndu3aBX19fTx9+hR9+/bF/Pnz0bVrV7x48QK///47hBAAgM2bN2PGjBlYuXIl6tevj6tXr2LYsGEwMzPL9y0VpUmRi8pDhw5h7ty5OH/+PGQymXy8vr4+mjVrhmnTpqFNmzZqDVITeiVfgaWlpbbDoELYaaDPdhp4rpMm+lzoo/4+MUQDfZYgzGvdpGqev513Rc1BjeSXrtCh44CtSV9YmjCnSzIRPkDbIRDlqU6dOpg5cyYAwMPDAytXrsSxY8cAADdu3EBMTAwcHR0BAD/++CNq1aqFixcvonHjxgDeXPL6448/wtbWFgBw5coVZGVloVu3bvKHhNauXVu+vJkzZ2LRokXo1q0bAMDFxQW3b9/GmjVrWFS+a8mSJZg4cSL09fXRqlUreHl5wdzcHKmpqbhx4wZ+++03BAQEYMmSJRg7dqymYiYiIiIiIspXnTp1FIbt7e0RHx+PO3fuwNHRUV5QAkDNmjVhbW2NO3fuyItKJycneUEJAHXr1kXr1q1Ru3Zt+Pv7o23btujRowfKlSuHtLQ0REdHY8iQIQq35mVlZcHKykrDa1oyKF1U3rlzB1OmTMEHH3yALVu2KGyIHHFxcejbty8mTpyINm3awNPTU63BEhERERERFcbQ0FBhWCKRKFxlWZh3b+vT19fH0aNHcfbsWRw5cgQrVqzAtGnTcOHCBZiamgIA1q5diyZNmuSaryxQ+kE9a9asgbm5Ofbt25dnQQkAVatWxa+//gozMzOsXVu098kQERERERFpUo0aNfDo0SM8evRIPu727dtISkpCzZo1C5xXIpGgefPmCA0NxdWrV2FkZITdu3ejYsWKcHBwwF9//QV3d3eFj4uLi6ZXqURQ+kzl6dOn0bNnT5QrV67AduXLl0fPnj1x6tSp9w6OiIiIiIhIXfz8/FC7dm30798fS5cuRVZWFkaPHg1fX180atQo3/kuXLiAY8eOoW3btrCzs8OFCxeQkJCAGjVqAABCQ0Mxbtw4WFlZoV27dsjIyMClS5eQmJiICRMmFNfqaY3SRWVMTAwGDx6sVNu6detix44dKgdFREREREQl00If3b0iUSKRYO/evRg7dixatmwJPT09tGvXDitWrChwPktLS/z2229YunQpUlJS4OTkhEWLFiEgIAAAMHToUJiammLBggWYNGkSzMzMULt2bQQHBxfDWmmf0kVlSkqK0jeaWlpaIiUlReWgiIiIiIiIVPH2q0Fy7NmzR/5z1apVsXfv3nznDwkJUXivJfDmstlDhw4VuNx+/fqhX79+RQm11FD6nsrs7GxIJBKl2hb1RlgiIiIiIiLSTUV6pciPP/6I8+fPF9ru3r17KgdEREREREREuqNIReWRI0dw5MgRpdoqe1aTiIiIiIiIdJfSRSUvZyUiIiIiIqJ3KX1PJREREREREdG7WFQSERERERGRypS+/LVTp05F6jjnHTBERERERERUeildVP75559FevgOH9RDRERERERU+ildVMbGxmowDCIiIiIiovcnhMCIESOwY8cOJCYm4urVq6hXr562wyrVivRKESIiIiIiKtsOPZxcrMtr5zS/SO0PHTqE9evX4+TJk3B1dUWFChU0FBnlKLNF5TarBjCF/nv309hb/V9hFT8ntfeZQ9qmmtr7lNRpoPY+ASDBRP19Xom/qdb+IuMS1dofAOy695/a+4y59FjtfQKACD+vkX5Vpa68Logmcl5T3vdYUtKPF5o4RhQkv+OHJo4Dynr7eKGuPC9JeZ0+qzsMpYbaDoMKUMu1mBORVPLXkJ3aDqFYRUdHw97eHs2aNctzemZmJoyMjIo5qtKNT38lIiIiIqJSITAwEGPHjkVcXBwkEgmcnZ3RqlUrBAUFITg4GBUqVIC/vz8A4NSpU/D29oZUKoW9vT2++OILZGVlyft68eIF+vfvDzMzM9jb22PJkiVo1aoVgoODtbR2JReLSiIiIiIiKhWWLVuGWbNmoUqVKnj69CkuXrwIANiwYQOMjIxw5swZrF69Go8fP8bHH3+Mxo0b4/r16wgPD8e6deswZ84ceV8TJkzAmTNn8Msvv+Do0aP4/fffceXKFW2tWommO9dxERERERERFcDKygoWFhbQ19dHpUqV5OM9PDwwf/7/7s2cNm0aHB0dsXLlSkgkEnh6euLJkyeYMmUKZsyYgbS0NGzYsAE//fQTWrduDQCIiIiAg4NDsa+TLtCpM5UhISGQSCQKH09PT22HRUREREREJVjDhg0Vhu/cuYOmTZsqvAaxefPmSE1Nxd9//42//voLr1+/hre3t3y6lZUVqlevXmwx6xKdO1NZq1YtREZGyocNDHRuFYiIiIiIqBiZmZlpO4RSTeWK7PDhw1i3bh3++usvJCYmQgihMF0ikSA6Ovq9A3yXgYGBwqlsIiIiIiKioqhRowZ27twJIYT8bOWZM2dgYWGBKlWqoFy5cjA0NMTFixdRtWpVAEBycjLu3buHli1bajP0EkmlonLBggX44osvULFiRXh7e6N27drqjitf9+/fh4ODA4yNjdG0aVOEhYXJN3ReMjIykJGRIR9OSUkpjjCJiIiIiKiEGj16NJYuXYqxY8ciKCgIUVFRmDlzJiZMmAA9PT1YWFhg4MCBmDRpEsqXLw87OzvMnDkTenp6CpfM0hsqFZXLli3DRx99hAMHDsDQsPjeH9WkSROsX78e1atXx9OnTxEaGgofHx/cvHkTFhYWec4TFhaG0NDQYouRiIiIiIhKtsqVK+PAgQOYNGkS6tati/Lly2PIkCH46quv5G0WL16MkSNHokOHDrC0tMTkyZPx6NEjGBsbazHykkmlojIxMRE9evQo1oISAAICAuQ/16lTB02aNIGTkxO2bduGIUOG5DnP1KlTMWHCBPlwSkoKHB0dNR4rEREREVFp1M5pfuGNtCg4OFjhXZInT57Ms52vry/++OOPfPuxsLDA5s2b5cNpaWkIDQ3F8OHD1RVqqaFSUent7Y2oqCh1x1Jk1tbWqFatGh48eJBvG6lUCqlUWoxRERERERGRrrt69Sru3r0Lb29vJCcnY9asWQCAzp07azmykkelV4qsWrUKu3btwk8//aTueIokNTUV0dHRsLe312ocRERERERU+ixcuBB169aFn58f0tLS8Pvvv6NChQraDqvEUelMZe/evZGVlYVPP/0Uo0aNQpUqVaCvr6/QRiKR4Pr162oJMsfEiRPRsWNHODk54cmTJ5g5cyb09fXRt29ftS6HiIiIiIjKtvr16+Py5cvaDkMnqFRUli9fHjY2NvDw8FB3PAX6+++/0bdvXzx//hy2trZo0aIFzp8/D1tb22KNg4iIiIiIiN5QqajM72ZXTduyZYtWlktERERERER5U+meSiIiIiIiIiJAxTOVAJCdnY1NmzZh//79ePjwIQDAyckJHTp0QP/+/XPdY0lERERERESlj0QIIYo6U3JyMvz9/XHx4kVYWFjA1dUVABATE4OUlBR4e3vj8OHDsLS0VHvA7yslJQVWVlZITk4ukfERlUaazjvmNVHx02TeMaeJil9eeffq1SvExMTAxcUFxsbGWo6QiltRtr9Kl79OmzYNly9fxooVK5CQkIArV67gypUriI+Px8qVK3Hp0iVMmzZNpeCJiIiIiIhId6hUVO7evRujR4/G6NGjYWhoKB9vaGiIUaNGYdSoUdi5c6fagiQiIiIiInofrVq1QnBwsLbDKJVUuqfy+fPnqF69er7TPT098d9//6kcFBERERERlUzx6T8W6/LsTAYU6/Ko6FQ6U+nu7o5ffvkl3+m//PIL3NzcVA6KiIiIiIiIdINKReXo0aNx5MgRfPzxxzhy5AhiY2MRGxuLw4cPo3379jh69CiCgoLUHSsREREREVGh0tLSMGDAAJibm8Pe3h6LFi1SmJ6YmIgBAwagXLlyMDU1RUBAAO7fv6/QZu3atXB0dISpqSm6du2KxYsXw9rauhjXQneodPnr6NGjER8fj2+++QaHDx9WmGZoaIgZM2Zg1KhRagmQiIiIiIioKCZNmoRTp05h7969sLOzw5dffokrV66gXr16AIDAwEDcv38fv/zyCywtLTFlyhR8/PHHuH37NgwNDXHmzBmMHDkS8+bNQ6dOnRAZGYnp06drd6VKMJXfUxkSEoKgoCBERkYqvKfSz88PFSpUUFuAREREREREykpNTcW6deuwadMmtG7dGgCwYcMGVKlSBQDkxeSZM2fQrFkzAMDmzZvh6OiIPXv2oGfPnlixYgUCAgIwceJEAEC1atVw9uxZ7Nu3TzsrVcKpXFQCQIUKFdCnTx91xUJERERERPReoqOjkZmZiSZNmsjHlS9fXv6g0Tt37sDAwEBhuo2NDapXr447d+4AAKKiotC1a1eFfr29vVlU5kOpojIuLg4AULVqVYXhwuS0JyIiIiIiotJJqaLS2dkZEokE6enpMDIykg8XJjs7+70DJCIiIiIiUpabmxsMDQ1x4cIF+UmuxMRE3Lt3D76+vqhRowaysrJw4cIF+eWvz58/R1RUFGrWrAkAqF69Oi5evKjQ77vD9D9KFZU//PADJBIJDA0NFYaJiIiIiIhKEnNzcwwZMgSTJk2CjY0N7OzsMG3aNOjpvXnxhYeHBzp37oxhw4ZhzZo1sLCwwBdffIHKlSujc+fOAICxY8eiZcuWWLx4MTp27Ijjx4/j4MGDrIHyoVRRGRgYWOAwERERERFRSbFgwQKkpqaiY8eOsLCwwOeff47k5GT59IiICHz22Wfo0KEDMjMz0bJlSxw4cEB+Eq158+ZYvXo1QkND8dVXX8Hf3x/jx4/HypUrtbVKJZpECCGKOtPgwYMxYsQIhZtb3/bHH39g9erV+OGHH947QHVLSUmBlZUVkpOTYWlpqe1wiMoETecd85qo+Gky75jTRMUvr7x79eoVYmJi4OLiAmNjYy1HqH3Dhg3D3bt38fvvv2s7lGJRlO2vp8oC1q9fj+jo6Hynx8TEYMOGDap0TUREREREpHULFy7E9evX8eDBA6xYsQIbNmzAwIEDtR1WifRerxTJz5MnT2BiYqKJromIiIiIiDTujz/+wPz58/HixQu4urpi+fLlGDp0qLbDKpGULir37t2LvXv3yoe/++47REZG5mqXlJSEyMhING7cWD0REhERERERFbNt27ZpOwSdoXRRefv2bWzfvh0AIJFIcOHCBVy+fFmhjUQigZmZmfxJSSXZNqsGMIW+Wvpq7K2RE74AgCp+TmrtT9qmmlr7e5ukTgO19peg5pPdV+JvqrdDAJFxiWrvEwB23ftPrf3FXHqs1v5yiPDzGulXVdEfecNcXz15/a73zUVpm2pqzxHKn7qPH2/TxLGkqN7n2PP28SWvY0NJymt1/q4mzdDk30CkPh4Xbmk7BCrllD4STJ06FVOnTgUA6OnpYd26dejXr5/GAiMiIiIiIqKST6V/L8lkMnXHQURERERERDpIpae/EhEREREREQFKFpV6enowMDBAZmamfFhfX7/Aj4GB5q+x/+abbyCRSBAcHKzxZREREREREVFuSlV+M2bMgEQikReKOcPadPHiRaxZswZ16tTRahxERERERERlmVJFZUhISIHDxS01NRX9+/fH2rVrMWfOHK3GQkREREREJV+rVq1Qr149LF26VNuhlDoqXaM6a9YsdOvWDV5eXnlOv3XrFnbu3IkZM2a8V3D5GTNmDNq3bw8/P79Ci8qMjAxkZGTIh1NSUjQSExERERFRWSD++7FYlycpP6BYl0dFp9KDekJCQvDnn3/mO/3mzZsIDQ1VOaiCbNmyBVeuXEFYWJhS7cPCwmBlZSX/ODo6aiQuIiIiIiIqu3KeP1MWaeTpr//99x+MjIzU3u+jR4/w2WefYfPmzTA2NlZqnqlTpyI5OVn+efTokdrjIiIiIiKikiMtLQ0DBgyAubk57O3tsWjRIoXpGRkZmDhxIipXrgwzMzM0adIEJ0+eVGhz+vRp+Pj4wMTEBI6Ojhg3bhzS0tLk052dnTF79mwMGDAAlpaWGD58eHGsWomk9OWvv/32m8IXvWvXLjx48CBXu6SkJGzduhW1a9dWS4Bvu3z5MuLj49GgQQP5uOzsbPz2229YuXIlMjIyoK+vrzCPVCqFVCpVeyxERERERFQyTZo0CadOncLevXthZ2eHL7/8EleuXEG9evUAAEFBQbh9+za2bNkCBwcH7N69G+3atcONGzfg4eGB6OhotGvXDnPmzMEPP/yAhIQEBAUFISgoCBEREfLlLFy4EDNmzMDMmTO1tKYlg9JF5YkTJ+SXtEokEuzatQu7du3Ks23NmjWxYsUK9UT4ltatW+PGjRsK4wYNGgRPT09MmTIlV0FJRERERERlS2pqKtatW4dNmzahdevWAIANGzagSpUqAIC4uDhEREQgLi4ODg4OAICJEyfi0KFDiIiIwNdff42wsDD0799f/upCDw8PLF++HL6+vggPD5dfNfnRRx/h888/L/6VLGGULionT56MoKAgCCFgZ2eH1atXo3v37gptJBIJTE1Nlb40tagsLCxyPRzIzMwMNjY2+T40iIiIiIiIyo7o6GhkZmaiSZMm8nHly5dH9erVAQA3btxAdnY2qlWrpjBfRkYGbGxsAADXr1/Hn3/+ic2bN8unCyEgk8kQExODGjVqAAAaNWqk6dXRCUoXlSYmJjAxMUFGRgaWLFmC2rVry790IiIiIiIiXZCamgp9fX1cvnw515WO5ubm8jYjRozAuHHjcs1ftWpV+c9mZmaaDVZHFPmVIkZGRpg8eTKWLVuGpk2baiKmInn3hloiIiIiIiq73NzcYGhoiAsXLsgLwMTERNy7dw++vr6oX78+srOzER8fDx8fnzz7aNCgAW7fvg13d/fiDF1nFfnprxKJBB4eHvj33381EQ8REREREZHKzM3NMWTIEEyaNAnHjx/HzZs3ERgYCD29N6VPtWrV0L9/fwwYMAC7du1CTEwM/vjjD4SFhWH//v0AgClTpuDs2bMICgrCtWvXcP/+fezduxdBQUHaXLUSq8hnKgHgyy+/xIQJE9CzZ0/5tcm6plfyFVhaWmo7DCqAnZr7a+ek5g411CcALMz7n2aqG6Lm/koot+N/MK8JgPqPH2/TVN4XVwwKx5cSfmzg72oiUtWCBQuQmpqKjh07wsLCAp9//jmSk5Pl0yMiIjBnzhx8/vnnePz4MSpUqIAPPvgAHTp0AADUqVMHp06dwrRp0+Dj4wMhBNzc3NC7d29trVKJJhFCiKLONG7cOBw7dgz37t1Dq1at4OzsDBMTE8WOJRIsW7ZMbYGqS0pKCqysrJCcnMxfVETFRNN5x7wmKn6azDvmNFHxyyvvXr16hZiYGLi4uGjsQZxUchVl+6t0pnLlypXyn48dO5Znm5JaVBIREREREZH6qFRUymQydcdBREREREREOqjID+ohIiIiIiIiysGikoiIiIiIiFSmclF58OBBtGnTBjY2NjAwMIC+vn6uDxEREREREZVuKhWVO3fuRIcOHfDs2TP06dMHMpkMffv2RZ8+fWBiYoI6depgxowZ6o6ViIiIiIiIShiVisqwsDB4e3vj6tWrCA0NBQAMHjwYmzdvxs2bN/H06VO4uLioNVAiIiIiIiIqeVQqKm/fvo0+ffpAX18fBgZvHiD7+vVrAICzszNGjx6NefPmqS9KIiIiIiIiKpFUKipNTU1hZGQEALC2toZUKsXTp0/l0ytWrIiYmBj1REhEREREREQllkpFZfXq1XH79m35cL169bBx40ZkZWXh1atX+Omnn1C1alW1BUlEREREREQlk4EqM3Xt2hXLly/HwoULIZVKMW3aNHTu3BnW1taQSCRIS0vDDz/8oO5YiYiIiIhIy2Qng4t1eXqtlhbr8kJCQrBnzx5cu3atWJery1QqKidOnIiJEyfKhzt06ICTJ09i165d0NfXR/v27fHhhx+qLUgiIiIiIiIqmYp0+eurV6+wdetWfPPNN/j+++8V7qP08fHBkiVLsHDhQhaURERERESkNTKZDPPnz4e7uzukUimqVq2KuXPnAgCmTJmCatWqwdTUFK6urpg+fbr8oaPr169HaGgorl+/DolEAolEgvXr12txTXSD0mcq4+Pj0axZM8TExEAIAeDNA3v27NkDPz8/jQVIRERERERUFFOnTsXatWuxZMkStGjRAk+fPsXdu3cBABYWFli/fj0cHBxw48YNDBs2DBYWFpg8eTJ69+6Nmzdv4tChQ4iMjAQAWFlZaXNVdILSReXs2bMRGxuL8ePH46OPPsKDBw8we/ZsjBgxAtHR0ZqMkYiIiIiISCkvXrzAsmXLsHLlSgwcOBAA4ObmhhYtWgAAvvrqK3lbZ2dnTJw4EVu2bMHkyZNhYmICc3NzGBgYoFKlSlqJXxcpXVQeOXIEAwYMwMKFC+XjKlasiH79+iEqKgrVq1fXSICass2qAUyhr7b+GnurdHtqgar4Oam9T2mbamrtT1KngVr7A4AEE7V3iSvxN9Xf6f+LjEtUa3+77v2n1v5yxFx6rPY+Rfh5tff5Pt7O65yc1EQeqVNBOamJ/FInTeRqWaDq8UjZY01+xxBljwElKa9lv0+BzEyq7TCoAPpbS87+QvkrSXldHO7cuYOMjAy0bt06z+lbt27F8uXLER0djdTUVGRlZcHS0rKYoyxdlL6nMi4uTl7d52jRogWEEHj27JnaAyMiIiIiIioqE5P8/+t57tw59O/fHx9//DH27duHq1evYtq0acjMzCzGCEsfpYvKjIwMGBsbK4zLGc7KylJvVERERERERCrw8PCAiYkJjh07lmva2bNn4eTkhGnTpqFRo0bw8PDAw4cPFdoYGRkhOzu7uMItFYp0zWZsbCyuXLkiH05OTgYA3L9/H9bW1rnaN2hQsi/dIiIiIiKi0sXY2BhTpkzB5MmTYWRkhObNmyMhIQG3bt2Ch4cH4uLisGXLFjRu3Bj79+/H7t27FeZ3dnZGTEwMrl27hipVqsDCwgJSKS/FL0iRisrp06dj+vTpucaPHj1aYVgIAYlEwgqfiIiIiIiK3fTp02FgYIAZM2bgyZMnsLe3x8iRIzFkyBCMHz8eQUFByMjIQPv27TF9+nSEhITI5+3evTt27dqFDz/8EElJSYiIiEBgYKDW1kUXKF1URkREaDIOpYSHhyM8PByxsbEAgFq1amHGjBkICAjQbmBERERERGWEXqul2g6hUHp6epg2bRqmTZuWa9r8+fMxf/58hXHBwcHyn6VSKXbs2KHpEEsVpYvKnMfxalOVKlXwzTffwMPDA0IIbNiwAZ07d8bVq1dRq1YtbYdHRERERERU5qj/PRga1LFjR4XhuXPnIjw8HOfPn2dRSUREREREpAU6VVS+LTs7G9u3b0daWhqaNm2ab7uMjAxkZGTIh1NSUoojPCIiIiIiojJB6VeKlBQ3btyAubk5pFIpRo4cid27d6NmzZr5tg8LC4OVlZX84+joWIzREhERERERlW46V1RWr14d165dw4ULFzBq1CgMHDgQt2/fzrf91KlTkZycLP88evSoGKMlIiIiIiIq3XTu8lcjIyO4u7sDABo2bIiLFy9i2bJlWLNmTZ7tpVIp3ytDRERERESkITp3pvJdMplM4Z5JIiIiIiIiKj46daZy6tSpCAgIQNWqVfHixQv89NNPOHnyJA4fPqzt0IiIiIiIiMoknSoq4+PjMWDAADx9+hRWVlaoU6cODh8+jDZt2mg7NCIiIiIiojJJpy5/XbduHWJjY5GRkYH4+HhERkayoCQiIiIiIrlWrVohODg43+nOzs5YunRpkfsNCQlBvXr1VI6rNNOpM5VERERERKRd6dM+Ltblmcw9oNb+Ll68CDMzM7X2WdaV2aKyV/IVWFpaajsMyoOdBvps56SBTjXU90If9fYnN0RD/ZYgzOvipYlcLQtUPWYoO1++xxAdPAbo+cyDHnO6RBOttB0BUdHZ2toWOP3169cwNDQspmhKB526/JWIiIiIiKgwWVlZCAoKgpWVFSpUqIDp06dDCAEg9+WvEokE4eHh6NSpE8zMzDB37lwAwDfffIOKFSvCwsICQ4YMwatXr7SxKjqBRSUREREREZUqGzZsgIGBAf744w8sW7YMixcvxvfff59v+5CQEHTt2hU3btzA4MGDsW3bNoSEhODrr7/GpUuXYG9vj1WrVhXjGuiWMnv5KxERERERlU6Ojo5YsmQJJBIJqlevjhs3bmDJkiUYNmxYnu379euHQYMGyYf79OmDIUOGYMiQN/cOzJkzB5GRkTxbmQ+eqSQiIiIiolLlgw8+gEQikQ83bdoU9+/fR3Z2dp7tGzVqpDB8584dNGnSRGFc06ZN1R9oKcGikoiIiIiIyjQ+Dfb9sKgkIiIiIqJS5cKFCwrD58+fh4eHB/T19ZWav0aNGnn2QXljUUlERERERKVKXFwcJkyYgKioKPz8889YsWIFPvvsM6Xn/+yzz/DDDz8gIiIC9+7dw8yZM3Hr1i0NRqzb+KAeIiIiIiIqVQYMGID09HR4e3tDX18fn332GYYPH670/L1790Z0dDQmT56MV69eoXv37hg1ahQOHz6swah1l0TkvLCljEhJSYGVlRWSk5P5knSiYqLpvGNeExU/TeYdc5qo+OWVd69evUJMTAxcXFxgbGys5QipuBVl+/PyVyIiIiIiIlIZi0oiIiIiIiJSGYtKIiIiIiIiUhmLSiIiIiIiIlIZi0oiIiIiIiJSGYtKIiIiIiIiUhmLSiIiIiIiIlKZgbYD0JZtVg1gCn219dfYW71fZRU/J7X2l0PapppG+pXUaaD2PhNM1Nvflfib6u3w/0XGJWqk3133/lNrfzGXHqu1PwAQ4efV3uf7UHde59CV/H5fmjo+vOvt44Wqea6pfFYXdR0X3j0OaCKP31WS8joh/We8MlTzLwNSq5Kei/RGO6f52g6BSjmeqSQiIiIiIiKVsagkIiIiIiIqwdavXw9ra+sC24SEhKBevXry4cDAQHTp0kWjceUos5e/EhERERFR0d1vUqtYl+dx4VaxLg94U8QFBwcjKSmp2JetqokTJ2Ls2LFaWTaLSiIiIiIiIh1nbm4Oc3NzrSxbpy5/DQsLQ+PGjWFhYQE7Ozt06dIFUVFR2g6LiIiIiIhKkEOHDqFFixawtraGjY0NOnTogOjoaADAyZMnIZFIFM5CXrt2DRKJBLGxsTh58iQGDRqE5ORkSCQSSCQShISEAAASExMxYMAAlCtXDqampggICMD9+/fl/eRcprpv3z5Ur14dpqam6NGjB16+fIkNGzbA2dkZ5cqVw7hx45CdnS2fr7B+c+zZswceHh4wNjaGv78/Hj16JJ/27uWv75LJZAgLC4OLiwtMTExQt25d7NixQ8VvWJFOFZWnTp3CmDFjcP78eRw9ehSvX79G27ZtkZaWpu3QiIiIiIiohEhLS8OECRNw6dIlHDt2DHp6eujatStkMlmh8zZr1gxLly6FpaUlnj59iqdPn2LixIkA3tyneOnSJfzyyy84d+4chBD4+OOP8fr1a/n8L1++xPLly7FlyxYcOnQIJ0+eRNeuXXHgwAEcOHAAGzduxJo1axQKOmX7nTt3Ln788UecOXMGSUlJ6NOnj9LfSVhYGH788UesXr0at27dwvjx4/HJJ5/g1KlTSveRH526/PXQoUMKw+vXr4ednR0uX76Mli1baikqIiIiIiIqSbp3764w/MMPP8DW1ha3b98udF4jIyNYWVlBIpGgUqVK8vH379/HL7/8gjNnzqBZs2YAgM2bN8PR0RF79uxBz549AQCvX79GeHg43NzcAAA9evTAxo0b8ezZM5ibm6NmzZr48MMPceLECfTu3btI/a5cuRJNmjQBAGzYsAE1atTAH3/8AW9v7wLXKSMjA19//TUiIyPRtGlTAICrqytOnz6NNWvWwNfXt9DvpSA6VVS+Kzk5GQBQvnz5fNtkZGQgIyNDPpySkqLxuIiIiIiISHvu37+PGTNm4MKFC/j333/lZyjj4uJgamqqUp937tyBgYGBvKgDABsbG1SvXh137tyRjzM1NZUXlABQsWJFODs7K9zvWLFiRcTHxxepXwMDAzRu3Fg+7OnpCWtra9y5c6fQovLBgwd4+fIl2rRpozA+MzMT9evXV/YryJfOFpUymQzBwcFo3rw5vLy88m0XFhaG0NDQYoyMiIiIiIi0qWPHjnBycsLatWvh4OAAmUwGLy8vZGZmyos7IYS8/duXmb4vQ0NDhWGJRJLnOGUuxVWX1NRUAMD+/ftRuXJlhWlSqfS9+9epeyrfNmbMGNy8eRNbtmwpsN3UqVORnJws/7x9MysREREREZUuz58/R1RUFL766iu0bt0aNWrUQGJiony6ra0tAODp06fycdeuXVPow8jISOFBOgBQo0YNZGVl4cKFC7mWVbNmTZXjVbbfrKwsXLp0ST4cFRWFpKQk1KhRo9Bl1KxZE1KpFHFxcXB3d1f4ODo6qhx7Dp08UxkUFIR9+/bht99+Q5UqVQpsK5VK1VJ9ExERERFRyVeuXDnY2Njgu+++g729PeLi4vDFF1/Ip+cUUiEhIZg7dy7u3buHRYsWKfTh7OyM1NRUHDt2DHXr1oWpqSk8PDzQuXNnDBs2DGvWrIGFhQW++OILVK5cGZ07d1Y5XmX7NTQ0xNixY7F8+XIYGBggKCgIH3zwQaGXvgKAhYUFJk6ciPHjx0Mmk6FFixZITk7GmTNnYGlpiYEDB6ocP6BjZyqFEAgKCsLu3btx/PhxuLi4aDskIiIiIiIqQfT09LBlyxZcvnwZXl5eGD9+PBYsWCCfbmhoiJ9//hl3795FnTp1MG/ePMyZM0ehj2bNmmHkyJHo3bs3bG1tMX/+fABAREQEGjZsiA4dOqBp06YQQuDAgQO5Lm8tKmX6NTU1xZQpU9CvXz80b94c5ubm2Lp1q9LLmD17NqZPn46wsDDUqFED7dq1w/79+9VSU0nE2xcTl3CjR4/GTz/9hL1796J69ery8VZWVjAxMVGqj5SUFFhZWWEt3GAKfbXF1thbvSd9q/g5qbW/HNI21TTSr6ROA7X3maDcJlXalfib6u3w/0XGJRbeSAW77v2n1v5iLj1Wa38AIMLPK9UuJ++Sk5NhaWmp9jg0ldc5dCW/35emjg/vevt4oWqeayqf1UVdx4V3jwOayON3lYS8zun7wT+rYWGp5l8GpFYlPRfpjXZO8wttk1dOv3r1CjExMXBxcYGxsbGmw6QSpijbX6fOVIaHhyM5ORmtWrWCvb29/FOUCp2IiIiIiIjUR6fuqdShk6pERERERERlgk6dqSQiIiIiIqKSRafuqVQHTd/bRUS5Fdc9lcxrouJTHPdUMqeJig/vqaR3ldp7KomIiIiIiKhkYVFJREREREREKmNRSURERERERCpjUUlEREREREQqY1FJREREREREKmNRSUREREREZUZsbCwkEgmuXbv23n0FBgaiS5cu792PrjPQdgBERERERKQ7fpJUL9bl9RNRau3P0dERT58+RYUKFdTab1nGopKIiIiIiMoMfX19VKpUKd/pQghkZ2fDwIClkrJ4+SsREREREZUqhw4dQosWLWBtbQ0bGxt06NAB0dHRAHJf/nry5ElIJBIcPHgQDRs2hFQqxenTpxESEoJ69ephzZo1cHR0hKmpKXr16oXk5GSVlvv2snft2oUPP/wQpqamqFu3Ls6dO6fQz+nTp+Hj4wMTExM4Ojpi3LhxSEtLU/8XpSYsKomIiIiIqFRJS0vDhAkTcOnSJRw7dgx6enro2rUrZDJZvvN88cUX+Oabb3Dnzh3UqVMHAPDgwQNs27YNv/76Kw4dOoSrV69i9OjR773cadOmYeLEibh27RqqVauGvn37IisrCwAQHR2Ndu3aoXv37vjzzz+xdetWnD59GkFBQWr4ZjSD53SJiIiIiKhU6d69u8LwDz/8AFtbW9y+fRvm5uZ5zjNr1iy0adNGYdyrV6/w448/onLlygCAFStWoH379li0aFGel9AWtFwvLy/5+IkTJ6J9+/YAgNDQUNSqVQsPHjyAp6cnwsLC0L9/fwQHBwMAPDw8sHz5cvj6+iI8PBzGxsZF+zKKAc9UEhERERFRqXL//n307dsXrq6usLS0hLOzMwAgLi4u33kaNWqUa1zVqlXlBSUANG3aFDKZDFFReT88SNnl5pwJBQB7e3sAQHx8PADg+vXrWL9+PczNzeUff39/yGQyxMTEFL7yWsAzlUREREREVKp07NgRTk5OWLt2LRwcHCCTyeDl5YXMzMx85zEzMyu25RoaGsp/lkgkACC/RDY1NRUjRozAuHHjcvVftWrV945RE1hUEhERERFRqfH8+XNERUVh7dq18PHxAfDmwTeqiIuLw5MnT+Dg4AAAOH/+PPT09FC9eu7XqqhruQ0aNMDt27fh7u6uUszawKKSiIiIiIhKjXLlysHGxgbfffcd7O3tERcXhy+++EKlvoyNjTFw4EAsXLgQKSkpGDduHHr16pXn/ZTqWu6UKVPwwQcfICgoCEOHDoWZmRlu376No0ePYuXKlSqth6bxnkoiIiIiIio19PT0sGXLFly+fBleXl4YP348FixYoFJf7u7u6NatGz7++GO0bdsWderUwapVqzS63Dp16uDUqVO4d+8efHx8UL9+fcyYMUN+trQkkgghhLaDKE4pKSmwsrLCWrjBFPpq7buxt3pP/Fbxc1Jrf2+TtqmmkX4ldRqovc8EE7V3iSvxN9XeZ2Rcotr62nXvP7X1lSPm0mO19ynCzyvVLifvkpOTYWlpqfY4NJnX71J3nhcmr+OAJvJXUqeByrmmiXwqiDpzTZ0Ky1tN5KAmlIS8zuk7cd9IWJpJ1do3qddk/ZL73jz6n4U+awttk1dOv3r1CjExMXBxcSmRTxzVtJCQEOzZs0f+Psuypijbn2cqiYiIiIiISGUsKomIiIiIiEhlLCqJiIiIiIjeERISUmYvfS0qnSsqf/vtN3Ts2BEODg6QSCTYs2ePtkMiIiIiIiIqs3SuqExLS0PdunXx7bffajsUIiIiIqJSr4w915P+X1G2u869pzIgIAABAQHaDoOIiIiIqFTT13/zRPXMzEyYmGjgcfxUor18+RIAYGhoWGhbnSsqiyojIwMZGRny4ZSUFC1GQ0RERESkGwwMDGBqaoqEhAQYGhpCT0/nLnIkFQgh8PLlS8THx8Pa2lr+z4WClPqiMiwsDKGhodoOg4iIiIhIp0gkEtjb2yMmJgYPHz7UdjhUzKytrVGpUiWl2pb6onLq1KmYMGGCfDglJQWOjo5ajIiIiIiISDcYGRnBw8MDmZmZ2g6FipGhoaFSZyhzlPqiUiqVQiqVajsMIiIiIiKdpKenB2NjY22HQSUYL4wmIiIiIiIilencmcrU1FQ8ePBAPhwTE4Nr166hfPnyqFq1qhYjIyIiIiIiKnt0rqi8dOkSPvzwQ/lwzv2SAwcOxPr167UUFRERERERUdmkc0Vlq1at+AJWIiIiIiKiEoL3VBIREREREZHKWFQSERERERGRyiSijF1LmpKSAisrKyQnJ8PS0lLb4RCVCZrOO+Y1UfHTZN4xp4mKH/OO3gfPVBIREREREZHKWFQSERERERGRylhUEhERERERkcpYVBIREREREZHKWFQSERERERGRylhUEhERERERkcpYVBIREREREZHKWFQSERERERGRylhUEhERERERkcpYVBIREREREZHKWFQSERERERGRylhUEhERERERkcpYVBIREREREZHKWFQSERERERGRylhUEhERERERkcpYVBIREREREZHKDLQdgLZss2oAU+irpa/G3ur/Gqv4Oam1P2mbamrpR1KngVr6eVeCiUa6xZX4mxrpNzIuUSP9AsCue/+pvc+YS4/V3icAiPDzGulXVerMa1Vp4nhQVOo+fqhDUY9Byhxr3ue4kXNsUDWXNZGn2pDXsaEk5bX472eILA39giC1mHTrd22HQEpY6LNW2yFQKcczlURERERERKQyFpVERERERESkMhaVREREREREpDKdLCq//fZbODs7w9jYGE2aNMEff/yh7ZCIiIiIiIjKJJ0rKrdu3YoJEyZg5syZuHLlCurWrQt/f3/Ex8drOzQiIiIiIqIyR+eKysWLF2PYsGEYNGgQatasidWrV8PU1BQ//PCDtkMjIiIiIiIqc3SqqMzMzMTly5fh5+cnH6enpwc/Pz+cO3cuz3kyMjKQkpKi8CEiIiIiIiL10Kmi8t9//0V2djYqVqyoML5ixYr4559/8pwnLCwMVlZW8o+jo2NxhEpERERERFQm6FRRqYqpU6ciOTlZ/nn06JG2QyIiIiIiIio1DLQdQFFUqFAB+vr6ePbsmcL4Z8+eoVKlSnnOI5VKIZVKiyM8IiIiIiKiMkenzlQaGRmhYcOGOHbsmHycTCbDsWPH0LRpUy1GRkREREREVDbp1JlKAJgwYQIGDhyIRo0awdvbG0uXLkVaWhoGDRqk7dCIiIiIiIjKHJ0rKnv37o2EhATMmDED//zzD+rVq4dDhw7lengPERERERERaZ7OFZUAEBQUhKCgIG2HQUREREREVObp1D2VREREREREVLKwqCQiIiIiIiKVSYQQQttBFKeUlBRYWVkhOTkZlpaW2g6HqEzQdN4xr4mKnybzjjlNVPyYd/Q+eKaSiIiIiIiIVMaikoiIiIiIiFTGopKIiIiIiIhUxqKSiIiIiIiIVMaikoiIiIiIiFTGopKIiIiIiIhUZqDtAIpbzhtUUlJStBwJUdmRk2+aeoMR85qo+Gkyr5nTRMVP07+rqXQrc0Xl8+fPAQCOjo5ajoSo7Hnx4gWsrKzU3i/zmkh7NJHXzGki7dHU72oq3cpcUVm+fHkAQFxcXKlImJSUFDg6OuLRo0el5kW1XCfdUJR1EkLgxYsXcHBw0EgszOuSj+ukG0pKXpe2nAa4v+iKsrxOmv5dTaVbmSsq9fTe3EZqZWVVag4WAGBpaVmq1gfgOukKZddJk38YMq91B9dJN2g7r0trTgNle3/RJWV1nUrLP3Go+PFBPURERERERKQyFpVERERERESksjJXVEqlUsycORNSqVTboahFaVsfgOukK0rSOpWkWNShtK0PwHXSFSVlnUpKHOrEddINXCci1UgEnxtMREREREREKipzZyqJiIiIiIhIfVhUEhERERERkcpYVBIREREREZHKWFQSERERERGRyspUUfntt9/C2dkZxsbGaNKkCf744w9th6S0sLAwNG7cGBYWFrCzs0OXLl0QFRWl0KZVq1aQSCQKn5EjR2op4sKFhITkitfT01M+/dWrVxgzZgxsbGxgbm6O7t2749mzZ1qMuHDOzs651kkikWDMmDEASv42+u2339CxY0c4ODhAIpFgz549CtOFEJgxYwbs7e1hYmICPz8/3L9/X6HNf//9h/79+8PS0hLW1tYYMmQIUlNTNRYz87pkYV6XvG3EvC5epS2vmdMlc/voYl5T6VZmisqtW7diwoQJmDlzJq5cuYK6devC398f8fHx2g5NKadOncKYMWNw/vx5HD16FK9fv0bbtm2Rlpam0G7YsGF4+vSp/DN//nwtRaycWrVqKcR7+vRp+bTx48fj119/xfbt23Hq1Ck8efIE3bp102K0hbt48aLC+hw9ehQA0LNnT3mbkryN0tLSULduXXz77bd5Tp8/fz6WL1+O1atX48KFCzAzM4O/vz9evXolb9O/f3/cunULR48exb59+/Dbb79h+PDhGomXeV0yMa9L1jZiXhev0pjXzOmSt310La+pDBBlhLe3txgzZox8ODs7Wzg4OIiwsDAtRqW6+Ph4AUCcOnVKPs7X11d89tln2guqiGbOnCnq1q2b57SkpCRhaGgotm/fLh93584dAUCcO3eumCJ8f5999plwc3MTMplMCKFb2wiA2L17t3xYJpOJSpUqiQULFsjHJSUlCalUKn7++WchhBC3b98WAMTFixflbQ4ePCgkEol4/Pix2mNkXpc8zOuSjXld/HQ9r5nTJZ8u5DWVfmXiTGVmZiYuX74MPz8/+Tg9PT34+fnh3LlzWoxMdcnJyQCA8uXLK4zfvHkzKlSoAC8vL0ydOhUvX77URnhKu3//PhwcHODq6or+/fsjLi4OAHD58mW8fv1aYZt5enqiatWqOrPNMjMzsWnTJgwePBgSiUQ+Xte2UY6YmBj8888/CtvEysoKTZo0kW+Tc+fOwdraGo0aNZK38fPzg56eHi5cuKDWeJjXJRfzuuRvoxzMa80rDXnNnC7Z2+ddJS2vqWww0HYAxeHff/9FdnY2KlasqDC+YsWKuHv3rpaiUp1MJkNwcDCaN28OLy8v+fh+/frByckJDg4O+PPPPzFlyhRERUVh165dWow2f02aNMH69etRvXp1PH36FKGhofDx8cHNmzfxzz//wMjICNbW1grzVKxYEf/88492Ai6iPXv2ICkpCYGBgfJxuraN3pbzveeVRznT/vnnH9jZ2SlMNzAwQPny5dW+3ZjXJXOfYV6X/G30Nua1ZpWGvGZOl+ztk5eSltdUNpSJorK0GTNmDG7evKlwTwMAhevga9euDXt7e7Ru3RrR0dFwc3Mr7jALFRAQIP+5Tp06aNKkCZycnLBt2zaYmJhoMTL1WLduHQICAuDg4CAfp2vbiIoP81o3MK+pKEpDXjOnS/b2ISopysTlrxUqVIC+vn6up5E9e/YMlSpV0lJUqgkKCsK+fftw4sQJVKlSpcC2TZo0AQA8ePCgOEJ7b9bW1qhWrRoePHiASpUqITMzE0lJSQptdGWbPXz4EJGRkRg6dGiB7XRpG+V87wXlUaVKlXI9TCMrKwv//fef2rcb87rk7zMA87qkY15rTmnNa+Z0yVfS8prKhjJRVBoZGaFhw4Y4duyYfJxMJsOxY8fQtGlTLUamPCEEgoKCsHv3bhw/fhwuLi6FznPt2jUAgL29vYajU4/U1FRER0fD3t4eDRs2hKGhocI2i4qKQlxcnE5ss4iICNjZ2aF9+/YFttOlbeTi4oJKlSopbJOUlBRcuHBBvk2aNm2KpKQkXL58Wd7m+PHjkMlk8l/K6sK8Lvn7DMC8LumY1+pX2vOaOV3ylbS8pjJCyw8KKjZbtmwRUqlUrF+/Xty+fVsMHz5cWFtbi3/++UfboSll1KhRwsrKSpw8eVI8ffpU/nn58qUQQogHDx6IWbNmiUuXLomYmBixd+9e4erqKlq2bKnlyPP3+eefi5MnT4qYmBhx5swZ4efnJypUqCDi4+OFEEKMHDlSVK1aVRw/flxcunRJNG3aVDRt2lTLURcuOztbVK1aVUyZMkVhvC5soxcvXoirV6+Kq1evCgBi8eLF4urVq+Lhw4dCCCG++eYbYW1tLfbu3Sv+/PNP0blzZ+Hi4iLS09PlfbRr107Ur19fXLhwQZw+fVp4eHiIvn37aiRe5nXJw7wueduIeV28SlteM6dL5vbRtbym0q/MFJVCCLFixQpRtWpVYWRkJLy9vcX58+e1HZLSAOT5iYiIEEIIERcXJ1q2bCnKly8vpFKpcHd3F5MmTRLJycnaDbwAvXv3Fvb29sLIyEhUrlxZ9O7dWzx48EA+PT09XYwePVqUK1dOmJqaiq5du4qnT59qMWLlHD58WAAQUVFRCuN1YRudOHEiz/1s4MCBQog3jymfPn26qFixopBKpaJ169a51vP58+eib9++wtzcXFhaWopBgwaJFy9eaCxm5nXJwrwueduIeV28SlteM6dL5vbRxbym0k0ihBAaPBFKREREREREpViZuKeSiIiIiIiININFJREREREREamMRSURERERERGpjEUlERERERERqYxFJREREREREamMRSURERERERGpjEUlERERERERqYxFJREREREREamMRSURERERERGpjEUlERERERERqYxFJREREREREamMRSURERERERGpjEUlERERERERqYxFJREREREREamMRSURERERERGpjEUlERERERERqYxFJREREREREamMRSURERERERGpjEUlERERERERqYxFJREREREREamMRSURERERERGpjEUlERERERERqYxFJREREREREamMRSURERERERGpjEUlERERERERqYxFJREREREREamMRSURERERERGpjEUlERERERERqYxFJRGRCpydnREYGKjtMJQSEhICiUSiMK644o+NjYVEIsH69evl4wIDA2Fubq7xZeeQSCQICQkptuURERGVNSwqiYjeEh0djREjRsDV1RXGxsawtLRE8+bNsWzZMqSnp2s7PK06cOBAiS3OSnJsREREpZ2BtgMgIiop9u/fj549e0IqlWLAgAHw8vJCZmYmTp8+jUmTJuHWrVv47rvvtB2mWkRFRUFPr2j/Vzxw4AC+/fbbIhVvTk5OSE9Ph6GhYREjLJqCYktPT4eBAX/dERERaQp/yxIRAYiJiUGfPn3g5OSE48ePw97eXj5tzJgxePDgAfbv36/FCNVLKpVqtP+srCzIZDIYGRnB2NhYo8sqjLaXT0REVNrx8lciIgDz589Hamoq1q1bp1BQ5nB3d8dnn32W7/z//fcfJk6ciNq1a8Pc3ByWlpYICAjA9evXc7VdsWIFatWqBVNTU5QrVw6NGjXCTz/9JJ/+4sULBAcHw9nZGVKpFHZ2dmjTpg2uXLlS6HqcPn0ajRs3hrGxMdzc3LBmzZo82717T+Xr168RGhoKDw8PGBsbw8bGBi1atMDRo0cBvLkP8ttvvwXw5h7FnA/wv/smFy5ciKVLl8LNzQ1SqRS3b9/O857KHH/99Rf8/f1hZmYGBwcHzJo1C0II+fSTJ09CIpHg5MmTCvO922dBseWMe/cM5tWrVxEQEABLS0uYm5ujdevWOH/+vEKb9evXQyKR4MyZM5gwYQJsbW1hZmaGrl27IiEhIe8NQEREVAbxTCUREYBff/0Vrq6uaNasmUrz//XXX9izZw969uwJFxcXPHv2DGvWrIGvry9u374NBwcHAMDatWsxbtw49OjRA5999hlevXqFP//8ExcuXEC/fv0AACNHjsSOHTsQFBSEmjVr4vnz5zh9+jTu3LmDBg0a5BvDjRs30LZtW9ja2iIkJARZWVmYOXMmKlasWGj8ISEhCAsLw9ChQ+Ht7Y2UlBRcunQJV65cQZs2bTBixAg8efIER48excaNG/PsIyIiAq9evcLw4cMhlUpRvnx5yGSyPNtmZ2ejXbt2+OCDDzB//nwcOnQIM2fORFZWFmbNmlVovG9TJra33bp1Cz4+PrD8v/buLSTKrY0D+F8nNWfGs5MpmaV51oQEpTxTjJFGgpUSpQhZMKkXhQZRTYkGKVGpSILmTdsKCwwSjyCWKWF0YYZpechDYCKpF+VpZu0Lab6m0bT52hd7+/9dzaxZ61nP+yLCw7vWeq2tkZOTAzMzM5SVlSE6Ohqtra0IDQ3V65+ZmQk7Ozuo1WoMDQ3h1q1byMjIwMOHD38rTyIiov8qFpVEtO7NzMxgbGwMhw4dMjpGYGAg+vr69PYpnjhxAj4+PqioqMClS5cALO3b9Pf3R3V19YqxamtrkZ6ejhs3bujacnJyVs3h8uXLEELg+fPn2Lp1KwAgMTERgYGBq46tra3FgQMHVtwzunv3bnh5eaGpqQnHjx9fts/o6Cg+fPgAhUKhaxsaGlq27+zsLPbv34+ioiIAgEqlwsGDB3H9+nVkZWXB0dFx1Zx/J7cfXbx4EQsLC2hra4O7uzsAICUlBd7e3sjJyUFra6tefwcHBzQ2Nuqefmq1WhQVFWF6eho2NjZrzpOIiOi/istfiWjdm5mZAQBYWVkZHcPCwkJXUGo0GkxOTkIul8Pb21tv2aqtrS1GR0fR2dm5YixbW1u8fPkSnz59WvP8Go0GDQ0NSEhI0BWUAODr64vY2NhVx9va2uLt27d4//79muf8WWJiol5BuZqMjAzdZxMTE2RkZGB+fh7Nzc1G57AajUaDxsZGJCQk6ApKAHB2dsaxY8fQ1tam+3v47tSpU3rLaSMiIqDRaPDx48d/LE8iIqJ/ExaVRLTuWVtbA1jay2gsrVaLmzdvwtPTExYWFnB0dIRCoUBXVxemp6d1/c6fPw+5XI6QkBB4enrizJkzePHihV6sgoICdHd3w9XVFSEhIbhy5QoGBgZ+Of/ExAS+ffsGT09Pg9+8vb1XzT83NxdTU1Pw8vJCYGAgsrOz0dXVtcarX7J9+/Y19zU1NdUr6gDAy8sLwMpPN/+EiYkJfP36ddl74uvrC61Wi5GREb32H4t0ALCzswMAfPny5R/Lk4iI6N+ERSURrXvW1tZwcXFBd3e30TGuXbuGs2fPIjIyEvfu3UNDQwOamprg7++vt6/Q19cXvb29ePDgAcLDw/H48WOEh4dDrVbr+hw9ehQDAwMoLi6Gi4sLCgsL4e/vj7q6uv/rOn8lMjIS/f39uHv3LgICAlBeXo5du3ahvLx8zTEsLS3/aE4/Ph38kUaj+aPzrEYikSzb/uOhQkREROsZi0oiIgDx8fHo7+9HR0eHUeMfPXqEmJgYVFRUIDk5GUqlEvv27cPU1JRBX5lMhqSkJFRWVmJ4eBhxcXHIz8/H7Oysro+zszNUKhVqamowODgIBwcH5Ofnrzi/QqGApaXlsstXe3t713QN9vb2SEtLw/379zEyMoKdO3fqnZq6UpFnDK1Wa/D0ta+vD8DSybTA/54I/nwPl1t2utbcFAoFpFLpsvfk3bt3MDU1haur65piERER0RIWlUREWDoIRyaT4eTJkxgfHzf4vb+/H7dv315xvEQiMXhyVV1djbGxMb22yclJve/m5ubw8/ODEAILCwvQaDR6y2UBYNOmTXBxccHc3Nwv54+NjUVNTQ2Gh4d17T09PWhoaFhx3Ep5yeVy7NixQ29OmUwGwLDIM1ZJSYnusxACJSUlMDMzw969ewEAbm5ukEgkePbsmd640tJSg1hrzU0ikUCpVOLJkyd6y2zHx8dRVVWF8PBw3XJoIiIiWhue/kpEBMDDwwNVVVVISkqCr68vUlJSEBAQgPn5ebS3t6O6ulrvvY4/i4+PR25uLtLS0rBnzx68efMGf/31l8G+QaVSic2bNyMsLAxOTk7o6elBSUkJ4uLiYGVlhampKWzZsgWHDx9GUFAQ5HI5mpub0dnZqXca7HKuXr2K+vp6REREQKVSYXFxUfdOzNX2R/r5+SE6OhrBwcGwt7fHq1evdK81+S44OBgAkJWVhdjYWEgkEiQnJ69yZ5e3ceNG1NfXIzU1FaGhoairq0NtbS0uXLigO+zHxsYGR44cQXFxMUxMTODh4YGnT5/i8+fPBvF+J7e8vDw0NTUhPDwcKpUKGzZsQFlZGebm5lBQUGDU9RAREa1rgoiIdPr6+kR6errYtm2bMDc3F1ZWViIsLEwUFxeL2dlZXT83NzeRmpqq+z47OyvOnTsnnJ2dhaWlpQgLCxMdHR0iKipKREVF6fqVlZWJyMhI4eDgICwsLISHh4fIzs4W09PTQggh5ubmRHZ2tggKChJWVlZCJpOJoKAgUVpauqb8W1tbRXBwsDA3Nxfu7u7izp07Qq1Wi5//3f+cf15enggJCRG2trbC0tJS+Pj4iPz8fDE/P6/rs7i4KDIzM4VCoRAmJia6mIODgwKAKCwsNMjn+2+VlZW6ttTUVCGTyUR/f79QKpVCKpUKJycnoVarhUaj0Rs/MTEhEhMThVQqFXZ2duL06dOiu7vbIOZKuQkhBAChVqv14r5+/VrExsYKuVwupFKpiImJEe3t7Xp9KisrBQDR2dmp197S0iIAiJaWFoPrJSIiWo9MhOBJA0RERERERGQc7qkkIiIiIiIio7GoJCIiIiIiIqOxqCQiIiIiIiKjsagkIiIiIiIio7GoJCIiIiIiIqOxqCQiIiIiIiKjsagkIiIiIiIio7GoJCIiIiIiIqOxqCQiIiIiIiKjsagkIiIiIiIio7GoJCIiIiIiIqOxqCQiIiIiIiKj/Q3rqt/t87q2rwAAAABJRU5ErkJggg==",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from flwr_datasets.visualization import plot_comparison_label_distribution\n",
+ "\n",
+ "fig, axes, df_list = plot_comparison_label_distribution(\n",
+ " partitioner_list=partitioner_list,\n",
+ " label_name=\"label\",\n",
+ " subtitle=\"Comparison of Partitioning Schemes on CIFAR10\",\n",
+ " titles=title_list,\n",
+ " legend=True,\n",
+ " verbose_labels=True,\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "862899eb04695380",
+ "metadata": {},
+ "source": [
+ "## Bonus: Natural Id Dataset"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4f3f8aaf",
+ "metadata": {},
+ "source": [
+ "Nothing stops you from using the `NaturalIdPartitioner` to visualize a dataset with the `id` in it and does not need the artificial partitioning but has the pre-existing partitions. For that dataset, we use `NaturalIdPartitioner`. Let's look at the `speech-commands` dataset that has `speaker_id`, and there are quite a few speakers; therefore, we will show only the first 20 partitions. And since we have quite a few different labels, let's specify `legend_kwargs={\"ncols\": 2}` to display them in two columns (we will also shift the legend slightly to the right)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f016d21a",
+ "metadata": {},
+ "source": [
+ "You'll be using the [Google SpeechCommands](https://huggingface.co/datasets/google/speech_commands) dataset, which is a speech-based dataset. For this, you'll need to install the `\"audio\"` extension for Flower Datasets. It can be easily done like this:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "fd5ca8f4",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "! pip install -q \"flwr-datasets[audio]\""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "90ea3642",
+ "metadata": {},
+ "source": [
+ "With everything ready, let's visualize the partitions for a naturally partitioned dataset."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "4fe70116",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAxQAAAHHCAYAAAAmth45AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy80BEi2AAAACXBIWXMAAA9hAAAPYQGoP6dpAACYNUlEQVR4nOzdeXhM1/8H8Pdk31dZyUpCQjZiiS2pLbZ+RexSkSJaxE5oFUkIpXZalDYJpWqn2lqaSlQQsQWVBinf+GqIImQh6/z+UPfXaYLMmCXL+/U88zy59557zufkTpjP3HPuEYnFYjGIiIiIiIhkoKbqAIiIiIiIqPZiQkFERERERDJjQkFERERERDJjQkFERERERDJjQkFERERERDJjQkFERERERDJjQkFERERERDJjQkFERERERDJjQkFERERERDJjQkFUx4WFhcHR0bFaZaOioiASiRQbkBIEBASgRYsWcq3T0dERYWFhcq2zuuLj4yESiXD79m2Ft/Xv98vt27chEomwbNkyhbcN1J33IBFRfcKEguq0lx/EXr50dHTg6uqKiIgI3L9/X+Htv/xw9PKlp6cHd3d3fPLJJ3j69Knc2vnzzz8RFRWFS5cuvbFsUVERoqKikJSUJLf25UEkEiEiIkLVYShcUlKSxHtCW1sbVlZWCAgIwKJFi/DgwQO5tFNTrzNQs2MjIiLpMaGgeiEmJgZbt27FunXr0L59e6xfvx5+fn4oKipSSvvr16/H1q1bsWLFCjRr1gyxsbHo2bMnxGKxXOr/888/ER0dXWVCsWnTJmRmZgrbRUVFiI6OrvLD3CeffIJnz57JJSZ6vUmTJmHr1q348ssvMXPmTJiZmWH+/Plwc3PDL7/8IlF2xIgRePbsGRwcHKpd/+uu8+v8+/2iCHwPEhHVLRqqDoBIGXr16gVfX18AwJgxY2Bubo4VK1bgwIEDGDZs2FvVXVRUBD09vdeWGThwIBo0aAAA+PDDDzFgwADs3bsXZ86cgZ+fn8xtl5WVoaKi4rVlNDU1q12fhoYGNDT4z4IydOrUCQMHDpTYl56ejh49emDAgAG4du0abGxsAADq6upQV1dXaDyFhYXQ19eX6v2iCHwPEhHVPrxDQfVSly5dAAC3bt0S9n3zzTdo1aoVdHV1YWZmhqFDh+LOnTsS570cm3/+/Hl07twZenp6+Pjjj9+q/ZKSEsybNw+tWrWCsbEx9PX10alTJxw/flzinH+OZV+1ahUaN24MbW1tfPHFF2jdujUA4P333xeG0sTHxwOQHBN/+/ZtWFhYAACio6OFslFRUQCqHr9eVlaGBQsWCO05Ojri448/RnFxsUQ5R0dH9O3bFydPnkSbNm2go6MDZ2dnbNmyRerfz6scOHAAffr0ga2tLbS1tdG4cWMsWLAA5eXlVZY/f/482rdvD11dXTg5OWHDhg2VyhQXF2P+/Plo0qQJtLW1YWdnh8jIyEr9+7fS0lJER0fDxcUFOjo6MDc3R8eOHXHs2DGZ++fl5YVVq1YhLy8P69atE/ZXNYfi3LlzCAwMRIMGDYT+jRo1CsCbr3NYWBgMDAyQlZWF3r17w9DQECEhIcKxV825WblyJRwcHKCrqwt/f39cvXpV4nhAQAACAgIqnVeX3oNERFQZvwaieikrKwsAYG5uDgCIjY3F3LlzMXjwYIwZMwYPHjzA2rVr0blzZ1y8eBEmJibCuQ8fPkSvXr0wdOhQvPfee7Cysnqr9p8+fYrNmzdj2LBhCA8PR35+Pr766isEBgbi7Nmz8Pb2ljg3Li4Oz58/x9ixY6GtrY3+/fsjPz8f8+bNw9ixY9GpUycAQPv27Su1a2FhgfXr12PcuHHo378/goODAQCenp6vjHXMmDFISEjAwIEDMX36dKSmpmLx4sXIyMjAvn37JMrevHkTAwcOxOjRozFy5Eh8/fXXCAsLQ6tWrdC8eXOpf0//Fh8fDwMDA0ybNg0GBgb45ZdfMG/ePDx9+hSfffaZRNnHjx+jd+/eGDx4MIYNG4adO3di3Lhx0NLSEj54V1RU4D//+Q9OnjyJsWPHws3NDVeuXMHKlStx/fp17N+//5WxREVFYfHixRgzZgzatGmDp0+f4ty5c7hw4QK6d+8ucx9f/v6OHj2K2NjYKsvk5uaiR48esLCwwOzZs2FiYoLbt29j7969AKp3ncvKyhAYGIiOHTti2bJlb7zLtmXLFuTn52PChAl4/vw5Vq9ejS5duuDKlStS/Q3U9vcgERFVQUxUh8XFxYkBiH/++WfxgwcPxHfu3BHv2LFDbG5uLtbV1RX/73//E9++fVusrq4ujo2NlTj3ypUrYg0NDYn9/v7+YgDiDRs2VKv9+fPniwGIMzMzxQ8ePBDfunVLvHHjRrG2trbYyspKXFhYKC4rKxMXFxdLnPf48WOxlZWVeNSoUcK+W7duiQGIjYyMxLm5uRLl09LSxADEcXFxlWIYOXKk2MHBQdh+8OCBGIB4/vz5r4z3pUuXLokBiMeMGSNRbsaMGWIA4l9++UXY5+DgIAYgPnHihLAvNzdXrK2tLZ4+ffprf09isVgMQDxhwoTXlikqKqq074MPPhDr6emJnz9/Lux7eZ2WL18u7CsuLhZ7e3uLLS0txSUlJWKxWCzeunWrWE1NTfzrr79K1LlhwwYxAHFKSopE/0aOHClse3l5ifv06fPGfv3b8ePHxQDEu3btemUZLy8vsampqbD98n1869YtsVgsFu/bt08MQJyWlvbKOl53nUeOHCkGIJ49e3aVx/75fnn5vnv59/JSamqqGIB46tSpwj5/f3+xv7//G+usqe9BIiKSDYc8Ub3QrVs3WFhYwM7ODkOHDoWBgQH27duHhg0bYu/evaioqMDgwYPx119/CS9ra2u4uLhUGnqkra2N999/X6r2mzZtCgsLCzg5OeGDDz5AkyZN8MMPP0BPTw/q6urQ0tIC8OIb80ePHqGsrAy+vr64cOFCpboGDBggDBlRtB9//BEAMG3aNIn906dPBwD88MMPEvvd3d2FOyTAi2+jmzZtij/++EMu8ejq6go/5+fn46+//kKnTp1QVFSE33//XaKshoYGPvjgA2FbS0sLH3zwAXJzc3H+/HkAwK5du+Dm5oZmzZpJXPuXQ9L+fe3/ycTEBL/99htu3Lghl779k4GBAfLz81/bNgAcOnQIpaWlMrczbty4apcNCgpCw4YNhe02bdqgbdu2wntEUWrae5CIiCrjkCeqFz7//HO4urpCQ0MDVlZWaNq0KdTUXuTTN27cgFgshouLS5Xn/nuSasOGDYUEoLr27NkDIyMjaGpqolGjRmjcuLHE8YSEBCxfvhy///67xAdEJyenSnVVtU9R/vvf/0JNTQ1NmjSR2G9tbQ0TExP897//ldhvb29fqQ5TU1M8fvxYLvH89ttv+OSTT/DLL79UeuzukydPJLZtbW2hr68vsc/V1RXAi3H87dq1w40bN5CRkfHKBC03N/eVscTExKBfv35wdXVFixYt0LNnT4wYMeK1Q3eqq6CgAIaGhq887u/vjwEDBiA6OhorV65EQEAAgoKCMHz4cGhra1erDQ0NDTRq1KjaMVX19+Hq6oqdO3dWuw5Z1LT3IBERVcaEguqFNm3aCE95+reKigqIRCL89NNPVT5Jx8DAQGL7n9+SV1fnzp2Fpzz92zfffIOwsDAEBQVh5syZsLS0hLq6OhYvXizMtXjb9t9WdRcae9WTiMRyeDxuXl4e/P39YWRkhJiYGDRu3Bg6Ojq4cOECZs2a9canXVWloqICHh4eWLFiRZXH7ezsXnlu586dkZWVhQMHDuDo0aPYvHkzVq5ciQ0bNmDMmDFSx/JSaWkprl+//tqF+UQiEXbv3o0zZ87g+++/x5EjRzBq1CgsX74cZ86cqfSerYq2traQVMuLSCSq8lq/atK8tHVXhyLfg0REVDUmFFTvNW7cGGKxGE5OTsI32Mq0e/duODs7Y+/evRIfmubPn1/tOqRZWViasg4ODqioqMCNGzfg5uYm7L9//z7y8vKkWhfhbSUlJeHhw4fYu3cvOnfuLOz/55O6/unPP/8UHoX60vXr1wFAeOJQ48aNkZ6ejq5du8q0OrOZmRnef/99vP/++ygoKEDnzp0RFRX1VgnF7t278ezZMwQGBr6xbLt27dCuXTvExsZi+/btCAkJwY4dOzBmzBi5rzZd1dCu69evSzwRytTUtMqhRf++i1Bb34NERFQ1zqGgei84OBjq6uqIjo6u9C2mWCzGw4cPFdr+y29U/9l2amoqTp8+Xe06Xn5ozsvLe2PZl0/zqU7Z3r17AwBWrVolsf/lN/p9+vSpdoxvq6rfU0lJCb744osqy5eVlWHjxo0SZTdu3AgLCwu0atUKADB48GDcvXsXmzZtqnT+s2fPUFhY+Mp4/v2+MDAwQJMmTd74uNnXSU9Px5QpU2BqaooJEya8stzjx48rvVdfPg3sZfvSXOfq2L9/P+7evStsnz17FqmpqejVq5ewr3Hjxvj9998lVvtOT09HSkqKRF219T1IRERV4x0KqvcaN26MhQsX4qOPPsLt27cRFBQEQ0ND3Lp1C/v27cPYsWMxY8YMhbXft29f7N27F/3790efPn1w69YtbNiwAe7u7igoKKh2H0xMTLBhwwYYGhpCX18fbdu2rXK+ha6uLtzd3fHdd9/B1dUVZmZmaNGiRZVDbLy8vDBy5Eh8+eWXwpCjs2fPIiEhAUFBQXjnnXfeuv//dO7cOSxcuLDS/oCAALRv3x6mpqYYOXIkJk2aBJFIhK1bt75yKIutrS2WLFmC27dvw9XVFd999x0uXbqEL7/8UpgXM2LECOzcuRMffvghjh8/jg4dOqC8vBy///47du7ciSNHjrxyqJy7uzsCAgLQqlUrmJmZ4dy5c9i9ezciIiKq1ddff/0Vz58/R3l5OR4+fIiUlBQcPHgQxsbG2LdvH6ytrV95bkJCAr744gv0798fjRs3Rn5+PjZt2gQjIyPhA7g017k6mjRpgo4dO2LcuHEoLi7GqlWrYG5ujsjISKHMqFGjsGLFCgQGBmL06NHIzc3Fhg0b0Lx5c4k5LzX5PUhERDJQ0dOliJTi5eM2X/d4zZf27Nkj7tixo1hfX1+sr68vbtasmXjChAnizMxMoYy/v7+4efPm1W7/5SMwHzx48MoyFRUV4kWLFokdHBzE2traYh8fH/GhQ4de+fjOzz77rMp6Dhw4IHZ3dxdraGhIPEL23/WIxWLxqVOnxK1atRJraWlJPL7z34/sFIvF4tLSUnF0dLTYyclJrKmpKbazsxN/9NFHEo9pFYtfPLKzqseovupRov8G4JWvBQsWiMVisTglJUXcrl07sa6urtjW1lYcGRkpPnLkiBiA+Pjx4xJtNm/eXHzu3Dmxn5+fWEdHR+zg4CBet25dpXZLSkrES5YsETdv3lysra0tNjU1Fbdq1UocHR0tfvLkiUT//vnY2IULF4rbtGkjNjExEevq6oqbNWsmjo2NFR5J+yovHxv78qWpqSm2sLAQd+7cWRwbG1vpkcBiceXHxl64cEE8bNgwsb29vVhbW1tsaWkp7tu3r/jcuXMS573qOo8cOVKsr69fZXyve98tX75cbGdnJ9bW1hZ36tRJnJ6eXun8b775Ruzs7CzW0tISe3t7i48cOVJr3oNERCQbkVjMmWpERERERCQbzqEgIiIiIiKZMaEgIiIiIiKZMaEgIiIiIiKZMaEgIiIiIiKZMaEgIiIiIiKZMaEgIiIiIiKZcWE7ABUVFfjzzz9haGgIkUik6nCIiIioGsRiMfLz82Fraws1NX5HSqQqTCgA/Pnnn7Czs1N1GERERCSDO3fuoFGjRqoOg6jeYkIBwNDQEMCLf5CMjIxUHA0RERFVx9OnT2FnZyf8P05EqsGEAhCGORkZGTGhICIiqmU4XJlItTjgkIiIiIiIZMaEgoiIiIiIZMaEgoiIiIiIZMaEgoiIiIiIZMaEgoiIiIiIZMaEgoiIiIiIZMaEgoiIiIiIZMaEgoiIiIiIZMaEgoiIiIiIZMaVsomIiKjeKi8vR2lpqarDIKpxNDU1oa6uXq2yKk0oTpw4gc8++wznz59HTk4O9u3bh6CgIOG4WCzG/PnzsWnTJuTl5aFDhw5Yv349XFxchDKPHj3CxIkT8f3330NNTQ0DBgzA6tWrYWBgoIIeERERUW0gFotx79495OXlqToUohrLxMQE1tbWEIlEry2n0oSisLAQXl5eGDVqFIKDgysdX7p0KdasWYOEhAQ4OTlh7ty5CAwMxLVr16CjowMACAkJQU5ODo4dO4bS0lK8//77GDt2LLZv367s7hAREVEt8TKZsLS0hJ6e3hs/MBHVJ2KxGEVFRcjNzQUA2NjYvLa8SCwWi5UR2JuIRCKJOxRisRi2traYPn06ZsyYAQB48uQJrKysEB8fj6FDhyIjIwPu7u5IS0uDr68vAODw4cPo3bs3/ve//8HW1rZabT99+hTGxsZ48uQJjIyMFNI/IiIiki9Z//8uLy/H9evXYWlpCXNzcwVGSFS7PXz4ELm5uXB1dX3t8KcaOyn71q1buHfvHrp16ybsMzY2Rtu2bXH69GkAwOnTp2FiYiIkEwDQrVs3qKmpITU1VekxExERUc33cs6Enp6eiiMhqtle/o28aZ5RjZ2Ufe/ePQCAlZWVxH4rKyvh2L1792BpaSlxXENDA2ZmZkKZqhQXF6O4uFjYfvr0qbzCJiIiolqCw5yIXq+6fyM1NqFQpMWLFyM6OrpaZc2W9JKq7kezfpI6Ht3ZAVKVf/ZpkkLrr6lt1IVroYw2eL2rj9e7emw39Je6jT8/3CdVeWVcC2W8p6T9XSn69wTUzPeUMq4FESlPjR3yZG1tDQC4f/++xP779+8Lx6ytrYXJIi+VlZXh0aNHQpmqfPTRR3jy5InwunPnjpyjJyIiIqKqxMfHw8TE5K3rEYlE2L9//1vXQ2+vxiYUTk5OsLa2RmJiorDv6dOnSE1NhZ+fHwDAz88PeXl5OH/+vFDml19+QUVFBdq2bfvKurW1tWFkZCTxIiIiIqLqCQsLk3jUP9VvKh3yVFBQgJs3bwrbt27dwqVLl2BmZgZ7e3tMmTIFCxcuhIuLi/DYWFtbW+EN7Obmhp49eyI8PBwbNmxAaWkpIiIiMHTo0Go/4YnkQ9dUV9Uh0N94LYiIiEiZVHqH4ty5c/Dx8YGPjw8AYNq0afDx8cG8efMAAJGRkZg4cSLGjh2L1q1bo6CgAIcPHxbWoACAbdu2oVmzZujatSt69+6Njh074ssvv1RJf4iIiIjquxUrVsDDwwP6+vqws7PD+PHjUVBQUKnc/v374eLiAh0dHQQGBlYagn7gwAG0bNkSOjo6cHZ2RnR0NMrKyqpss6SkBBEREbCxsYGOjg4cHBywePFihfSPKlPpHYqAgAC8bhkMkUiEmJgYxMTEvLKMmZkZF7EjIiIiqiHU1NSwZs0aODk54Y8//sD48eMRGRmJL774QihTVFSE2NhYbNmyBVpaWhg/fjyGDh2KlJQUAMCvv/6K0NBQrFmzBp06dUJWVhbGjh0LAJg/f36lNtesWYODBw9i586dsLe3x507dzhHVonq5VOeSP50jHXeXIiUgteCiIhUacqUKcLPjo6OWLhwIT788EOJhKK0tBTr1q0T5rwmJCTAzc0NZ8+eRZs2bRAdHY3Zs2dj5MiRAABnZ2csWLAAkZGRVSYU2dnZcHFxQceOHSESieDg4KDYTpKEGjspm4iIiIhqn59//hldu3ZFw4YNYWhoiBEjRuDhw4coKioSymhoaKB169bCdrNmzWBiYoKMjAwAQHp6OmJiYmBgYCC8wsPDkZOTI1HPS2FhYbh06RKaNm2KSZMm4ejRo4rvKAmYUBARERGRXNy+fRt9+/aFp6cn9uzZg/Pnz+Pzzz8H8GKeQ3UVFBQgOjoaly5dEl5XrlzBjRs3JObSvtSyZUvcunULCxYswLNnzzB48GAMHDhQbv2i1+OQJ5ILW0MtVYdAf+O1ICIiVTl//jwqKiqwfPlyqKm9+N56586dlcqVlZXh3LlzaNOmDQAgMzMTeXl5cHNzA/AiQcjMzESTJk2q3baRkRGGDBmCIUOGYODAgejZsycePXoEMzMzOfSMXocJBRERERFJ7cmTJ7h06ZLEvgYNGqC0tBRr167Fu+++i5SUFGzYsKHSuZqampg4cSLWrFkDDQ0NREREoF27dkKCMW/ePPTt2xf29vYYOHAg1NTUkJ6ejqtXr2LhwoWV6luxYgVsbGzg4+MDNTU17Nq1C9bW1nJZQI/ejAkFEUmtrqx10dzDStUh0N+U8Z7iAwuqp678fZPiJSUlCY/+f2n06NFYsWIFlixZgo8++gidO3fG4sWLERoaKlFOT08Ps2bNwvDhw3H37l106tQJX331lXA8MDAQhw4dQkxMDJYsWQJNTU00a9YMY8aMqTIWQ0NDLF26FDdu3IC6ujpat26NH3/8UbhLQorFhIKIiIiIpBIfH4/4+PhXHp86darE9ogRI4Sfw8LCEBYWBgAIDg5+ZR2BgYEIDAx85fF/Lj0QHh6O8PDwN0RNisKEguTC2oDj9msKXgsiIiJSJt4HIiIiIiIimTGhICIiIiIimXHIE8lFQ0NtVYdAf+O1ICIiImXiHQoiIiIiIpIZEwoiIiIiIpIZhzyRXDTQZW5aU/BaEBERkTLxkwcREREREcmMCQUREREREcmMQ55ILqz01FUdAv2N14KIiKjmE4vF+OCDD7B79248fvwYFy9ehLe3t6rDkgkTihqguYdVra5fWdo3a6DqEEiJdIx1VB2CXJg6miq0/tAeTRRaPwDYGip+9XXnRkYKb4Oqp6787clCd3aAUtt79mmSUturd4r2Kbc9vf5SFT98+DDi4+ORlJQEZ2dnNGhQez/nMKEgIiIiIlKyrKws2NjYoH379gpro6SkBFpaiv9SiHMoSC4a6KhJ/SLF4LUgIqqbDh06BBMTE5SXlwMALl26BJFIhNmzZwtlxowZg/feew8AsGfPHjRv3hza2tpwdHTE8uXLhXLr1q1DixYthO39+/dDJBJhw4YNwr5u3brhk08+UXS36qWwsDBMnDgR2dnZEIlEcHR0RHFxMSZNmgRLS0vo6OigY8eOSEtLE86Jj4+HiYmJRD0vr9tLUVFR8Pb2xubNm+Hk5AQdHeXcceQnCSIiIqJaoFOnTsjPz8fFixcBAMnJyWjQoAGSkpKEMsnJyQgICMD58+cxePBgDB06FFeuXEFUVBTmzp2L+Ph4AIC/vz+uXbuGBw8eVFlXaWkpTp8+jYCAACX2sP5YvXo1YmJi0KhRI+Tk5CAtLQ2RkZHYs2cPEhIScOHCBTRp0gSBgYF49OiRVHXfvHkTe/bswd69e3Hp0iXFdOBfmFAQERER1QLGxsbw9vYWPvQnJSVh6tSpuHjxIgoKCnD37l3cvHkT/v7+WLFiBbp27Yq5c+fC1dUVYWFhiIiIwGeffQYAaNGiBczMzJCcnCzUNX36dGH77NmzKC0tVehwnPrM2NgYhoaGUFdXh7W1NfT09LB+/Xp89tln6NWrF9zd3bFp0ybo6uriq6++kqrukpISbNmyBT4+PvD09FRQDyQxoSAiIiKqJfz9/ZGUlASxWIxff/0VwcHBcHNzw8mTJ5GcnAxbW1u4uLggIyMDHTp0kDi3Q4cOuHHjBsrLyyESidC5c2ckJSUhLy8P165dw/jx41FcXIzff/8dycnJaN26NfT09FTU0/olKysLpaWlEtdMU1MTbdq0QUZGhlR1OTg4wMLCQt4hvhYnZZNcmOnoqzoE+huvBRFR3RUQEICvv/4a6enp0NTURLNmzRAQEICkpCQ8fvwY/v7+UtX15Zdf4tdff4WPjw+MjIyEJCM5OVmqukjx1NTUIBaLJfaVlpZWKqevr/zPAbxDQURERFRLvJxHsXLlSuED/8uEIikpSZjz4ObmhpSUFIlzU1JS4OrqCnX1F+sVvZxHsWvXLuG8gIAA/Pzzz0hJSeH8CSVq3LgxtLS0JK5ZaWkp0tLS4O7uDgCwsLBAfn4+CgsLhTLKmiPxJkwoiIiIiGoJU1NTeHp6Ytu2bcIH/s6dO+PChQu4fv26kGRMnz4diYmJWLBgAa5fv46EhASsW7cOM2bMEOry9PSEqakptm/fLpFQ7N+/H8XFxZWGTJHi6OvrY9y4cZg5cyYOHz6Ma9euITw8HEVFRRg9ejQAoG3bttDT08PHH3+MrKwsbN++XZhkr2oc8kRyYaLNMZY1Ba8FEZFsastCc/7+/rh06ZKQBJiZmcHd3R33799H06ZNAQAtW7bEzp07MW/ePCxYsAA2NjaIiYlBWFiYUI9IJEKnTp3www8/oGPHjgBeJBlGRkZo2rSpSobOyJWUC82p2qeffoqKigqMGDEC+fn58PX1xZEjR2Bq+mKBVDMzM3zzzTeYOXMmNm3ahK5duyIqKgpjx45VceRMKIiIiIhqlVWrVmHVqlUS+6oa+jJgwAAMGDDgtXXt379fYltNTU3qx5SSbKZMmYIpU6YI2zo6OlizZg3WrFnzynOCgoIQFBQksS88PFz4OSoqClFRUXKO9M045ImIiIiIiGTGOxRvoGOs+BUGrQ0UvyR6XdDQUFvVIciFrqmuqkN4a8r4u7A1VPzfRSsbQ4W30dLGQKH1929sptD6laUuXIu6Qhl/e0RUt/AOBRERERERyYx3KIiIiOSE3+4TUX3EOxRERERERCQzJhRERERERCQzJhRERERERCQzJhRERERERCQzJhRERERERCQzJhRERERERCqWlJQEkUiEvLw8VYciNT42lugfpvVzVXUIRESkImZLeim1vUezfpL6nICAAHh7e2PVqlXyD6iOET/YrNT2RBZjpCpfl64l71AQERER1RFisRhlZWWqDoOUpKSkRNUhAGBCQURERFQrhIWFITk5GatXr4ZIJIJIJEJ8fDxEIhF++ukntGrVCtra2jh58iQqKiqwePFiODk5QVdXF15eXti9e7dEfVevXkWvXr1gYGAAKysrjBgxAn/99ZeKele/VHUtb9++DQA4f/48fH19oaenh/bt2yMzM1M4LyoqCt7e3ti8eTOcnJygo6MDAMjLy8OYMWNgYWEBIyMjdOnSBenp6RJtHjhwAC1btoSOjg6cnZ0RHR0tt+STQ57egKue1hwNdJn/knzxPVU9rWwMFd6GMq6Fl6WuwtuoC5RxvXWMdRTeRl20evVqXL9+HS1atEBMTAwA4LfffgMAzJ49G8uWLYOzszNMTU2xePFifPPNN9iwYQNcXFxw4sQJvPfee7CwsIC/vz/y8vLQpUsXjBkzBitXrsSzZ88wa9YsDB48GL/88osqu1kvvO5azpkzB8uXL4eFhQU+/PBDjBo1CikpKcK5N2/exJ49e7B3716oq6sDAAYNGgRdXV389NNPMDY2xsaNG9G1a1dcv34dZmZm+PXXXxEaGoo1a9agU6dOyMrKwtixYwEA8+fPf+v+MKEguTDTMVB1CPQ3Xguiuqurn52qQyAVMjY2hpaWFvT09GBtbQ0A+P333wEAMTEx6N69OwCguLgYixYtws8//ww/Pz8AgLOzM06ePImNGzfC398f69atg4+PDxYtWiTU//XXX8POzg7Xr1+HqyvnFCrS665lbGws/P39AbxIFPv06YPnz58LdyNKSkqwZcsWWFhYAABOnjyJs2fPIjc3F9ra2gCAZcuWYf/+/di9ezfGjh2L6OhozJ49GyNHjgTw4v2wYMECREZGMqEgIiIiIsDX11f4+ebNmygqKhISjJdKSkrg4+MDAEhPT8fx48dhYFD5S6isrCwmFCrk6ekp/GxjYwMAyM3Nhb29PQDAwcFBSCaAF9eyoKAA5ubmEvU8e/YMWVlZQpmUlBTExsYKx8vLy/H8+XMUFRVBT0/vrWJmQkFERCQn1gYcJkuqoa+vL/xcUFAAAPjhhx/QsGFDiXIvv8EuKCjAu+++iyVLllSq6+WHWFINTU1N4WeRSAQAqKioEPb981oDL66ljY0NkpKSKtVlYmIilImOjkZwcHClMi/vfLwNJhREREREtYSWlhbKy8tfW8bd3R3a2trIzs4Whs78W8uWLbFnzx44OjpCQ4MfB1WhOteyOlq2bIl79+5BQ0MDjo6OryyTmZmJJk2avHV7VeE7iIiIiKiWcHR0RGpqKm7fvg0DAwOJb65fMjQ0xIwZMzB16lRUVFSgY8eOePLkCVJSUmBkZISRI0diwoQJ2LRpE4YNG4bIyEiYmZnh5s2b2LFjBzZv3ixM9iXFqc61rI5u3brBz88PQUFBWLp0KVxdXfHnn3/ihx9+QP/+/eHr64t58+ahb9++sLe3x8CBA6Gmpob09HRcvXoVCxcufOu+MKEgIiIigmwLzSnbjBkzMHLkSLi7u+PZs2eIi4urstyCBQtgYWGBxYsX448//oCJiQlatmyJjz/+GABga2uLlJQUzJo1Cz169EBxcTEcHBzQs2dPqKnVjSfgSbvQnLJV91q+iUgkwo8//og5c+bg/fffx4MHD2BtbY3OnTvDysoKABAYGIhDhw4hJiYGS5YsgaamJpo1a4YxY+TzO2JCQURERFRLuLq64vTp0xL7wsLCKpUTiUSYPHkyJk+e/Mq6XFxcsHfvXnmHSNVUnWvp7e0NsVgsbEdFRSEqKqpSXYaGhlizZg3WrFnzyvYCAwMRGBj4VjG/St1IQYmIiIiISCWYUBARERERkcyYUBARERERkcw4h6IeqCvPRW9upvnmQqQUtoaKf0+1sjFUeBvK4GWpq+oQ3lr/xmYKb8NKT/FPlFFGG3WBMq63Mv4NISLl4R0KIiIiIiKSGRMKIiIiIiKSGRMKIiIiIiKSGRMKIiIiIiKSGRMKIiIiOWloqC3Vi0hewsLCEBQU9FZ1xMfHw8TERNiOioqCt7f3W9VJ9QOf8kRyYaBpouoQ6G+8FkREJIshQ4agd+/eqg6j3ggICIC3tzdWrVql6lDeGhMKIiIiIgC2G/ortb0/P9yn1PbeRFdXF7q6tf9R1y+J/7tMqe2JHGYotb2apEYPeSovL8fcuXPh5OQEXV1dNG7cGAsWLIBYLBbKiMVizJs3DzY2NtDV1UW3bt1w48YNFUZNREREpBi7d++Gh4cHdHV1YW5ujm7duqGwsFA4vmzZMtjY2MDc3BwTJkxAaWmpcKy4uBgzZsxAw4YNoa+vj7Zt2yIpKUk4/u8hT/+WlpaG7t27o0GDBjA2Noa/vz8uXLigiG7WeWFhYUhOTsbq1ashEokgEonQoEEDLFv2/0lQUFAQNDU1UVBQAAD43//+B5FIhJs3bwIAHj9+jNDQUJiamkJPTw+9evVS2WfgGp1QLFmyBOvXr8e6deuQkZGBJUuWYOnSpVi7dq1QZunSpVizZg02bNiA1NRU6OvrIzAwEM+fP1dh5ERERETylZOTg2HDhmHUqFHIyMhAUlISgoODhS9ajx8/jqysLBw/fhwJCQmIj49HfHy8cH5ERAROnz6NHTt24PLlyxg0aBB69uxZ7Q+h+fn5GDlyJE6ePIkzZ87AxcUFvXv3Rn5+viK6W6etXr0afn5+CA8PR05ODnJycjBixAghwROLxfj1119hYmKCkydPAgCSk5PRsGFDNGnSBMCLpOTcuXM4ePAgTp8+DbFYjN69e0skkcpSo4c8nTp1Cv369UOfPn0AAI6Ojvj2229x9uxZAC9+2atWrcInn3yCfv36AQC2bNkCKysr7N+/H0OHDlVZ7ERERETylJOTg7KyMgQHB8PBwQEA4OHhIRw3NTXFunXroK6ujmbNmqFPnz5ITExEeHg4srOzERcXh+zsbNja2gIAZsyYgcOHDyMuLg6LFi16Y/tdunSR2P7yyy9hYmKC5ORk9O3bV449rfuMjY2hpaUFPT09WFtbA3jx+42Li0N5eTmuXr0KLS0tDBkyBElJSejZsyeSkpLg7+8PALhx4wYOHjyIlJQUtG/fHgCwbds22NnZYf/+/Rg0aJBS+1Oj71C0b98eiYmJuH79OgAgPT0dJ0+eRK9evQAAt27dwr1799CtWzfhHGNjY7Rt2xanT59+Zb3FxcV4+vSpxIuIiIioJvPy8kLXrl3h4eGBQYMGYdOmTXj8+LFwvHnz5lBXVxe2bWxskJubCwC4cuUKysvL4erqCgMDA+GVnJyMrKysarV///59hIeHw8XFBcbGxjAyMkJBQQGys7Pl29F6qlOnTsjPz8fFixeRnJwMf39/BAQECHctkpOTERAQAADIyMiAhoYG2rZtK5xvbm6Opk2bIiMjQ+mx1+g7FLNnz8bTp0/RrFkzqKuro7y8HLGxsQgJCQEA3Lt3DwBgZWUlcZ6VlZVwrCqLFy9GdHR0tWJoZWMoY/TVp+hHB9aVRxOa6eirOgS5cG5kpOoQaoUGuor/vsNKT/3NhWo4Z2NLVYcgFw10avT3W/WKibaewtuwNtBSeBt1kbq6Oo4dO4ZTp07h6NGjWLt2LebMmYPU1FQAgKampkR5kUiEiooKAEBBQQHU1dVx/vx5iaQDAAwMDKrV/siRI/Hw4UOsXr0aDg4O0NbWhp+fH0pKSuTQOzIxMYGXlxeSkpJw+vRpdO/eHZ07d8aQIUNw/fp13LhxQ7hDUdPU6H/Bd+7ciW3btmH79u24cOECEhISsGzZMiQkJLxVvR999BGePHkivO7cuSOniImIiIgURyQSoUOHDoiOjsbFixehpaWFffve/LQoHx8flJeXIzc3F02aNJF4vRxy8yYpKSmYNGkSevfujebNm0NbWxt//fXX23ap3tLS0kJ5ebnEPn9/fxw/fhwnTpxAQEAAzMzM4ObmhtjYWNjY2MDV1RUA4ObmhrKyMiGZBICHDx8iMzMT7u7uSu0HUMMTipkzZ2L27NkYOnQoPDw8MGLECEydOhWLFy8GAOEP4P79+xLn3b9//7V/HNra2jAyMpJ4EREREdVkqampWLRoEc6dO4fs7Gzs3bsXDx48gJub2xvPdXV1RUhICEJDQ7F3717cunULZ8+exeLFi/HDDz9Uq30XFxds3boVGRkZSE1NRUhISJ16zKyyOTo6IjU1Fbdv38Zff/2FiooKBAQE4MiRI9DQ0ECzZs0AvFivYtu2bRJ3J1xcXNCvXz+Eh4fj5MmTSE9Px3vvvYeGDRsK84qVqUYnFEVFRVBTkwxRXV1duH3n5OQEa2trJCYmCsefPn2K1NRU+Pn5KTVWIiIiIkUyMjLCiRMn0Lt3b7i6uuKTTz7B8uXLhbmlbxIXF4fQ0FBMnz4dTZs2RVBQENLS0mBvb1+t87/66is8fvwYLVu2xIgRIzBp0iRYWtaNYZeqMGPGDKirq8Pd3R0WFhbIzs5Gp06dUFFRIZE8BAQEoLy8XJg/8VJcXBxatWqFvn37ws/PD2KxGD/++GOloW/KUKPnULz77ruIjY2Fvb09mjdvjosXL2LFihUYNWoUgBe3/aZMmYKFCxfCxcUFTk5OmDt3Lmxtbd96+XkiIiKqX2raQnP/5ubmhsOHD1d57J+Ph33p3yswa2pqIjo6+pXzSMPCwhAWFiZsR0VFISoqStj28fFBWlqaxDkDBw6sVuyqUNMXmnN1da3yIUIvvzh/KSgoSGINtpdMTU2xZcsWhcUnjRqdUKxduxZz587F+PHjkZubC1tbW3zwwQeYN2+eUCYyMhKFhYUYO3Ys8vLy0LFjRxw+fBg6OjoqjJyIiOqj5ubK/2aQiEjVanRCYWhoiFWrVlXKsP9JJBIhJiYGMTExyguMiIiIiIgA1PA5FEREREREVLMxoSAiIiIiIpkxoSAiIiIiIpnV6DkUVHsYapmrOgT6G68FERERKRPvUBARERERkcx4h+INlPEIwAa6zOuqw0RbT9Uh0N8CnY1VHYJcuJoYKrwNKz11hbdRF5jp6Ks6BPqbmY6Bwtvo4aj4NohIefhJloiIiIiIZMaEgoiIiIikEhYWhqCgIFWHQTUEhzwRERERAfDdNkSp7Z0L+U6p7cnT6tWrIRaLVR3Ga4kvzlVqeyKfBUptryZhQkFEREREAICSkhJoaWm9sZyxcd2YS0fywSFPRERERLVAQEAAJk6ciClTpsDU1BRWVlbYtGkTCgsL8f7778PQ0BBNmjTBTz/9BAAoLy/H6NGj4eTkBF1dXTRt2hSrV6+WqPPl0KXY2FjY2tqiadOm+Pjjj9G2bdtK7Xt5eSEmJkbivH/GNmnSJERGRsLMzAzW1taIiopS2O+iLiguLsakSZNgaWkJHR0ddOzYEWlpaQCApKQkiEQiJCYmwtfXF3p6emjfvj0yMzMl6jhw4ABatmwJHR0dODs7Izo6GmVlZUrvCxMKon+w0lOX6kVE9E8NdNSkehFJKyEhAQ0aNMDZs2cxceJEjBs3DoMGDUL79u1x4cIF9OjRAyNGjEBRUREqKirQqFEj7Nq1C9euXcO8efPw8ccfY+fOnRJ1JiYmIjMzE8eOHcOhQ4cQEhKCs2fPIisrSyjz22+/4fLlyxg+fPhrY9PX10dqaiqWLl2KmJgYHDt2TGG/i9ouMjISe/bsQUJCAi5cuIAmTZogMDAQjx49EsrMmTMHy5cvx7lz56ChoYFRo0YJx3799VeEhoZi8uTJuHbtGjZu3Ij4+HjExsYqvS/814yIiIiolvDy8sInn3wCFxcXfPTRR9DR0UGDBg0QHh4OFxcXzJs3Dw8fPsTly5ehqamJ6Oho+Pr6wsnJCSEhIXj//fcrJRT6+vrYvHkzmjdvLry8vLywfft2ocy2bdvQtm1bNGnS5JWxeXp6Yv78+XBxcUFoaCh8fX2RmJiosN9FbVZYWIj169fjs88+Q69eveDu7o5NmzZBV1cXX331lVAuNjYW/v7+cHd3x+zZs3Hq1Ck8f/4cABAdHY3Zs2dj5MiRcHZ2Rvfu3bFgwQJs3LhR6f1hQkFERERUS3h6ego/q6urw9zcHB4eHsI+KysrAEBubi4A4PPPP0erVq1gYWEBAwMDfPnll8jOzpao08PDo9K8iZCQECGhEIvF+PbbbxESElLt2ADAxsZGiIMkZWVlobS0FB06dBD2aWpqok2bNsjIyBD2/fN3amNjA+D/r216ejpiYmJgYGAgvMLDw5GTk4OioiIl9eQFTsomIiIiqiU0NSUX3BWJRBL7RCIRAKCiogI7duzAjBkzsHz5cvj5+cHQ0BCfffYZUlNTJerQ16+8sOSwYcMwa9YsXLhwAc+ePcOdO3cwZMjrn4JVVWwVFRVS9Y8kveraAkBBQQGio6MRHBxc6TwdHR3lBPg3JhREREREdVBKSgrat2+P8ePHC/v+OS/idRo1agR/f39s27YNz549Q/fu3WFpaamoUOudxo0bQ0tLCykpKXBwcAAAlJaWIi0tDVOmTKlWHS1btkRmZuZrh6EpCxMKIiIiojrIxcUFW7ZswZEjR+Dk5IStW7ciLS0NTk5O1To/JCQE8+fPR0lJCVauXKngaOsXfX19jBs3DjNnzoSZmRns7e2xdOlSFBUVYfTo0UhPT39jHfPmzUPfvn1hb2+PgQMHQk1NDenp6bh69SoWLlyohF78PyYUb1AXnsLRQLf294FqluZmmm8uVAvaUIa68G+IszG/lST5cjUxVHUI9cIHH3yAixcvYsiQIRCJRBg2bBjGjx8vPFb2TQYOHIiIiAioq6tzVWwF+PTTT1FRUYERI0YgPz8fvr6+OHLkCExNTat1fmBgIA4dOoSYmBgsWbIEmpqaaNasGcaMGaPgyCtjQkFERESEmr9ydVJSUqV9t2/frrTvnytYx8XFIS4uTuL44sWLhZ/j4+Nf2Z6JiYnwRKF/+/d5VcW2f//+V9atDDV95WodHR2sWbMGa9asqXQsICCg0krk3t7elfYFBgYiMDBQoXFWBxMKkgsDTRNVh0B/47UgIiIiZWJCQUREVEs0NNRWdQhERJXU/sG9RERERESkMkwoiIiIiIhIZkwoiIiIiIhIZkwoiIiIiIhIZkwoiIiIiIhIZkwoiIiIiIhIZkwoiIiIiIhIZlyH4g3MdPQV3oaVnnqtrl9ZzHQMVB2CXFgbaKk6BPqbs7GlwttQ9L8hNvqNFVq/sijjWriaGCq8jbqAi2PWXAEBAfD29saqVatUHQqRBCYURERERAD6Hhih1PYO9duq1Pbqm4qkKUptTy1glVLbq0k45ImIiIiIqBYrKSlRaftMKIiIiIhqiYqKCkRGRsLMzAzW1taIiooSjmVnZ6Nfv34wMDCAkZERBg8ejPv37wvHw8LCEBQUJFHflClTEBAQIGzv3r0bHh4e0NXVhbm5Obp164bCwkLh+ObNm+Hm5gYdHR00a9YMX3zxhaK6Wqfdvn0bIpGo0uvltTh58iQ6deoEXV1d2NnZYdKkSRLXwdHREQsWLEBoaCiMjIwwduxYAMCePXvQvHlzaGtrw9HREcuXL1dKf5hQEBEREdUSCQkJ0NfXR2pqKpYuXYqYmBgcO3YMFRUV6NevHx49eoTk5GQcO3YMf/zxB4YMGVLtunNycjBs2DCMGjUKGRkZSEpKQnBwMMRiMQBg27ZtmDdvHmJjY5GRkYFFixZh7ty5SEhIUFR36yw7Ozvk5OQIr4sXL8Lc3BydO3dGVlYWevbsiQEDBuDy5cv47rvvcPLkSUREREjUsWzZMnh5eeHixYuYO3cuzp8/j8GDB2Po0KG4cuUKoqKiMHfuXMTHxyu8P5xDQURERFRLeHp6Yv78+QAAFxcXrFu3DomJiQCAK1eu4NatW7CzswMAbNmyBc2bN0daWhpat279xrpzcnJQVlaG4OBgODg4AAA8PDyE4/Pnz8fy5csRHBwMAHBycsK1a9ewceNGjBw5Uq79rOvU1dVhbW0NAHj+/DmCgoLg5+eHqKgojB07FiEhIZgyZQqAF9d5zZo18Pf3x/r166GjowMA6NKlC6ZPny7UGRISgq5du2Lu3LkAAFdXV1y7dg2fffYZwsLCFNof3qEgIiIiqiU8PT0ltm1sbJCbm4uMjAzY2dkJyQQAuLu7w8TEBBkZGdWq28vLC127doWHhwcGDRqETZs24fHjxwCAwsJCZGVlYfTo0TAwMBBeCxcuRFZWlvw6WA+NGjUK+fn52L59O9TU1JCeno74+HiJ33NgYCAqKipw69Yt4TxfX1+JejIyMtChQweJfR06dMCNGzdQXl6u0D7wDgXJhagoT/qT9OQeBoHXgoioLtPU1JTYFolEqKioqNa5ampqwvCll0pLS4Wf1dXVcezYMZw6dQpHjx7F2rVrMWfOHKSmpkJP78V/FJs2bULbtm0l6lBXrxuPp1eFhQsX4siRIzh79iwMDV882rqgoAAffPABJk2aVKm8vb298LO+vuKXNqguJhREREREtZybmxvu3LmDO3fuCHcprl27hry8PLi7uwMALCwscPXqVYnzLl26JJGkiEQidOjQAR06dMC8efPg4OCAffv2Ydq0abC1tcUff/yBkJAQ5XWsDtuzZw9iYmLw008/oXHj/19TqGXLlrh27RqaNGkiVX1ubm5ISUmR2JeSkgJXV1eFJ31MKIiIiIhquW7dusHDwwMhISFYtWoVysrKMH78ePj7+wtDY7p06YLPPvsMW7ZsgZ+fH7755htcvXoVPj4+AIDU1FQkJiaiR48esLS0RGpqKh48eAA3NzcAQHR0NCZNmgRjY2P07NkTxcXFOHfuHB4/foxp06aprO+10dWrVxEaGopZs2ahefPmuHfvHgBAS0sLs2bNQrt27RAREYExY8ZAX18f165dw7Fjx7Bu3bpX1jl9+nS0bt0aCxYswJAhQ3D69GmsW7dOKU/i4hwKIiIiolpOJBLhwIEDMDU1RefOndGtWzc4Ozvju+++E8oEBgZi7ty5iIyMROvWrZGfn4/Q0FDhuJGREU6cOIHevXvD1dUVn3zyCZYvX45evXoBAMaMGYPNmzcjLi4OHh4e8Pf3R3x8PJycnJTe39ru3LlzKCoqwsKFC2FjYyO8goOD4enpieTkZFy/fh2dOnWCj48P5s2bB1tb29fW2bJlS+zcuRM7duxAixYtMG/ePMTExCh8QjbAOxREREREAGr+ytVJSUmV9u3fv1/42d7eHgcOHHhtHdHR0YiOjq7ymJubGw4fPvza84cPH47hw4e/MdaaoCavXB0WFvbaD/qtW7fG0aNHX3n89u3bVe4fMGAABgwY8JbRSY8JxRuYaCt+tqqriaHC2yCSJzMdxU8EU0YbBpomCm9D0QzyHkp/koX843hbyrgWzsaWCm+DqkcZ/7cSkfJwyBMREREREcmMCQUREREREcmMCQUREREREcmMcyiIiIhqiQa6/B6QiGoe/stEREREREQyY0JBREREREQyY0JBREREREQy4xwKIiIiOVHG+ilERDUN71AQERER1QIBAQGYMmWKqsMgqoR3KIiIiIgAfPDLGKW2t7HLZqW2V99U7H1fqe2pBccptb2ahAkFyYW48KHU54j0FBAI8VoQERGRUnHIExGRAjkbW0r1qokMNE2kfhG9jpmOgVQv+n8VFRWIjIyEmZkZrK2tERUVJRxbsWIFPDw8oK+vDzs7O4wfPx4FBQXC8fj4eJiYmGD//v1wcXGBjo4OAgMDcefOHaFMVFQUvL29sXHjRtjZ2UFPTw+DBw/GkydPAAAnTpyApqYm7t27JxHXlClT0KlTJ8V2vo7Jz89HSEgI9PX1YWNjg5UrV0oMa3v8+DFCQ0NhamoKPT099OrVCzdu3FBt0K/AhIKIiIiolkhISIC+vj5SU1OxdOlSxMTE4NixYwAANTU1rFmzBr/99hsSEhLwyy+/IDIyUuL8oqIixMbGYsuWLUhJSUFeXh6GDh0qUebmzZvYuXMnvv/+exw+fBgXL17E+PHjAQCdO3eGs7Mztm7dKpQvLS3Ftm3bMGrUKAX3vm6ZNm0aUlJScPDgQRw7dgy//vorLly4IBwPCwvDuXPncPDgQZw+fRpisRi9e/dGaWmpCqOuGhMKIiIiolrC09MT8+fPh4uLC0JDQ+Hr64vExEQAL+4SvPPOO3B0dESXLl2wcOFC7Ny5U+L80tJSrFu3Dn5+fmjVqhUSEhJw6tQpnD17Vijz/PlzbNmyBd7e3ujcuTPWrl2LHTt2CHclRo8ejbi4/58v8P333+P58+cYPHiwEn4DdUN+fj4SEhKwbNkydO3aFS1atEBcXBzKy8sBADdu3MDBgwexefNmdOrUCV5eXti2bRvu3r2L/fv3qzb4KnAOBdE/uJoYqjoEIiKiV/L09JTYtrGxQW5uLgDg559/xuLFi/H777/j6dOnKCsrw/Pnz1FUVAQ9vReT5TQ0NNC6dWvh/GbNmsHExAQZGRlo06YNAMDe3h4NGzYUyvj5+aGiogKZmZmwtrZGWFgYPvnkE5w5cwbt2rVDfHw8Bg8eDH19Pja5uv744w+UlpYKv3MAMDY2RtOmTQEAGRkZ0NDQQNu2bYXj5ubmaNq0KTIyMpQe75swoSAiIqolmptpqjoEUjFNTcn3gEgkQkVFBW7fvo2+ffti3LhxiI2NhZmZGU6ePInRo0ejpKRESCjkwdLSEu+++y7i4uLg5OSEn376CUlJSXKrn2ofmYc85eTkYODAgbCwsICZmRneffdd/PHHH/KMjYiIiIiq4fz586ioqMDy5cvRrl07uLq64s8//6xUrqysDOfOnRO2MzMzkZeXBzc3N2Ffdna2xLlnzpyBmpqa8O05AIwZMwbfffcdvvzySzRu3BgdOnRQUM/qJmdnZ2hqaiItLU3Y9+TJE1y/fh0A4ObmhrKyMqSmpgrHHz58iMzMTLi7uys93jeROaEYNWoUWrRogeTkZPzyyy+wsrLC8OHD5RkbEREREVVDkyZNUFpairVr1+KPP/7A1q1bsWHDhkrlNDU1MXHiRKSmpuL8+fMICwtDu3btJIbe6OjoYOTIkUhPT8evv/6KSZMmYfDgwbC2thbKBAYGwsjICAsXLsT77yt3vYe6wNDQECNHjsTMmTNx/Phx/Pbbbxg9ejTU1NQgEong4uKCfv36ITw8HCdPnkR6ejree+89NGzYEP369VN1+JVUO6GYPHkyCgsLhe2bN29i1qxZcHd3h7e3NyZPnozMzEy5B3j37l289957MDc3h66uLjw8PCQya7FYjHnz5sHGxga6urro1q1bjX2kFhEREZEieHl5YcWKFViyZAlatGiBbdu2YfHixZXK6enpYdasWRg+fDg6dOgAAwMDfPfddxJlmjRpguDgYPTu3Rs9evSAp6cnvvjiC4kyampqCAsLQ3l5OUJDQxXat7pqxYoV8PPzQ9++fdGtWzd06NABbm5u0NHRAQDExcWhVatW6Nu3L/z8/CAWi/Hjjz9WGvZWE1R7DkWjRo3QqlUrLF26FP/5z38wZMgQtG3bVnh81d69exESEiLX4B4/fowOHTrgnXfewU8//QQLCwvcuHEDpqamQpmlS5dizZo1SEhIgJOTE+bOnYvAwEBcu3ZNuCBEREREb1LTV66uap7CP5/4M3XqVEydOlXi+IgRIyqdExwcjODg4Ne2NW7cOIwbN+61Ze7evYvevXvDxsbmteVUpaavXG1oaIht27YJ24WFhYiOjsbYsWMBAKamptiyZYuqwpNKtROKmTNnYuDAgRg/fjzi4+Oxdu1atG3bFklJSSgvL8fSpUsxcOBAuQa3ZMkS2NnZSTyazMnJSfhZLBZj1apV+OSTT4TbP1u2bIGVlRX2799f6bnKRERERPR2njx5gitXrmD79u04ePCgqsOptS5evIjff/8dbdq0wZMnTxATEwMANXJI05tINYfi5Uz+AQMGwN/fH7dv38ayZcuwatUqDBo0CCKRSK7BHTx4EL6+vhg0aBAsLS3h4+ODTZs2Ccdv3bqFe/fuoVu3bsI+Y2NjtG3bFqdPn5ZrLERERET04gNvjx498OGHH6J79+6qDqdWW7ZsGby8vNCtWzcUFhbi119/RYMGDVQdltSknpT98OFDhISEIC0tDRcvXoSfnx8uX76siNjwxx9/YP369XBxccGRI0cwbtw4TJo0CQkJCQAgLLBiZWUlcZ6VlVWlJeH/qbi4GE+fPpV4EREREdVlYWFhyMvLe22ZqKgoXLp06bVlkpKSUFRUhJUrV8ovuHrIx8cH58+fR0FBAR49eoRjx47Bw8ND1WHJpNpDnhITEzF8+HA8ePAAtra22LVrF77++mscP34cw4YNQ58+fRAdHQ1dXV25BVdRUQFfX18sWrQIwItf/NWrV7FhwwaMHDlS5noXL16M6OjoapVta+0vczvV5WxsqfA26gIDTRNVh0B/M9GW3/PMVdkGVY+hlnmdaIOqx1bEa0FE0qn2HYoJEyYgMjISRUVFWLduHaZMmQIAeOedd3DhwgVoamrC29tbrsHZ2NhUetaum5sbsrOzAUB4fNn9+/clyty/f1/i0Wb/9tFHH+HJkyfC686dO3KNm4iIiIiovqh2QpGTk4M+ffpAR0cHPXv2xIMHD4Rj2traiI2Nxd69e+UaXIcOHSo9ivb69etwcHAA8GJOh7W1NRITE4XjT58+RWpqKvz8/F5Zr7a2NoyMjCReREREREQkvWoPefrPf/6DgQMH4j//+Q9OnjyJ3r17VyrTvHlzuQY3depUtG/fHosWLcLgwYNx9uxZfPnll/jyyy8BvFhufsqUKVi4cCFcXFyEx8ba2toiKChIrrHQGxTlqToCeonXgoiIiJSo2gnFV199hY0bN+L333/He++9h1GjRikyLgBA69atsW/fPnz00UeIiYmBk5MTVq1aJbHeRWRkJAoLCzF27Fjk5eWhY8eOOHz4MNegICIiIiJSgmonFFpaWpg4caIiY6lS37590bdv31ceF4lEiImJEZ7dS0REREREyiP1Y2OJiIiIqOYICAgQHpZDpArVvkNBREREVJfNOT1Wqe3F+n2p1Pbqm9KNQ5XanuYHO+RSz8v1Qvbv3y+X+pSBdyiIiIiIiEhmTCiIiIiIaonCwkKEhobCwMAANjY2WL58ucTxx48fIzQ0FKamptDT00OvXr1w48YNiTKbNm2CnZ0d9PT00L9/f6xYsQImJiZK7AUBwO7du+Hh4QFdXV2Ym5ujW7dumDlzJhISEnDgwAGIRCKIRCIkJSUBAK5cuYIuXboI5ceOHYuCggKhvrCwMAQFBSE6OhoWFhYwMjLChx9+iJKSEoX3ReqEwtnZGQ8fPqy0Py8vD87OznIJioiIqDYy0daT6kUkrZkzZyI5ORkHDhzA0aNHkZSUhAsXLgjHw8LCcO7cORw8eBCnT5+GWCxG7969UVpaCgBISUnBhx9+iMmTJ+PSpUvo3r07YmNjVdWdeisnJwfDhg3DqFGjkJGRgaSkJAQHB2P+/PkYPHgwevbsiZycHOTk5KB9+/YoLCxEYGAgTE1NkZaWhl27duHnn39GRESERL2JiYlCfd9++y327t2L6OhohfdH6jkUt2/fRnl5eaX9xcXFuHv3rlyCIvlyNTFUdQhyYaPfWNUhyEVDQ21Vh/DWzHQMFN5GW2t/hbdhqGWu8DYU/r5VwrojBn9mSX+Sg3TFDUulrF9TyvIADDRNpD+pHhIXVv7S8E1EUuZGvBayKSgowFdffYVvvvkGXbt2BQAkJCSgUaNGAIAbN27g4MGDSElJQfv27QEA27Ztg52dHfbv349BgwZh7dq16NWrF2bMmAEAcHV1xalTp3Do0CHVdKqeysnJQVlZGYKDg4UFmz08PAAAurq6KC4uhrW1tVA+ISEBz58/x5YtW6Cvrw8AWLduHd59910sWbIEVlZWAF48lfXrr7+Gnp4emjdvjpiYGMycORMLFiyAmpriBiZVO6E4ePCg8PORI0dgbGwsbJeXlyMxMRGOjo5yDY6IiIiIXsjKykJJSQnatm0r7DMzM0PTpk0BABkZGdDQ0JA4bm5ujqZNmyIjIwMAkJmZif79+0vU26ZNGyYUSubl5YWuXbvCw8MDgYGB6NGjBwYOHAhTU9Mqy2dkZMDLy0tIJgCgQ4cOqKioQGZmppBQeHl5QU/v/zN8Pz8/FBQU4M6dO0LiogjVTiherjwtEokwcuRIiWOamppwdHSsNI6PiIiIiIgkqaur49ixYzh16hSOHj2KtWvXYs6cOUhNTVV1aDKp9r2PiooKVFRUwN7eHrm5ucJ2RUUFiouLkZmZ+doF6IiIiIhIdo0bN4ampqbEh87Hjx/j+vXrAAA3NzeUlZVJHH/48CEyMzPh7u4OAGjatCnS0tIk6v33NimHSCRChw4dEB0djYsXL0JLSwv79u2DlpZWpekFbm5uSE9PR2FhobAvJSUFampqwh0qAEhPT8ezZ8+E7TNnzsDAwAB2dnYK7YvUg6lu3bqFBg0aKCIWIiIiInoFAwMDjB49GjNnzsQvv/yCq1evIiwsTBgb7+Lign79+iE8PBwnT55Eeno63nvvPTRs2BD9+vUDAEycOBE//vgjVqxYgRs3bmDjxo346aefIBKJVNm1eic1NRWLFi3CuXPnkJ2djb179+LBgwdwc3ODo6MjLl++jMzMTPz1118oLS1FSEgIdHR0MHLkSFy9ehXHjx/HxIkTMWLECGG4EwCUlJRg9OjRuHbtGn788UfMnz8fERERCp0/Aci4sF1iYiISExOFOxX/9PXXX8slMCIiIiKS9Nlnn6GgoADvvvsuDA0NMX36dDx58kQ4HhcXh8mTJ6Nv374oKSlB586d8eOPP0JT88WTDDp06IANGzYgOjoan3zyCQIDAzF16lSsW7dOVV2ql4yMjHDixAmsWrUKT58+hYODA5YvX45evXrB19cXSUlJ8PX1RUFBAY4fP46AgAAcOXIEkydPRuvWraGnp4cBAwZgxYoVEvV27doVLi4u6Ny5M4qLizFs2DBERUUpvD9SJxTR0dGIiYmBr68vbGxsmNESEREpiZmO/psLkcxqw8rVBgYG2Lp1K7Zu3SrsmzlzpvCzqakptmzZ8to6wsPDER4eLrHdpEkT+QerYvJauVoR3NzccPjw4SqPWVhY4OjRo5X2e3h44Jdffnlj3dHR0Up5VOw/SZ1QbNiwAfHx8RgxYoQi4iEiIiIiBVq2bBm6d+8OfX19/PTTT0hISMAXX3yh6rCoFpM6oSgpKRGebUwkePTkzWX+TXFPL6vfeC2IiOg1zp49i6VLlyI/Px/Ozs5Ys2YNxowZo+qwqBaTOqEYM2YMtm/fjrlz5yoiHiIiIiJSoJ07d6o6BFKA+Ph4lbUtdULx/PlzfPnll/j555/h6ekpTPJ56d+TQ4iIiIiIqO6SOqG4fPkyvL29AQBXr16VOMYJ2kREVJ+Z6RioOgQiIqWTOqE4fvy4IuKosURFedKdoPfmIv9moGki/Un1kDKuhTI0N9d8cyHi3141iW/9T+pzRDVwzoz4QZZU5WXpg41+Y+lPIoXgtSCqW2Re5eLmzZs4cuSIsBqfWCyWW1BERERERFQ7SJ1QPHz4EF27doWrqyt69+6NnJwcAMDo0aMxffp0uQdIREREREQ1l9QJxdSpU6GpqYns7Gzo6f3/GIMhQ4a8coEOIiIiIiKqm6ROKI4ePYolS5agUaNGEvtdXFzw3//+V26BERERERHVVykpKfDw8ICmpiaCgoJeua8mkHpSdmFhocSdiZcePXoEbW1tuQRFREREpGxr0scptb1JXuuV2l5UVBT279+PS5cuKbVdVXke865S29OZ971c65s2bRq8vb3x008/wcDA4JX7agKp71B06tQJW7ZsEbZFIhEqKiqwdOlSvPPOO3INjoiIiIioPsrKykKXLl3QqFEjmJiYvHJfTSB1QrF06VJ8+eWX6NWrF0pKShAZGYkWLVrgxIkTWLJkiSJiJCIiIiJA+BK3SZMm0NbWhr29PWJjYwEAs2bNgqurK/T09ODs7Iy5c+eitLQUwItVlKOjo5Geng6RSASRSKTSlZUJKC4uxqRJk2BpaQkdHR107NgRaWlpuH37NkQiER4+fIhRo0YJ16qqfTWF1AlFixYtcP36dXTs2BH9+vVDYWEhgoODcfHiRTRuzOdKExERESnKRx99hE8//RRz587FtWvXsH37dlhZWQEADA0NER8fj2vXrmH16tXYtGkTVq5cCeDFw3OmT5+O5s2bIycnBzk5ORgyZIgqu1LvRUZGYs+ePUhISMCFCxfQpEkTBAYGwtDQEDk5OTAyMsKqVauQk5ODQYMGVdpXk66f1HMoAMDY2Bhz5syRdyxERERE9Ar5+flYvXo11q1bh5EjRwIAGjdujI4dOwIAPvnkE6Gso6MjZsyYgR07diAyMhK6urowMDCAhoYGrK2tVRI//b/CwkKsX78e8fHx6NWrFwBg06ZNOHbsGL7++mvMnDkTIpEIxsbGwvXS19evtK+mkDqhiIuLg4GBAQYNGiSxf9euXSgqKhLe4EREREQkPxkZGSguLkbXrl2rPP7dd99hzZo1yMrKQkFBAcrKymBkZKTkKKk6srKyUFpaig4dOgj7NDU10aZNG2RkZKgwMtlIPeRp8eLFaNCgQaX9lpaWWLRokVyCIiIiIiJJurq6rzx2+vRphISEoHfv3jh06BAuXryIOXPmoKSkRIkRUn0l9R2K7OxsODk5Vdrv4OCA7OxsuQRF8mWiXfkxv/ImfpIv9TkiadvIvCBd/T79pWwBcDa2lPocaTXQkTqPl4oyroWrluLnS4kfZElVXuQgfRuiojzpTpDhT8lA00T6k2oY8a3/SX2O1Nfj0RPpystwvSHlewoW0j+5sC5cb6q5XFxcoKuri8TERIwZM0bi2KlTp+Dg4CAxJP3f64NpaWmhvLxcKbHS6zVu3BhaWlpISUmBg8OLf9BKS0uRlpaGKVOmqDY4GUidUFhaWuLy5ctwdHSU2J+eng5zc3N5xUVERERE/6Cjo4NZs2YhMjISWlpa6NChAx48eIDffvsNLi4uyM7Oxo4dO9C6dWv88MMP2Ldvn8T5jo6OuHXrFi5duoRGjRrB0NCQa4ipiL6+PsaNG4eZM2fCzMwM9vb2WLp0KYqKijB69GhVhyc1qb8qHTZsGCZNmoTjx4+jvLwc5eXl+OWXXzB58mQMHTpUETESEREREYC5c+di+vTpmDdvHtzc3DBkyBDk5ubiP//5D6ZOnYqIiAh4e3vj1KlTmDt3rsS5AwYMQM+ePfHOO+/AwsIC3377rYp6QQDw6aefYsCAARgxYgRatmyJmzdv4siRIzA1NVV1aFKT+g7FggULcPv2bXTt2hUaGi9Or6ioQGhoKOdQEBERUa2l7JWrZaGmpoY5c+ZU+bTNpUuXYunSpRL7/jl8RltbG7t371Z0iDWGvFeuljcdHR2sWbMGa9asqfJ4Xl5etfbVBFIlFGKxGPfu3UN8fDwWLlyIS5cuQVdXFx4eHsL4LyIiIiIiqj+kTiiaNGkijNVzcXFRVFxERERERFQLSDWHQk1NDS4uLnj48KGi4iEiIiIiolpE6knZn376KWbOnImrV68qIh4iIiIiIqpFpJ6UHRoaiqKiInh5eUFLS6vSIiuPHj2SW3BERERERFSzSZ1QrFq1SgFhEBERERFRbSR1QjFy5EhFxEFERERERLWQ1AkFAGRlZSEuLg5ZWVlYvXo1LC0t8dNPP8He3h7NmzeXd4wqVXE2WaryagH9pW7DUEuxK4yb6RgotH6SjpmOvqpDeGviQukfzCDSk/KER0+kKy/Dk6ul7YfUfVCC8sx7Up+jFiD/OGqFojxVR1A7KOH3JJK2jRr4t0dE/0/qSdnJycnw8PBAamoq9u7di4KCAgBAeno65s+fL/cAiYiIagtDLXOpXkREdYHUCcXs2bOxcOFCHDt2DFpaWsL+Ll264MyZM3INjoiIiIheCAgIkFj5+t8cHR1lmusaFRUFb29vmeMiknrI05UrV7B9+/ZK+y0tLfHXX3/JJSgiIiIiZdueOUGp7Q1v+rlc60tLS4O+fu0fVisvhZO6KbU9/TU/K7W9mkTqOxQmJibIycmptP/ixYto2LChXIIiIiIiIulYWFhAT+/VE05KS0uVGA3VJ1InFEOHDsWsWbNw7949iEQiVFRUICUlBTNmzEBoaKgiYqTa4NET6V+kGLwWRER1VllZGSIiImBsbIwGDRpg7ty5EIvFACoPeRKJRFi/fj3+85//QF9fH7GxsQBeLFJsZWUFQ0NDjB49Gs+fP1dFV+q94uJiTJo0CZaWltDR0UHHjh2RlpYGAEhKSoJIJEJiYiJ8fX2hp6eH9u3bIzMzU8VRV03qhGLRokVo1qwZ7OzsUFBQAHd3d3Tu3Bnt27fHJ598oogYiYiIiAhAQkICNDQ0cPbsWaxevRorVqzA5s2bX1k+KioK/fv3x5UrVzBq1Cjs3LkTUVFRWLRoEc6dOwcbGxt88cUXSuwBvRQZGYk9e/YgISEBFy5cQJMmTRAYGCixSPScOXOwfPlynDt3DhoaGhg1apQKI341qedQaGlpYdOmTZg3bx6uXLmCgoIC+Pj4wMXFRRHxERER0d8CYanqEEjF7OzssHLlSohEIjRt2hRXrlzBypUrER4eXmX54cOH4/333xe2hw4ditGjR2P06NEAgIULF+Lnn3/mXQolKywsxPr16xEfH49evXoBADZt2oRjx47hq6++QuvWrQEAsbGx8Pf3B/DiwUh9+vTB8+fPoaOjo7LYq1LtOxQVFRVYsmQJOnTogNatW+Pzzz/HO++8g8GDBzOZICIiIlKCdu3aQSQSCdt+fn64ceMGysvLqyzv6+srsZ2RkYG2bdtK7PPz85N/oPRaWVlZKC0tRYcOHYR9mpqaaNOmDTIyMoR9np6ews82NjYAgNzcXOUFWk3VTihiY2Px8ccfw8DAAA0bNsTq1asxYYJyn4ZARERERNXHpz7VbpqamsLPLxPJiooKVYXzStVOKLZs2YIvvvgCR44cwf79+/H9999j27ZtNbJTRERERHVRamqqxPaZM2fg4uICdXX1ap3v5uZWZR2kXI0bN4aWlhZSUlKEfaWlpUhLS4O7u7sKI5NNtROK7Oxs9O7dW9ju1q0bRCIR/vzzT4UERkRERESSsrOzMW3aNGRmZuLbb7/F2rVrMXny5GqfP3nyZHz99deIi4vD9evXMX/+fPz2228KjJiqoq+vj3HjxmHmzJk4fPgwrl27hvDwcBQVFQnzW2qTak/KLisrqzQBRFNTs+4/01gJj9Q00DRRaP02+o0VWr+yiJ/kS1Ve9OYiKuFsXAcmVRblKbyJ4u8vSVVex0f6NsS/pry50D+IgsdI34iCld8vlPoczTcXUTpl/H2Lb/1PujYcZGikLpDl/736+rtSgdDQUDx79gxt2rSBuro6Jk+ejLFjx1b7/CFDhiArKwuRkZF4/vw5BgwYgHHjxuHIkSMKjJqq8umnn6KiogIjRoxAfn4+fH19ceTIEZiamqo6NKlVO6EQi8UICwuDtra2sO/58+f48MMPJcbn7d27V74REhERESmBvFeulrekpCTh5/Xr11c6fvv2bYntl+tT/NvHH3+Mjz/+WGLfkiVL3jq+mqamr1yto6ODNWvWYM2aNZWOBQQEVLp+3t7er7ymqlbthGLkyJGV9r333ntyDYaIiKg2U/QdZyKimqjaCUVcXJwi4yAiIiIiolpI6pWyiYiIiIiIXmJCQUREREREMmNCQUREREREMqv2HAoiIiJSLWkffQvU48ffEpHS1Ko7FJ9++ilEIhGmTJki7Hv+/DkmTJgAc3NzGBgYYMCAAbh//77qgiQiIiIiqkdqzR2KtLQ0bNy4EZ6enhL7p06dih9++AG7du2CsbExIiIiEBwcLLGUOSle+YNnUp9Tq7LZWoTXgoiIiJSpVnyOKCgoQEhICDZt2iSxeuCTJ0/w1VdfYcWKFejSpQtatWqFuLg4nDp1CmfOnFFhxERERERE9UOtSCgmTJiAPn36oFu3bhL7z58/j9LSUon9zZo1g729PU6fPv3K+oqLi/H06VOJFxEREVFNFhAQIDHsWx7i4+NhYmIi1zpJdiKRCPv37692+aSkJIhEIuTl5Skspuqo8UOeduzYgQsXLiAtLa3SsXv37kFLS6vSH4KVlRXu3bv3yjoXL16M6OjoarVfcvUvqeLVCZaquFIY5D2U/iQL+cfxtsozX31Nq6IWoJg43hZX0q2e/GvSvW91FBTHW3uQJV15i3cUEwdRNYmf5Et9jkjaNgql+/sW6UnZgIwO/zdSOQ39rafDUqW2V988GNpeqe1Z7Dj11nXk5ORIjMaRh6ioKOzfvx+XLl2Sa73/VKPvUNy5cweTJ0/Gtm3boKMjv48LH330EZ48eSK87ty5I7e6iYiIiIikVVJSAmtra2hra6s6FKnV6ITi/PnzyM3NRcuWLaGhoQENDQ0kJydjzZo10NDQgJWVFUpKSird5rl//z6sra1fWa+2tjaMjIwkXkRERG/tQZZ0LyIplZWVISIiAsbGxmjQoAHmzp0LsVgM4MWQ7hkzZqBhw4bQ19dH27ZtkZSUJHF+fHw87O3toaenh/79++PhQxlGMZBcBAQEICIiAlOmTEGDBg0QGBhYacjTqVOn4O3tDR0dHfj6+mL//v0QiUSV7jacP38evr6+0NPTQ/v27ZGZmQngxfWOjo5Geno6RCIRRCIR4uPj5d6XGp1QdO3aFVeuXMGlS5eEl6+vL0JCQoSfNTU1kZiYKJyTmZmJ7Oxs+Pn5qTByIiIiIvlLSEiAhoYGzp49i9WrV2PFihXYvHkzACAiIgKnT5/Gjh07cPnyZQwaNAg9e/bEjRs3AACpqakYPXo0IiIicOnSJbzzzjtYuHChKrtT7yUkJEBLSwspKSnYsGGDxLGnT5/i3XffhYeHBy5cuIAFCxZg1qxZVdYzZ84cLF++HOfOnYOGhgZGjRoFABgyZAimT5+O5s2bIycnBzk5ORgyZIjc+1Gj51AYGhqiRYsWEvv09fVhbm4u7B89ejSmTZsGMzMzGBkZYeLEifDz80O7du1UETIRERGRwtjZ2WHlypUQiURo2rQprly5gpUrVyIwMBBxcXHIzs6Gra0tAGDGjBk4fPgw4uLisGjRIqxevRo9e/ZEZOSLuSKurq44deoUDh8+rMou1WsuLi5YurTquTTbt2+HSCTCpk2boKOjA3d3d9y9exfh4eGVysbGxsLf3x8AMHv2bPTp0wfPnz+Hrq4uDAwMoKGh8drRO2+rRt+hqI6VK1eib9++GDBgADp37gxra2vs3btX1WERERERyV27du0gEv3/NHg/Pz/cuHEDV65cQXl5OVxdXWFgYCC8kpOTkZX1YnhdRkYG2rZtK1EfR3SoVqtWrV55LDMzE56enhLziNu0aVNl2X+u02ZjYwMAyM3NlVOUb1aj71BU5d9jAXV0dPD555/j888/V01ARERERCpWUFAAdXV1nD9/Hurq6hLHDAwMVBQVvYm+vr5c6tHU1BR+fplwVlRUyKXu6qh1CQURERFRfZWamiqxfebMGbi4uMDHxwfl5eXIzc1Fp06dqjzXzc2tyvOpZmratCm++eYbFBcXC09+qmoZhTfR0tJCeXm5vMOTUOuHPBERERHVF9nZ2Zg2bRoyMzPx7bffYu3atZg8eTJcXV0REhKC0NBQ7N27F7du3cLZs2exePFi/PDDDwCASZMm4fDhw1i2bBlu3LiBdevWcf5EDTZ8+HBUVFRg7NixyMjIwJEjR7Bs2TIAkBj29iaOjo64desWLl26hL/++gvFxcVyj5UJBdE/2Og3lupFRESkTKGhoXj27BnatGmDCRMmYPLkyRg7diwAIC4uDqGhoZg+fTqaNm2KoKAgpKWlwd7eHsCL+RebNm3C6tWr4eXlhaNHj+KTTz5RZXfoNYyMjPD999/j0qVL8Pb2xpw5czBv3jwAkGp9tgEDBqBnz5545513YGFhgW+//VbusXLIExERERFq/srV/5xHun79+krHNTU1ER0djejo6FfWMWrUKOGRoi9Nnz5dbjHWJPJYuVqR/j0vGICwpshL7du3R3p6urC9bds2aGpqCkliQEBApXO8vb0l9mlra2P37t1yjLwyJhRERERERDXQli1b4OzsjIYNGyI9PR2zZs3C4MGDoaurq+rQJDChqA/+91/pz7GQfxhvq/x+oVTlNd9chGT16In05zhIV/zZw2fSt0GKIcv1ptpLGde7KE/xbRDVAffu3cO8efNw79492NjYYNCgQYiNjVV1WJUwoSC5kPbDPsAP/IrCa0FERFQ3REZGCgsR1mSclE1ERERERDLjHQoiIiJ54VAeIqqHeIeCiIiIiIhkxoSCiIiIiIhkxoSCiIiIiIhkxjkUREREtQUf4UtENRDvUBARERHVASKRCPv371d1GFRNAQEBmDJlSrXL79+/H02aNIG6urpU5ykD71AQERERAThzL0qp7bWzlm97OTk5MDU1lWudtVl2dx+ltmd/7KJC6//ggw/w/vvvY9KkSTA0NERYWBjy8vJqRBLJhIKIiIioDrC2tlZ1CKQgBQUFyM3NRWBgIGxtbVUdTiVMKN4g99f/SVXeXpZGHmRJV97iHamKi5/kS1c/AJHUZyhe+V/PVB2CXNjoN1Z1CG+tzrynHkj3npJljKj4t6tSlRcFSFd/Xfm7KM+8J1V5tQDFxEFKIu1cEAfFhFHbBAQEwNPTEzo6Oti8eTO0tLTw4YcfIioqCsCLIU/79u1DUFAQbt++DScnJ+zZswdr165FamoqXFxcsGHDBvj5+Ql1njx5Eh999BHOnTuHBg0aoH///li8eDH09fVV1Mv6qbi4GHPmzMG3336LvLw8tGjRAkuWLEFAQACSkpLwzjsvPvt16dIFAODv74/k5GQAL647ABw/fhwBAQEqiZ9zKIiIiIhqiYSEBOjr6yM1NRVLly5FTEwMjh079sryc+bMwYwZM3Dp0iW4urpi2LBhKCsrAwBkZWWhZ8+eGDBgAC5fvozvvvsOJ0+eREREhLK6Q3+LiIjA6dOnsWPHDly+fBmDBg1Cz549cePGDbRv3x6ZmZkAgD179iAnJwcHDx7E4MGD0bNnT+Tk5CAnJwft27dXWfxMKIiIiIhqCU9PT8yfPx8uLi4IDQ2Fr68vEhMTX1l+xowZ6NOnD1xdXREdHY3//ve/uHnzJgBg8eLFCAkJwZQpU+Di4oL27dtjzZo12LJlC54/f66sLtV72dnZiIuLw65du9CpUyc0btwYM2bMQMeOHREXFwctLS1YWloCAMzMzGBtbQ0jIyPo6upCW1sb1tbWsLa2hpaWlsr6wCFPRERERLWEp6enxLaNjQ1yc3OrVd7GxgYAkJubi2bNmiE9PR2XL1/Gtm3bhDJisRgVFRW4desW3Nzc5Bw9VeXKlSsoLy+Hq6urxP7i4mKYm5urKCrpMKEgIiIiqiU0NTUltkUiESoqKqpV/uVY+5flCwoK8MEHH2DSpEmVzrO3l2lWKMmgoKAA6urqOH/+PNTV1SWOGRgYqCgq6TChICIikhPx+d+kKi/iZGNSoZYtW+LatWto0qSJqkOp13x8fFBeXo7c3Fx06tSp2udpaWmhvLxcgZFVHxMKkou68qSZuoDXgoiIqmPWrFlo164dIiIiMGbMGOjr6+PatWs4duwY1q1bp+rw6g1XV1eEhIQgNDQUy5cvh4+PDx48eIDExER4enqiT58+VZ7n6OiII0eOIDMzE+bm5jA2Nq50B0tZOCmbiIiIqB7y9PREcnIyrl+/jk6dOsHHxwfz5s2rkesc1HVxcXEIDQ3F9OnT0bRpUwQFBSEtLe21Q8/Cw8PRtGlT+Pr6wsLCAikpKUqMWBLvUBARERFB/itXy1tSUlKlff9cJVksFgs/Ozo6SmwDgImJSaV9rVu3xtGjR+UaZ02h6JWr39Y/r6empiaio6MRHR1dZdmqrp2FhUWNuXa8Q0FERERERDJjQkFERERERDJjQkFERERERDLjHIqaoChP1REQkaI8eqLqCOglXotqeZZ0R+pz9IOlK1929oZU5TV9pKufiJSLdyiIiIiIiEhmTCiIiIiIiEhmTCiIiIiIiEhmTCiIiIiIiEhmTCiIiIiIiEhmTCiIiIiI6oCkpCSIRCLk5eVV+5yoqCh4e3srLCaSTkBAAKZMmaLqMKTGx8YSERERAbiet0yp7bmazJBrfe3bt0dOTg6MjY3lWm9AQAC8vb2xatUqudaraBnezZTantul35XaXk3ChIKIiIioDtDS0oK1tbWqw6B6iEOeiIiIiGqJiooKLF68GE5OTtDV1YWXlxd2794NoOohT5s2bYKdnR309PTQv39/rFixAiYmJpXq3bp1KxwdHWFsbIyhQ4ciPz8fABAWFobk5GSsXr0aIpEIIpEIt2/fVkJP677CwkKEhobCwMAANjY2WL58ucTxx48fIzQ0FKamptDT00OvXr1w48aLRSHFYjEsLCyEaw8A3t7esLGxEbZPnjwJbW1tFBUVAQBEIhE2b96M/v37Q09PDy4uLjh48KBc+sKEguSiKLdI6hcpBq8FEVHdtXjxYmzZsgUbNmzAb7/9hqlTp+K9995DcnJypbIpKSn48MMPMXnyZFy6dAndu3dHbGxspXJZWVnYv38/Dh06hEOHDiE5ORmffvopAGD16tXw8/NDeHg4cnJykJOTAzs7O4X3sz6YOXMmkpOTceDAARw9ehRJSUm4cOGCcDwsLAznzp3DwYMHcfr0aYjFYvTu3RulpaUQiUTo3LkzkpKSALxIPjIyMvDs2TP8/vuLoVfJyclo3bo19PT0hDqjo6MxePBgXL58Gb1790ZISAgePXr01n1hQkFERERUCxQXF2PRokX4+uuvERgYCGdnZ4SFheG9997Dxo0bK5Vfu3YtevXqhRkzZsDV1RXjx49Hr169KpWrqKhAfHw8WrRogU6dOmHEiBFITEwEABgbG0NLSwt6enqwtraGtbU11NXVFd7Xuq6goABfffUVli1bhq5du8LDwwMJCQkoKysDANy4cQMHDx7E5s2b0alTJ3h5eWHbtm24e/cu9u/fD+DF3JaXCcWJEyfg4+MjsS8pKQn+/v4S7YaFhWHYsGFo0qQJFi1ahIKCApw9e/at+8M5FDXBoyfSlXdQcP01lLTfpOsrKI639iBLuvIW7ygmjrdQckLKPgDQCZB/HG+r5Le/pCqvKUMb/1t/Sary9sHS1f8w46F0J6Bm/m2U3y+Uqrws16L8wTOpytfXb9xkuWsp7XtKGde7Lrp58yaKiorQvXt3if0lJSXw8fGpVD4zMxP9+/eX2NemTRscOnRIYp+joyMMDQ2FbRsbG+Tm5soxcvq3rKwslJSUoG3btsI+MzMzNG3aFACQkZEBDQ0NiePm5uZo2rQpMjIyAAD+/v6YPHkyHjx4gOTkZAQEBMDa2hpJSUkYPXo0Tp06hcjISIl2PT09hZ/19fVhZGQkl2vNhIKIiIioFigoKAAA/PDDD2jYsKHEMW1tbWRlSf9lDwBoakqmbCKRCBUVFbIFSUrj4eEBMzMzJCcnIzk5GbGxsbC2tsaSJUuQlpaG0tJStG/fXuIcRV3r+voFDBEREVGt4u7uDm1tbWRnZ6NJkyYSr6rmNTRt2hRpaWkS+/69XR1aWlooLy+XOW6qrHHjxtDU1ERqaqqw7/Hjx7h+/ToAwM3NDWVlZRLHHz58iMzMTLi7uwN4kQx06tQJBw4cwG+//YaOHTvC09MTxcXF2LhxI3x9faGvr5x70rxDQURERFQLGBoaYsaMGZg6dSoqKirQsWNHPHnyBCkpKTAyMoKDg+SY6IkTJ6Jz585YsWIF3n33Xfzyyy/46aefIBKJpGrX0dERqampuH37NgwMDGBmZgY1NX4n/TYMDAwwevRozJw5E+bm5rC0tMScOXOE36uLiwv69euH8PBwbNy4EYaGhpg9ezYaNmyIfv36CfUEBARg+vTp8PX1hYGBAQCgc+fO2LZtG2bOnKm0/vDdQERERFRLLFiwAHPnzsXixYvh5uaGnj174ocffoCTk1Olsh06dMCGDRuwYsUKeHl54fDhw5g6dSp0dHSkanPGjBlQV1eHu7s7LCwskJ2dLa/u1GufffYZOnXqhHfffRfdunVDx44d0apVK+F4XFwcWrVqhb59+8LPzw9isRg//vijxLAlf39/lJeXIyAgQNgXEBBQaZ+i8Q4FEREREeS/crUiiEQiTJ48GZMnT67yuFgsltgODw9HeHi4xHaTJk2E7aioKERFRUmcM2XKFEyZMkXYdnV1xenTp98+eCWr6StXGxgYYOvWrdi6dauw7593FUxNTbFly5bX1uHt7V3pmv/7+r3073IAJNYseRtMKIj+weBPKSe0SfvELSKq0/gkKappli1bhu7du0NfXx8//fQTEhIS8MUXX6g6LKpjmFAQERER1VFnz57F0qVLkZ+fD2dnZ6xZswZjxoxRdVhUxzChICIiIqqjdu7cqeoQqB5gQkFERFRLSDukCuCwKiJSPP47Q0REREREMmNCQUREREREMuOQpxpA/CRfqvLSLUfDW+Qkf+V/Sf+eklahDO/bmkjR/VDG74n/htQvzx4q/j2Vf+2hVOWlWzWBiJSN/+YTEREREZHMeIeC5EIZ32hR9fBaEBERkTLxDgUREREREcmMdyiIiIiIAPxZuFmp7dnqc4E5RTrt1Eyp7fnd+l2p7dUkvENBREREREQyY0JBREQkJ+X3C6V6EUlr9+7d8PDwgK6uLszNzdGtWzcUFr54L23evBlubm7Q0dFBs2bN8MUXXwjntW/fHrNmzZKo68GDB9DU1MSJEycAAMXFxZgxYwYaNmwIfX19tG3bFklJSUL5+Ph4mJiY4MiRI3Bzc4OBgQF69uyJnJwcxXe8DnJ0dMSqVask9nl7eyMqKgoAIBKJsH79evTq1Qu6urpwdnbG7t27lR9oNdTohGLx4sVo3bo1DA0NYWlpiaCgIGRmZkqUef78OSZMmABzc3MYGBhgwIABuH//vooiJiIiIlKMnJwcDBs2DKNGjUJGRgaSkpIQHBwMsViMbdu2Yd68eYiNjUVGRgYWLVqEuXPnIiEhAQAQEhKCHTt2QCwWC/V99913sLW1RadOnQAAEREROH36NHbs2IHLly9j0KBB6NmzJ27cuCGcU1RUhGXLlmHr1q04ceIEsrOzMWPGDOX+IuqRuXPnYsCAAUhPT0dISAiGDh2KjIwMVYdVSY1OKJKTkzFhwgScOXMGx44dQ2lpKXr06CFk4gAwdepUfP/999i1axeSk5Px559/Ijg4WIVRExEREclfTk4OysrKEBwcDEdHR3h4eGD8+PEwMDDA/PnzsXz5cgQHB8PJyQnBwcGYOnUqNm7cCAAYPHgw/vzzT5w8eVKob/v27Rg2bBhEIhGys7MRFxeHXbt2oVOnTmjcuDFmzJiBjh07Ii4uTjintLQUGzZsgK+vL1q2bImIiAgkJiYq/XdRXwwaNAhjxoyBq6srFixYAF9fX6xdu1bVYVVSoydlHz58WGI7Pj4elpaWOH/+PDp37ownT57gq6++wvbt29GlSxcAQFxcHNzc3HDmzBm0a9dOFWETEREphCzDpDQVEAephpeXF7p27QoPDw8EBgaiR48eGDhwILS0tJCVlYXRo0cjPDxcKF9WVgZjY2MAgIWFBXr06IFt27ahU6dOuHXrFk6fPi0kHFeuXEF5eTlcXV0l2iwuLoa5ubmwraenh8aNGwvbNjY2yM3NVWS36zU/P79K25cuXVJNMK9RoxOKf3vy5AkAwMzMDABw/vx5lJaWolu3bkKZZs2awd7eHqdPn35lQlFcXIzi4mJh++nTpwqMmoiIiOjtqaur49ixYzh16hSOHj2KtWvXYs6cOfj+++8BAJs2bULbtm0rnfNSSEgIJk2ahLVr12L79u3w8PCAh4cHAKCgoADq6uo4f/68xDkAYGBgIPysqSmZoopEIolhVFR9ampqlX53paWlKorm7dSahKKiogJTpkxBhw4d0KJFCwDAvXv3oKWlBRMTE4myVlZWuHfv3ivrWrx4MaKjo6vV7t2b0i0SZi9V6Ree7b0qVXn9AOnqryvfaP31+yOpylsoKI63VpSn6gjeWlFukdTn6Csgjrf1MOOhVOVrYh/qivxr0l0LHQXFQcrBBThlJxKJ0KFDB3To0AHz5s2Dg4MDUlJSYGtriz/++AMhISGvPLdfv34YO3YsDh8+jO3btyM0NFQ45uPjg/LycuTm5gpzKkixLCwsJCa0P336FLdu3ZIoc+bMGYnrdObMGfj4+CgtxuqqNQnFhAkTcPXqVYmxf7L66KOPMG3aNGH76dOnsLOze+t6iYiIiBQlNTUViYmJ6NGjBywtLZGamooHDx7Azc0N0dHRmDRpEoyNjdGzZ08UFxfj3LlzePz4sfCZR19fH0FBQZg7dy4yMjIwbNgwoW5XV1eEhIQgNDQUy5cvh4+PDx48eIDExER4enqiT58+qup2ndWlSxfEx8fj3XffhYmJCebNm1fp7tCuXbvg6+uLjh07Ytu2bTh79iy++uorFUX8arUioYiIiMChQ4dw4sQJNGrUSNhvbW2NkpIS5OXlSdyluH//PqytrV9Zn7a2NrS1tRUZMhEREZFcGRkZ4cSJE1i1ahWePn0KBwcHLF++HL169QLwYn7DZ599hpkzZ0JfXx8eHh6YMmWKRB0hISHo3bs3OnfuDHt7yXEVcXFxWLhwIaZPn467d++iQYMGaNeuHfr27ausLtYrH330EW7duoW+ffvC2NgYCxYsqHSHIjo6Gjt27MD48eNhY2ODb7/9Fu7u7iqK+NVqdEIhFosxceJE7Nu3D0lJSXBycpI43qpVK2hqaiIxMREDBgwAAGRmZiI7O7vSJBYiIiKi16npK1e7ublVemDNPw0fPhzDhw9/bR29evV65ZwHTU1NREdHv3JYeFhYGMLCwiT2BQUF1dg5FDV95WojIyPs2LFDYt/IkSMltm1tbXH06FFlhiWTGp1QTJgwAdu3b8eBAwdgaGgozIswNjaGrq4ujI2NMXr0aEybNg1mZmYwMjLCxIkT4efnxyc8EREREREpQY1OKNavXw8ACAgIkNgfFxcnZMgrV66EmpoaBgwYgOLiYgQGBkqsDElERERERIpToxOK6txC09HRweeff47PP/9cCRERERERESleTR1KVpUanVBQ7VH4gI8ArCl4LYiIiEiZmFAQERHJCdfTIKL6SE3VARARERERUe3FhIKIiIiIiGTGhIKIiIiIiGTGORREJLVnDxU/8TvvicKbUApF90MZv6ec3ZlSn2P/gQICeUvl9wulKq+poDiID48gqmt4h4KIiIiolgsLC0NQUJCqw6B6incoiIiIiADkl+5TanuGmv3lVtfq1atr1boFyvCjSVOlttc7T/q7uXUFEwoiIiKiWs7Y2FjVIVA9xiFPRERERLXE7t274eHhAV1dXZibm6Nbt24oLCyUGPL04MEDWFtbY9GiRcJ5p06dgpaWFhITE1UUOf1bQEAAJk2ahMjISJiZmcHa2hpRUVHC8ezsbPTr1w8GBgYwMjLC4MGDcf/+fdUF/BpMKIiIiIhqgZycHAwbNgyjRo1CRkYGkpKSEBwcXGmok4WFBb7++mtERUXh3LlzyM/Px4gRIxAREYGuXbuqKHqqSkJCAvT19ZGamoqlS5ciJiYGx44dQ0VFBfr164dHjx4hOTkZx44dwx9//IEhQ4aoOuQqccgTERERUS2Qk5ODsrIyBAcHw8HBAQDg4eFRZdnevXsjPDwcISEh8PX1hb6+PhYvXqzMcKkaPD09MX/+fACAi4sL1q1bJ9xFunLlCm7dugU7OzsAwJYtW9C8eXOkpaWhdevWKou5KrxDQURERFQLeHl5oWvXrvDw8MCgQYOwadMmPH78+JXlly1bhrKyMuzatQvbtm2Dtra2EqOl6vD09JTYtrGxQW5uLjIyMmBnZyckEwDg7u4OExMTZGRkKDvMN+IdCiIiIjn56/dHUpW3UFAcVDepq6vj2LFjOHXqFI4ePYq1a9dizpw5SE1NrbJ8VlYW/vzzT1RUVOD27duvvJtBqqOpKbnijUgkQkVFhYqikR3vUBARERHVEiKRCB06dEB0dDQuXrwILS0t7NtX+XG3JSUleO+99zBkyBAsWLAAY8aMQW5urgoiJlm4ubnhzp07uHPnjrDv2rVryMvLg7u7uwojqxrvUBAREdUS5X9xhen6LDU1FYmJiejRowcsLS2RmpqKBw8ewM3NDZcvX5YoO2fOHDx58gRr1qyBgYEBfvzxR4waNQqHDh1SUfQkjW7dusHDwwMhISFYtWoVysrKMH78ePj7+8PX11fV4VXChILkIu+JqiOgl3gtiIjqJiMjI5w4cQKrVq3C06dP4eDggOXLl6NXr1747rvvhHJJSUlYtWoVjh8/DiMjIwDA1q1b4eXlhfXr12PcuHGq6gJVk0gkwoEDBzBx4kR07twZampq6NmzJ9auXavq0KrEhKIGeJjxUKry+lLWn/vr/6Q8A7CX+gyqLvEt6a6HyEFBgbyFwgeK/5b08WPFr/iqjH4omjJ+T3VF/jXp/q3VUVAcNV1d+LuQlTxXrlYENzc3HD58uMpj8fHxws8BAQEoLS2VOO7o6IgnT+rXN041feXqpKSkSvv2798v/Gxvb48DBw4oL6C3wDkUREREREQkMyYUREREREQkMw55IvqHbx03SVV+uHiGgiIhIiIiqh14h4KIiIiIiGTGhIKIiIiIiGTGhIKIiIiIiGTGhIKIiIiIiGTGhIKIiIiIiGTGpzwRERHJycV06RYadFNQHEREysSEgoiIqJa4/P3/pD7Hb40CAiGVEYvF+OCDD7B79248fvwYxsbGCAsLw6pVq1QdGtVjTCiIiIiIAIhxXKntifCO1OccPnwY8fHxSEpKgrOzM9TU1KCrq6uA6Gq/7aKmSm1vuDhTqe3VJEwoaoDCB88UWv/dm9LXb6+AON7Wrdt1YyjBs71XpSqvH6CYOEg576nHj6VroyZSxr8hf/3+SKryFlLWDwBphx9KVb63DG1Q9eQ9UXUEtVdWVhZsbGzQvn17VYdCJOCkbCIiIqJaICwsDBMnTkR2djZEIhEcHR0REBCAKVOmAAA+/vhjtG3bttJ5Xl5eiImJEbY3b94MNzc36OjooFmzZvjiiy+U1QX625YtW2Bubo7i4mKJ/UFBQRgxYgQA4MCBA2jZsiV0dHTg7OyM6OholJWVAXgx9C0qKgr29vbQ1taGra0tJk2apPR+vMSEgoiIiKgWWL16NWJiYtCoUSPk5OQgLS1N4nhISAjOnj2LrKwsYd9vv/2Gy5cvY/jw4QCAbdu2Yd68eYiNjUVGRgYWLVqEuXPnIiEhQal9qe8GDRqE8vJyHDx4UNiXm5uLH374AaNGjcKvv/6K0NBQTJ48GdeuXcPGjRsRHx+P2NhYAMCePXuwcuVKbNy4ETdu3MD+/fvh4eGhqu5wyBPJR10Y1lFX8FoQEdVNxsbGMDQ0hLq6OqytrSsdb968Oby8vLB9+3bMnTsXwIsEom3btmjSpAkAYP78+Vi+fDmCg4MBAE5OTsIH1pEjRyqvM/Wcrq4uhg8fjri4OAwaNAgA8M0338De3h4BAQHo3r07Zs+eLVwTZ2dnLFiwAJGRkZg/fz6ys7NhbW2Nbt26QVNTE/b29mjTpo3K+sM7FERERER1REhICLZv3w7gxbCYb7/9FiEhIQCAwsJCZGVlYfTo0TAwMBBeCxculLirQcoRHh6Oo0eP4u7duwCA+Ph4hIWFQSQSIT09HTExMRLXKTw8HDk5OSgqKsKgQYPw7NkzODs7Izw8HPv27ROGQ6kC71AQERER1RHDhg3DrFmzcOHCBTx79gx37tzBkCFDAAAFBQUAgE2bNlWaa6Gurq70WOs7Hx8feHl5YcuWLejRowd+++03/PDDDwBeXKvo6GjhTtI/6ejowM7ODpmZmfj5559x7NgxjB8/Hp999hmSk5Ohqamp7K4woSAiIiKqKxo1agR/f39s27YNz549Q/fu3WFpaQkAsLKygq2tLf744w/hrgWp1pgxY7Bq1SrcvXsX3bp1g52dHQCgZcuWyMzMFIaqVUVXVxfvvvsu3n33XUyYMAHNmjXDlStX0LJlS2WFL2BCQURERFSHhISEYP78+SgpKcHKlSsljkVHR2PSpEkwNjZGz549UVxcjHPnzuHx48eYNm2aiiKuv4YPH44ZM2Zg06ZN2LJli7B/3rx56Nu3L+zt7TFw4ECoqakhPT0dV69excKFCxEfH4/y8nK0bdsWenp6+Oabb6CrqwsHBweV9INzKIiIiIjqkIEDB+Lhw4coKipCUFCQxLExY8Zg8+bNiIuLg4eHB/z9/REfHw8nJyfVBFvPGRsbY8CAATAwMJC4VoGBgTh06BCOHj2K1q1bo127dli5cqWQMJiYmGDTpk3o0KEDPD098fPPP+P777+Hubm5SvrBOxREREREkG3lamWbMmWKsO4EACQlJVUqY2JigufPn7+yjuHDhwuPka3LasvK1Xfv3kVISAi0tbUl9gcGBiIwMLDKc4KCgioli6rEhIKIiKiW4GOhieqOx48fIykpCUlJSbV+cUEmFG+gjH+8854otn7+B1SzFOUWSVVeX0Fx1HSK/rugmuViunT/TrnJ0AbfU9Vz67b0/2fIcj2I6jsfHx88fvwYS5YsQdOmTVUdzlthQkFEREREpGS3b99WdQhyw0nZREREREQkMyYUREREVC+JxRwSTPQ61f0bYUJBRERE9crLlYSLiqSb00ZU37z8G3nT6tucQ0FERET1irq6OkxMTJCbmwsA0NPTg0gkUnFURDWHWCxGUVERcnNzYWJiAnV19deWZ0JBRERE9Y61tTUACEkFEVVmYmIi/K28DhMKIiIiqndEIhFsbGxgaWmJ0tJSVYdDVONoamq+8c7ES0woSC74fPeag9eCiKj61NXVq/2hiYiqxknZREREREQkMyYUREREREQkMyYUREREREQkM86hqAEeP+bCOtXBuQE1x63b0r9n3RQQx9tSxntK0W0oow+yXG8/BcRBdeM9C0j/nuL7iahm4x0KIiIiIiKSGRMKIiIiIiKSGRMKIiIiIiKSGRMKIiIiIiKSGRMKIiIiIiKSGRMKIiIiIiKSWZ1JKD7//HM4OjpCR0cHbdu2xdmzZ1UdEhERERFRnVcnEorvvvsO06ZNw/z583HhwgV4eXkhMDAQubm5qg6NiIiIiKhOqxMJxYoVKxAeHo73338f7u7u2LBhA/T09PD111+rOjQiIiIiojqt1icUJSUlOH/+PLp16ybsU1NTQ7du3XD69GkVRkZEREREVPdpqDqAt/XXX3+hvLwcVlZWEvutrKzw+++/V3lOcXExiouLhe0nT54AAJ4+fVqpbJG4XKp4qqrjTRTdhrT1y9QGal4bMl0LJbSRX1omVXntmngtauB7qqZeb0W3URP/9pTRRk28FspooyZeC2W08ar6X+4Xi8VS1UdE8iUS1/K/wj///BMNGzbEqVOn4OfnJ+yPjIxEcnIyUlNTK50TFRWF6OhoZYZJRERECnLnzh00atRI1WEQ1Vu1/g5FgwYNoK6ujvv370vsv3//Pqytras856OPPsK0adOE7YqKCjx69Ajm5uYQiURvbPPp06ews7PDnTt3YGRk9HYdqMNt1IU+sI2aUz/bqFlt1IU+sI2aU7+sbYjFYuTn58PW1lYhMRFR9dT6hEJLSwutWrVCYmIigoKCALxIEBITExEREVHlOdra2tDW1pbYZ2JiInXbRkZGCvuHtS61URf6wDZqTv1so2a1URf6wDZqTv2ytGFsbKzAaIioOmp9QgEA06ZNw8iRI+Hr64s2bdpg1apVKCwsxPvvv6/q0IiIiIiI6rQ6kVAMGTIEDx48wLx583Dv3j14e3vj8OHDlSZqExERERGRfNWJhAIAIiIiXjnESd60tbUxf/78SsOm2IZy62cbNauNutAHtlFz6mcbNauNutAHIlKcWv+UJyIiIiIiUp1av7AdERERERGpDhMK+r/27j2qyfv+A/j7IRCIkSEgmARMuCkIAlNQJro6aw7CrELtlDq0MKzn6MIK2lLtOqbVVqRWWqEUqqWUemlrVy8pXaVABWunoGAUHUO8YdUolYkKqMTk+/vDQ36CTiF80bX7vM7JOfKQvD/PE/kAn+cGIYQQQgghFqOBghBCCCGEEGIxGigIIYQQQgghFqOBwgI5OTnw8PCAnZ0dwsLCUFVVxS17z549mDZtGhQKBQRBwI4dO7hlA0B6ejrGjBkDe3t7uLq6IiYmBvX19Vxr5ObmIigoyPzHicaNG4evv/6aa43uVq9eDUEQkJKSwi1z+fLlEAShy8PPz49bPgCcP38ec+bMgbOzMyQSCQIDA3Hw4EFu+R4eHvdsgyAI0Gg03GoYjUakpaXB09MTEokE3t7eWLlyJXjf7+H69etISUmBSqWCRCJBeHg4Dhw4YHHew3qNMYa//vWvkMvlkEgkUKvVaGho4Fpj27ZtiIiIgLOzMwRBgE6n45ZvMBiwZMkSBAYGQiqVQqFQ4LnnnsOFCxe4bsPy5cvh5+cHqVQKR0dHqNVqVFZWcq1xtwULFkAQBLzzzjtcayQkJNzTJ5GRkdy3o66uDtOnT4eDgwOkUinGjBmDs2fPcsm/X68LgoA1a9Zw24bW1lYkJSXB3d0dEokE/v7+yMvL63F+T2pcunQJCQkJUCgUGDBgACIjI3vde4SQR4sGil767LPPsHjxYixbtgw1NTUIDg7GlClT0NTUxCW/ra0NwcHByMnJ4ZLXXUVFBTQaDfbv34+SkhIYDAZERESgra2NWw13d3esXr0a1dXVOHjwIJ588klER0fj2LFj3Grc7cCBA3j//fcRFBTEPTsgIAB6vd782Lt3L7fsK1euYPz48bCxscHXX3+Nf/7zn1i7di0cHR251Thw4ECX9S8pKQEAzJw5k1uNjIwM5Obm4t1330VdXR0yMjLw5ptvIjs7m1sNAHj++edRUlKCjRs3ora2FhEREVCr1Th//rxFeQ/rtTfffBNZWVnIy8tDZWUlpFIppkyZgps3b3Kr0dbWhgkTJiAjI4P7NrS3t6OmpgZpaWmoqanBtm3bUF9fj+nTp3OrAQDDhw/Hu+++i9raWuzduxceHh6IiIjAjz/+yK1Gp+3bt2P//v1QKBS92oae1oiMjOzSL5988gnXGidPnsSECRPg5+eH8vJyHDlyBGlpabCzs+OSf/e66/V6fPjhhxAEAc888wy3bVi8eDF27dqFTZs2oa6uDikpKUhKSoJWq+VSgzGGmJgYnDp1Cjt37sShQ4egUqmgVqu5/pwihHDGSK+MHTuWaTQa88dGo5EpFAqWnp7OvRYAtn37du65d2tqamIAWEVFRb/WcXR0ZB988AH33OvXr7Nhw4axkpISNnHiRJacnMwte9myZSw4OJhbXndLlixhEyZM6Lf8+0lOTmbe3t7MZDJxy5w6dSpLTEzssmzGjBksLi6OW4329nYmEolYUVFRl+WjR49mr776ap/zu/eayWRiMpmMrVmzxryspaWF2drask8++YRLjbudPn2aAWCHDh2yKPth+Z2qqqoYANbY2NhvNa5evcoAsNLSUq41zp07x9zc3NjRo0eZSqVib7/9tkX5/6lGfHw8i46OtjizJzViY2PZnDlz+i2/u+joaPbkk09yrREQEMBWrFjRZVlf+rB7jfr6egaAHT161LzMaDQyFxcXtmHDBotqEEL6Hx2h6IWOjg5UV1dDrVabl1lZWUGtVmPfvn2Pcc0sd/XqVQCAk5NTv+QbjUZ8+umnaGtrw7hx47jnazQaTJ06tcv/CU8NDQ1QKBTw8vJCXFxcj09N6AmtVovQ0FDMnDkTrq6uGDVqFDZs2MAtv7uOjg5s2rQJiYmJEASBW254eDjKyspw/PhxAMDhw4exd+9eREVFcatx+/ZtGI3Ge/bkSiQSrkeNOp0+fRoXL17s8nXl4OCAsLCwn2yvA3f6XRAEDBo0qF/yOzo6sH79ejg4OCA4OJhbrslkwty5c5GamoqAgABuud2Vl5fD1dUVvr6+WLhwIZqbm7llm0wmfPXVVxg+fDimTJkCV1dXhIWFcT+ttdOlS5fw1VdfYd68eVxzw8PDodVqcf78eTDGsHv3bhw/fhwRERFc8m/dugUAXXrdysoKtra2/dLrhBA+aKDohcuXL8NoNGLIkCFdlg8ZMgQXL158TGtlOZPJhJSUFIwfPx4jR47kml1bW4uBAwfC1tYWCxYswPbt2+Hv78+1xqeffoqamhqkp6dzze0UFhaGjz76CLt27UJubi5Onz6NX//617h+/TqX/FOnTiE3NxfDhg1DcXExFi5ciBdeeAGFhYVc8rvbsWMHWlpakJCQwDV36dKlePbZZ+Hn5wcbGxuMGjUKKSkpiIuL41bD3t4e48aNw8qVK3HhwgUYjUZs2rQJ+/btg16v51anU2c//1x6HQBu3ryJJUuWYPbs2fjFL37BNbuoqAgDBw6EnZ0d3n77bZSUlGDw4MHc8jMyMmBtbY0XXniBW2Z3kZGR+Pjjj1FWVoaMjAxUVFQgKioKRqORS35TUxNaW1uxevVqREZG4ptvvsHTTz+NGTNmoKKigkuNuxUWFsLe3h4zZszgmpudnQ1/f3+4u7tDLBYjMjISOTk5eOKJJ7jk+/n5QalU4pVXXsGVK1fQ0dGBjIwMnDt3rl96nRDCh/XjXgHy+Gg0Ghw9erRf9vr4+vpCp9Ph6tWr+Nvf/ob4+HhUVFRwGyp++OEHJCcno6SkpMfnH/fW3XvYg4KCEBYWBpVKha1bt3LZ62cymRAaGopVq1YBAEaNGoWjR48iLy8P8fHxfc7vLj8/H1FRURadf/4gW7duxebNm7FlyxYEBARAp9MhJSUFCoWC63Zs3LgRiYmJcHNzg0gkwujRozF79mxUV1dzq/FzZTAYMGvWLDDGkJubyz1/0qRJ0Ol0uHz5MjZs2IBZs2ahsrISrq6ufc6urq7GunXrUFNTw/XIWnfPPvus+d+BgYEICgqCt7c3ysvLMXny5D7nm0wmAEB0dDQWLVoEAPjlL3+Jf/zjH8jLy8PEiRP7XONuH374IeLi4rh/f8zOzsb+/fuh1WqhUqmwZ88eaDQaKBQKLkeKbWxssG3bNsybNw9OTk4QiURQq9WIiorifqMHQgg/dISiFwYPHgyRSIRLly51WX7p0iXIZLLHtFaWSUpKQlFREXbv3g13d3fu+WKxGD4+PggJCUF6ejqCg4Oxbt06bvnV1dVoamrC6NGjYW1tDWtra1RUVCArKwvW1tbc9irebdCgQRg+fDhOnDjBJU8ul98zYI0YMYLraVWdGhsbUVpaiueff557dmpqqvkoRWBgIObOnYtFixZxP3Lk7e2NiooKtLa24ocffkBVVRUMBgO8vLy41gFg7uefQ693DhONjY0oKSnhfnQCAKRSKXx8fPCrX/0K+fn5sLa2Rn5+Ppfs7777Dk1NTVAqleZeb2xsxIsvvggPDw8uNe7Hy8sLgwcP5tbvgwcPhrW19SPp+e+++w719fXc+/3GjRv485//jMzMTEybNg1BQUFISkpCbGws3nrrLW51QkJCoNPp0NLSAr1ej127dqG5ublfep0QwgcNFL0gFosREhKCsrIy8zKTyYSysrJ+uT6gPzDGkJSUhO3bt+Pbb7+Fp6fnI6lrMpnM58byMHnyZNTW1kKn05kfoaGhiIuLg06ng0gk4larU2trK06ePAm5XM4lb/z48ffcsvf48eNQqVRc8u9WUFAAV1dXTJ06lXt2e3s7rKy6fisRiUTmPbK8SaVSyOVyXLlyBcXFxYiOjuZew9PTEzKZrEuvX7t2DZWVlT+ZXgf+f5hoaGhAaWkpnJ2dH0ldnv0+d+5cHDlypEuvKxQKpKamori4mEuN+zl37hyam5u59btYLMaYMWMeSc/n5+cjJCSE63UswJ2vJ4PB8Mj63cHBAS4uLmhoaMDBgwf7pdcJIXzQKU+9tHjxYsTHxyM0NBRjx47FO++8g7a2NvzhD3/gkt/a2tplj9jp06eh0+ng5OQEpVLZ53yNRoMtW7Zg586dsLe3N58P7uDgAIlE0ud8AHjllVcQFRUFpVKJ69evY8uWLSgvL+f6w9/e3v6e6z6kUimcnZ25XQ/y0ksvYdq0aVCpVLhw4QKWLVsGkUiE2bNnc8lftGgRwsPDsWrVKsyaNQtVVVVYv3491q9fzyW/k8lkQkFBAeLj42Ftzb/lp02bhjfeeANKpRIBAQE4dOgQMjMzkZiYyLVOcXExGGPw9fXFiRMnkJqaCj8/P4t772G9lpKSgtdffx3Dhg2Dp6cn0tLSoFAoEBMTw63Gv//9b5w9e9b8tyE6f9mUyWQ9OhLyoHy5XI7f/e53qKmpQVFREYxGo7nfnZycIBaL+7wNzs7OeOONNzB9+nTI5XJcvnwZOTk5OH/+fK9uTfyw96n7IGRjYwOZTAZfX18uNZycnPDaa6/hmWeegUwmw8mTJ/Hyyy/Dx8cHU6ZM4bYdqampiI2NxRNPPIFJkyZh165d+PLLL1FeXs4lH7gz+H7++edYu3Ztj9e7NzUmTpyI1NRUSCQSqFQqVFRU4OOPP0ZmZia3Gp9//jlcXFygVCpRW1uL5ORkxMTEcLvwmxDSDx7rPaZ+orKzs5lSqWRisZiNHTuW7d+/n1v27t27GYB7HvHx8Vzy75cNgBUUFHDJZ4yxxMREplKpmFgsZi4uLmzy5Mnsm2++4Zb/n/C+bWxsbCyTy+VMLBYzNzc3Fhsby06cOMEtnzHGvvzySzZy5Ehma2vL/Pz82Pr167nmM8ZYcXExA8Dq6+u5ZzPG2LVr11hycjJTKpXMzs6OeXl5sVdffZXdunWLa53PPvuMeXl5MbFYzGQyGdNoNKylpcXivIf1mslkYmlpaWzIkCHM1taWTZ48udfv4cNqFBQU3Pfzy5Yt63N+561o7/fYvXs3l224ceMGe/rpp5lCoWBisZjJ5XI2ffp0VlVVxfV96s6S28Y+qEZ7ezuLiIhgLi4uzMbGhqlUKjZ//nx28eJF7tuRn5/PfHx8mJ2dHQsODmY7duzgmv/+++8ziURicW88rIZer2cJCQlMoVAwOzs75uvry9auXdurW1E/rMa6deuYu7s7s7GxYUqlkv3lL3/h/v2EEMKXwBhd5UQIIYQQQgixDF1DQQghhBBCCLEYDRSEEEIIIYQQi9FAQQghhBBCCLEYDRSEEEIIIYQQi9FAQQghhBBCCLEYDRSEEEIIIYQQi9FAQQghhBBCCLEYDRSEkJ+kM2fOQBAE6HS6Bz7vN7/5DVJSUh7JOhFCCCH/i2igIIRwk5CQAEEQIAgCxGIxfHx8sGLFCty+fbvPuTExMV2WDR06FHq9HiNHjgQAlJeXQxAEtLS0dHnetm3bsHLlyj7Vf5juw03nx50Pe3t7BAQEQKPRoKGhoV/XhRBCCHnUaKAghHAVGRkJvV6PhoYGvPjii1i+fDnWrFljUZbRaITJZLrv50QiEWQyGaytrR+Y4eTkBHt7e4vq91VpaSn0ej0OHz6MVatWoa6uDsHBwSgrK3ss60MIIYT0BxooCCFc2draQiaTQaVSYeHChVCr1dBqtQCAzMxMBAYGQiqVYujQofjjH/+I1tZW82s/+ugjDBo0CFqtFv7+/rC1tUViYiIKCwuxc+dO8x7/8vLyLkcFzpw5g0mTJgEAHB0dIQgCEhISANx7ytOVK1fw3HPPwdHREQMGDEBUVFSXowad61BcXIwRI0Zg4MCB5iGpt5ydnSGTyeDl5YXo6GiUlpYiLCwM8+bNg9FotODdJYQQQv770EBBCOlXEokEHR0dAAArKytkZWXh2LFjKCwsxLfffouXX365y/Pb29uRkZGBDz74AMeOHUNWVhZmzZpl/qVer9cjPDy8y2uGDh2KL774AgBQX18PvV6PdevW3Xd9EhIScPDgQWi1Wuzbtw+MMfz2t7+FwWDosg5vvfUWNm7ciD179uDs2bN46aWX+vxeWFlZITk5GY2Njaiuru5zHiGEEPLf4MHnChBCiIUYYygrK0NxcTH+9Kc/AUCXIwUeHh54/fXXsWDBArz33nvm5QaDAe+99x6Cg4PNyyQSCW7dugWZTHbfWiKRCE5OTgAAV1dXDBo06L7Pa2hogFarxffff28eSjZv3oyhQ4dix44dmDlzpnkd8vLy4O3tDQBISkrCihUrLHsjuvHz8wNw5zqLsWPHcskkhBBCHicaKAghXBUVFWHgwIEwGAwwmUz4/e9/j+XLlwO4c01Beno6/vWvf+HatWu4ffs2bt68ifb2dgwYMAAAIBaLERQU1C/rVldXB2tra4SFhZmXOTs7w9fXF3V1deZlAwYMMA8TACCXy9HU1MRlHRhjAABBELjkEUIIIY8bnfJECOFq0qRJ0Ol0aGhowI0bN1BYWAipVIozZ87gqaeeQlBQEL744gtUV1cjJycHAMynRAF3jkY87l+2bWxsunwsCIJ5EOirzsHF09OTSx4hhBDyuNERCkIIV1KpFD4+Pvcsr66uhslkwtq1a2FldWdfxtatW3uUKRaLH3oRs1gsBoAHPm/EiBG4ffs2Kisrzac8NTc3o76+Hv7+/j1al74wmUzIysqCp6cnRo0a1e/1CCGEkEeBjlAQQh4JHx8fGAwGZGdn49SpU9i4cSPy8vJ69FoPDw8cOXIE9fX1uHz5cpcLqDupVCoIgoCioiL8+OOPXe4e1WnYsGGIjo7G/PnzsXfvXhw+fBhz5syBm5sboqOj+7yN3TU3N+PixYs4deoUtFot1Go1qqqqkJ+fD5FIxL0eIYQQ8jjQQEEIeSSCg4ORmZmJjIwMjBw5Eps3b0Z6enqPXjt//nz4+voiNDQULi4u+P777+95jpubG1577TUsXboUQ4YMQVJS0n2zCgoKEBISgqeeegrjxo0DYwx///vf7znNiQe1Wg25XI7AwEAsXboUI0aMwJEjR8y3uCWEEEJ+DgTG68RgQgghhBBCyP8cOkJBCCGEEEIIsRgNFIQQQgghhBCL0UBBCCGEEEIIsRgNFIQQQgghhBCL0UBBCCGEEEIIsRgNFIQQQgghhBCL0UBBCCGEEEIIsRgNFIQQQgghhBCL0UBBCCGEEEIIsRgNFIQQQgghhBCL0UBBCCGEEEIIsRgNFIQQQgghhBCL/R9Yw+Tn1pgx6AAAAABJRU5ErkJggg==",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from flwr_datasets import FederatedDataset\n",
+ "from flwr_datasets.partitioner import NaturalIdPartitioner\n",
+ "from flwr_datasets.visualization import plot_label_distributions\n",
+ "\n",
+ "\n",
+ "fds = FederatedDataset(\n",
+ " dataset=\"google/speech_commands\",\n",
+ " subset=\"v0.01\",\n",
+ " partitioners={\n",
+ " \"train\": NaturalIdPartitioner(\n",
+ " partition_by=\"speaker_id\",\n",
+ " ),\n",
+ " },\n",
+ ")\n",
+ "\n",
+ "partitioner = fds.partitioners[\"train\"]\n",
+ "\n",
+ "fix, ax, df = plot_label_distributions(\n",
+ " partitioner=partitioner,\n",
+ " label_name=\"label\",\n",
+ " max_num_partitions=20,\n",
+ " plot_type=\"bar\",\n",
+ " size_unit=\"percent\",\n",
+ " partition_id_axis=\"x\",\n",
+ " legend=True,\n",
+ " title=\"Per Partition Labels Distribution\",\n",
+ " verbose_labels=True,\n",
+ " legend_kwargs={\"ncols\": 2, \"bbox_to_anchor\": (1.25, 0.5)},\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4442c99c",
+ "metadata": {},
+ "source": [
+ "## More resources\n",
+ "\n",
+ "If you are looking for more resorces, feel free to check:\n",
+ "\n",
+ "* `flwr-dataset` documentation\n",
+ " * [plot_label_distributions](https://flower.ai/docs/datasets/ref-api/flwr_datasets.visualization.plot_label_distributions.html#flwr_datasets.visualization.plot_label_distributions)\n",
+ " * [plot_comparison_label_distribution](https://flower.ai/docs/datasets/ref-api/flwr_datasets.visualization.plot_comparison_label_distribution.html#flwr_datasets.visualization.plot_comparison_label_distribution)\n",
+ "* if you want to do any custom modification of the returned plots\n",
+ " * [matplotlib](https://matplotlib.org/)\n",
+ " * [seaborn](https://seaborn.pydata.org/)\n",
+ " * or plot directly using pandas object [pd.DataFrame.plot](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.plot.html)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "52655972",
+ "metadata": {},
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "flwr",
+ "language": "python",
+ "name": "python3"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/datasets/doc/source/index.rst b/datasets/doc/source/index.rst
index 263aa4908d6d..fcc7920711bf 100644
--- a/datasets/doc/source/index.rst
+++ b/datasets/doc/source/index.rst
@@ -41,6 +41,7 @@ Problem-oriented how-to guides show step-by-step how to achieve a specific goal.
how-to-use-with-tensorflow
how-to-use-with-numpy
how-to-use-with-local-data
+ how-to-visualize-label-distribution
how-to-disable-enable-progress-bar
References
@@ -93,6 +94,7 @@ Here are a few of the ``Partitioner`` s that are available: (for a full list see
* IID partitioning ``IidPartitioner(num_partitions)``
* Dirichlet partitioning ``DirichletPartitioner(num_partitions, partition_by, alpha)``
* InnerDirichlet partitioning ``InnerDirichletPartitioner(partition_sizes, partition_by, alpha)``
+* PathologicalPartitioner ``PathologicalPartitioner(num_partitions, partition_by, num_classes_per_partition, class_assignment_mode)``
* Natural ID partitioner ``NaturalIdPartitioner(partition_by)``
* Size partitioner (the abstract base class for the partitioners dictating the division based the number of samples) ``SizePartitioner``
* Linear partitioner ``LinearPartitioner(num_partitions)``
diff --git a/datasets/flwr_datasets/mock_utils_test.py b/datasets/flwr_datasets/mock_utils_test.py
index 78aff1f1cdd7..bd49de8033de 100644
--- a/datasets/flwr_datasets/mock_utils_test.py
+++ b/datasets/flwr_datasets/mock_utils_test.py
@@ -190,7 +190,7 @@ def _generate_random_image_column(
pil_imgs = []
for np_image in np_images:
# Convert the NumPy array to a PIL image
- pil_img_beg = Image.fromarray(np_image) # type: ignore
+ pil_img_beg = Image.fromarray(np_image)
# Save the image to an in-memory bytes buffer
in_memory_file = io.BytesIO()
diff --git a/datasets/flwr_datasets/partitioner/__init__.py b/datasets/flwr_datasets/partitioner/__init__.py
index 1fc00ed90323..0c75dbce387a 100644
--- a/datasets/flwr_datasets/partitioner/__init__.py
+++ b/datasets/flwr_datasets/partitioner/__init__.py
@@ -22,6 +22,7 @@
from .linear_partitioner import LinearPartitioner
from .natural_id_partitioner import NaturalIdPartitioner
from .partitioner import Partitioner
+from .pathological_partitioner import PathologicalPartitioner
from .shard_partitioner import ShardPartitioner
from .size_partitioner import SizePartitioner
from .square_partitioner import SquarePartitioner
@@ -34,6 +35,7 @@
"LinearPartitioner",
"NaturalIdPartitioner",
"Partitioner",
+ "PathologicalPartitioner",
"ShardPartitioner",
"SizePartitioner",
"SquarePartitioner",
diff --git a/datasets/flwr_datasets/partitioner/pathological_partitioner.py b/datasets/flwr_datasets/partitioner/pathological_partitioner.py
new file mode 100644
index 000000000000..1ee60d283044
--- /dev/null
+++ b/datasets/flwr_datasets/partitioner/pathological_partitioner.py
@@ -0,0 +1,305 @@
+# Copyright 2024 Flower Labs GmbH. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Pathological partitioner class that works with Hugging Face Datasets."""
+
+
+import warnings
+from typing import Any, Dict, List, Literal, Optional
+
+import numpy as np
+
+import datasets
+from flwr_datasets.common.typing import NDArray
+from flwr_datasets.partitioner.partitioner import Partitioner
+
+
+# pylint: disable=too-many-arguments, too-many-instance-attributes
+class PathologicalPartitioner(Partitioner):
+ """Partition dataset such that each partition has a chosen number of classes.
+
+ Implementation based on Federated Learning on Non-IID Data Silos: An Experimental
+ Study https://arxiv.org/pdf/2102.02079.
+
+ The algorithm firstly determines which classe will be assigned to which partitions.
+ For each partition `num_classes_per_partition` are sampled in a way chosen in
+ `class_assignment_mode`. Given the information about the required classes for each
+ partition, it is determined into how many parts the samples corresponding to this
+ label should be divided. Such division is performed for each class.
+
+ Parameters
+ ----------
+ num_partitions : int
+ The total number of partitions that the data will be divided into.
+ partition_by : str
+ Column name of the labels (targets) based on which partitioning works.
+ num_classes_per_partition: int
+ The (exact) number of unique classes that each partition will have.
+ class_assignment_mode: Literal["random", "deterministic", "first-deterministic"]
+ The way how the classes are assigned to the partitions. The default is "random".
+ The possible values are:
+
+ - "random": Randomly assign classes to the partitions. For each partition choose
+ the `num_classes_per_partition` classes without replacement.
+ - "first-deterministic": Assign the first class for each partition in a
+ deterministic way (class id is the partition_id % num_unique_classes).
+ The rest of the classes are assigned randomly. In case the number of
+ partitions is smaller than the number of unique classes, not all classes will
+ be used in the first iteration, otherwise all the classes will be used (such
+ it will be present in at least one partition).
+ - "deterministic": Assign all the classes to the partitions in a deterministic
+ way. Classes are assigned based on the formula: partion_id has classes
+ identified by the index: (partition_id + i) % num_unique_classes
+ where i in {0, ..., num_classes_per_partition}. So, partition 0 will have
+ classes 0, 1, 2, ..., `num_classes_per_partition`-1, partition 1 will have
+ classes 1, 2, 3, ...,`num_classes_per_partition`, ....
+
+ The list representing the unique lables is sorted in ascending order. In case
+ of numbers starting from zero the class id corresponds to the number itself.
+ `class_assignment_mode="first-deterministic"` was used in the orginal paper,
+ here we provide the option to use the other modes as well.
+ shuffle: bool
+ Whether to randomize the order of samples. Shuffling applied after the
+ samples assignment to partitions.
+ seed: int
+ Seed used for dataset shuffling. It has no effect if `shuffle` is False.
+
+ Examples
+ --------
+ In order to mimic the original behavior of the paper follow the setup below
+ (the `class_assignment_mode="first-deterministic"`):
+
+ >>> from flwr_datasets.partitioner import PathologicalPartitioner
+ >>> from flwr_datasets import FederatedDataset
+ >>>
+ >>> partitioner = PathologicalPartitioner(
+ >>> num_partitions=10,
+ >>> partition_by="label",
+ >>> num_classes_per_partition=2,
+ >>> class_assignment_mode="first-deterministic"
+ >>> )
+ >>> fds = FederatedDataset(dataset="mnist", partitioners={"train": partitioner})
+ >>> partition = fds.load_partition(0)
+ """
+
+ def __init__(
+ self,
+ num_partitions: int,
+ partition_by: str,
+ num_classes_per_partition: int,
+ class_assignment_mode: Literal[
+ "random", "deterministic", "first-deterministic"
+ ] = "random",
+ shuffle: bool = True,
+ seed: Optional[int] = 42,
+ ) -> None:
+ super().__init__()
+ self._num_partitions = num_partitions
+ self._partition_by = partition_by
+ self._num_classes_per_partition = num_classes_per_partition
+ self._class_assignment_mode = class_assignment_mode
+ self._shuffle = shuffle
+ self._seed = seed
+ self._rng = np.random.default_rng(seed=self._seed)
+
+ # Utility attributes
+ self._partition_id_to_indices: Dict[int, List[int]] = {}
+ self._partition_id_to_unique_labels: Dict[int, List[Any]] = {
+ pid: [] for pid in range(self._num_partitions)
+ }
+ self._unique_labels: List[Any] = []
+ # Count in how many partitions the label is used
+ self._unique_label_to_times_used_counter: Dict[Any, int] = {}
+ self._partition_id_to_indices_determined = False
+
+ def load_partition(self, partition_id: int) -> datasets.Dataset:
+ """Load a partition based on the partition index.
+
+ Parameters
+ ----------
+ partition_id : int
+ The index that corresponds to the requested partition.
+
+ Returns
+ -------
+ dataset_partition : Dataset
+ Single partition of a dataset.
+ """
+ # The partitioning is done lazily - only when the first partition is
+ # requested. Only the first call creates the indices assignments for all the
+ # partition indices.
+ self._check_num_partitions_correctness_if_needed()
+ self._determine_partition_id_to_indices_if_needed()
+ return self.dataset.select(self._partition_id_to_indices[partition_id])
+
+ @property
+ def num_partitions(self) -> int:
+ """Total number of partitions."""
+ self._check_num_partitions_correctness_if_needed()
+ self._determine_partition_id_to_indices_if_needed()
+ return self._num_partitions
+
+ def _determine_partition_id_to_indices_if_needed(self) -> None:
+ """Create an assignment of indices to the partition indices."""
+ if self._partition_id_to_indices_determined:
+ return
+ self._determine_partition_id_to_unique_labels()
+ assert self._unique_labels is not None
+ self._count_partitions_having_each_unique_label()
+
+ labels = np.asarray(self.dataset[self._partition_by])
+ self._check_correctness_of_unique_label_to_times_used_counter(labels)
+ for partition_id in range(self._num_partitions):
+ self._partition_id_to_indices[partition_id] = []
+
+ unused_labels = []
+ for unique_label in self._unique_labels:
+ if self._unique_label_to_times_used_counter[unique_label] == 0:
+ unused_labels.append(unique_label)
+ continue
+ # Get the indices in the original dataset where the y == unique_label
+ unique_label_to_indices = np.where(labels == unique_label)[0]
+
+ split_unique_labels_to_indices = np.array_split(
+ unique_label_to_indices,
+ self._unique_label_to_times_used_counter[unique_label],
+ )
+
+ split_index = 0
+ for partition_id in range(self._num_partitions):
+ if unique_label in self._partition_id_to_unique_labels[partition_id]:
+ self._partition_id_to_indices[partition_id].extend(
+ split_unique_labels_to_indices[split_index]
+ )
+ split_index += 1
+
+ if len(unused_labels) >= 1:
+ warnings.warn(
+ f"Classes: {unused_labels} will NOT be used due to the chosen "
+ f"configuration. If it is undesired behavior consider setting"
+ f" 'first_class_deterministic_assignment=True' which in case when"
+ f" the number of classes is smaller than the number of partitions will "
+ f"utilize all the classes for the created partitions.",
+ stacklevel=1,
+ )
+ if self._shuffle:
+ for indices in self._partition_id_to_indices.values():
+ # In place shuffling
+ self._rng.shuffle(indices)
+
+ self._partition_id_to_indices_determined = True
+
+ def _check_num_partitions_correctness_if_needed(self) -> None:
+ """Test num_partitions when the dataset is given (in load_partition)."""
+ if not self._partition_id_to_indices_determined:
+ if self._num_partitions > self.dataset.num_rows:
+ raise ValueError(
+ "The number of partitions needs to be smaller than the number of "
+ "samples in the dataset."
+ )
+
+ def _determine_partition_id_to_unique_labels(self) -> None:
+ """Determine the assignment of unique labels to the partitions."""
+ self._unique_labels = sorted(self.dataset.unique(self._partition_by))
+ num_unique_classes = len(self._unique_labels)
+
+ if self._num_classes_per_partition > num_unique_classes:
+ raise ValueError(
+ f"The specified `num_classes_per_partition`"
+ f"={self._num_classes_per_partition} is greater than the number "
+ f"of unique classes in the given dataset={num_unique_classes}. "
+ f"Reduce the `num_classes_per_partition` or make use different dataset "
+ f"to apply this partitioning."
+ )
+ if self._class_assignment_mode == "first-deterministic":
+ # if self._first_class_deterministic_assignment:
+ for partition_id in range(self._num_partitions):
+ label = partition_id % num_unique_classes
+ self._partition_id_to_unique_labels[partition_id].append(label)
+
+ while (
+ len(self._partition_id_to_unique_labels[partition_id])
+ < self._num_classes_per_partition
+ ):
+ label = self._rng.choice(self._unique_labels, size=1)[0]
+ if label not in self._partition_id_to_unique_labels[partition_id]:
+ self._partition_id_to_unique_labels[partition_id].append(label)
+ elif self._class_assignment_mode == "deterministic":
+ for partition_id in range(self._num_partitions):
+ labels = []
+ for i in range(self._num_classes_per_partition):
+ label = self._unique_labels[
+ (partition_id + i) % len(self._unique_labels)
+ ]
+ labels.append(label)
+ self._partition_id_to_unique_labels[partition_id] = labels
+ elif self._class_assignment_mode == "random":
+ for partition_id in range(self._num_partitions):
+ labels = self._rng.choice(
+ self._unique_labels,
+ size=self._num_classes_per_partition,
+ replace=False,
+ ).tolist()
+ self._partition_id_to_unique_labels[partition_id] = labels
+ else:
+ raise ValueError(
+ f"The supported class_assignment_mode are: 'random', 'deterministic', "
+ f"'first-deterministic'. You provided: {self._class_assignment_mode}."
+ )
+
+ def _count_partitions_having_each_unique_label(self) -> None:
+ """Count the number of partitions that have each unique label.
+
+ This computation is based on the assigment of the label to the partition_id in
+ the `_determine_partition_id_to_unique_labels` method.
+ Given:
+ * partition 0 has only labels: 0,1 (not necessarily just two samples it can have
+ many samples but either from 0 or 1)
+ * partition 1 has only labels: 1, 2 (same count note as above)
+ * and there are only two partitions then the following will be computed:
+ {
+ 0: 1,
+ 1: 2,
+ 2: 1
+ }
+ """
+ for unique_label in self._unique_labels:
+ self._unique_label_to_times_used_counter[unique_label] = 0
+ for unique_labels in self._partition_id_to_unique_labels.values():
+ for unique_label in unique_labels:
+ self._unique_label_to_times_used_counter[unique_label] += 1
+
+ def _check_correctness_of_unique_label_to_times_used_counter(
+ self, labels: NDArray
+ ) -> None:
+ """Check if partitioning is possible given the presence requirements.
+
+ The number of times the label can be used must be smaller or equal to the number
+ of times that the label is present in the dataset.
+ """
+ for unique_label in self._unique_labels:
+ num_unique = np.sum(labels == unique_label)
+ if self._unique_label_to_times_used_counter[unique_label] > num_unique:
+ raise ValueError(
+ f"Label: {unique_label} is needed to be assigned to more "
+ f"partitions "
+ f"({self._unique_label_to_times_used_counter[unique_label]})"
+ f" than there are samples (corresponding to this label) in the "
+ f"dataset ({num_unique}). Please decrease the `num_partitions`, "
+ f"`num_classes_per_partition` to avoid this situation, "
+ f"or try `class_assigment_mode='deterministic'` to create a more "
+ f"even distribution of classes along the partitions. "
+ f"Alternatively use a different dataset if you can not adjust"
+ f" the any of these parameters."
+ )
diff --git a/datasets/flwr_datasets/partitioner/pathological_partitioner_test.py b/datasets/flwr_datasets/partitioner/pathological_partitioner_test.py
new file mode 100644
index 000000000000..151b7e14659c
--- /dev/null
+++ b/datasets/flwr_datasets/partitioner/pathological_partitioner_test.py
@@ -0,0 +1,262 @@
+# Copyright 2024 Flower Labs GmbH. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Test cases for PathologicalPartitioner."""
+
+
+import unittest
+from typing import Dict
+
+import numpy as np
+from parameterized import parameterized
+
+import datasets
+from datasets import Dataset
+from flwr_datasets.partitioner.pathological_partitioner import PathologicalPartitioner
+
+
+def _dummy_dataset_setup(
+ num_samples: int, partition_by: str, num_unique_classes: int
+) -> Dataset:
+ """Create a dummy dataset for testing."""
+ data = {
+ partition_by: np.tile(
+ np.arange(num_unique_classes), num_samples // num_unique_classes + 1
+ )[:num_samples],
+ "features": np.random.randn(num_samples),
+ }
+ return Dataset.from_dict(data)
+
+
+def _dummy_heterogeneous_dataset_setup(
+ num_samples: int, partition_by: str, num_unique_classes: int
+) -> Dataset:
+ """Create a dummy dataset for testing."""
+ data = {
+ partition_by: np.tile(
+ np.arange(num_unique_classes), num_samples // num_unique_classes + 1
+ )[:num_samples],
+ "features": np.random.randn(num_samples),
+ }
+ return Dataset.from_dict(data)
+
+
+class TestClassConstrainedPartitioner(unittest.TestCase):
+ """Unit tests for PathologicalPartitioner."""
+
+ @parameterized.expand( # type: ignore
+ [
+ # num_partition, num_classes_per_partition, num_samples, total_classes
+ (3, 1, 60, 3), # Single class per partition scenario
+ (5, 2, 100, 5),
+ (5, 2, 100, 10),
+ (4, 3, 120, 6),
+ ]
+ )
+ def test_correct_num_classes_when_partitioned(
+ self,
+ num_partitions: int,
+ num_classes_per_partition: int,
+ num_samples: int,
+ num_unique_classes: int,
+ ) -> None:
+ """Test correct number of unique classes."""
+ dataset = _dummy_dataset_setup(num_samples, "labels", num_unique_classes)
+ partitioner = PathologicalPartitioner(
+ num_partitions=num_partitions,
+ partition_by="labels",
+ num_classes_per_partition=num_classes_per_partition,
+ )
+ partitioner.dataset = dataset
+ partitions: Dict[int, Dataset] = {
+ pid: partitioner.load_partition(pid) for pid in range(num_partitions)
+ }
+ unique_classes_per_partition = {
+ pid: np.unique(partition["labels"]) for pid, partition in partitions.items()
+ }
+
+ for unique_classes in unique_classes_per_partition.values():
+ self.assertEqual(num_classes_per_partition, len(unique_classes))
+
+ def test_first_class_deterministic_assignment(self) -> None:
+ """Test deterministic assignment of first classes to partitions.
+
+ Test if all the classes are used (which has to be the case, given num_partitions
+ >= than the number of unique classes).
+ """
+ dataset = _dummy_dataset_setup(100, "labels", 10)
+ partitioner = PathologicalPartitioner(
+ num_partitions=10,
+ partition_by="labels",
+ num_classes_per_partition=2,
+ class_assignment_mode="first-deterministic",
+ )
+ partitioner.dataset = dataset
+ partitioner.load_partition(0)
+ expected_classes = set(range(10))
+ actual_classes = set()
+ for pid in range(10):
+ partition = partitioner.load_partition(pid)
+ actual_classes.update(np.unique(partition["labels"]))
+ self.assertEqual(expected_classes, actual_classes)
+
+ @parameterized.expand(
+ [ # type: ignore
+ # num_partitions, num_classes_per_partition, num_samples, num_unique_classes
+ (4, 2, 80, 8),
+ (10, 2, 100, 10),
+ ]
+ )
+ def test_deterministic_class_assignment(
+ self, num_partitions, num_classes_per_partition, num_samples, num_unique_classes
+ ):
+ """Test deterministic assignment of classes to partitions."""
+ dataset = _dummy_dataset_setup(num_samples, "labels", num_unique_classes)
+ partitioner = PathologicalPartitioner(
+ num_partitions=num_partitions,
+ partition_by="labels",
+ num_classes_per_partition=num_classes_per_partition,
+ class_assignment_mode="deterministic",
+ )
+ partitioner.dataset = dataset
+ partitions = {
+ pid: partitioner.load_partition(pid) for pid in range(num_partitions)
+ }
+
+ # Verify each partition has the expected classes, order does not matter
+ for pid, partition in partitions.items():
+ expected_labels = sorted(
+ [
+ (pid + i) % num_unique_classes
+ for i in range(num_classes_per_partition)
+ ]
+ )
+ actual_labels = sorted(np.unique(partition["labels"]))
+ self.assertTrue(
+ np.array_equal(expected_labels, actual_labels),
+ f"Partition {pid} does not have the expected labels: "
+ f"{expected_labels} but instead {actual_labels}.",
+ )
+
+ @parameterized.expand(
+ [ # type: ignore
+ # num_partitions, num_classes_per_partition, num_samples, num_unique_classes
+ (10, 3, 20, 3),
+ ]
+ )
+ def test_too_many_partitions_for_a_class(
+ self, num_partitions, num_classes_per_partition, num_samples, num_unique_classes
+ ) -> None:
+ """Test too many partitions for the number of samples in a class."""
+ dataset_1 = _dummy_dataset_setup(
+ num_samples // 2, "labels", num_unique_classes - 1
+ )
+ # Create a skewed part of the dataset for the last label
+ data = {
+ "labels": np.array([num_unique_classes - 1] * (num_samples // 2)),
+ "features": np.random.randn(num_samples // 2),
+ }
+ dataset_2 = Dataset.from_dict(data)
+ dataset = datasets.concatenate_datasets([dataset_1, dataset_2])
+
+ partitioner = PathologicalPartitioner(
+ num_partitions=num_partitions,
+ partition_by="labels",
+ num_classes_per_partition=num_classes_per_partition,
+ class_assignment_mode="random",
+ )
+ partitioner.dataset = dataset
+
+ with self.assertRaises(ValueError) as context:
+ _ = partitioner.load_partition(0)
+ self.assertEqual(
+ str(context.exception),
+ "Label: 0 is needed to be assigned to more partitions (10) than there are "
+ "samples (corresponding to this label) in the dataset (5). "
+ "Please decrease the `num_partitions`, `num_classes_per_partition` to "
+ "avoid this situation, or try `class_assigment_mode='deterministic'` to "
+ "create a more even distribution of classes along the partitions. "
+ "Alternatively use a different dataset if you can not adjust the any of "
+ "these parameters.",
+ )
+
+ @parameterized.expand( # type: ignore
+ [
+ # num_partitions, num_classes_per_partition, num_samples, num_unique_classes
+ (10, 11, 100, 10), # 11 > 10
+ (5, 11, 100, 10), # 11 > 10
+ (10, 20, 100, 5), # 20 > 5
+ ]
+ )
+ def test_more_classes_per_partition_than_num_unique_classes_in_dataset_raises(
+ self,
+ num_partitions: int,
+ num_classes_per_partition: int,
+ num_samples: int,
+ num_unique_classes: int,
+ ) -> None:
+ """Test more num_classes_per_partition > num_unique_classes in the dataset."""
+ dataset = _dummy_dataset_setup(num_samples, "labels", num_unique_classes)
+ with self.assertRaises(ValueError) as context:
+ partitioner = PathologicalPartitioner(
+ num_partitions=num_partitions,
+ partition_by="labels",
+ num_classes_per_partition=num_classes_per_partition,
+ )
+ partitioner.dataset = dataset
+ partitioner.load_partition(0)
+ self.assertEqual(
+ str(context.exception),
+ "The specified "
+ f"`num_classes_per_partition`={num_classes_per_partition} is "
+ f"greater than the number of unique classes in the given "
+ f"dataset={len(dataset.unique('labels'))}. Reduce the "
+ f"`num_classes_per_partition` or make use different dataset "
+ f"to apply this partitioning.",
+ )
+
+ @parameterized.expand( # type: ignore
+ [
+ # num_classes_per_partition should be irrelevant since the exception should
+ # be raised at the very beginning
+ # num_partitions, num_classes_per_partition, num_samples
+ (10, 2, 5),
+ (10, 10, 5),
+ (100, 10, 99),
+ ]
+ )
+ def test_more_partitions_than_samples_raises(
+ self, num_partitions: int, num_classes_per_partition: int, num_samples: int
+ ) -> None:
+ """Test if generation of more partitions that there are samples raises."""
+ # The number of unique classes in the dataset should be irrelevant since the
+ # exception should be raised at the very beginning
+ dataset = _dummy_dataset_setup(num_samples, "labels", num_unique_classes=5)
+ with self.assertRaises(ValueError) as context:
+ partitioner = PathologicalPartitioner(
+ num_partitions=num_partitions,
+ partition_by="labels",
+ num_classes_per_partition=num_classes_per_partition,
+ )
+ partitioner.dataset = dataset
+ partitioner.load_partition(0)
+ self.assertEqual(
+ str(context.exception),
+ "The number of partitions needs to be smaller than the number of "
+ "samples in the dataset.",
+ )
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/datasets/pyproject.toml b/datasets/pyproject.toml
index 017374181f59..e3afd8b87075 100644
--- a/datasets/pyproject.toml
+++ b/datasets/pyproject.toml
@@ -59,6 +59,7 @@ pillow = { version = ">=6.2.1", optional = true }
soundfile = { version = ">=0.12.1", optional = true }
librosa = { version = ">=0.10.0.post2", optional = true }
tqdm ="^4.66.1"
+pyarrow = "==16.1.0"
matplotlib = "^3.7.5"
seaborn = "^0.13.0"
diff --git a/dev/bootstrap.sh b/dev/bootstrap.sh
index 154fe0f1cbaf..bfcdc8a4369e 100755
--- a/dev/bootstrap.sh
+++ b/dev/bootstrap.sh
@@ -9,8 +9,8 @@ cd "$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"/../
./dev/rm-caches.sh
# Upgrade/install spcific versions of `pip`, `setuptools`, and `poetry`
-python -m pip install -U pip==24.0.0
-python -m pip install -U setuptools==69.5.1
+python -m pip install -U pip==24.1.2
+python -m pip install -U setuptools==70.3.0
python -m pip install -U poetry==1.7.1
# Use `poetry` to install project dependencies
diff --git a/dev/build-docker-image-matrix.py b/dev/build-docker-image-matrix.py
index 51d7fd0083d1..b7c4d2daaefd 100644
--- a/dev/build-docker-image-matrix.py
+++ b/dev/build-docker-image-matrix.py
@@ -168,6 +168,13 @@ def tag_latest_ubuntu_with_flwr_version(image: BaseImage) -> List[str]:
tag_latest_ubuntu_with_flwr_version,
lambda image: image.distro.name == DistroName.UBUNTU,
)
+ # ubuntu images for each supported python version
+ + generate_binary_images(
+ "superexec",
+ base_images,
+ tag_latest_ubuntu_with_flwr_version,
+ lambda image: image.distro.name == DistroName.UBUNTU,
+ )
)
print(
diff --git a/dev/build-docs.sh b/dev/build-docs.sh
index f8d4f91508de..f4bf958b0ebf 100755
--- a/dev/build-docs.sh
+++ b/dev/build-docs.sh
@@ -8,9 +8,7 @@ cd $ROOT
./dev/build-baseline-docs.sh
cd $ROOT
-./dev/update-examples.sh
-cd examples/doc
-make docs
+python dev/build-example-docs.py
cd $ROOT
./datasets/dev/build-flwr-datasets-docs.sh
diff --git a/dev/build-example-docs.py b/dev/build-example-docs.py
new file mode 100644
index 000000000000..367994708bf9
--- /dev/null
+++ b/dev/build-example-docs.py
@@ -0,0 +1,283 @@
+# Copyright 2024 Flower Labs GmbH. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Build the Flower Example docs."""
+
+import os
+import shutil
+import re
+import subprocess
+from pathlib import Path
+
+ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
+INDEX = os.path.join(ROOT, "examples", "doc", "source", "index.rst")
+
+initial_text = """
+Flower Examples Documentation
+-----------------------------
+
+Welcome to Flower Examples' documentation. `Flower `_ is
+a friendly federated learning framework.
+
+Join the Flower Community
+-------------------------
+
+The Flower Community is growing quickly - we're a friendly group of researchers,
+engineers, students, professionals, academics, and other enthusiasts.
+
+.. button-link:: https://flower.ai/join-slack
+ :color: primary
+ :shadow:
+
+ Join us on Slack
+
+Quickstart Examples
+-------------------
+
+Flower Quickstart Examples are a collection of demo projects that show how you
+can use Flower in combination with other existing frameworks or technologies.
+
+"""
+
+table_headers = (
+ "\n.. list-table::\n :widths: 50 15 15 15\n "
+ ":header-rows: 1\n\n * - Title\n - Framework\n - Dataset\n - Tags\n\n"
+)
+
+categories = {
+ "quickstart": {"table": table_headers, "list": ""},
+ "advanced": {"table": table_headers, "list": ""},
+ "other": {"table": table_headers, "list": ""},
+}
+
+urls = {
+ # Frameworks
+ "Android": "https://www.android.com/",
+ "C++": "https://isocpp.org/",
+ "Docker": "https://www.docker.com/",
+ "JAX": "https://jax.readthedocs.io/en/latest/",
+ "Java": "https://www.java.com/",
+ "Keras": "https://keras.io/",
+ "Kotlin": "https://kotlinlang.org/",
+ "mlcube": "https://docs.mlcommons.org/mlcube/",
+ "MLX": "https://ml-explore.github.io/mlx/build/html/index.html",
+ "MONAI": "https://monai.io/",
+ "PEFT": "https://huggingface.co/docs/peft/index",
+ "Swift": "https://www.swift.org/",
+ "TensorFlowLite": "https://www.tensorflow.org/lite",
+ "fastai": "https://fast.ai/",
+ "lifelines": "https://lifelines.readthedocs.io/en/latest/index.html",
+ "lightning": "https://lightning.ai/docs/pytorch/stable/",
+ "numpy": "https://numpy.org/",
+ "opacus": "https://opacus.ai/",
+ "pandas": "https://pandas.pydata.org/",
+ "scikit-learn": "https://scikit-learn.org/",
+ "tabnet": "https://github.com/titu1994/tf-TabNet",
+ "tensorboard": "https://www.tensorflow.org/tensorboard",
+ "tensorflow": "https://www.tensorflow.org/",
+ "torch": "https://pytorch.org/",
+ "torchvision": "https://pytorch.org/vision/stable/index.html",
+ "transformers": "https://huggingface.co/docs/transformers/index",
+ "wandb": "https://wandb.ai/home",
+ "whisper": "https://huggingface.co/openai/whisper-tiny",
+ "xgboost": "https://xgboost.readthedocs.io/en/stable/",
+ # Datasets
+ "Adult Census Income": "https://www.kaggle.com/datasets/uciml/adult-census-income/data",
+ "Alpaca-GPT4": "https://huggingface.co/datasets/vicgalle/alpaca-gpt4",
+ "CIFAR-10": "https://huggingface.co/datasets/uoft-cs/cifar10",
+ "HIGGS": "https://archive.ics.uci.edu/dataset/280/higgs",
+ "IMDB": "https://huggingface.co/datasets/stanfordnlp/imdb",
+ "Iris": "https://scikit-learn.org/stable/auto_examples/datasets/plot_iris_dataset.html",
+ "MNIST": "https://huggingface.co/datasets/ylecun/mnist",
+ "MedNIST": "https://medmnist.com/",
+ "Oxford Flower-102": "https://www.robots.ox.ac.uk/~vgg/data/flowers/102/",
+ "SpeechCommands": "https://huggingface.co/datasets/google/speech_commands",
+ "Titanic": "https://www.kaggle.com/competitions/titanic",
+ "Waltons": "https://lifelines.readthedocs.io/en/latest/lifelines.datasets.html#lifelines.datasets.load_waltons",
+}
+
+
+def _convert_to_link(search_result):
+ if "," in search_result:
+ result = ""
+ for part in search_result.split(","):
+ result += f"{_convert_to_link(part)}, "
+ return result[:-2]
+ else:
+ search_result = search_result.strip()
+ name, url = search_result, urls.get(search_result, None)
+ if url:
+ return f"`{name.strip()} <{url.strip()}>`_"
+ else:
+ return search_result
+
+
+def _read_metadata(example):
+ with open(os.path.join(example, "README.md")) as f:
+ content = f.read()
+
+ metadata_match = re.search(r"^---(.*?)^---", content, re.DOTALL | re.MULTILINE)
+ if not metadata_match:
+ raise ValueError("Metadata block not found")
+ metadata = metadata_match.group(1)
+
+ title_match = re.search(r"^# (.+)$", content, re.MULTILINE)
+ if not title_match:
+ raise ValueError("Title not found in metadata")
+ title = title_match.group(1).strip()
+
+ tags_match = re.search(r"^tags:\s*\[(.+?)\]$", metadata, re.MULTILINE)
+ if not tags_match:
+ raise ValueError("Tags not found in metadata")
+ tags = tags_match.group(1).strip()
+
+ dataset_match = re.search(
+ r"^dataset:\s*\[(.*?)\]$", metadata, re.DOTALL | re.MULTILINE
+ )
+ if not dataset_match:
+ raise ValueError("Dataset not found in metadata")
+ dataset = dataset_match.group(1).strip()
+
+ framework_match = re.search(
+ r"^framework:\s*\[(.*?|)\]$", metadata, re.DOTALL | re.MULTILINE
+ )
+ if not framework_match:
+ raise ValueError("Framework not found in metadata")
+ framework = framework_match.group(1).strip()
+
+ dataset = _convert_to_link(re.sub(r"\s+", " ", dataset).strip())
+ framework = _convert_to_link(re.sub(r"\s+", " ", framework).strip())
+ return title, tags, dataset, framework
+
+
+def _add_table_entry(example, tag, table_var):
+ title, tags, dataset, framework = _read_metadata(example)
+ example_name = Path(example).stem
+ table_entry = (
+ f" * - `{title} <{example_name}.html>`_ \n "
+ f"- {framework} \n - {dataset} \n - {tags}\n\n"
+ )
+ if tag in tags:
+ categories[table_var]["table"] += table_entry
+ categories[table_var]["list"] += f" {example_name}\n"
+ return True
+ return False
+
+
+def _copy_markdown_files(example):
+ for file in os.listdir(example):
+ if file.endswith(".md"):
+ src = os.path.join(example, file)
+ dest = os.path.join(
+ ROOT, "examples", "doc", "source", os.path.basename(example) + ".md"
+ )
+ shutil.copyfile(src, dest)
+
+
+def _add_gh_button(example):
+ gh_text = f'[](https://github.com/adap/flower/blob/main/examples/{example})'
+ readme_file = os.path.join(ROOT, "examples", "doc", "source", example + ".md")
+ with open(readme_file, "r+") as f:
+ content = f.read()
+ if gh_text not in content:
+ content = re.sub(
+ r"(^# .+$)", rf"\1\n\n{gh_text}", content, count=1, flags=re.MULTILINE
+ )
+ f.seek(0)
+ f.write(content)
+ f.truncate()
+
+
+def _copy_images(example):
+ static_dir = os.path.join(example, "_static")
+ dest_dir = os.path.join(ROOT, "examples", "doc", "source", "_static")
+ if os.path.isdir(static_dir):
+ for file in os.listdir(static_dir):
+ if file.endswith((".jpg", ".png", ".jpeg")):
+ shutil.copyfile(
+ os.path.join(static_dir, file), os.path.join(dest_dir, file)
+ )
+
+
+def _add_all_entries():
+ examples_dir = os.path.join(ROOT, "examples")
+ for example in sorted(os.listdir(examples_dir)):
+ example_path = os.path.join(examples_dir, example)
+ if os.path.isdir(example_path) and example != "doc":
+ _copy_markdown_files(example_path)
+ _add_gh_button(example)
+ _copy_images(example)
+
+
+def _main():
+ if os.path.exists(INDEX):
+ os.remove(INDEX)
+
+ with open(INDEX, "w") as index_file:
+ index_file.write(initial_text)
+
+ examples_dir = os.path.join(ROOT, "examples")
+ for example in sorted(os.listdir(examples_dir)):
+ example_path = os.path.join(examples_dir, example)
+ if os.path.isdir(example_path) and example != "doc":
+ _copy_markdown_files(example_path)
+ _add_gh_button(example)
+ _copy_images(example_path)
+ if not _add_table_entry(example_path, "quickstart", "quickstart"):
+ if not _add_table_entry(example_path, "comprehensive", "comprehensive"):
+ if not _add_table_entry(example_path, "advanced", "advanced"):
+ _add_table_entry(example_path, "", "other")
+
+ with open(INDEX, "a") as index_file:
+ index_file.write(categories["quickstart"]["table"])
+
+ index_file.write("\nAdvanced Examples\n-----------------\n")
+ index_file.write(
+ "Advanced Examples are mostly for users that are both familiar with "
+ "Federated Learning but also somewhat familiar with Flower's main "
+ "features.\n"
+ )
+ index_file.write(categories["advanced"]["table"])
+
+ index_file.write("\nOther Examples\n--------------\n")
+ index_file.write(
+ "Flower Examples are a collection of example projects written with "
+ "Flower that explore different domains and features. You can check "
+ "which examples already exist and/or contribute your own example.\n"
+ )
+ index_file.write(categories["other"]["table"])
+
+ _add_all_entries()
+
+ index_file.write(
+ "\n.. toctree::\n :maxdepth: 1\n :caption: Quickstart\n :hidden:\n\n"
+ )
+ index_file.write(categories["quickstart"]["list"])
+
+ index_file.write(
+ "\n.. toctree::\n :maxdepth: 1\n :caption: Advanced\n :hidden:\n\n"
+ )
+ index_file.write(categories["advanced"]["list"])
+
+ index_file.write(
+ "\n.. toctree::\n :maxdepth: 1\n :caption: Others\n :hidden:\n\n"
+ )
+ index_file.write(categories["other"]["list"])
+
+ index_file.write("\n")
+
+
+if __name__ == "__main__":
+ _main()
+ subprocess.call(f"cd {ROOT}/examples/doc && make html", shell=True)
diff --git a/dev/changelog_config.toml b/dev/changelog_config.toml
index c5ff1bcdd1c1..637ea9b4b2c6 100644
--- a/dev/changelog_config.toml
+++ b/dev/changelog_config.toml
@@ -3,7 +3,7 @@
type = ["ci", "docs", "feat", "fix", "refactor", "break"]
-project = ["framework", "baselines", "datasets", "examples"]
+project = ["framework", "baselines", "datasets", "examples", "benchmarks"]
scope = "skip"
diff --git a/dev/format.sh b/dev/format.sh
index 71edf9c6065a..e1e2abc307f1 100755
--- a/dev/format.sh
+++ b/dev/format.sh
@@ -18,6 +18,11 @@ find src/proto/flwr/proto -name *.proto | grep "\.proto" | xargs clang-format -i
python -m black -q examples
python -m docformatter -i -r examples
+# Benchmarks
+python -m isort benchmarks
+python -m black -q benchmarks
+python -m docformatter -i -r benchmarks
+
# E2E
python -m isort e2e
python -m black -q e2e
diff --git a/dev/test.sh b/dev/test.sh
index 8cbe88c9298b..58ac0b3d24cd 100755
--- a/dev/test.sh
+++ b/dev/test.sh
@@ -11,11 +11,11 @@ clang-format --Werror --dry-run src/proto/flwr/proto/*
echo "- clang-format: done"
echo "- isort: start"
-python -m isort --check-only --skip src/py/flwr/proto src/py/flwr e2e
+python -m isort --check-only --skip src/py/flwr/proto src/py/flwr benchmarks e2e
echo "- isort: done"
echo "- black: start"
-python -m black --exclude "src\/py\/flwr\/proto" --check src/py/flwr examples e2e
+python -m black --exclude "src\/py\/flwr\/proto" --check src/py/flwr benchmarks examples e2e
echo "- black: done"
echo "- init_py_check: start"
diff --git a/dev/update-examples.sh b/dev/update-examples.sh
deleted file mode 100755
index 1076b4621984..000000000000
--- a/dev/update-examples.sh
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/bin/bash
-set -e
-cd "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"/../
-
-ROOT=`pwd`
-INDEX=$ROOT/examples/doc/source/index.md
-INSERT_LINE=6
-
-copy_markdown_files () {
- for file in $1/*.md; do
- # Copy the README into the source of the Example docs as the name of the example
- if [[ $(basename "$file") = "README.md" ]]; then
- cp $file $ROOT/examples/doc/source/$1.md 2>&1 >/dev/null
- else
- # If the example contains other markdown files, copy them to the source of the Example docs
- cp $file $ROOT/examples/doc/source/$(basename "$file") 2>&1 >/dev/null
- fi
- done
-}
-
-add_gh_button () {
- gh_text="[](https://github.com/adap/flower/blob/main/examples/$1)"
- readme_file="$ROOT/examples/doc/source/$1.md"
-
- if ! grep -Fq "$gh_text" "$readme_file"; then
- awk -v text="$gh_text" '
- /^# / && !found {
- print $0 "\n" text;
- found=1;
- next;
- }
- { print }
- ' "$readme_file" > tmpfile && mv tmpfile "$readme_file"
- fi
-}
-
-copy_images () {
- if [ -d "$1/_static" ]; then
- cp $1/_static/**.{jpg,png,jpeg} $ROOT/examples/doc/source/_static/ 2>/dev/null || true
- fi
-}
-
-add_to_index () {
- (echo $INSERT_LINE; echo a; echo $1; echo .; echo wq) | ed $INDEX 2>&1 >/dev/null
-}
-
-add_single_entry () {
- # Copy markdown files to correct folder
- copy_markdown_files $1
-
- # Add button linked to GitHub
- add_gh_button $1
-
- # Copy all images of the _static folder into the examples
- # docs static folder
- copy_images $1
-
- # Insert the name of the example into the index file
- add_to_index $1
-}
-
-add_all_entries () {
- cd $ROOT/examples
- # Iterate through each folder in examples/
- for d in $(printf '%s\n' */ | sort -V); do
- # Add entry based on the name of the folder
- example=${d%/}
-
- if [[ $example != doc ]]; then
- add_single_entry $example
- fi
- done
-}
-
-# Clean up before starting
-rm -f $ROOT/examples/doc/source/*.md
-rm -f $INDEX
-
-# Create empty index file
-touch $INDEX
-
-echo "# Flower Examples Documentation" >> $INDEX
-echo "" >> $INDEX
-echo "\`\`\`{toctree}" >> $INDEX
-echo "---" >> $INDEX
-echo "maxdepth: 1" >> $INDEX
-echo "---" >> $INDEX
-
-add_all_entries
-
-echo "\`\`\`" >> $INDEX
diff --git a/doc/build-versioned-docs.sh b/doc/build-versioned-docs.sh
index 6c1b6dd9c5fc..4d3462718385 100755
--- a/doc/build-versioned-docs.sh
+++ b/doc/build-versioned-docs.sh
@@ -82,9 +82,9 @@ done
# Build the main version (main for GH CI, local branch for local)
if [ $GITHUB_ACTIONS ]
then
- git switch main
+ git checkout --force main
else
- git switch $current_branch
+ git checkout --force $current_branch
fi
current_version=main
diff --git a/doc/locales/ko/LC_MESSAGES/framework-docs.po b/doc/locales/ko/LC_MESSAGES/framework-docs.po
index 3c41a8647c35..f01f9eaf7bd9 100644
--- a/doc/locales/ko/LC_MESSAGES/framework-docs.po
+++ b/doc/locales/ko/LC_MESSAGES/framework-docs.po
@@ -8,8 +8,8 @@ msgstr ""
"Project-Id-Version: Flower main\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-06-17 16:09+0200\n"
-"PO-Revision-Date: 2024-06-23 14:41+0000\n"
-"Last-Translator: 박태현 \n"
+"PO-Revision-Date: 2024-06-25 10:43+0000\n"
+"Last-Translator: \"Young D. Kwon\" \n"
"Language-Team: Korean \n"
"Language: ko\n"
@@ -8575,7 +8575,7 @@ msgstr ":py:obj:`context `\\"
#: flwr.simulation.app.start_simulation
#: flwr.simulation.run_simulation.run_simulation of
msgid "Parameters"
-msgstr "매개변수"
+msgstr "파라미터"
#: flwr.client.client.Client.evaluate:3 of
msgid ""
@@ -22278,7 +22278,7 @@ msgid ""
msgstr ""
"이 튜토리얼에서 연합 학습이 무엇인지 배우고 Flower로 첫 번째 시스템을 "
"구축하고 점진적으로 확장해 나갈 것입니다. 본 튜토리얼의 모든 부분을 완성할 "
-"수 있다면, 당신은 고급 연방 학습 시스템을 구축하여 그 분야의 현재 기술 "
+"수 있다면, 당신은 고급 연합 학습 시스템을 구축하여 그 분야의 현재 최고 기술 "
"수준에 접근할 수 있을 것입니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:15
@@ -22287,8 +22287,9 @@ msgid ""
"learning. Only a basic understanding of data science and Python programming "
"is assumed."
msgstr ""
-"🧑🏫이 튜토리얼은 제로베이부터 시작되며 연방 학습에 상세히 아는 필요가 "
-"없습니다. 데이터 과학과 파이썬 프로그래밍에 대한 기본적인 이해만 가정합니다."
+"🧑🏫이 튜토리얼은 사전 지식을 많이 필요로 하지 않으며 연합 학습에 대해 "
+"상세히알 필요는 없습니다. 데이터 과학과 파이썬 프로그래밍에 대한 기본적인 "
+"이해만 가정합니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:17
msgid ""
@@ -22306,13 +22307,13 @@ msgstr ""
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:31
msgid "Classic machine learning"
-msgstr "클래식 머신러닝"
+msgstr "전통적인 머신러닝(기계학습)"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:33
msgid ""
"Before we begin to discuss federated learning, let us quickly recap how most "
"machine learning works today."
-msgstr "연방 학습에 대해 논의하기 전에 현재 대부분의 머신러닝이 어떻게 작동하는지 "
+msgstr "연합 학습에 대해 논의하기 전에 현재 대부분의 머신러닝이 어떻게 작동하는지 "
"간략히 요약하겠습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:35
@@ -22321,8 +22322,8 @@ msgid ""
"neural network (as depicted here), or something else, like classical linear "
"regression."
msgstr ""
-"기계 학습에서 우리는 모델과 데이터를 가지고 있습니다.모델은 신경망((그림과 "
-"같이))일 수도 있고 고전적인 선형 회귀와 같은 다른 것일 수도 있습니다."
+"머신러닝에서 우리는 모델과 데이터를 가지고 있습니다. 모델은 신경망(그림과 "
+"같이)일 수도 있고 고전적인 선형 회귀와 같은 다른 것일 수도 있습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:41
msgid "|93b02017c78049bbbd5ae456dcb2c91b|"
@@ -22373,7 +22374,7 @@ msgstr "|9bc21c7dbd17444a8f070c60786e3484|"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:113
msgid "Data on a phone"
-msgstr "핸드푼에 있는 데이터"
+msgstr "핸드폰에 있는 데이터"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:73
msgid ""
@@ -22401,7 +22402,7 @@ msgid ""
"server can be somewhere in a data center, or somewhere in the cloud."
msgstr ""
"따라서 머신러닝이나 어떤 종류의 데이터 분석을 이용하려면 과거에는 중앙 "
-"서버에서 모든 데이터를 수집하는 방법이 사용되었습니다.이 서버는 데이터 센터 "
+"서버에서 모든 데이터를 수집하는 방법이 사용되었습니다. 이 서버는 데이터 센터 "
"어딘가에 있을 수도 있고 클라우드 어딘가에 있을 수도 있습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:91
@@ -22419,8 +22420,8 @@ msgid ""
"learning approach that we've basically always relied on."
msgstr ""
"모든 데이터가 한 곳에 모이면, 우리는 궁극적으로 머신러닝 알고리즘을 사용하여 "
-"데이터에서 모델을 훈련시킬 수 있습니다.이것이 바로 우리가 기본적으로 "
-"의지해왔던 머신러닝 방법입니다."
+"데이터에서 모델을 훈련시킬 수 있습니다. 이것이 바로 우리가 기본적으로 사용해 "
+"온 머신러닝 방법입니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:103
msgid "|c24c1478b30e4f74839208628a842d1e|"
@@ -22432,7 +22433,7 @@ msgstr "중앙 데이터 훈련"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:130
msgid "Challenges of classical machine learning"
-msgstr "클래식 머신러닝이 만난 도전"
+msgstr "클래식 머신러닝의 어려움"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:132
msgid ""
@@ -22441,9 +22442,9 @@ msgid ""
"traffic. Cases, where all the data is naturally available on a centralized "
"server."
msgstr ""
-"우리가 방금 본 클래식 머신러닝 접근 방식은 경우에 따라 사용될 수 있습니다. "
-"좋은 예로는 휴일 사진을 분류하거나 웹 트래픽을 분석하는 것이 있습니다. "
-"이러한 사례에서 모든 데이터는 자연스럽게 중앙 서버에서 사용할 수 있습니다."
+"우리가 방금 본 전통적 머신러닝의 접근 방식은 경우에 따라 다르게 사용될 수 "
+"있습니다. 좋은 예로는 휴일 사진을 분류하거나 웹 트래픽을 분석하는 것이 "
+"있습니다. 이러한 사례에서 모든 데이터는 자연스럽게 중앙 서버에 존재합니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:138
msgid "|1b3613d7a58847b59e1d3180802dbc09|"
@@ -22459,9 +22460,9 @@ msgid ""
"is not available on a centralized server, or cases where the data available "
"on one server is not enough to train a good model."
msgstr ""
-"그러나 이 방법은 다른 많은 경우에 적용되지 않습니다.예를 들어, 중앙 집중식 "
-"서버에 데이터가 없거나 서버의 데이터가 좋은 모델을 훈련하기에 충분하지 "
-"않습니다."
+"그러나 이 방법은 다른 많은 경우에 적용되지 않을 수 있습니다. 예를 들어, 중앙 "
+"집중식 서버에 데이터가 없거나 서버의 데이터가 좋은 모델을 훈련하기에 "
+"충분하지 않을 수 있습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:150
msgid "|9980b5213db547d0b8024a50992b9e3f|"
@@ -22469,7 +22470,7 @@ msgstr "|9980b5213db547d0b8024a50992b9e3f|"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:175
msgid "Centralized impossible"
-msgstr "집중화 가능"
+msgstr "집중화 불가능"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:156
msgid ""
@@ -22477,8 +22478,8 @@ msgid ""
"does not work for a large number of highly important real-world use cases. "
"Those reasons include:"
msgstr ""
-"클래식 중앙 집중식 머신러닝 방법이 현실 세계에서 매우 중요한 수많은 사용 "
-"사례를 충족시킬 수 없는 이유가 있습니다.이유는 다음과 같은 여러 가지가 "
+"전통적인 중앙 집중식 머신러닝 방법이 현실 세계에서 매우 중요한 수많은 사용 "
+"사례를 충족시킬 수 없는 이유가 있습니다. 이유는 다음과 같은 여러 가지가 "
"있습니다:"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:158
@@ -22497,9 +22498,9 @@ msgstr ""
"PDPL (아르헨티나), KVKK (터키), POPI (남아프리카공화국), FSS (러시아), CDPR "
"(중국), PDPB (인도), PIPA (한국), APPI (일본), PDP (인도네시아), PDPA "
"(싱가포르), APP (호주)등의 법규로 민감한 데이터가 이동하지 않도록 보호하고 "
-"있습니 다. 실제 로이러한 규정은 사용자가 세계의 다른 지역에 살고 데이터가 "
+"있습니다. 실제로 이러한 규정은 사용자가 세계의 다른 지역에 살고 데이터가 "
"다른 데이터 보호 규정에 의해 통제되기 때문에 단일 조직이 자체 사용자 "
-"데이터를 인공 지능 교육에 사용하는 것을 방지하기도 합니다."
+"데이터를 인공 지능 학습에 사용하는 것을 방지하기도 합니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:160
msgid ""
@@ -22510,6 +22511,10 @@ msgid ""
"developed that keyboard, do you? In fact, that use case was the reason "
"federated learning was invented in the first place."
msgstr ""
+"**사용자 선호도**: 규정 외에도 일부 사용 사례에서 사용자는 데이터가 자기 "
+"장치를 떠나지 않기를 예상합니다. 휴대폰의 디지털 키보드에 비밀번호와 "
+"신용카드 정보를 입력하면 비밀번호가 해당 키보드를 개발한 회사의 서버에 뜨길 "
+"원하지는 않겠죠? 사실, 이 사용 사례가 애당초 연합 학습이 발명된 이유였습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:161
msgid ""
@@ -22522,29 +22527,36 @@ msgid ""
"exceedingly expensive infrastructure to process and store. And most of the "
"data isn't even useful."
msgstr ""
+"**데이터 볼륨**: 일부 센서(예:카메라)는 너무 많은 데이터 볼륨을 생성하여 "
+"모든 데이터를 수집하는 것이 실현 가능하지도 않고 경제적이지도 않습니다(예: "
+"대역폭 또는 통신 효율로 인해). 전국에 수백 개 기차역이 있는 국가 철도 "
+"서비스를 생각해 보세요. 각 기차역에 수 많은 보안 카메라가 설치되어 있다면, "
+"그들이 생산하는 대량의 미가공 된 온디바이스 데이터는 처리 및 저장을 위해 "
+"엄청나게 강력하고 매우 비싼기반 구조를 필요로 합니다. 그런데 대부분의 "
+"데이터는 유용하지도 않습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:164
msgid "Examples where centralized machine learning does not work include:"
-msgstr ""
+msgstr "중앙 집중식 머신러닝이 작동하지 않는 예는 다음과 같습니다:"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:166
msgid ""
"Sensitive healthcare records from multiple hospitals to train cancer "
"detection models"
-msgstr ""
+msgstr "여러 병원의 민감한 의료기록으로 암 검진 모델 훈련"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:167
msgid ""
"Financial information from different organizations to detect financial fraud"
-msgstr ""
+msgstr "금융 사기를 탐지하기 위한 다양한 조직의 금융 정보"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:168
msgid "Location data from your electric car to make better range prediction"
-msgstr ""
+msgstr "더 나은 범위 예측을 위해 전기 자동차의 위치 데이터"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:169
msgid "End-to-end encrypted messages to train better auto-complete models"
-msgstr ""
+msgstr "더 나은 자동 완성 모델을 훈련시키기 위한 엔드 투 엔드 암호화 된 메시지"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:171
msgid ""
@@ -22556,10 +22568,17 @@ msgid ""
"private data? After all, these are all areas that would benefit "
"significantly from recent advances in AI."
msgstr ""
+"`Brave `__ 브라우저나 `Signal `__ "
+"메신저와 같은 개인 정보 보호 시스템의 인기는 사용자들이 개인 정보 보호에 "
+"신경 쓴다는 것을 보여줍니다. 실제로 그러한 대안이 존재하는 경우 다른 "
+"대안보다 개인 정보 보호 강화 버전을 선택합니다. 그런데 이러한 사례에 "
+"머신러닝 및 데이터 과학을 적용하여 프라이버시 데이터를 활용하려면 어떻게 "
+"해야 합니까? 이 모든 분야는 최근 AI의 발전으로 상당한 이익을 얻을 수 있는 "
+"분야입니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:186
msgid "Federated learning"
-msgstr ""
+msgstr "연합 학습"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:188
msgid ""
@@ -22567,14 +22586,17 @@ msgid ""
"learning on distributed data by moving the training to the data, instead of "
"moving the data to the training. Here's the single-sentence explanation:"
msgstr ""
+"연합 학습은 이 방법을 쉽게 뒤집었습니다. 데이터를 컴퓨팅 센터로 옮기는 대신 "
+"컴퓨팅 능력을 데이터가 생성되는 장소로 이동 시킴으로써 분산된 데이터에서 "
+"머신러닝을 실현합니다. 요약하자면:"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:190
msgid "Central machine learning: move the data to the computation"
-msgstr ""
+msgstr "중앙 집중식 머신러닝: 데이터를 컴퓨팅 센터로 이동"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:191
msgid "Federated (machine) learning: move the computation to the data"
-msgstr ""
+msgstr "연합(기계)학습: 컴퓨팅을 데이터로 옮김"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:193
msgid ""
@@ -22589,20 +22611,29 @@ msgid ""
"more and more areas that can suddenly be reinvented because they now have "
"access to vast amounts of previously inaccessible data."
msgstr ""
+"이를 통해 이전에는 불가능했던 분야에서 머신러닝(및 기타 데이터 과학 방법)을 "
+"사용할 수 있습니다. 이제 다양한 병원이 협력할 수 있도록 함으로써 우수한 의료 "
+"AI 모델을 훈련할 수 있습니다. 다양한 금융 기관의 데이터에 대한 AI 모델을 "
+"훈련하여 금융 사기를 해결할 수 있습니다. 개인 정보 보호를 강화하지 않는 "
+"대안보다 더 나은 AI가 내장된 새로운 개인 정보 보호 강화 애플리케이션(예: "
+"보안 메시징)을 구축할 수 있습니다. 그것들은 떠오르는 몇 가지 예에 "
+"불과합니다. 연합 학습을 구축함에 따라 이전에 액세스할 수 없었던 많은 "
+"데이터에 액세스할 수 있게 되었기 때문에 갑자기 재생될 수 있는 영역이 점점 더 "
+"많아지고 있습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:196
msgid ""
"So how does federated learning work, exactly? Let's start with an intuitive "
"explanation."
-msgstr ""
+msgstr "그렇다면 연합 학습은 어떻게 작동합니까? 직관적인 설명부터 시작하겠습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:199
msgid "Federated learning in five steps"
-msgstr ""
+msgstr "연합 학습의 5단계"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:202
msgid "Step 0: Initialize global model"
-msgstr ""
+msgstr "0단계: 글로벌 모델 초기화"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:204
msgid ""
@@ -22610,20 +22641,23 @@ msgid ""
"in classic centralized learning: we initialize the model parameters, either "
"randomly or from a previously saved checkpoint."
msgstr ""
+"서버에서 모델을 초기화하는 것으로 시작합니다. 이것은 전통적인 중앙 집중식 "
+"학습과도 동일합니다: 임의로 또는 이전에 저장된 체크포인트에서 모델 "
+"매개변수를 초기화합니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:210
msgid "|c7afb4c92d154bfaa5e8cb9a150e17f1|"
-msgstr ""
+msgstr "|c7afb4c92d154bfaa5e8cb9a150e17f1|"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:307
msgid "Initialize global model"
-msgstr ""
+msgstr "글로벌 모델 초기화"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:217
msgid ""
"Step 1: Send model to a number of connected organizations/devices (client "
"nodes)"
-msgstr ""
+msgstr "1단계: 연결된 여러 조직/장치(클라이언트 노드)에 모델 전송"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:219
msgid ""
@@ -22634,20 +22668,25 @@ msgid ""
"the connected nodes instead of all nodes. The reason for this is that "
"selecting more and more client nodes has diminishing returns."
msgstr ""
+"다음으로 글로벌 모델의 파라미터를 연결된 클라이언트 노드(예: 스마트폰과 같은 "
+"에지 디바이스 또는 조직에 속한 서버)로 보냅니다. 이것은 각 참여 노드가 "
+"동일한 모델 매개변수를 사용하여 로컬 훈련을 시작하도록 하기 위함입니다. "
+"일반적으로 모든 노드가 아닌 몇 개의 연결 노드만 사용합니다. 그 이유는 점점 "
+"더 많은 클라이언트 노드를 선택하면 학습의 효율성이 감소하기 때문입니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:225
msgid "|032eb6fed6924ac387b9f13854919196|"
-msgstr ""
+msgstr "|032eb6fed6924ac387b9f13854919196|"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:309
msgid "Send global model"
-msgstr ""
+msgstr "글로벌 모델 전송"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:232
msgid ""
"Step 2: Train model locally on the data of each organization/device (client "
"node)"
-msgstr ""
+msgstr "2단계: 각 조직/장치(클라이언트 노드)의 데이터에 대해 로컬로 모델 훈련"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:234
msgid ""
@@ -22658,18 +22697,23 @@ msgid ""
"little as one epoch on the local data, or even just a few steps (mini-"
"batches)."
msgstr ""
+"이제 모든(선택된) 클라이언트 노드에는 최신 버전의 글로벌 모델 파라미터가 "
+"있으며 로컬 훈련을 시작합니다. 그들은 자신의 로컬 데이터 세트를 사용하여 "
+"자신의 로컬 모델을 훈련합니다. 모델이 완전히 수렴할 때까지 훈련하지 않고 "
+"잠시만 훈련합니다. 이는 로컬 데이터에서 한 단계 정도로 짧거나 몇 단계(mini-"
+"batches)에 불과할 수 있습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:240
msgid "|fbf225add7fd4df5a9bf25a95597d954|"
-msgstr ""
+msgstr "|fbf225add7fd4df5a9bf25a95597d954|"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:311
msgid "Train on local data"
-msgstr ""
+msgstr "로컬 데이터에 대한 훈련"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:247
msgid "Step 3: Return model updates back to the server"
-msgstr ""
+msgstr "3단계: 모델 파라미터를 업데이트하여 서버로 되돌리기"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:249
msgid ""
@@ -22680,18 +22724,23 @@ msgid ""
"The model updates they send can either be the full model parameters or just "
"the gradients that were accumulated during local training."
msgstr ""
+"로컬 훈련 후에는 클라이언트 노드마다 원래 받은 모델 파라미터의 버전이 조금씩 "
+"다릅니다. 파라미터가 다른 이유는 각 클라이언트 노드의 로컬 데이터 세트에 "
+"다른 데이터가 있기 때문입니다. 그런 다음 클라이언트 노드는 이러한 모델 "
+"업데이트를 서버로 다시 보냅니다. 보내는 모델 업데이트는 전체 모델 "
+"파라미터거나 로컬 교육 중에 누적된 그레디언트(gradient)일 수 있습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:255
msgid "|7efbe3d29d8349b89594e8947e910525|"
-msgstr ""
+msgstr "|7efbe3d29d8349b89594e8947e910525|"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:313
msgid "Send model updates"
-msgstr ""
+msgstr "모델 업데이트 전송"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:262
msgid "Step 4: Aggregate model updates into a new global model"
-msgstr ""
+msgstr "4단계: 모델 업데이트를 새 글로벌 모델로 집계"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:264
msgid ""
@@ -22701,6 +22750,11 @@ msgid ""
"didn't we want to have one model that contains the learnings from the data "
"of all 100 client nodes?"
msgstr ""
+"서버는 선택된 클라이언트 노드들로부터 모델 업데이트들을 수신합니다. 서버가 "
+"100개의 클라이언트 노드를 선택했다면 이제 각각 클라이언트의 로컬 데이터를 "
+"기반으로 훈련된 100개의 조금씩 다른 원래 글로벌 모델 버전을 갖게 됩니다. "
+"하지만 우리는 100개의 모든 클라이언트 노드의 데이터에서 학습한 내용을 "
+"포함하는 모델을 하나만 갖고 싶지 않았습니까?"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:266
msgid ""
@@ -22717,18 +22771,29 @@ msgid ""
"examples, then - without weighting - each of the 10 examples would influence "
"the global model ten times as much as each of the 100 examples."
msgstr ""
+"단일 모델 하나를 얻으려면 클라이언트 노드에서 받은 모든 모델 업데이트를 "
+"결합해야 합니다. 이 과정이 *집합*라고 하며 여러 가지 방법이 있습니다. 가장 "
+"기본적인 방법은*Federated Averaging* (`McMahan et al., 2016 `__)이라고 하고 보통 줄여서 *FedAvg*로 표기합니다. "
+"*FedAvg* 는 100개의 모델 업데이트를 받아 이름에서 알 수 있듯이 모델 "
+"업데이트를 평균화합니다. 더 정확히 말하면, 모델 업데이트의 *가중 평균* 을 각 "
+"클라이언트가 훈련에 사용한 예제 수에 따라 가중치를 부여합니다. 가중치는 각 "
+"데이터 예제가 결과 글로벌 모델에 동일한 \"영향\" 을 미치는지 확인하는 데 "
+"중요합니다. 한 클라이언트에 10개의 데이터 포인트가 있고 다른 클라이언트에 "
+"100개의 데이터 포인트가 있다면 가중치를 부여하지 않고 10개의 예가 100개의 "
+"사례보다 글로벌 모델에 10배 더 많은 영향을 미칩니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:273
msgid "|329fb3c04c744eda83bb51fa444c2266|"
-msgstr ""
+msgstr "|329fb3c04c744eda83bb51fa444c2266|"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:315
msgid "Aggregate model updates"
-msgstr ""
+msgstr "모델 업데이트 집계"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:280
msgid "Step 5: Repeat steps 1 to 4 until the model converges"
-msgstr ""
+msgstr "5단계: 모델이 수렴할 때까지 1~4단계를 반복합니다"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:282
msgid ""
@@ -22738,6 +22803,11 @@ msgid ""
"models to the server (step 3), and the server then aggregates the model "
"updates to get a new version of the global model (step 4)."
msgstr ""
+"단계 1에서 4는 우리가 말하는 단일 라운드 연합 학습입니다. 글로벌 모델 "
+"파라미터는 참여하는 클라이언트 노드에 전송되고(1단계), 클라이언트 노드는 "
+"로컬 데이터에 대한 훈련을 받고(2단계), 업데이트된 모델을 서버에 "
+"전송하고(3단계), 서버는 모델 업데이트를 집계하여 글로벌 모델의 새로운 버전을 "
+"얻습니다(4단계)."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:284
msgid ""
@@ -22748,6 +22818,11 @@ msgid ""
"repeat this training process over and over again to eventually arrive at a "
"fully trained model that performs well across the data of all client nodes."
msgstr ""
+"한 라운드의 반복에서 해당 반복에 참여하는 각 클라이언트 노드는 짧은 시간 "
+"동안만 훈련합니다. 집계 단계(4단계) 이후 우리 모델이 관련된 모든 클라이언트 "
+"노드의 모든 데이터에 대해 잠시 동안만 훈련되었음을 의미합니다. 그런 다음 "
+"모든 클라이언트 노드의 데이터에서 잘 작동하는 완전히 훈련된 모델에 "
+"도달하려면 이 훈련 과정을 계속 반복해야 합니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:289
msgid ""
@@ -22758,6 +22833,12 @@ msgid ""
"should participate in the next round? What's the best way to aggregate model "
"updates? How can we handle failing client nodes (stragglers)?"
msgstr ""
+"축하합니다, 이제 연합 학습의 기초에 대해 알게 되었습니다. 물론 아직 논의해야 "
+"할 내용이 많지만 이는 연합 학습의 축소판일 뿐입니다. 본 튜토리얼의 "
+"후반부에는 좀 더 자세히 설명하겠습니다. 흥미로운 질문은 다음과 같습니다: "
+"다음 라운드에 참여해야 할 가장 좋은 클라이언트 노드를 어떻게 선택할 수 "
+"있을까요? 모델 업데이트를 집계하는 가장 좋은 방법은 무엇일까요? 실패한 "
+"클라이언트 노드(낙오자)를 어떻게 처리할 수 있을까요?"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:294
msgid ""
@@ -22767,10 +22848,14 @@ msgid ""
"In fact, federated evaluation is an integral part of most federated learning "
"systems."
msgstr ""
+"다양한 클라이언트 노드의 분산된 데이터에 대해 모델을 훈련할 수 있는 것처럼 "
+"해당 데이터에 대한 모델을 평가하여 가치 있는 메트릭(metrics)을 받을 수도 "
+"있습니다. 이를 연합 평가라고 하며 FE라고 약칭하기도 합니다. 사실 연합 평가는 "
+"대부분의 연합 학습 시스템에서 필수적인 부분입니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:297
msgid "Federated analytics"
-msgstr ""
+msgstr "연합 분석"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:299
msgid ""
@@ -22782,6 +22867,13 @@ msgid ""
"other privacy-enhancing technologies like secure aggregation to prevent the "
"server from seeing the results submitted by individual client nodes."
msgstr ""
+"많은 경우 머신러닝은 데이터로부터 가치를 얻기 위한 필수 조건이 아닙니다. "
+"데이터 분석을 통해 귀중한 통찰력을 얻을 수 있지만, 명확한 답변을 얻기에는 "
+"데이터가 충분하지 않은 경우가 많습니다. 특정 유형의 건강 상태가 발생하는 "
+"평균 연령은 몇 살입니까? 연합 분석을 사용하면 여러 클라이언트 노드에서 "
+"이러한 쿼리(query)를 실행할 수 있습니다. 서버가 단일 클라이언트 노드에서 "
+"제출한 결과를 보지 못하도록 보안을 강화한 집합 방식과 같은 다른 프라이버시 "
+"향상 기술과 함께 자주 사용됩니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:305
msgid ""
@@ -22793,10 +22885,16 @@ msgid ""
"identified. This technique can be considered an optimization that provides a "
"quantifiable privacy protection measure."
msgstr ""
+"차분 프라이버시(Differential Privacy)는 연합 학습의 맥락에서 종종 "
+"언급됩니다. 통계 데이터를 분석하고 공유할 때 사용하는 프라이버시 보호 "
+"방식으로, 참가자 개인의 프라이버시를 보장합니다. 차분 프라이버시는 모델 "
+"업데이트에 통계적 잡음(noise)를 추가하여 개별 참가자의 정보를 구별하거나 "
+"재식별할 수 없도록 함으로써 이를 달성합니다. 이 기술은 정량적 개인 정보 보호 "
+"조치를 제공하는 최적화라고 볼 수 있습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:326
msgid "Flower"
-msgstr ""
+msgstr "Flower"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:328
msgid ""
@@ -22808,28 +22906,38 @@ msgid ""
"learning, analytics, and evaluation. It allows the user to federate any "
"workload, any ML framework, and any programming language."
msgstr ""
+"연합 학습, 연합 평가 및 연합 분석은 머신러닝 모델을 앞뒤로 이동하고 로컬 "
+"데이터에 대해 훈련 및 평가한 다음 업데이트된 모델을 통합하기 위한 기본 "
+"프레임워크가 필요합니다. Flower가 제공하는 기반 구조는 간단하고 확장 "
+"가능하며 안전한 방식으로 이러한 목표를 달성합니다. 간단히 말해서, Flower는 "
+"연합 학습, 분석 및 평가를 위한 통합 접근 방식을 제공합니다. 이를 통해 "
+"사용자는 모든 워크로드, 머신러닝 프레임워크 및 모든 프로그래밍 언어를 통합할 "
+"수 있습니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:334
msgid "|c00bf2750bc24d229737a0fe1395f0fc|"
-msgstr ""
+msgstr "|c00bf2750bc24d229737a0fe1395f0fc|"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:340
msgid ""
"Flower federated learning server and client nodes (car, scooter, personal "
"computer, roomba, and phone)"
-msgstr ""
+msgstr "Flower 연합 학습 서버 및 클라이언트 노드(자동차, 스쿠터, 개인용 컴퓨터, "
+"룸바, 전화)"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:353
msgid ""
"Congratulations, you just learned the basics of federated learning and how "
"it relates to the classic (centralized) machine learning!"
-msgstr ""
+msgstr "축하합니다, 지금까지 당신은 연합 학습의 기본 지식과 그것이 어떻게 전통적 ("
+"중앙 집중식) 머신러닝과 관련되는지 배웠습니다!"
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:355
msgid ""
"In the next part of this tutorial, we are going to build a first federated "
"learning system with Flower."
-msgstr ""
+msgstr "이 튜토리얼의 다음 부분에서는 Flower와 함께 첫 번째 연합 학습 시스템을 "
+"구축할 것입니다."
#: ../../source/tutorial-series-what-is-federated-learning.ipynb:373
msgid ""
@@ -22837,6 +22945,9 @@ msgid ""
"framework/tutorial-get-started-with-flower-pytorch.html>`__ shows how to "
"build a simple federated learning system with PyTorch and Flower."
msgstr ""
+"`Flower 연합 학습 튜토리얼- 1부 `__ PyTorch와 Flower를 사용하여 간단한 "
+"연합 학습 시스템을 구축하는 방법을 보여줍니다."
#~ msgid ""
#~ "Currently, Flower provides two images, a ``base`` image and a "
diff --git a/doc/source/_static/docker-ci-release.png b/doc/source/_static/docker-ci-release.png
deleted file mode 100644
index 6ec97ce9fb06..000000000000
Binary files a/doc/source/_static/docker-ci-release.png and /dev/null differ
diff --git a/doc/source/_templates/sidebar/lang.html b/doc/source/_templates/sidebar/lang.html
index b377a53f9c40..bbea57571838 100644
--- a/doc/source/_templates/sidebar/lang.html
+++ b/doc/source/_templates/sidebar/lang.html
@@ -1,9 +1,14 @@
{% if versions or lang %}
-
+
-
{% endif %}
diff --git a/doc/source/contributor-explanation-public-and-private-apis.rst b/doc/source/contributor-explanation-public-and-private-apis.rst
new file mode 100644
index 000000000000..1dfdf88f97d3
--- /dev/null
+++ b/doc/source/contributor-explanation-public-and-private-apis.rst
@@ -0,0 +1,118 @@
+Public and private APIs
+=======================
+
+In Python, everything is public.
+To enable developers to understand which components can be relied upon, Flower declares a public API.
+Components that are part of the public API can be relied upon.
+Changes to the public API are announced in the release notes and are subject to deprecation policies.
+
+Everything that is not part of the public API is part of the private API.
+Even though Python allows accessing them, user code should never use those components.
+Private APIs can change at any time, even in patch releases.
+
+How can you determine whether a component is part of the public API or not? Easy:
+
+- `Use the Flower API reference documentation `_
+- `Use the Flower CLI reference documentation `_
+
+Everything listed in the reference documentation is part of the public API.
+This document explains how Flower maintainers define the public API and how you can determine whether a component is part of the public API or not by reading the Flower source code.
+
+Flower public API
+-----------------
+
+Flower has a well-defined public API. Let's look at this in more detail.
+
+.. important::
+
+ Every component that is reachable by recursively following ``__init__.__all__`` starting from the root package (``flwr``) is part of the public API.
+
+If you want to determine whether a component (class/function/generator/...) is part of the public API or not, you need to start at the root of the ``flwr`` package.
+Let's use ``tree -L 1 -d src/py/flwr`` to look at the Python sub-packages contained ``flwr``:
+
+.. code-block:: bash
+
+ flwr
+ ├── cli
+ ├── client
+ ├── common
+ ├── proto
+ ├── server
+ └── simulation
+
+Contrast this with the definition of ``__all__`` in the root ``src/py/flwr/__init__.py``:
+
+.. code-block:: python
+
+ # From `flwr/__init__.py`
+ __all__ = [
+ "client",
+ "common",
+ "server",
+ "simulation",
+ ]
+
+You can see that ``flwr`` has six subpackages (``cli``, ``client``, ``common``, ``proto``, ``server``, ``simulation``), but only four of them are "exported" via ``__all__`` (``client``, ``common``, ``server``, ``simulation``).
+
+What does this mean? It means that ``client``, ``common``, ``server`` and ``simulation`` are part of the public API, but ``cli`` and ``proto`` are not.
+The ``flwr`` subpackages ``cli`` and ``proto`` are private APIs.
+A private API can change completely from one release to the next (even in patch releases).
+It can change in a breaking way, it can be renamed (for example, ``flwr.cli`` could be renamed to ``flwr.command``) and it can even be removed completely.
+
+Therefore, as a Flower user:
+
+- ``from flwr import client`` ✅ Ok, you're importing a public API.
+- ``from flwr import proto`` ❌ Not recommended, you're importing a private API.
+
+What about components that are nested deeper in the hierarchy? Let's look at Flower strategies to see another typical pattern.
+Flower strategies like ``FedAvg`` are often imported using ``from flwr.server.strategy import FedAvg``.
+Let's look at ``src/py/flwr/server/strategy/__init__.py``:
+
+.. code-block:: python
+
+ from .fedavg import FedAvg as FedAvg
+ # ... more imports
+
+ __all__ = [
+ "FedAvg",
+ # ... more exports
+ ]
+
+What's notable here is that all strategies are implemented in dedicated modules (e.g., ``fedavg.py``).
+In ``__init__.py``, we *import* the components we want to make part of the public API and then *export* them via ``__all__``.
+Note that we export the component itself (for example, the ``FedAvg`` class), but not the module it is defined in (for example, ``fedavg.py``).
+This allows us to move the definition of ``FedAvg`` into a different module (or even a module in a subpackage) without breaking the public API (as long as we update the import path in ``__init__.py``).
+
+Therefore:
+
+- ``from flwr.server.strategy import FedAvg`` ✅ Ok, you're importing a class that is part of the public API.
+- ``from flwr.server.strategy import fedavg`` ❌ Not recommended, you're importing a private module.
+
+This approach is also implemented in the tooling that automatically builds API reference docs.
+
+Flower public API of private packages
+-------------------------------------
+
+We also use this to define the public API of private subpackages.
+Public, in this context, means the API that other ``flwr`` subpackages should use.
+For example, ``flwr.server.driver`` is a private subpackage (it's not exported via ``src/py/flwr/server/__init__.py``'s ``__all__``).
+
+Still, the private sub-package ``flwr.server.driver`` defines a "public" API using ``__all__`` in ``src/py/flwr/server/driver/__init__.py``:
+
+.. code-block:: python
+
+ from .driver import Driver
+ from .grpc_driver import GrpcDriver
+ from .inmemory_driver import InMemoryDriver
+
+ __all__ = [
+ "Driver",
+ "GrpcDriver",
+ "InMemoryDriver",
+ ]
+
+The interesting part is that both ``GrpcDriver`` and ``InMemoryDriver`` are never used by Flower framework users, only by other parts of the Flower framework codebase.
+Those other parts of the codebase import, for example, ``InMemoryDriver`` using ``from flwr.server.driver import InMemoryDriver`` (i.e., the ``InMemoryDriver`` exported via ``__all__``), not ``from flwr.server.driver.in_memory_driver import InMemoryDriver`` (``in_memory_driver.py`` is the module containing the actual ``InMemoryDriver`` class definition).
+
+This is because ``flwr.server.driver`` defines a public interface for other ``flwr`` subpackages.
+This allows codeowners of ``flwr.server.driver`` to refactor the package without breaking other ``flwr``-internal users.
diff --git a/doc/source/contributor-how-to-release-flower.rst b/doc/source/contributor-how-to-release-flower.rst
index fc4c2d436b05..4853d87bc4c1 100644
--- a/doc/source/contributor-how-to-release-flower.rst
+++ b/doc/source/contributor-how-to-release-flower.rst
@@ -12,24 +12,6 @@ The version number of a release is stated in ``pyproject.toml``. To release a ne
2. Once the changelog has been updated with all the changes, run ``./dev/prepare-release-changelog.sh v``, where ```` is the version stated in ``pyproject.toml`` (notice the ``v`` added before it). This will replace the ``Unreleased`` header of the changelog by the version and current date, and it will add a thanking message for the contributors. Open a pull request with those changes.
3. Once the pull request is merged, tag the release commit with the version number as soon as the PR is merged: ``git tag v`` (notice the ``v`` added before the version number), then ``git push --tags``. This will create a draft release on GitHub containing the correct artifacts and the relevant part of the changelog.
4. Check the draft release on GitHub, and if everything is good, publish it.
-5. Trigger the CI for building the Docker images.
-
-To trigger the workflow, a collaborator must create a ``workflow_dispatch`` event in the
-GitHub CI. This can be done either through the UI or via the GitHub CLI. The event requires only one
-input, the Flower version, to be released.
-
-**Via the UI**
-
-1. Go to the ``Build docker images`` workflow `page `_.
-2. Click on the ``Run workflow`` button and type the new version of Flower in the ``Version of Flower`` input field.
-3. Click on the **green** ``Run workflow`` button.
-
-.. image:: _static/docker-ci-release.png
-
-**Via the GitHub CI**
-
-1. Make sure you are logged in via ``gh auth login`` and that the current working directory is the root of the Flower repository.
-2. Trigger the workflow via ``gh workflow run docker-images.yml -f flwr-version=``.
After the release
-----------------
diff --git a/doc/source/explanation-differential-privacy.rst b/doc/source/explanation-differential-privacy.rst
index 69fd333f9b13..e488f5ccbd57 100644
--- a/doc/source/explanation-differential-privacy.rst
+++ b/doc/source/explanation-differential-privacy.rst
@@ -32,7 +32,7 @@ and for all possible outputs S ⊆ Range(A):
.. math::
\small
- P[M(D_{1} \in A)] \leq e^{\delta} P[M(D_{2} \in A)] + \delta
+ P[M(D_{1} \in A)] \leq e^{\epsilon} P[M(D_{2} \in A)] + \delta
The :math:`\epsilon` parameter, also known as the privacy budget, is a metric of privacy loss.
diff --git a/doc/source/index.rst b/doc/source/index.rst
index f62c5ebf4786..a0115620fce9 100644
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -174,6 +174,7 @@ The Flower community welcomes contributions. The following docs are intended to
:caption: Contributor explanations
contributor-explanation-architecture
+ contributor-explanation-public-and-private-apis
.. toctree::
:maxdepth: 1
diff --git a/doc/source/tutorial-quickstart-xgboost.rst b/doc/source/tutorial-quickstart-xgboost.rst
index 7ac055138814..34ad5f6e99c0 100644
--- a/doc/source/tutorial-quickstart-xgboost.rst
+++ b/doc/source/tutorial-quickstart-xgboost.rst
@@ -96,26 +96,26 @@ Prior to local training, we require loading the HIGGS dataset from Flower Datase
fds = FederatedDataset(dataset="jxie/higgs", partitioners={"train": partitioner})
# Load the partition for this `node_id`
- partition = fds.load_partition(node_id=args.node_id, split="train")
+ partition = fds.load_partition(partition_id=args.partition_id, split="train")
partition.set_format("numpy")
-In this example, we split the dataset into two partitions with uniform distribution (:code:`IidPartitioner(num_partitions=2)`).
-Then, we load the partition for the given client based on :code:`node_id`:
+In this example, we split the dataset into 30 partitions with uniform distribution (:code:`IidPartitioner(num_partitions=30)`).
+Then, we load the partition for the given client based on :code:`partition_id`:
.. code-block:: python
- # We first define arguments parser for user to specify the client/node ID.
+ # We first define arguments parser for user to specify the client/partition ID.
parser = argparse.ArgumentParser()
parser.add_argument(
- "--node-id",
+ "--partition-id",
default=0,
type=int,
- help="Node ID used for the current client.",
+ help="Partition ID used for the current client.",
)
args = parser.parse_args()
- # Load the partition for this `node_id`.
- partition = fds.load_partition(idx=args.node_id, split="train")
+ # Load the partition for this `partition_id`.
+ partition = fds.load_partition(idx=args.partition_id, split="train")
partition.set_format("numpy")
After that, we do train/test splitting on the given partition (client's local data), and transform data format for :code:`xgboost` package.
@@ -186,12 +186,23 @@ We follow the general rule to define :code:`XgbClient` class inherited from :cod
.. code-block:: python
class XgbClient(fl.client.Client):
- def __init__(self):
- self.bst = None
- self.config = None
+ def __init__(
+ self,
+ train_dmatrix,
+ valid_dmatrix,
+ num_train,
+ num_val,
+ num_local_round,
+ params,
+ ):
+ self.train_dmatrix = train_dmatrix
+ self.valid_dmatrix = valid_dmatrix
+ self.num_train = num_train
+ self.num_val = num_val
+ self.num_local_round = num_local_round
+ self.params = params
-The :code:`self.bst` is used to keep the Booster objects that remain consistent across rounds,
-allowing them to store predictions from trees integrated in earlier rounds and maintain other essential data structures for training.
+All required parameters defined above are passed to :code:`XgbClient`'s constructor.
Then, we override :code:`get_parameters`, :code:`fit` and :code:`evaluate` methods insides :code:`XgbClient` class as follows.
@@ -214,27 +225,27 @@ As a result, let's return an empty tensor in :code:`get_parameters` when it is c
.. code-block:: python
def fit(self, ins: FitIns) -> FitRes:
- if not self.bst:
+ global_round = int(ins.config["global_round"])
+ if global_round == 1:
# First round local training
- log(INFO, "Start training at round 1")
bst = xgb.train(
- params,
- train_dmatrix,
- num_boost_round=num_local_round,
- evals=[(valid_dmatrix, "validate"), (train_dmatrix, "train")],
+ self.params,
+ self.train_dmatrix,
+ num_boost_round=self.num_local_round,
+ evals=[(self.valid_dmatrix, "validate"), (self.train_dmatrix, "train")],
)
- self.config = bst.save_config()
- self.bst = bst
else:
+ bst = xgb.Booster(params=self.params)
for item in ins.parameters.tensors:
global_model = bytearray(item)
# Load global model into booster
- self.bst.load_model(global_model)
- self.bst.load_config(self.config)
+ bst.load_model(global_model)
- bst = self._local_boost()
+ # Local training
+ bst = self._local_boost(bst)
+ # Save model
local_model = bst.save_raw("json")
local_model_bytes = bytes(local_model)
@@ -244,60 +255,81 @@ As a result, let's return an empty tensor in :code:`get_parameters` when it is c
message="OK",
),
parameters=Parameters(tensor_type="", tensors=[local_model_bytes]),
- num_examples=num_train,
+ num_examples=self.num_train,
metrics={},
)
In :code:`fit`, at the first round, we call :code:`xgb.train()` to build up the first set of trees.
-the returned Booster object and config are stored in :code:`self.bst` and :code:`self.config`, respectively.
-From the second round, we load the global model sent from server to :code:`self.bst`,
+From the second round, we load the global model sent from server to new build Booster object,
and then update model weights on local training data with function :code:`local_boost` as follows:
.. code-block:: python
- def _local_boost(self):
+ def _local_boost(self, bst_input):
# Update trees based on local training data.
- for i in range(num_local_round):
- self.bst.update(train_dmatrix, self.bst.num_boosted_rounds())
+ for i in range(self.num_local_round):
+ bst_input.update(self.train_dmatrix, bst_input.num_boosted_rounds())
- # Extract the last N=num_local_round trees for sever aggregation
- bst = self.bst[
- self.bst.num_boosted_rounds()
- - num_local_round : self.bst.num_boosted_rounds()
+ # Bagging: extract the last N=num_local_round trees for sever aggregation
+ bst = bst_input[
+ bst_input.num_boosted_rounds()
+ - self.num_local_round : bst_input.num_boosted_rounds()
]
-Given :code:`num_local_round`, we update trees by calling :code:`self.bst.update` method.
+ return bst
+
+Given :code:`num_local_round`, we update trees by calling :code:`bst_input.update` method.
After training, the last :code:`N=num_local_round` trees will be extracted to send to the server.
.. code-block:: python
def evaluate(self, ins: EvaluateIns) -> EvaluateRes:
- eval_results = self.bst.eval_set(
- evals=[(valid_dmatrix, "valid")],
- iteration=self.bst.num_boosted_rounds() - 1,
+ # Load global model
+ bst = xgb.Booster(params=self.params)
+ for para in ins.parameters.tensors:
+ para_b = bytearray(para)
+ bst.load_model(para_b)
+
+ # Run evaluation
+ eval_results = bst.eval_set(
+ evals=[(self.valid_dmatrix, "valid")],
+ iteration=bst.num_boosted_rounds() - 1,
)
auc = round(float(eval_results.split("\t")[1].split(":")[1]), 4)
+ global_round = ins.config["global_round"]
+ log(INFO, f"AUC = {auc} at round {global_round}")
+
return EvaluateRes(
status=Status(
code=Code.OK,
message="OK",
),
loss=0.0,
- num_examples=num_val,
+ num_examples=self.num_val,
metrics={"AUC": auc},
)
-In :code:`evaluate`, we call :code:`self.bst.eval_set` function to conduct evaluation on valid set.
+In :code:`evaluate`, after loading the global model, we call :code:`bst.eval_set` function to conduct evaluation on valid set.
The AUC value will be returned.
Now, we can create an instance of our class :code:`XgbClient` and add one line to actually run this client:
.. code-block:: python
- fl.client.start_client(server_address="127.0.0.1:8080", client=XgbClient())
+ fl.client.start_client(
+ server_address="127.0.0.1:8080",
+ client=XgbClient(
+ train_dmatrix,
+ valid_dmatrix,
+ num_train,
+ num_val,
+ num_local_round,
+ params,
+ ).to_client(),
+ )
-That's it for the client. We only have to implement :code:`Client`and call :code:`fl.client.start_client()`.
+That's it for the client. We only have to implement :code:`Client` and call :code:`fl.client.start_client()`.
The string :code:`"[::]:8080"` tells the client which server to connect to.
In our case we can run the server and the client on the same machine, therefore we use
:code:`"[::]:8080"`. If we run a truly federated workload with the server and
@@ -325,6 +357,8 @@ We first define a strategy for XGBoost bagging aggregation.
min_evaluate_clients=2,
fraction_evaluate=1.0,
evaluate_metrics_aggregation_fn=evaluate_metrics_aggregation,
+ on_evaluate_config_fn=config_func,
+ on_fit_config_fn=config_func,
)
def evaluate_metrics_aggregation(eval_metrics):
@@ -336,8 +370,16 @@ We first define a strategy for XGBoost bagging aggregation.
metrics_aggregated = {"AUC": auc_aggregated}
return metrics_aggregated
+ def config_func(rnd: int) -> Dict[str, str]:
+ """Return a configuration with global epochs."""
+ config = {
+ "global_round": str(rnd),
+ }
+ return config
+
We use two clients for this example.
An :code:`evaluate_metrics_aggregation` function is defined to collect and wighted average the AUC values from clients.
+The :code:`config_func` function is to return the current FL round number to client's :code:`fit()` and :code:`evaluate()` methods.
Then, we start the server:
@@ -346,7 +388,7 @@ Then, we start the server:
# Start Flower server
fl.server.start_server(
server_address="0.0.0.0:8080",
- config=fl.server.ServerConfig(num_rounds=num_rounds),
+ config=fl.server.ServerConfig(num_rounds=5),
strategy=strategy,
)
@@ -535,52 +577,66 @@ Open a new terminal and start the first client:
.. code-block:: shell
- $ python3 client.py --node-id=0
+ $ python3 client.py --partition-id=0
Open another terminal and start the second client:
.. code-block:: shell
- $ python3 client.py --node-id=1
+ $ python3 client.py --partition-id=1
Each client will have its own dataset.
You should now see how the training does in the very first terminal (the one that started the server):
.. code-block:: shell
- INFO flwr 2023-11-20 11:21:56,454 | app.py:163 | Starting Flower server, config: ServerConfig(num_rounds=5, round_timeout=None)
- INFO flwr 2023-11-20 11:21:56,473 | app.py:176 | Flower ECE: gRPC server running (5 rounds), SSL is disabled
- INFO flwr 2023-11-20 11:21:56,473 | server.py:89 | Initializing global parameters
- INFO flwr 2023-11-20 11:21:56,473 | server.py:276 | Requesting initial parameters from one random client
- INFO flwr 2023-11-20 11:22:38,302 | server.py:280 | Received initial parameters from one random client
- INFO flwr 2023-11-20 11:22:38,302 | server.py:91 | Evaluating initial parameters
- INFO flwr 2023-11-20 11:22:38,302 | server.py:104 | FL starting
- DEBUG flwr 2023-11-20 11:22:38,302 | server.py:222 | fit_round 1: strategy sampled 2 clients (out of 2)
- DEBUG flwr 2023-11-20 11:22:38,636 | server.py:236 | fit_round 1 received 2 results and 0 failures
- DEBUG flwr 2023-11-20 11:22:38,643 | server.py:173 | evaluate_round 1: strategy sampled 2 clients (out of 2)
- DEBUG flwr 2023-11-20 11:22:38,653 | server.py:187 | evaluate_round 1 received 2 results and 0 failures
- DEBUG flwr 2023-11-20 11:22:38,653 | server.py:222 | fit_round 2: strategy sampled 2 clients (out of 2)
- DEBUG flwr 2023-11-20 11:22:38,721 | server.py:236 | fit_round 2 received 2 results and 0 failures
- DEBUG flwr 2023-11-20 11:22:38,745 | server.py:173 | evaluate_round 2: strategy sampled 2 clients (out of 2)
- DEBUG flwr 2023-11-20 11:22:38,756 | server.py:187 | evaluate_round 2 received 2 results and 0 failures
- DEBUG flwr 2023-11-20 11:22:38,756 | server.py:222 | fit_round 3: strategy sampled 2 clients (out of 2)
- DEBUG flwr 2023-11-20 11:22:38,831 | server.py:236 | fit_round 3 received 2 results and 0 failures
- DEBUG flwr 2023-11-20 11:22:38,868 | server.py:173 | evaluate_round 3: strategy sampled 2 clients (out of 2)
- DEBUG flwr 2023-11-20 11:22:38,881 | server.py:187 | evaluate_round 3 received 2 results and 0 failures
- DEBUG flwr 2023-11-20 11:22:38,881 | server.py:222 | fit_round 4: strategy sampled 2 clients (out of 2)
- DEBUG flwr 2023-11-20 11:22:38,960 | server.py:236 | fit_round 4 received 2 results and 0 failures
- DEBUG flwr 2023-11-20 11:22:39,012 | server.py:173 | evaluate_round 4: strategy sampled 2 clients (out of 2)
- DEBUG flwr 2023-11-20 11:22:39,026 | server.py:187 | evaluate_round 4 received 2 results and 0 failures
- DEBUG flwr 2023-11-20 11:22:39,026 | server.py:222 | fit_round 5: strategy sampled 2 clients (out of 2)
- DEBUG flwr 2023-11-20 11:22:39,111 | server.py:236 | fit_round 5 received 2 results and 0 failures
- DEBUG flwr 2023-11-20 11:22:39,177 | server.py:173 | evaluate_round 5: strategy sampled 2 clients (out of 2)
- DEBUG flwr 2023-11-20 11:22:39,193 | server.py:187 | evaluate_round 5 received 2 results and 0 failures
- INFO flwr 2023-11-20 11:22:39,193 | server.py:153 | FL finished in 0.8905023969999988
- INFO flwr 2023-11-20 11:22:39,193 | app.py:226 | app_fit: losses_distributed [(1, 0), (2, 0), (3, 0), (4, 0), (5, 0)]
- INFO flwr 2023-11-20 11:22:39,193 | app.py:227 | app_fit: metrics_distributed_fit {}
- INFO flwr 2023-11-20 11:22:39,193 | app.py:228 | app_fit: metrics_distributed {'AUC': [(1, 0.7572), (2, 0.7705), (3, 0.77595), (4, 0.78), (5, 0.78385)]}
- INFO flwr 2023-11-20 11:22:39,193 | app.py:229 | app_fit: losses_centralized []
- INFO flwr 2023-11-20 11:22:39,193 | app.py:230 | app_fit: metrics_centralized {}
+ INFO : Starting Flower server, config: num_rounds=5, no round_timeout
+ INFO : Flower ECE: gRPC server running (5 rounds), SSL is disabled
+ INFO : [INIT]
+ INFO : Requesting initial parameters from one random client
+ INFO : Received initial parameters from one random client
+ INFO : Evaluating initial global parameters
+ INFO :
+ INFO : [ROUND 1]
+ INFO : configure_fit: strategy sampled 2 clients (out of 2)
+ INFO : aggregate_fit: received 2 results and 0 failures
+ INFO : configure_evaluate: strategy sampled 2 clients (out of 2)
+ INFO : aggregate_evaluate: received 2 results and 0 failures
+ INFO :
+ INFO : [ROUND 2]
+ INFO : configure_fit: strategy sampled 2 clients (out of 2)
+ INFO : aggregate_fit: received 2 results and 0 failures
+ INFO : configure_evaluate: strategy sampled 2 clients (out of 2)
+ INFO : aggregate_evaluate: received 2 results and 0 failures
+ INFO :
+ INFO : [ROUND 3]
+ INFO : configure_fit: strategy sampled 2 clients (out of 2)
+ INFO : aggregate_fit: received 2 results and 0 failures
+ INFO : configure_evaluate: strategy sampled 2 clients (out of 2)
+ INFO : aggregate_evaluate: received 2 results and 0 failures
+ INFO :
+ INFO : [ROUND 4]
+ INFO : configure_fit: strategy sampled 2 clients (out of 2)
+ INFO : aggregate_fit: received 2 results and 0 failures
+ INFO : configure_evaluate: strategy sampled 2 clients (out of 2)
+ INFO : aggregate_evaluate: received 2 results and 0 failures
+ INFO :
+ INFO : [ROUND 5]
+ INFO : configure_fit: strategy sampled 2 clients (out of 2)
+ INFO : aggregate_fit: received 2 results and 0 failures
+ INFO : configure_evaluate: strategy sampled 2 clients (out of 2)
+ INFO : aggregate_evaluate: received 2 results and 0 failures
+ INFO :
+ INFO : [SUMMARY]
+ INFO : Run finished 5 round(s) in 1.67s
+ INFO : History (loss, distributed):
+ INFO : round 1: 0
+ INFO : round 2: 0
+ INFO : round 3: 0
+ INFO : round 4: 0
+ INFO : round 5: 0
+ INFO : History (metrics, distributed, evaluate):
+ INFO : {'AUC': [(1, 0.76755), (2, 0.775), (3, 0.77935), (4, 0.7836), (5, 0.7872)]}
Congratulations!
You've successfully built and run your first federated XGBoost system.
diff --git a/e2e/bare-client-auth/client.py b/e2e/bare-client-auth/client.py
index e82f17088bd9..c7b0d59b8ea5 100644
--- a/e2e/bare-client-auth/client.py
+++ b/e2e/bare-client-auth/client.py
@@ -1,13 +1,14 @@
import numpy as np
-import flwr as fl
+from flwr.client import ClientApp, NumPyClient
+from flwr.common import Context
model_params = np.array([1])
objective = 5
# Define Flower client
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def get_parameters(self, config):
return model_params
@@ -23,10 +24,10 @@ def evaluate(self, parameters, config):
return loss, 1, {"accuracy": accuracy}
-def client_fn(cid):
+def client_fn(context: Context):
return FlowerClient().to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
diff --git a/e2e/bare-https/client.py b/e2e/bare-https/client.py
index 8f5c1412fd01..4a682af3aec3 100644
--- a/e2e/bare-https/client.py
+++ b/e2e/bare-https/client.py
@@ -2,14 +2,15 @@
import numpy as np
-import flwr as fl
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import Context
model_params = np.array([1])
objective = 5
# Define Flower client
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def get_parameters(self, config):
return model_params
@@ -25,17 +26,17 @@ def evaluate(self, parameters, config):
return loss, 1, {"accuracy": accuracy}
-def client_fn(cid):
+def client_fn(context: Context):
return FlowerClient().to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
if __name__ == "__main__":
# Start Flower client
- fl.client.start_client(
+ start_client(
server_address="127.0.0.1:8080",
client=FlowerClient().to_client(),
root_certificates=Path("certificates/ca.crt").read_bytes(),
diff --git a/e2e/bare/client.py b/e2e/bare/client.py
index 402d775ac3a9..943e60d5db9f 100644
--- a/e2e/bare/client.py
+++ b/e2e/bare/client.py
@@ -2,8 +2,8 @@
import numpy as np
-import flwr as fl
-from flwr.common import ConfigsRecord
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import ConfigsRecord, Context
SUBSET_SIZE = 1000
STATE_VAR = "timestamp"
@@ -14,7 +14,7 @@
# Define Flower client
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def get_parameters(self, config):
return model_params
@@ -51,16 +51,14 @@ def evaluate(self, parameters, config):
)
-def client_fn(cid):
+def client_fn(context: Context):
return FlowerClient().to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
if __name__ == "__main__":
# Start Flower client
- fl.client.start_client(
- server_address="127.0.0.1:8080", client=FlowerClient().to_client()
- )
+ start_client(server_address="127.0.0.1:8080", client=FlowerClient().to_client())
diff --git a/e2e/docker/client.py b/e2e/docker/client.py
index 8451b810416b..44313c7c3af6 100644
--- a/e2e/docker/client.py
+++ b/e2e/docker/client.py
@@ -9,6 +9,7 @@
from torchvision.transforms import Compose, Normalize, ToTensor
from flwr.client import ClientApp, NumPyClient
+from flwr.common import Context
# #############################################################################
# 1. Regular PyTorch pipeline: nn.Module, train, test, and DataLoader
@@ -122,7 +123,7 @@ def evaluate(self, parameters, config):
return loss, len(testloader.dataset), {"accuracy": accuracy}
-def client_fn(cid: str):
+def client_fn(context: Context):
"""Create and return an instance of Flower `Client`."""
return FlowerClient().to_client()
diff --git a/e2e/fastai/README.md b/e2e/framework-fastai/README.md
similarity index 100%
rename from e2e/fastai/README.md
rename to e2e/framework-fastai/README.md
diff --git a/e2e/fastai/client.py b/e2e/framework-fastai/client.py
similarity index 90%
rename from e2e/fastai/client.py
rename to e2e/framework-fastai/client.py
index 1d98a1134941..161b27b5a548 100644
--- a/e2e/fastai/client.py
+++ b/e2e/framework-fastai/client.py
@@ -5,7 +5,8 @@
import torch
from fastai.vision.all import *
-import flwr as fl
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import Context
warnings.filterwarnings("ignore", category=UserWarning)
@@ -29,7 +30,7 @@
# Define Flower client
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def get_parameters(self, config):
return [val.cpu().numpy() for _, val in learn.model.state_dict().items()]
@@ -49,18 +50,18 @@ def evaluate(self, parameters, config):
return loss, len(dls.valid), {"accuracy": 1 - error_rate}
-def client_fn(cid):
+def client_fn(context: Context):
return FlowerClient().to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
if __name__ == "__main__":
# Start Flower client
- fl.client.start_client(
+ start_client(
server_address="127.0.0.1:8080",
client=FlowerClient().to_client(),
)
diff --git a/e2e/fastai/pyproject.toml b/e2e/framework-fastai/pyproject.toml
similarity index 100%
rename from e2e/fastai/pyproject.toml
rename to e2e/framework-fastai/pyproject.toml
diff --git a/e2e/fastai/simulation.py b/e2e/framework-fastai/simulation.py
similarity index 100%
rename from e2e/fastai/simulation.py
rename to e2e/framework-fastai/simulation.py
diff --git a/e2e/jax/README.md b/e2e/framework-jax/README.md
similarity index 100%
rename from e2e/jax/README.md
rename to e2e/framework-jax/README.md
diff --git a/e2e/jax/client.py b/e2e/framework-jax/client.py
similarity index 86%
rename from e2e/jax/client.py
rename to e2e/framework-jax/client.py
index 347a005d923a..c9ff67b3e38e 100644
--- a/e2e/jax/client.py
+++ b/e2e/framework-jax/client.py
@@ -6,7 +6,8 @@
import jax_training
import numpy as np
-import flwr as fl
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import Context
# Load data and determine model shape
train_x, train_y, test_x, test_y = jax_training.load_data()
@@ -14,7 +15,7 @@
model_shape = train_x.shape[1:]
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def __init__(self):
self.params = jax_training.load_model(model_shape)
@@ -48,16 +49,14 @@ def evaluate(
return float(loss), num_examples, {"loss": float(loss)}
-def client_fn(cid):
+def client_fn(context: Context):
return FlowerClient().to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
if __name__ == "__main__":
# Start Flower client
- fl.client.start_client(
- server_address="127.0.0.1:8080", client=FlowerClient().to_client()
- )
+ start_client(server_address="127.0.0.1:8080", client=FlowerClient().to_client())
diff --git a/e2e/jax/jax_training.py b/e2e/framework-jax/jax_training.py
similarity index 100%
rename from e2e/jax/jax_training.py
rename to e2e/framework-jax/jax_training.py
diff --git a/e2e/jax/pyproject.toml b/e2e/framework-jax/pyproject.toml
similarity index 100%
rename from e2e/jax/pyproject.toml
rename to e2e/framework-jax/pyproject.toml
diff --git a/e2e/jax/simulation.py b/e2e/framework-jax/simulation.py
similarity index 100%
rename from e2e/jax/simulation.py
rename to e2e/framework-jax/simulation.py
diff --git a/e2e/opacus/.gitignore b/e2e/framework-opacus/.gitignore
similarity index 100%
rename from e2e/opacus/.gitignore
rename to e2e/framework-opacus/.gitignore
diff --git a/e2e/opacus/README.md b/e2e/framework-opacus/README.md
similarity index 100%
rename from e2e/opacus/README.md
rename to e2e/framework-opacus/README.md
diff --git a/e2e/opacus/client.py b/e2e/framework-opacus/client.py
similarity index 96%
rename from e2e/opacus/client.py
rename to e2e/framework-opacus/client.py
index c9ebe319063a..167fa4584e37 100644
--- a/e2e/opacus/client.py
+++ b/e2e/framework-opacus/client.py
@@ -9,7 +9,8 @@
from torch.utils.data import DataLoader
from torchvision.datasets import CIFAR10
-import flwr as fl
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import Context
# Define parameters.
PARAMS = {
@@ -95,7 +96,7 @@ def load_data():
# Define Flower client.
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def __init__(self, model) -> None:
super().__init__()
# Create a privacy engine which will add DP and keep track of the privacy budget.
@@ -139,16 +140,16 @@ def evaluate(self, parameters, config):
return float(loss), len(testloader), {"accuracy": float(accuracy)}
-def client_fn(cid):
+def client_fn(context: Context):
model = Net()
return FlowerClient(model).to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
if __name__ == "__main__":
- fl.client.start_client(
+ start_client(
server_address="127.0.0.1:8080", client=FlowerClient(model).to_client()
)
diff --git a/e2e/opacus/pyproject.toml b/e2e/framework-opacus/pyproject.toml
similarity index 100%
rename from e2e/opacus/pyproject.toml
rename to e2e/framework-opacus/pyproject.toml
diff --git a/e2e/opacus/simulation.py b/e2e/framework-opacus/simulation.py
similarity index 100%
rename from e2e/opacus/simulation.py
rename to e2e/framework-opacus/simulation.py
diff --git a/e2e/pandas/README.md b/e2e/framework-pandas/README.md
similarity index 100%
rename from e2e/pandas/README.md
rename to e2e/framework-pandas/README.md
diff --git a/e2e/pandas/client.py b/e2e/framework-pandas/client.py
similarity index 82%
rename from e2e/pandas/client.py
rename to e2e/framework-pandas/client.py
index 19e15f5a3b11..0c3300e1dd3f 100644
--- a/e2e/pandas/client.py
+++ b/e2e/framework-pandas/client.py
@@ -3,7 +3,8 @@
import numpy as np
import pandas as pd
-import flwr as fl
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import Context
df = pd.read_csv("./data/client.csv")
@@ -16,7 +17,7 @@ def compute_hist(df: pd.DataFrame, col_name: str) -> np.ndarray:
# Define Flower client
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def fit(
self, parameters: List[np.ndarray], config: Dict[str, str]
) -> Tuple[List[np.ndarray], int, Dict]:
@@ -32,17 +33,17 @@ def fit(
)
-def client_fn(cid):
+def client_fn(context: Context):
return FlowerClient().to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
if __name__ == "__main__":
# Start Flower client
- fl.client.start_client(
+ start_client(
server_address="127.0.0.1:8080",
client=FlowerClient().to_client(),
)
diff --git a/e2e/pandas/pyproject.toml b/e2e/framework-pandas/pyproject.toml
similarity index 100%
rename from e2e/pandas/pyproject.toml
rename to e2e/framework-pandas/pyproject.toml
diff --git a/e2e/pandas/server.py b/e2e/framework-pandas/server.py
similarity index 100%
rename from e2e/pandas/server.py
rename to e2e/framework-pandas/server.py
diff --git a/e2e/pandas/simulation.py b/e2e/framework-pandas/simulation.py
similarity index 100%
rename from e2e/pandas/simulation.py
rename to e2e/framework-pandas/simulation.py
diff --git a/e2e/pandas/strategy.py b/e2e/framework-pandas/strategy.py
similarity index 100%
rename from e2e/pandas/strategy.py
rename to e2e/framework-pandas/strategy.py
diff --git a/e2e/pytorch-lightning/README.md b/e2e/framework-pytorch-lightning/README.md
similarity index 100%
rename from e2e/pytorch-lightning/README.md
rename to e2e/framework-pytorch-lightning/README.md
diff --git a/e2e/pytorch-lightning/client.py b/e2e/framework-pytorch-lightning/client.py
similarity index 89%
rename from e2e/pytorch-lightning/client.py
rename to e2e/framework-pytorch-lightning/client.py
index fdd55b3dc344..bf291a1ca2c5 100644
--- a/e2e/pytorch-lightning/client.py
+++ b/e2e/framework-pytorch-lightning/client.py
@@ -4,10 +4,11 @@
import pytorch_lightning as pl
import torch
-import flwr as fl
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import Context
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def __init__(self, model, train_loader, val_loader, test_loader):
self.model = model
self.train_loader = train_loader
@@ -51,7 +52,7 @@ def _set_parameters(model, parameters):
model.load_state_dict(state_dict, strict=True)
-def client_fn(cid):
+def client_fn(context: Context):
model = mnist.LitAutoEncoder()
train_loader, val_loader, test_loader = mnist.load_data()
@@ -59,7 +60,7 @@ def client_fn(cid):
return FlowerClient(model, train_loader, val_loader, test_loader).to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
@@ -71,7 +72,7 @@ def main() -> None:
# Flower client
client = FlowerClient(model, train_loader, val_loader, test_loader).to_client()
- fl.client.start_client(server_address="127.0.0.1:8080", client=client)
+ start_client(server_address="127.0.0.1:8080", client=client)
if __name__ == "__main__":
diff --git a/e2e/pytorch-lightning/mnist.py b/e2e/framework-pytorch-lightning/mnist.py
similarity index 100%
rename from e2e/pytorch-lightning/mnist.py
rename to e2e/framework-pytorch-lightning/mnist.py
diff --git a/e2e/pytorch-lightning/pyproject.toml b/e2e/framework-pytorch-lightning/pyproject.toml
similarity index 100%
rename from e2e/pytorch-lightning/pyproject.toml
rename to e2e/framework-pytorch-lightning/pyproject.toml
diff --git a/e2e/pytorch-lightning/simulation.py b/e2e/framework-pytorch-lightning/simulation.py
similarity index 100%
rename from e2e/pytorch-lightning/simulation.py
rename to e2e/framework-pytorch-lightning/simulation.py
diff --git a/e2e/pytorch/README.md b/e2e/framework-pytorch/README.md
similarity index 100%
rename from e2e/pytorch/README.md
rename to e2e/framework-pytorch/README.md
diff --git a/e2e/pytorch/client.py b/e2e/framework-pytorch/client.py
similarity index 95%
rename from e2e/pytorch/client.py
rename to e2e/framework-pytorch/client.py
index dbfbfed1ffa7..ab4bc7b5c5b9 100644
--- a/e2e/pytorch/client.py
+++ b/e2e/framework-pytorch/client.py
@@ -10,8 +10,8 @@
from torchvision.transforms import Compose, Normalize, ToTensor
from tqdm import tqdm
-import flwr as fl
-from flwr.common import ConfigsRecord
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import ConfigsRecord, Context
# #############################################################################
# 1. Regular PyTorch pipeline: nn.Module, train, test, and DataLoader
@@ -89,7 +89,7 @@ def load_data():
# Define Flower client
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def get_parameters(self, config):
return [val.cpu().numpy() for _, val in net.state_dict().items()]
@@ -136,18 +136,18 @@ def set_parameters(model, parameters):
return
-def client_fn(cid):
+def client_fn(context: Context):
return FlowerClient().to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
if __name__ == "__main__":
# Start Flower client
- fl.client.start_client(
+ start_client(
server_address="127.0.0.1:8080",
client=FlowerClient().to_client(),
)
diff --git a/e2e/pytorch/pyproject.toml b/e2e/framework-pytorch/pyproject.toml
similarity index 100%
rename from e2e/pytorch/pyproject.toml
rename to e2e/framework-pytorch/pyproject.toml
diff --git a/e2e/pytorch/simulation.py b/e2e/framework-pytorch/simulation.py
similarity index 100%
rename from e2e/pytorch/simulation.py
rename to e2e/framework-pytorch/simulation.py
diff --git a/e2e/pytorch/simulation_next.py b/e2e/framework-pytorch/simulation_next.py
similarity index 100%
rename from e2e/pytorch/simulation_next.py
rename to e2e/framework-pytorch/simulation_next.py
diff --git a/e2e/scikit-learn/README.md b/e2e/framework-scikit-learn/README.md
similarity index 100%
rename from e2e/scikit-learn/README.md
rename to e2e/framework-scikit-learn/README.md
diff --git a/e2e/scikit-learn/client.py b/e2e/framework-scikit-learn/client.py
similarity index 86%
rename from e2e/scikit-learn/client.py
rename to e2e/framework-scikit-learn/client.py
index b0691e75a79d..24c6617c1289 100644
--- a/e2e/scikit-learn/client.py
+++ b/e2e/framework-scikit-learn/client.py
@@ -5,7 +5,8 @@
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import log_loss
-import flwr as fl
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import Context
# Load MNIST dataset from https://www.openml.org/d/554
(X_train, y_train), (X_test, y_test) = utils.load_mnist()
@@ -26,7 +27,7 @@
# Define Flower client
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def get_parameters(self, config): # type: ignore
return utils.get_model_parameters(model)
@@ -45,16 +46,14 @@ def evaluate(self, parameters, config): # type: ignore
return loss, len(X_test), {"accuracy": accuracy}
-def client_fn(cid):
+def client_fn(context: Context):
return FlowerClient().to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
if __name__ == "__main__":
# Start Flower client
- fl.client.start_client(
- server_address="0.0.0.0:8080", client=FlowerClient().to_client()
- )
+ start_client(server_address="0.0.0.0:8080", client=FlowerClient().to_client())
diff --git a/e2e/scikit-learn/pyproject.toml b/e2e/framework-scikit-learn/pyproject.toml
similarity index 100%
rename from e2e/scikit-learn/pyproject.toml
rename to e2e/framework-scikit-learn/pyproject.toml
diff --git a/e2e/scikit-learn/simulation.py b/e2e/framework-scikit-learn/simulation.py
similarity index 100%
rename from e2e/scikit-learn/simulation.py
rename to e2e/framework-scikit-learn/simulation.py
diff --git a/e2e/scikit-learn/utils.py b/e2e/framework-scikit-learn/utils.py
similarity index 100%
rename from e2e/scikit-learn/utils.py
rename to e2e/framework-scikit-learn/utils.py
diff --git a/e2e/tensorflow/README.md b/e2e/framework-tensorflow/README.md
similarity index 100%
rename from e2e/tensorflow/README.md
rename to e2e/framework-tensorflow/README.md
diff --git a/e2e/tensorflow/client.py b/e2e/framework-tensorflow/client.py
similarity index 81%
rename from e2e/tensorflow/client.py
rename to e2e/framework-tensorflow/client.py
index 779be0c3746d..351f495a3acb 100644
--- a/e2e/tensorflow/client.py
+++ b/e2e/framework-tensorflow/client.py
@@ -2,7 +2,8 @@
import tensorflow as tf
-import flwr as fl
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import Context
SUBSET_SIZE = 1000
@@ -18,7 +19,7 @@
# Define Flower client
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def get_parameters(self, config):
return model.get_weights()
@@ -33,16 +34,14 @@ def evaluate(self, parameters, config):
return loss, len(x_test), {"accuracy": accuracy}
-def client_fn(cid):
+def client_fn(context: Context):
return FlowerClient().to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
if __name__ == "__main__":
# Start Flower client
- fl.client.start_client(
- server_address="127.0.0.1:8080", client=FlowerClient().to_client()
- )
+ start_client(server_address="127.0.0.1:8080", client=FlowerClient().to_client())
diff --git a/e2e/tensorflow/pyproject.toml b/e2e/framework-tensorflow/pyproject.toml
similarity index 100%
rename from e2e/tensorflow/pyproject.toml
rename to e2e/framework-tensorflow/pyproject.toml
diff --git a/e2e/tabnet/simulation.py b/e2e/framework-tensorflow/simulation.py
similarity index 100%
rename from e2e/tabnet/simulation.py
rename to e2e/framework-tensorflow/simulation.py
diff --git a/e2e/tensorflow/simulation_next.py b/e2e/framework-tensorflow/simulation_next.py
similarity index 100%
rename from e2e/tensorflow/simulation_next.py
rename to e2e/framework-tensorflow/simulation_next.py
diff --git a/e2e/strategies/client.py b/e2e/strategies/client.py
index 505340e013a5..0403416cc3b7 100644
--- a/e2e/strategies/client.py
+++ b/e2e/strategies/client.py
@@ -2,7 +2,8 @@
import tensorflow as tf
-import flwr as fl
+from flwr.client import ClientApp, NumPyClient, start_client
+from flwr.common import Context
SUBSET_SIZE = 1000
@@ -33,7 +34,7 @@ def get_model():
# Define Flower client
-class FlowerClient(fl.client.NumPyClient):
+class FlowerClient(NumPyClient):
def get_parameters(self, config):
return model.get_weights()
@@ -48,17 +49,15 @@ def evaluate(self, parameters, config):
return loss, len(x_test), {"accuracy": accuracy}
-def client_fn(cid):
+def client_fn(context: Context):
return FlowerClient().to_client()
-app = fl.client.ClientApp(
+app = ClientApp(
client_fn=client_fn,
)
if __name__ == "__main__":
# Start Flower client
- fl.client.start_client(
- server_address="127.0.0.1:8080", client=FlowerClient().to_client()
- )
+ start_client(server_address="127.0.0.1:8080", client=FlowerClient().to_client())
diff --git a/e2e/strategies/test.py b/e2e/strategies/test.py
index abf9cdb5a5c7..c567f33b236b 100644
--- a/e2e/strategies/test.py
+++ b/e2e/strategies/test.py
@@ -3,8 +3,8 @@
import tensorflow as tf
from client import SUBSET_SIZE, FlowerClient, get_model
-import flwr as fl
-from flwr.common import ndarrays_to_parameters
+from flwr.common import Context, ndarrays_to_parameters
+from flwr.server import ServerConfig
from flwr.server.strategy import (
FaultTolerantFedAvg,
FedAdagrad,
@@ -15,6 +15,7 @@
FedYogi,
QFedAvg,
)
+from flwr.simulation import start_simulation
STRATEGY_LIST = [
FedMedian,
@@ -42,8 +43,7 @@ def get_strat(name):
init_model = get_model()
-def client_fn(cid):
- _ = cid
+def client_fn(context: Context):
return FlowerClient()
@@ -71,10 +71,10 @@ def evaluate(server_round, parameters, config):
if start_idx >= OPT_IDX:
strat_args["tau"] = 0.01
-hist = fl.simulation.start_simulation(
+hist = start_simulation(
client_fn=client_fn,
num_clients=2,
- config=fl.server.ServerConfig(num_rounds=3),
+ config=ServerConfig(num_rounds=3),
strategy=strategy(**strat_args),
)
diff --git a/e2e/tabnet/README.md b/e2e/tabnet/README.md
deleted file mode 100644
index 258043c3ffa8..000000000000
--- a/e2e/tabnet/README.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# Flower with Tabnet testing
-
-This directory is used for testing Flower with Tabnet.
-
-It uses the `FedAvg` strategy.
diff --git a/e2e/tabnet/client.py b/e2e/tabnet/client.py
deleted file mode 100644
index 1a7ecfd68f73..000000000000
--- a/e2e/tabnet/client.py
+++ /dev/null
@@ -1,95 +0,0 @@
-import os
-
-import tabnet
-import tensorflow as tf
-import tensorflow_datasets as tfds
-
-import flwr as fl
-
-train_size = 125
-BATCH_SIZE = 50
-col_names = ["sepal_length", "sepal_width", "petal_length", "petal_width"]
-
-
-def transform(ds):
- features = tf.unstack(ds["features"])
- labels = ds["label"]
-
- x = dict(zip(col_names, features))
- y = tf.one_hot(labels, 3)
- return x, y
-
-
-def prepare_iris_dataset():
- ds_full = tfds.load(name="iris", split=tfds.Split.TRAIN)
- ds_full = ds_full.shuffle(150, seed=0)
-
- ds_train = ds_full.take(train_size)
- ds_train = ds_train.map(transform)
- ds_train = ds_train.batch(BATCH_SIZE)
-
- ds_test = ds_full.skip(train_size)
- ds_test = ds_test.map(transform)
- ds_test = ds_test.batch(BATCH_SIZE)
-
- feature_columns = []
- for col_name in col_names:
- feature_columns.append(tf.feature_column.numeric_column(col_name))
-
- return ds_train, ds_test, feature_columns
-
-
-ds_train, ds_test, feature_columns = prepare_iris_dataset()
-# Make TensorFlow log less verbose
-os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
-
-# Load TabNet model
-model = tabnet.TabNetClassifier(
- feature_columns,
- num_classes=3,
- feature_dim=8,
- output_dim=4,
- num_decision_steps=4,
- relaxation_factor=1.0,
- sparsity_coefficient=1e-5,
- batch_momentum=0.98,
- virtual_batch_size=None,
- norm_type="group",
- num_groups=1,
-)
-lr = tf.keras.optimizers.schedules.ExponentialDecay(
- 0.01, decay_steps=100, decay_rate=0.9, staircase=False
-)
-optimizer = tf.keras.optimizers.Adam(lr)
-model.compile(optimizer, loss="categorical_crossentropy", metrics=["accuracy"])
-
-
-# Define Flower client
-class FlowerClient(fl.client.NumPyClient):
- def get_parameters(self, config):
- return model.get_weights()
-
- def fit(self, parameters, config):
- model.set_weights(parameters)
- model.fit(ds_train, epochs=25)
- return model.get_weights(), len(ds_train), {}
-
- def evaluate(self, parameters, config):
- model.set_weights(parameters)
- loss, accuracy = model.evaluate(ds_test)
- return loss, len(ds_train), {"accuracy": accuracy}
-
-
-def client_fn(cid):
- return FlowerClient().to_client()
-
-
-app = fl.client.ClientApp(
- client_fn=client_fn,
-)
-
-if __name__ == "__main__":
- # Start Flower client
- fl.client.start_client(
- server_address="127.0.0.1:8080", client=FlowerClient().to_client()
- )
diff --git a/e2e/tabnet/pyproject.toml b/e2e/tabnet/pyproject.toml
deleted file mode 100644
index 99379ddb607e..000000000000
--- a/e2e/tabnet/pyproject.toml
+++ /dev/null
@@ -1,25 +0,0 @@
-[build-system]
-requires = ["hatchling"]
-build-backend = "hatchling.build"
-
-[project]
-name = "quickstart-tabnet-test"
-version = "0.1.0"
-description = "Tabnet Federated Learning E2E test with Flower"
-authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
-]
-dependencies = [
- "flwr[simulation] @ {root:parent:parent:uri}",
- "tensorflow-cpu>=2.9.1,!=2.11.1; platform_machine == \"x86_64\"",
- "tensorflow-macos>=2.9.1,!=2.11.1; sys_platform == \"darwin\" and platform_machine == \"arm64\"",
- "tensorflow_datasets==4.9.2",
- "tensorflow-io-gcs-filesystem<0.35.0",
- "tabnet==0.1.6",
-]
-
-[tool.hatch.build.targets.wheel]
-packages = ["."]
-
-[tool.hatch.metadata]
-allow-direct-references = true
diff --git a/e2e/tensorflow/simulation.py b/e2e/tensorflow/simulation.py
deleted file mode 100644
index bf05a77cf32a..000000000000
--- a/e2e/tensorflow/simulation.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from client import client_fn
-
-import flwr as fl
-
-hist = fl.simulation.start_simulation(
- client_fn=client_fn,
- num_clients=2,
- config=fl.server.ServerConfig(num_rounds=3),
-)
-
-assert (
- hist.losses_distributed[-1][1] == 0
- or (hist.losses_distributed[0][1] / hist.losses_distributed[-1][1]) >= 0.98
-)
diff --git a/e2e/test.sh b/e2e/test_legacy.sh
similarity index 96%
rename from e2e/test.sh
rename to e2e/test_legacy.sh
index 4ea17a4f994b..dc17ca8c6378 100755
--- a/e2e/test.sh
+++ b/e2e/test_legacy.sh
@@ -2,7 +2,7 @@
set -e
case "$1" in
- pandas)
+ framework-pandas)
server_file="server.py"
;;
bare-https)
diff --git a/e2e/test_reconnection.sh b/e2e/test_reconnection.sh
new file mode 100755
index 000000000000..7f8eaa94bf27
--- /dev/null
+++ b/e2e/test_reconnection.sh
@@ -0,0 +1,88 @@
+#!/bin/bash
+set -e
+
+case "$1" in
+ rest)
+ rest_arg="--rest"
+ server_app_address="http://localhost:9091"
+ server_address="http://localhost:9093"
+ db_arg="--database :flwr-in-memory-state:"
+ ;;
+ sqlite)
+ rest_arg=""
+ server_address="127.0.0.1:9092"
+ server_app_address="127.0.0.1:9091"
+ db_arg="--database $(date +%s).db"
+ ;;
+ *)
+ rest_arg=""
+ server_address="127.0.0.1:9092"
+ server_app_address="127.0.0.1:9091"
+ db_arg="--database :flwr-in-memory-state:"
+ ;;
+esac
+
+dir_arg="--dir ./.."
+
+timeout 2m flower-superlink --insecure $db_arg $rest_arg &
+sl_pid=$!
+echo "Starting SuperLink"
+sleep 3
+
+timeout 2m flower-client-app client:app --insecure $rest_arg --server $server_address &
+cl1_pid=$!
+echo "Starting first client"
+sleep 3
+
+timeout 2m flower-client-app client:app --insecure $rest_arg --server $server_address &
+cl2_pid=$!
+echo "Starting second client"
+sleep 3
+
+# Kill superlink, this should send the clients into their retry loops
+kill $sl_pid
+echo "Killing Superlink"
+sleep 3
+
+# Restart superlink, the clients should now be able to reconnect to it
+timeout 2m flower-superlink --insecure $db_arg $rest_arg &
+sl_pid=$!
+echo "Restarting Superlink"
+sleep 20
+
+# Kill first client, this should send a DeleteNode message to the Superlink
+kill $cl1_pid
+echo "Killing first client"
+sleep 3
+
+# Starting new client, this is so we have enough clients to start the server-app
+timeout 2m flower-client-app client:app --insecure $rest_arg --server $server_address &
+cl1_pid=$!
+echo "Starting new client"
+sleep 5
+
+# We start the server-app to begining the training
+timeout 2m flower-server-app server:app --insecure $dir_arg $rest_arg --server $server_app_address &
+pid=$!
+echo "Starting server-app to start training"
+
+# Kill first client as soon as the training starts,
+# the server-app should just receive a failure in this case and continue the rounds
+# when enough clients are connected
+kill $cl1_pid
+echo "Killing first client"
+sleep 1
+
+# Restart first client so enough clients are connected to continue the FL rounds
+timeout 2m flower-client-app client:app --insecure $rest_arg --server $server_address &
+cl1_pid=$!
+echo "Starting new client"
+
+wait $pid
+res=$?
+
+if [[ "$res" = "0" ]];
+ then echo "Training worked correctly"; kill $cl1_pid; kill $cl2_pid; kill $sl_pid;
+ else echo "Training had an issue" && exit 1;
+fi
+
diff --git a/e2e/test_driver.sh b/e2e/test_superlink.sh
similarity index 90%
rename from e2e/test_driver.sh
rename to e2e/test_superlink.sh
index e177863bab78..1bb81cc47ea1 100755
--- a/e2e/test_driver.sh
+++ b/e2e/test_superlink.sh
@@ -2,7 +2,7 @@
set -e
case "$1" in
- pandas)
+ framework-pandas)
server_arg="--insecure"
client_arg="--insecure"
server_dir="./"
@@ -70,11 +70,11 @@ timeout 2m flower-superlink $server_arg $db_arg $rest_arg_superlink $server_auth
sl_pid=$!
sleep 3
-timeout 2m flower-client-app client:app $client_arg $rest_arg_supernode --superlink $server_address $client_auth_1 &
+timeout 2m flower-supernode client:app $client_arg $rest_arg_supernode --superlink $server_address $client_auth_1 &
cl1_pid=$!
sleep 3
-timeout 2m flower-client-app client:app $client_arg $rest_arg_supernode --superlink $server_address $client_auth_2 &
+timeout 2m flower-supernode client:app $client_arg $rest_arg_supernode --superlink $server_address $client_auth_2 &
cl2_pid=$!
sleep 3
diff --git a/examples/advanced-pytorch/README.md b/examples/advanced-pytorch/README.md
index c1ba85b95879..ac0737673407 100644
--- a/examples/advanced-pytorch/README.md
+++ b/examples/advanced-pytorch/README.md
@@ -1,3 +1,9 @@
+---
+tags: [advanced, vision, fds]
+dataset: [CIFAR-10]
+framework: [torch, torchvision]
+---
+
# Advanced Flower Example (PyTorch)
This example demonstrates an advanced federated learning setup using Flower with PyTorch. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) and it differs from the quickstart example in the following ways:
diff --git a/examples/advanced-tensorflow/README.md b/examples/advanced-tensorflow/README.md
index 94707b5cbc98..375c539d13dd 100644
--- a/examples/advanced-tensorflow/README.md
+++ b/examples/advanced-tensorflow/README.md
@@ -1,3 +1,9 @@
+---
+tags: [advanced, vision, fds]
+dataset: [CIFAR-10]
+framework: [tensorflow, Keras]
+---
+
# Advanced Flower Example (TensorFlow/Keras)
This example demonstrates an advanced federated learning setup using Flower with TensorFlow/Keras. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) and it differs from the quickstart example in the following ways:
diff --git a/examples/android-kotlin/README.md b/examples/android-kotlin/README.md
index 2d0f704fdc0e..6cadb8e436fe 100644
--- a/examples/android-kotlin/README.md
+++ b/examples/android-kotlin/README.md
@@ -1,3 +1,9 @@
+---
+tags: [mobile, vision, sdk]
+dataset: [CIFAR-10]
+framework: [Android, Kotlin, TensorFlowLite]
+---
+
# Flower Android Client Example with Kotlin and TensorFlow Lite 2022
This example is similar to the Flower Android Example in Java:
diff --git a/examples/android/README.md b/examples/android/README.md
index f9f2bb93b8dc..83519f15d04d 100644
--- a/examples/android/README.md
+++ b/examples/android/README.md
@@ -1,3 +1,9 @@
+---
+tags: [mobile, vision, sdk]
+dataset: [CIFAR-10]
+framework: [Android, Java, TensorFlowLite]
+---
+
# Flower Android Example (TensorFlowLite)
This example demonstrates a federated learning setup with Android clients in a background thread. The training on Android is done on a CIFAR10 dataset using TensorFlow Lite. The setup is as follows:
diff --git a/examples/app-pytorch/README.md b/examples/app-pytorch/README.md
index 14de3c7d632e..5cfae8440ed2 100644
--- a/examples/app-pytorch/README.md
+++ b/examples/app-pytorch/README.md
@@ -1,3 +1,9 @@
+---
+tags: [basic, vision, fds]
+dataset: [CIFAR-10]
+framework: [torch, torchvision]
+---
+
# Flower App (PyTorch) 🧪
> 🧪 = This example covers experimental features that might change in future versions of Flower
diff --git a/examples/app-secure-aggregation/README.md b/examples/app-secure-aggregation/README.md
index d1ea7bdc893f..8e483fb2f6bd 100644
--- a/examples/app-secure-aggregation/README.md
+++ b/examples/app-secure-aggregation/README.md
@@ -1,3 +1,9 @@
+---
+tags: [basic, vision, fds]
+dataset: []
+framework: [numpy]
+---
+
# Secure aggregation with Flower (the SecAgg+ protocol) 🧪
> 🧪 = This example covers experimental features that might change in future versions of Flower
diff --git a/examples/custom-metrics/README.md b/examples/custom-metrics/README.md
index 317fb6336106..dd6985070cef 100644
--- a/examples/custom-metrics/README.md
+++ b/examples/custom-metrics/README.md
@@ -1,3 +1,10 @@
+---
+title: Example Flower App with Custom Metrics
+tags: [basic, vision, fds]
+dataset: [CIFAR-10]
+framework: [tensorflow]
+---
+
# Flower Example using Custom Metrics
This simple example demonstrates how to calculate custom metrics over multiple clients beyond the traditional ones available in the ML frameworks. In this case, it demonstrates the use of ready-available `scikit-learn` metrics: accuracy, recall, precision, and f1-score.
diff --git a/examples/custom-mods/README.md b/examples/custom-mods/README.md
index 6b03abcfbfe0..c2007eb323ae 100644
--- a/examples/custom-mods/README.md
+++ b/examples/custom-mods/README.md
@@ -1,3 +1,9 @@
+---
+tags: [mods, monitoring, app]
+dataset: [CIFAR-10]
+framework: [wandb, tensorboard]
+---
+
# Using custom mods 🧪
> 🧪 = This example covers experimental features that might change in future versions of Flower
@@ -207,7 +213,7 @@ app = fl.client.ClientApp(
client_fn=client_fn,
mods=[
get_wandb_mod("Custom mods example"),
- ],
+ ],
)
```
diff --git a/examples/doc/source/.gitignore b/examples/doc/source/.gitignore
index dd449725e188..73ee14e96f68 100644
--- a/examples/doc/source/.gitignore
+++ b/examples/doc/source/.gitignore
@@ -1 +1,2 @@
*.md
+index.rst
diff --git a/examples/embedded-devices/README.md b/examples/embedded-devices/README.md
index f1c5931b823a..86f19399932d 100644
--- a/examples/embedded-devices/README.md
+++ b/examples/embedded-devices/README.md
@@ -1,3 +1,9 @@
+---
+tags: [basic, vision, fds]
+dataset: [CIFAR-10, MNIST]
+framework: [torch, tensorflow]
+---
+
# Federated Learning on Embedded Devices with Flower
This example will show you how Flower makes it very easy to run Federated Learning workloads on edge devices. Here we'll be showing how to use NVIDIA Jetson devices and Raspberry Pi as Flower clients. You can run this example using either PyTorch or Tensorflow. The FL workload (i.e. model, dataset and training loop) is mostly borrowed from the [quickstart-pytorch](https://github.com/adap/flower/tree/main/examples/simulation-pytorch) and [quickstart-tensorflow](https://github.com/adap/flower/tree/main/examples/quickstart-tensorflow) examples.
@@ -65,7 +71,7 @@ If you are working on this tutorial on your laptop or desktop, it can host the F
- Install `pip`. In the terminal type: `sudo apt install python3-pip -y`
- Now clone this directory. You just need to execute the `git clone` command shown at the top of this README.md on your device.
- - Install Flower and your ML framework: We have prepared some convenient installation scripts that will install everything you need. You are free to install other versions of these ML frameworks to suit your needs.
+ - Install Flower and your ML framework of choice: We have prepared some convenient installation scripts that will install everything you need. You are free to install other versions of these ML frameworks to suit your needs.
- If you want your clients to use PyTorch: `pip3 install -r requirements_pytorch.txt`
- If you want your clients to use TensorFlow: `pip3 install -r requirements_tf.txt`
diff --git a/examples/federated-kaplan-meier-fitter/README.md b/examples/federated-kaplan-meier-fitter/README.md
index 1569467d6f82..20d4ca4c47af 100644
--- a/examples/federated-kaplan-meier-fitter/README.md
+++ b/examples/federated-kaplan-meier-fitter/README.md
@@ -1,3 +1,9 @@
+---
+tags: [estimator, medical]
+dataset: [Waltons]
+framework: [lifelines]
+---
+
# Flower Example using KaplanMeierFitter
This is an introductory example on **federated survival analysis** using [Flower](https://flower.ai/)
diff --git a/examples/fl-dp-sa/README.md b/examples/fl-dp-sa/README.md
index 47eedb70a2b8..65c8a5b18fa8 100644
--- a/examples/fl-dp-sa/README.md
+++ b/examples/fl-dp-sa/README.md
@@ -1,4 +1,10 @@
-# fl_dp_sa
+---
+tags: [basic, vision, fds]
+dataset: [MNIST]
+framework: [torch, torchvision]
+---
+
+# Example of Flower App with DP and SA
This is a simple example that utilizes central differential privacy with client-side fixed clipping and secure aggregation.
Note: This example is designed for a small number of rounds and is intended for demonstration purposes.
diff --git a/examples/fl-tabular/README.md b/examples/fl-tabular/README.md
index 58afd1080b70..ee6dd7d00ef0 100644
--- a/examples/fl-tabular/README.md
+++ b/examples/fl-tabular/README.md
@@ -1,3 +1,9 @@
+---
+tags: [basic, tabular, fds]
+dataset: [Adult Census Income]
+framework: [scikit-learn, torch]
+---
+
# Flower Example on Adult Census Income Tabular Dataset
This code exemplifies a federated learning setup using the Flower framework on the ["Adult Census Income"](https://huggingface.co/datasets/scikit-learn/adult-census-income) tabular dataset. The "Adult Census Income" dataset contains demographic information such as age, education, occupation, etc., with the target attribute being income level (\<=50K or >50K). The dataset is partitioned into subsets, simulating a federated environment with 5 clients, each holding a distinct portion of the data. Categorical variables are one-hot encoded, and the data is split into training and testing sets. Federated learning is conducted using the FedAvg strategy for 5 rounds.
diff --git a/examples/flower-authentication/README.md b/examples/flower-authentication/README.md
index 589270e621c9..d10780eeae5d 100644
--- a/examples/flower-authentication/README.md
+++ b/examples/flower-authentication/README.md
@@ -1,3 +1,9 @@
+---
+tags: [advanced, vision, fds]
+dataset: [CIFAR-10]
+framework: [torch, torchvision]
+---
+
# Flower Authentication with PyTorch 🧪
> 🧪 = This example covers experimental features that might change in future versions of Flower
diff --git a/examples/flower-in-30-minutes/README.md b/examples/flower-in-30-minutes/README.md
index 5fd9b882413b..faec3d72dae2 100644
--- a/examples/flower-in-30-minutes/README.md
+++ b/examples/flower-in-30-minutes/README.md
@@ -1,3 +1,9 @@
+---
+tags: [colab, vision, simulation]
+dataset: [CIFAR-10]
+framework: [torch]
+---
+
# 30-minute tutorial running Flower simulation with PyTorch
This README links to a Jupyter notebook that you can either download and run locally or [](https://colab.research.google.com/github/adap/flower/blob/main/examples/flower-in-30-minutes/tutorial.ipynb). This is a short 30-minute (or less!) tutorial showcasing the basics of Flower federated learning simulations using PyTorch.
diff --git a/examples/flower-simulation-step-by-step-pytorch/README.md b/examples/flower-simulation-step-by-step-pytorch/README.md
index beb8dd7f6f95..b00afedbe80b 100644
--- a/examples/flower-simulation-step-by-step-pytorch/README.md
+++ b/examples/flower-simulation-step-by-step-pytorch/README.md
@@ -1,3 +1,9 @@
+---
+tags: [basic, vision, simulation]
+dataset: [MNIST]
+framework: [torch]
+---
+
# Flower Simulation Step-by-Step
> Since this tutorial (and its video series) was put together, Flower has been updated a few times. As a result, some of the steps to construct the environment (see below) have been updated. Some parts of the code have also been updated. Overall, the content of this tutorial and how things work remains the same as in the video tutorials.
diff --git a/examples/flower-via-docker-compose/README.md b/examples/flower-via-docker-compose/README.md
index 3ef1ac37bcda..3325a731fecf 100644
--- a/examples/flower-via-docker-compose/README.md
+++ b/examples/flower-via-docker-compose/README.md
@@ -1,3 +1,10 @@
+---
+title: Leveraging Flower and Docker for Device Heterogeneity Management in FL
+tags: [deployment, vision, tutorial]
+dataset: [CIFAR-10]
+framework: [Docker, tensorflow]
+---
+
# Leveraging Flower and Docker for Device Heterogeneity Management in Federated Learning
diff --git a/examples/ios/README.md b/examples/ios/README.md
index 4e17e7a674f3..aef4177dddf7 100644
--- a/examples/ios/README.md
+++ b/examples/ios/README.md
@@ -1,3 +1,9 @@
+---
+tags: [mobile, vision, sdk]
+dataset: [MNIST]
+framework: [Swift]
+---
+
# FLiOS - A Flower SDK for iOS Devices with Example
FLiOS is a sample application for testing and benchmarking the Swift implementation of Flower. The default scenario uses the MNIST dataset and the associated digit recognition model. The app includes the Swift package in `./src/swift` and allows extension for other benchmarking scenarios. The app guides the user through the steps of the machine learning process that would be executed in a normal production environment as a background task of the application. The app is therefore aimed at researchers and research institutions to test their hypotheses and perform performance analyses.
diff --git a/examples/ios/scenarios.ipynb b/examples/ios/scenarios.ipynb
index de3e0e0c8c49..01da347cfee6 100644
--- a/examples/ios/scenarios.ipynb
+++ b/examples/ios/scenarios.ipynb
@@ -7,7 +7,7 @@
"source": [
"# Extending FLiOS Scenarios\n",
"\n",
- "This notebook demonstrates how to download and preprocess further benchmarking datasets and its associated machine learning models for the extenstion of the FLiOS application."
+ "This notebook demonstrates how to download and preprocess further benchmarking datasets and its associated machine learning models for the extension of the FLiOS application."
]
},
{
diff --git a/examples/llm-flowertune/README.md b/examples/llm-flowertune/README.md
index 4f98072f8c7f..46076e0b2078 100644
--- a/examples/llm-flowertune/README.md
+++ b/examples/llm-flowertune/README.md
@@ -1,3 +1,10 @@
+---
+title: Federated LLM Fine-tuning with Flower
+tags: [llm, nlp, LLama2]
+dataset: [Alpaca-GPT4]
+framework: [PEFT, torch]
+---
+
# LLM FlowerTune: Federated LLM Fine-tuning with Flower
Large language models (LLMs), which have been trained on vast amounts of publicly accessible data, have shown remarkable effectiveness in a wide range of areas.
diff --git a/examples/opacus/README.md b/examples/opacus/README.md
index 6fc0d2ff49a0..aea5d0f689fe 100644
--- a/examples/opacus/README.md
+++ b/examples/opacus/README.md
@@ -1,3 +1,9 @@
+---
+tags: [dp, security, fds]
+dataset: [CIFAR-10]
+framework: [opacus, torch]
+---
+
# Training with Sample-Level Differential Privacy using Opacus Privacy Engine
In this example, we demonstrate how to train a model with differential privacy (DP) using Flower. We employ PyTorch and integrate the Opacus Privacy Engine to achieve sample-level differential privacy. This setup ensures robust privacy guarantees during the client training phase. The code is adapted from the [PyTorch Quickstart example](https://github.com/adap/flower/tree/main/examples/quickstart-pytorch).
diff --git a/examples/pytorch-federated-variational-autoencoder/README.md b/examples/pytorch-federated-variational-autoencoder/README.md
index 00af7a6328b2..52f94a16307c 100644
--- a/examples/pytorch-federated-variational-autoencoder/README.md
+++ b/examples/pytorch-federated-variational-autoencoder/README.md
@@ -1,3 +1,9 @@
+---
+tags: [basic, vision, fds]
+dataset: [CIFAR-10]
+framework: [torch, torchvision]
+---
+
# Flower Example for Federated Variational Autoencoder using Pytorch
This example demonstrates how a variational autoencoder (VAE) can be trained in a federated way using the Flower framework.
diff --git a/examples/pytorch-from-centralized-to-federated/README.md b/examples/pytorch-from-centralized-to-federated/README.md
index 06ee89dddcac..1bff7d02f52c 100644
--- a/examples/pytorch-from-centralized-to-federated/README.md
+++ b/examples/pytorch-from-centralized-to-federated/README.md
@@ -1,3 +1,9 @@
+---
+tags: [basic, vision, fds]
+dataset: [CIFAR-10]
+framework: [torch]
+---
+
# PyTorch: From Centralized To Federated
This example demonstrates how an already existing centralized PyTorch-based machine learning project can be federated with Flower.
diff --git a/examples/quickstart-cpp/README.md b/examples/quickstart-cpp/README.md
index d6cbeebe1bc6..61b76ece52b0 100644
--- a/examples/quickstart-cpp/README.md
+++ b/examples/quickstart-cpp/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, linear regression, tabular]
+dataset: [Synthetic]
+framework: [C++]
+---
+
# Flower Clients in C++ (under development)
In this example you will train a linear model on synthetic data using C++ clients.
diff --git a/examples/quickstart-fastai/README.md b/examples/quickstart-fastai/README.md
index 38ef23c95a1e..d1bf97cd4203 100644
--- a/examples/quickstart-fastai/README.md
+++ b/examples/quickstart-fastai/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, vision]
+dataset: [MNIST]
+framework: [fastai]
+---
+
# Flower Example using fastai
This introductory example to Flower uses [fastai](https://www.fast.ai/), but deep knowledge of fastai is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case.
diff --git a/examples/quickstart-huggingface/README.md b/examples/quickstart-huggingface/README.md
index ce7790cd4af5..fa4330040ea7 100644
--- a/examples/quickstart-huggingface/README.md
+++ b/examples/quickstart-huggingface/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, llm, nlp, sentiment]
+dataset: [IMDB]
+framework: [transformers]
+---
+
# Federated HuggingFace Transformers using Flower and PyTorch
This introductory example to using [HuggingFace](https://huggingface.co) Transformers with Flower with PyTorch. This example has been extended from the [quickstart-pytorch](https://flower.ai/docs/examples/quickstart-pytorch.html) example. The training script closely follows the [HuggingFace course](https://huggingface.co/course/chapter3?fw=pt), so you are encouraged to check that out for a detailed explanation of the transformer pipeline.
diff --git a/examples/quickstart-jax/README.md b/examples/quickstart-jax/README.md
index 836adf558d88..b47f3a82e13b 100644
--- a/examples/quickstart-jax/README.md
+++ b/examples/quickstart-jax/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, linear regression]
+dataset: [Synthetic]
+framework: [JAX]
+---
+
# JAX: From Centralized To Federated
This example demonstrates how an already existing centralized JAX-based machine learning project can be federated with Flower.
diff --git a/examples/quickstart-mlcube/README.md b/examples/quickstart-mlcube/README.md
index 8e6fc29b3ad8..f0c6c5664a82 100644
--- a/examples/quickstart-mlcube/README.md
+++ b/examples/quickstart-mlcube/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, vision, deployment]
+dataset: [MNIST]
+framework: [mlcube, tensorflow, Keras]
+---
+
# Flower Example using TensorFlow/Keras + MLCube
This introductory example to Flower uses MLCube together with Keras, but deep knowledge of Keras is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use-cases with MLCube. Running this example in itself is quite easy.
diff --git a/examples/quickstart-mlx/README.md b/examples/quickstart-mlx/README.md
index cca55bcb946a..a4ac44bf8460 100644
--- a/examples/quickstart-mlx/README.md
+++ b/examples/quickstart-mlx/README.md
@@ -1,3 +1,10 @@
+---
+title: Simple Flower Example using MLX
+tags: [quickstart, vision]
+dataset: [MNIST]
+framework: [MLX]
+---
+
# Flower Example using MLX
This introductory example to Flower uses [MLX](https://ml-explore.github.io/mlx/build/html/index.html), but deep knowledge of MLX is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. Running this example in itself is quite easy.
diff --git a/examples/quickstart-monai/README.md b/examples/quickstart-monai/README.md
index 4a9afef4f86a..dc31f03e4b1b 100644
--- a/examples/quickstart-monai/README.md
+++ b/examples/quickstart-monai/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, medical, vision]
+dataset: [MedNIST]
+framework: [MONAI]
+---
+
# Flower Example using MONAI
This introductory example to Flower uses MONAI, but deep knowledge of MONAI is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case.
diff --git a/examples/quickstart-pandas/README.md b/examples/quickstart-pandas/README.md
index dd69f3ead3cb..0b4b3a6ac78a 100644
--- a/examples/quickstart-pandas/README.md
+++ b/examples/quickstart-pandas/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, tabular, federated analytics]
+dataset: [Iris]
+framework: [pandas]
+---
+
# Flower Example using Pandas
This introductory example to Flower uses Pandas, but deep knowledge of Pandas is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to
diff --git a/examples/quickstart-pytorch-lightning/README.md b/examples/quickstart-pytorch-lightning/README.md
index fb29c7e9e9ea..04eb911818fc 100644
--- a/examples/quickstart-pytorch-lightning/README.md
+++ b/examples/quickstart-pytorch-lightning/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, vision, fds]
+dataset: [MNIST]
+framework: [lightning]
+---
+
# Flower Example using PyTorch Lightning
This introductory example to Flower uses PyTorch, but deep knowledge of PyTorch Lightning is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. Running this example in itself is quite easy. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to download, partition and preprocess the MNIST dataset.
diff --git a/examples/quickstart-pytorch/README.md b/examples/quickstart-pytorch/README.md
index 93d6a593f362..8eace1ea6845 100644
--- a/examples/quickstart-pytorch/README.md
+++ b/examples/quickstart-pytorch/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, vision, fds]
+dataset: [CIFAR-10]
+framework: [torch, torchvision]
+---
+
# Flower Example using PyTorch
This introductory example to Flower uses PyTorch, but deep knowledge of PyTorch is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. Running this example in itself is quite easy. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to download, partition and preprocess the CIFAR-10 dataset.
diff --git a/examples/quickstart-sklearn-tabular/README.md b/examples/quickstart-sklearn-tabular/README.md
index a975a9392800..b0b4cd1b84c0 100644
--- a/examples/quickstart-sklearn-tabular/README.md
+++ b/examples/quickstart-sklearn-tabular/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, tabular, fds]
+dataset: [Iris]
+framework: [scikit-learn]
+---
+
# Flower Example using scikit-learn
This example of Flower uses `scikit-learn`'s `LogisticRegression` model to train a federated learning system on
diff --git a/examples/quickstart-tabnet/README.md b/examples/quickstart-tabnet/README.md
index 19a139f83064..e8be55eaacef 100644
--- a/examples/quickstart-tabnet/README.md
+++ b/examples/quickstart-tabnet/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, tabular]
+dataset: [Iris]
+framework: [tabnet]
+---
+
# Flower TabNet Example using TensorFlow
This introductory example to Flower uses Keras but deep knowledge of Keras is not necessarily required to run the example. However, it will help you understanding how to adapt Flower to your use-cases. You can learn more about TabNet from [paper](https://arxiv.org/abs/1908.07442) and its implementation using TensorFlow at [this repository](https://github.com/titu1994/tf-TabNet). Note also that the basis of this example using federated learning is the example from the repository above.
diff --git a/examples/quickstart-tensorflow/README.md b/examples/quickstart-tensorflow/README.md
index ae1fe19834a3..386f8bbd96f0 100644
--- a/examples/quickstart-tensorflow/README.md
+++ b/examples/quickstart-tensorflow/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, vision, fds]
+dataset: [CIFAR-10]
+framework: [tensorflow]
+---
+
# Flower Example using TensorFlow/Keras
This introductory example to Flower uses Keras but deep knowledge of Keras is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case.
diff --git a/examples/simulation-pytorch/README.md b/examples/simulation-pytorch/README.md
index 93f9e1acbac7..2dbfbc849ab7 100644
--- a/examples/simulation-pytorch/README.md
+++ b/examples/simulation-pytorch/README.md
@@ -1,3 +1,9 @@
+---
+tags: [basic, vision, fds, simulation]
+dataset: [MNIST]
+framework: [torch, torchvision]
+---
+
# Flower Simulation example using PyTorch
This introductory example uses the simulation capabilities of Flower to simulate a large number of clients on a single machine. Take a look at the [Documentation](https://flower.ai/docs/framework/how-to-run-simulations.html) for a deep dive into how Flower simulation works. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to download, partition and preprocess the MNIST dataset. This examples uses 100 clients by default.
diff --git a/examples/simulation-pytorch/sim.py b/examples/simulation-pytorch/sim.py
index db68e75653fc..dcc0f39a79ef 100644
--- a/examples/simulation-pytorch/sim.py
+++ b/examples/simulation-pytorch/sim.py
@@ -87,11 +87,13 @@ def get_client_fn(dataset: FederatedDataset):
the strategy to participate.
"""
- def client_fn(cid: str) -> fl.client.Client:
+ def client_fn(context) -> fl.client.Client:
"""Construct a FlowerClient with its own dataset partition."""
# Let's get the partition corresponding to the i-th client
- client_dataset = dataset.load_partition(int(cid), "train")
+ client_dataset = dataset.load_partition(
+ int(context.node_config["partition-id"]), "train"
+ )
# Now let's split it into train (90%) and validation (10%)
client_dataset_splits = client_dataset.train_test_split(test_size=0.1, seed=42)
@@ -171,15 +173,23 @@ def evaluate(
mnist_fds = FederatedDataset(dataset="mnist", partitioners={"train": NUM_CLIENTS})
centralized_testset = mnist_fds.load_split("test")
-# Configure the strategy
-strategy = fl.server.strategy.FedAvg(
- fraction_fit=0.1, # Sample 10% of available clients for training
- fraction_evaluate=0.05, # Sample 5% of available clients for evaluation
- min_available_clients=10,
- on_fit_config_fn=fit_config,
- evaluate_metrics_aggregation_fn=weighted_average, # Aggregate federated metrics
- evaluate_fn=get_evaluate_fn(centralized_testset), # Global evaluation function
-)
+from flwr.server import ServerAppComponents
+
+
+def server_fn(context):
+ # Configure the strategy
+ strategy = fl.server.strategy.FedAvg(
+ fraction_fit=0.1, # Sample 10% of available clients for training
+ fraction_evaluate=0.05, # Sample 5% of available clients for evaluation
+ min_available_clients=10,
+ on_fit_config_fn=fit_config,
+ evaluate_metrics_aggregation_fn=weighted_average, # Aggregate federated metrics
+ evaluate_fn=get_evaluate_fn(centralized_testset), # Global evaluation function
+ )
+ return ServerAppComponents(
+ strategy=strategy, config=fl.server.ServerConfig(num_rounds=NUM_ROUNDS)
+ )
+
# ClientApp for Flower-Next
client = fl.client.ClientApp(
@@ -187,10 +197,7 @@ def evaluate(
)
# ServerApp for Flower-Next
-server = fl.server.ServerApp(
- config=fl.server.ServerConfig(num_rounds=NUM_ROUNDS),
- strategy=strategy,
-)
+server = fl.server.ServerApp(server_fn=server_fn)
def main():
diff --git a/examples/simulation-tensorflow/README.md b/examples/simulation-tensorflow/README.md
index 917d7b34c7af..047cb4379659 100644
--- a/examples/simulation-tensorflow/README.md
+++ b/examples/simulation-tensorflow/README.md
@@ -1,3 +1,9 @@
+---
+tags: [basic, vision, fds, simulation]
+dataset: [MNIST]
+framework: [tensorflow, Keras]
+---
+
# Flower Simulation example using TensorFlow/Keras
This introductory example uses the simulation capabilities of Flower to simulate a large number of clients on a single machine. Take a look at the [Documentation](https://flower.ai/docs/framework/how-to-run-simulations.html) for a deep dive into how Flower simulation works. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to download, partition and preprocess the MNIST dataset. This examples uses 100 clients by default.
diff --git a/examples/sklearn-logreg-mnist/README.md b/examples/sklearn-logreg-mnist/README.md
index 12b1a5e3bc1a..b117c5452086 100644
--- a/examples/sklearn-logreg-mnist/README.md
+++ b/examples/sklearn-logreg-mnist/README.md
@@ -1,4 +1,10 @@
-# Flower Example using scikit-learn
+---
+tags: [basic, vision, logistic regression, fds]
+dataset: [MNIST]
+framework: [scikit-learn]
+---
+
+# Flower Logistic Regression Example using scikit-learn
This example of Flower uses `scikit-learn`'s `LogisticRegression` model to train a federated learning system. It will help you understand how to adapt Flower for use with `scikit-learn`.
Running this example in itself is quite easy. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to download, partition and preprocess the MNIST dataset.
diff --git a/examples/tensorflow-privacy/README.md b/examples/tensorflow-privacy/README.md
index a1f1be00f6b0..8156f92f60c9 100644
--- a/examples/tensorflow-privacy/README.md
+++ b/examples/tensorflow-privacy/README.md
@@ -1,3 +1,9 @@
+---
+tags: [basic, vision, fds, privacy, dp]
+dataset: [MNIST]
+framework: [tensorflow]
+---
+
# Training with Sample-Level Differential Privacy using TensorFlow-Privacy Engine
In this example, we demonstrate how to train a model with sample-level differential privacy (DP) using Flower. We employ TensorFlow and integrate the tensorflow-privacy Engine to achieve sample-level differential privacy. This setup ensures robust privacy guarantees during the client training phase.
diff --git a/examples/vertical-fl/README.md b/examples/vertical-fl/README.md
index ba8228a059f9..ab5d2210d8d5 100644
--- a/examples/vertical-fl/README.md
+++ b/examples/vertical-fl/README.md
@@ -1,3 +1,10 @@
+---
+title: Vertical FL Flower Example
+tags: [vertical, tabular, advanced]
+dataset: [Titanic]
+framework: [torch, pandas, scikit-learn]
+---
+
# Vertical Federated Learning example
This example will showcase how you can perform Vertical Federated Learning using
diff --git a/examples/vit-finetune/README.md b/examples/vit-finetune/README.md
index ac1652acf02d..957c0eda0b68 100644
--- a/examples/vit-finetune/README.md
+++ b/examples/vit-finetune/README.md
@@ -1,3 +1,10 @@
+---
+title: Federated finetuning of a ViT
+tags: [finetuneing, vision, fds]
+dataset: [Oxford Flower-102]
+framework: [torch, torchvision]
+---
+
# Federated finetuning of a ViT
This example shows how to use Flower's Simulation Engine to federate the finetuning of a Vision Transformer ([ViT-Base-16](https://pytorch.org/vision/main/models/generated/torchvision.models.vit_b_16.html#torchvision.models.vit_b_16)) that has been pretrained on ImageNet. To keep things simple we'll be finetuning it to [Oxford Flower-102](https://www.robots.ox.ac.uk/~vgg/data/flowers/102/index.html) datasset, creating 20 partitions using [Flower Datasets](https://flower.ai/docs/datasets/). We'll be finetuning just the exit `head` of the ViT, this means that the training is not that costly and each client requires just ~1GB of VRAM (for a batch size of 32 images).
diff --git a/examples/whisper-federated-finetuning/README.md b/examples/whisper-federated-finetuning/README.md
index ddebe51247b2..cfd0db842bae 100644
--- a/examples/whisper-federated-finetuning/README.md
+++ b/examples/whisper-federated-finetuning/README.md
@@ -1,3 +1,9 @@
+---
+tags: [finetuning, speech, transformers]
+dataset: [SpeechCommands]
+framework: [transformers, whisper]
+---
+
# On-device Federated Finetuning for Speech Classification
This example demonstrates how to, from a pre-trained [Whisper](https://openai.com/research/whisper) model, finetune it for the downstream task of keyword spotting. We'll be implementing a federated downstream finetuning pipeline using Flower involving a total of 100 clients. As for the downstream dataset, we'll be using the [Google Speech Commands](https://huggingface.co/datasets/speech_commands) dataset for keyword spotting. We'll take the encoder part of the [Whisper-tiny](https://huggingface.co/openai/whisper-tiny) model, freeze its parameters, and learn a lightweight classification (\<800K parameters !!) head to correctly classify a spoken word.
diff --git a/examples/xgboost-comprehensive/README.md b/examples/xgboost-comprehensive/README.md
index dc6d7e3872d6..62fcba2bb06d 100644
--- a/examples/xgboost-comprehensive/README.md
+++ b/examples/xgboost-comprehensive/README.md
@@ -1,3 +1,9 @@
+---
+tags: [advanced, classification, tabular]
+dataset: [HIGGS]
+framework: [xgboost]
+---
+
# Flower Example using XGBoost (Comprehensive)
This example demonstrates a comprehensive federated learning setup using Flower with XGBoost.
diff --git a/examples/xgboost-comprehensive/client.py b/examples/xgboost-comprehensive/client.py
index 2d54c3fd63c7..08dd548a386b 100644
--- a/examples/xgboost-comprehensive/client.py
+++ b/examples/xgboost-comprehensive/client.py
@@ -32,7 +32,7 @@
fds = FederatedDataset(
dataset="jxie/higgs",
partitioners={"train": partitioner},
- resplitter=resplit,
+ preprocessor=resplit,
)
# Load the partition for this `partition_id`
diff --git a/examples/xgboost-comprehensive/pyproject.toml b/examples/xgboost-comprehensive/pyproject.toml
index 2d44c06d6e3f..c9259ffa1db4 100644
--- a/examples/xgboost-comprehensive/pyproject.toml
+++ b/examples/xgboost-comprehensive/pyproject.toml
@@ -11,5 +11,5 @@ authors = ["The Flower Authors "]
[tool.poetry.dependencies]
python = ">=3.8,<3.11"
flwr = { extras = ["simulation"], version = ">=1.7.0,<2.0" }
-flwr-datasets = ">=0.1.0,<1.0.0"
+flwr-datasets = ">=0.2.0,<1.0.0"
xgboost = ">=2.0.0,<3.0.0"
diff --git a/examples/xgboost-comprehensive/requirements.txt b/examples/xgboost-comprehensive/requirements.txt
index 16eb78f484e3..840e19529953 100644
--- a/examples/xgboost-comprehensive/requirements.txt
+++ b/examples/xgboost-comprehensive/requirements.txt
@@ -1,3 +1,3 @@
flwr[simulation]>=1.7.0, <2.0
-flwr-datasets>=0.1.0, <1.0.0
+flwr-datasets>=0.2.0, <1.0.0
xgboost>=2.0.0, <3.0.0
diff --git a/examples/xgboost-comprehensive/server.py b/examples/xgboost-comprehensive/server.py
index 939819641438..07dc4bed6db4 100644
--- a/examples/xgboost-comprehensive/server.py
+++ b/examples/xgboost-comprehensive/server.py
@@ -32,7 +32,7 @@
# Load centralised test set
if centralised_eval:
fds = FederatedDataset(
- dataset="jxie/higgs", partitioners={"train": 20}, resplitter=resplit
+ dataset="jxie/higgs", partitioners={"train": 20}, preprocessor=resplit
)
log(INFO, "Loading centralised test set...")
test_set = fds.load_split("test")
diff --git a/examples/xgboost-comprehensive/sim.py b/examples/xgboost-comprehensive/sim.py
index c9481f1cdd5d..09ebbb81fcb4 100644
--- a/examples/xgboost-comprehensive/sim.py
+++ b/examples/xgboost-comprehensive/sim.py
@@ -80,7 +80,7 @@ def main():
fds = FederatedDataset(
dataset="jxie/higgs",
partitioners={"train": partitioner},
- resplitter=resplit,
+ preprocessor=resplit,
)
# Load centralised test set
diff --git a/examples/xgboost-quickstart/README.md b/examples/xgboost-quickstart/README.md
index 713b6eab8bac..fa3e9d0dc6fb 100644
--- a/examples/xgboost-quickstart/README.md
+++ b/examples/xgboost-quickstart/README.md
@@ -1,3 +1,9 @@
+---
+tags: [quickstart, classification, tabular]
+dataset: [HIGGS]
+framework: [xgboost]
+---
+
# Flower Example using XGBoost
This example demonstrates how to perform EXtreme Gradient Boosting (XGBoost) within Flower using `xgboost` package.
diff --git a/pyproject.toml b/pyproject.toml
index dbab703c7671..7fe1ef7843d9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -64,13 +64,14 @@ flower-simulation = "flwr.simulation.run_simulation:run_simulation_from_cli"
python = "^3.8"
# Mandatory dependencies
numpy = "^1.21.0"
-grpcio = "^1.60.0"
+grpcio = "^1.60.0,!=1.64.2,!=1.65.0"
protobuf = "^4.25.2"
cryptography = "^42.0.4"
pycryptodome = "^3.18.0"
iterators = "^0.0.2"
typer = { version = "^0.9.0", extras=["all"] }
tomli = "^2.0.1"
+tomli-w = "^1.0.0"
pathspec = "^0.12.1"
# Optional dependencies (Simulation Engine)
ray = { version = "==2.10.0", optional = true, python = ">=3.8,<3.12" }
@@ -122,7 +123,7 @@ mdformat-gfm = "==0.3.5"
mdformat-frontmatter = "==2.0.1"
mdformat-beautysh = "==0.1.1"
mdformat-myst = "==0.1.5"
-twine = "==4.0.2"
+twine = "==5.1.1"
pyroma = "==4.2"
check-wheel-contents = "==0.4.0"
GitPython = "==3.1.32"
@@ -131,12 +132,7 @@ licensecheck = "==2024"
pre-commit = "==3.5.0"
[tool.isort]
-line_length = 88
-indent = " "
-multi_line_output = 3
-include_trailing_comma = true
-force_grid_wrap = 0
-use_parentheses = true
+profile = "black"
known_first_party = ["flwr", "flwr_tool"]
[tool.black]
diff --git a/src/docker/base/alpine/Dockerfile b/src/docker/base/alpine/Dockerfile
index 04864b525e2e..9e58d82e3bda 100644
--- a/src/docker/base/alpine/Dockerfile
+++ b/src/docker/base/alpine/Dockerfile
@@ -54,9 +54,11 @@ FROM python:${PYTHON_VERSION}-${DISTRO}${DISTRO_VERSION} as base
# required by the grpc package
RUN apk add --no-cache \
libstdc++ \
+ ca-certificates \
# add non-root user
&& adduser \
--no-create-home \
+ --home /app \
--disabled-password \
--gecos "" \
--uid 49999 app \
diff --git a/src/docker/base/ubuntu/Dockerfile b/src/docker/base/ubuntu/Dockerfile
index 4aeddc3f8d8d..960ed07edf96 100644
--- a/src/docker/base/ubuntu/Dockerfile
+++ b/src/docker/base/ubuntu/Dockerfile
@@ -48,29 +48,7 @@ RUN git clone https://github.com/pyenv/pyenv.git \
RUN LATEST=$(pyenv latest -k ${PYTHON_VERSION}) \
&& python-build "${LATEST}" /usr/local/bin/python
-FROM $DISTRO:$DISTRO_VERSION as base
-
-ENV DEBIAN_FRONTEND=noninteractive
-
-RUN apt-get update \
- && apt-get -y --no-install-recommends install \
- libsqlite3-0 \
- && rm -rf /var/lib/apt/lists/*
-
-COPY --from=python /usr/local/bin/python /usr/local/bin/python
-
-ENV PATH=/usr/local/bin/python/bin:$PATH \
- # Send stdout and stderr stream directly to the terminal. Ensures that no
- # output is retained in a buffer if the application crashes.
- PYTHONUNBUFFERED=1 \
- # Typically, bytecode is created on the first invocation to speed up following invocation.
- # However, in Docker we only make a single invocation (when we start the container).
- # Therefore, we can disable bytecode writing.
- PYTHONDONTWRITEBYTECODE=1 \
- # Ensure that python encoding is always UTF-8.
- PYTHONIOENCODING=UTF-8 \
- LANG=C.UTF-8 \
- LC_ALL=C.UTF-8
+ENV PATH=/usr/local/bin/python/bin:$PATH
# Use a virtual environment to ensure that Python packages are installed in the same location
# regardless of whether the subsequent image build is run with the app or the root user
@@ -86,16 +64,43 @@ RUN pip install -U --no-cache-dir \
setuptools==${SETUPTOOLS_VERSION} \
${FLWR_PACKAGE}==${FLWR_VERSION}
-# add non-root user
-RUN adduser \
+FROM $DISTRO:$DISTRO_VERSION as base
+
+COPY --from=python /usr/local/bin/python /usr/local/bin/python
+
+ENV DEBIAN_FRONTEND=noninteractive \
+ PATH=/usr/local/bin/python/bin:$PATH
+
+RUN apt-get update \
+ && apt-get -y --no-install-recommends install \
+ libsqlite3-0 \
+ ca-certificates \
+ && rm -rf /var/lib/apt/lists/* \
+ # add non-root user
+ && adduser \
--no-create-home \
+ --home /app \
--disabled-password \
--gecos "" \
--uid 49999 app \
&& mkdir -p /app \
- && chown -R app:app /python \
&& chown -R app:app /app
+COPY --from=python --chown=app:app /python/venv /python/venv
+
+ENV PATH=/python/venv/bin:$PATH \
+ # Send stdout and stderr stream directly to the terminal. Ensures that no
+ # output is retained in a buffer if the application crashes.
+ PYTHONUNBUFFERED=1 \
+ # Typically, bytecode is created on the first invocation to speed up following invocation.
+ # However, in Docker we only make a single invocation (when we start the container).
+ # Therefore, we can disable bytecode writing.
+ PYTHONDONTWRITEBYTECODE=1 \
+ # Ensure that python encoding is always UTF-8.
+ PYTHONIOENCODING=UTF-8 \
+ LANG=C.UTF-8 \
+ LC_ALL=C.UTF-8
+
WORKDIR /app
USER app
ENV HOME=/app
diff --git a/src/docker/superexec/Dockerfile b/src/docker/superexec/Dockerfile
new file mode 100644
index 000000000000..9e4cc722921e
--- /dev/null
+++ b/src/docker/superexec/Dockerfile
@@ -0,0 +1,20 @@
+# Copyright 2024 Flower Labs GmbH. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+ARG BASE_REPOSITORY=flwr/base
+ARG BASE_IMAGE
+FROM $BASE_REPOSITORY:$BASE_IMAGE
+
+ENTRYPOINT ["flower-superexec"]
diff --git a/src/docker/supernode/Dockerfile b/src/docker/supernode/Dockerfile
index 8dce1c389a5b..8b78577b1201 100644
--- a/src/docker/supernode/Dockerfile
+++ b/src/docker/supernode/Dockerfile
@@ -17,4 +17,4 @@ ARG BASE_REPOSITORY=flwr/base
ARG BASE_IMAGE
FROM $BASE_REPOSITORY:$BASE_IMAGE
-ENTRYPOINT ["flower-client-app"]
+ENTRYPOINT ["flower-supernode"]
diff --git a/src/proto/flwr/proto/driver.proto b/src/proto/flwr/proto/driver.proto
index edbd5d91bb5b..77dc52b3258b 100644
--- a/src/proto/flwr/proto/driver.proto
+++ b/src/proto/flwr/proto/driver.proto
@@ -42,6 +42,7 @@ service Driver {
message CreateRunRequest {
string fab_id = 1;
string fab_version = 2;
+ map override_config = 3;
}
message CreateRunResponse { sint64 run_id = 1; }
diff --git a/src/proto/flwr/proto/exec.proto b/src/proto/flwr/proto/exec.proto
index 8e5f53b02ca8..d0d8dfcbb273 100644
--- a/src/proto/flwr/proto/exec.proto
+++ b/src/proto/flwr/proto/exec.proto
@@ -25,7 +25,10 @@ service Exec {
rpc StreamLogs(StreamLogsRequest) returns (stream StreamLogsResponse) {}
}
-message StartRunRequest { bytes fab_file = 1; }
+message StartRunRequest {
+ bytes fab_file = 1;
+ map override_config = 2;
+}
message StartRunResponse { sint64 run_id = 1; }
message StreamLogsRequest { sint64 run_id = 1; }
message StreamLogsResponse { string log_output = 1; }
diff --git a/src/proto/flwr/proto/run.proto b/src/proto/flwr/proto/run.proto
index 76a7fd91532f..e41748381cab 100644
--- a/src/proto/flwr/proto/run.proto
+++ b/src/proto/flwr/proto/run.proto
@@ -21,6 +21,7 @@ message Run {
sint64 run_id = 1;
string fab_id = 2;
string fab_version = 3;
+ map override_config = 4;
}
message GetRunRequest { sint64 run_id = 1; }
message GetRunResponse { Run run = 1; }
diff --git a/src/py/flwr/cli/build.py b/src/py/flwr/cli/build.py
index 4a9b54f9223f..1f7f75d36184 100644
--- a/src/py/flwr/cli/build.py
+++ b/src/py/flwr/cli/build.py
@@ -20,6 +20,7 @@
from typing import Optional
import pathspec
+import tomli_w
import typer
from typing_extensions import Annotated
@@ -31,7 +32,7 @@
def build(
directory: Annotated[
Optional[Path],
- typer.Option(help="The Flower project directory to bundle into a FAB"),
+ typer.Option(help="Path of the Flower project to bundle into a FAB"),
] = None,
) -> str:
"""Build a Flower project into a Flower App Bundle (FAB).
@@ -85,7 +86,7 @@ def build(
# Set the name of the zip file
fab_filename = (
- f"{conf['flower']['publisher']}"
+ f"{conf['tool']['flwr']['app']['publisher']}"
f".{directory.name}"
f".{conf['project']['version'].replace('.', '-')}.fab"
)
@@ -93,15 +94,28 @@ def build(
allowed_extensions = {".py", ".toml", ".md"}
+ # Remove the 'federations' field from 'tool.flwr' if it exists
+ if (
+ "tool" in conf
+ and "flwr" in conf["tool"]
+ and "federations" in conf["tool"]["flwr"]
+ ):
+ del conf["tool"]["flwr"]["federations"]
+
+ toml_contents = tomli_w.dumps(conf)
+
with zipfile.ZipFile(fab_filename, "w", zipfile.ZIP_DEFLATED) as fab_file:
+ fab_file.writestr("pyproject.toml", toml_contents)
+
+ # Continue with adding other files
for root, _, files in os.walk(directory, topdown=True):
- # Filter directories and files based on .gitignore
files = [
f
for f in files
if not ignore_spec.match_file(Path(root) / f)
and f != fab_filename
and Path(f).suffix in allowed_extensions
+ and f != "pyproject.toml" # Exclude the original pyproject.toml
]
for file in files:
@@ -118,7 +132,7 @@ def build(
fab_file.writestr(".info/CONTENT", list_file_content)
typer.secho(
- f"🎊 Successfully built {fab_filename}.", fg=typer.colors.GREEN, bold=True
+ f"🎊 Successfully built {fab_filename}", fg=typer.colors.GREEN, bold=True
)
return fab_filename
diff --git a/src/py/flwr/cli/config_utils.py b/src/py/flwr/cli/config_utils.py
index d06a1d6dba96..f46a53857dfc 100644
--- a/src/py/flwr/cli/config_utils.py
+++ b/src/py/flwr/cli/config_utils.py
@@ -60,7 +60,7 @@ def get_fab_metadata(fab_file: Union[Path, bytes]) -> Tuple[str, str]:
return (
conf["project"]["version"],
- f"{conf['flower']['publisher']}/{conf['project']['name']}",
+ f"{conf['tool']['flwr']['app']['publisher']}/{conf['project']['name']}",
)
@@ -108,6 +108,14 @@ def load(path: Optional[Path] = None) -> Optional[Dict[str, Any]]:
return load_from_string(toml_file.read())
+def _validate_run_config(config_dict: Dict[str, Any], errors: List[str]) -> None:
+ for key, value in config_dict.items():
+ if isinstance(value, dict):
+ _validate_run_config(config_dict[key], errors)
+ elif not isinstance(value, str):
+ errors.append(f"Config value of key {key} is not of type `str`.")
+
+
# pylint: disable=too-many-branches
def validate_fields(config: Dict[str, Any]) -> Tuple[bool, List[str], List[str]]:
"""Validate pyproject.toml fields."""
@@ -128,18 +136,28 @@ def validate_fields(config: Dict[str, Any]) -> Tuple[bool, List[str], List[str]]
if "authors" not in config["project"]:
warnings.append('Recommended property "authors" missing in [project]')
- if "flower" not in config:
- errors.append("Missing [flower] section")
+ if (
+ "tool" not in config
+ or "flwr" not in config["tool"]
+ or "app" not in config["tool"]["flwr"]
+ ):
+ errors.append("Missing [tool.flwr.app] section")
else:
- if "publisher" not in config["flower"]:
- errors.append('Property "publisher" missing in [flower]')
- if "components" not in config["flower"]:
- errors.append("Missing [flower.components] section")
+ if "publisher" not in config["tool"]["flwr"]["app"]:
+ errors.append('Property "publisher" missing in [tool.flwr.app]')
+ if "config" in config["tool"]["flwr"]["app"]:
+ _validate_run_config(config["tool"]["flwr"]["app"]["config"], errors)
+ if "components" not in config["tool"]["flwr"]["app"]:
+ errors.append("Missing [tool.flwr.app.components] section")
else:
- if "serverapp" not in config["flower"]["components"]:
- errors.append('Property "serverapp" missing in [flower.components]')
- if "clientapp" not in config["flower"]["components"]:
- errors.append('Property "clientapp" missing in [flower.components]')
+ if "serverapp" not in config["tool"]["flwr"]["app"]["components"]:
+ errors.append(
+ 'Property "serverapp" missing in [tool.flwr.app.components]'
+ )
+ if "clientapp" not in config["tool"]["flwr"]["app"]["components"]:
+ errors.append(
+ 'Property "clientapp" missing in [tool.flwr.app.components]'
+ )
return len(errors) == 0, errors, warnings
@@ -155,14 +173,14 @@ def validate(
# Validate serverapp
is_valid, reason = object_ref.validate(
- config["flower"]["components"]["serverapp"], check_module
+ config["tool"]["flwr"]["app"]["components"]["serverapp"], check_module
)
if not is_valid and isinstance(reason, str):
return False, [reason], []
# Validate clientapp
is_valid, reason = object_ref.validate(
- config["flower"]["components"]["clientapp"], check_module
+ config["tool"]["flwr"]["app"]["components"]["clientapp"], check_module
)
if not is_valid and isinstance(reason, str):
diff --git a/src/py/flwr/cli/config_utils_test.py b/src/py/flwr/cli/config_utils_test.py
index b24425cd08f4..077f254fb914 100644
--- a/src/py/flwr/cli/config_utils_test.py
+++ b/src/py/flwr/cli/config_utils_test.py
@@ -34,27 +34,18 @@ def test_load_pyproject_toml_load_from_cwd(tmp_path: Path) -> None:
name = "fedgpt"
version = "1.0.0"
description = ""
- authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
- ]
license = {text = "Apache License (2.0)"}
dependencies = [
"flwr[simulation]>=1.9.0,<2.0",
"numpy>=1.21.0",
]
- [flower]
+ [tool.flwr.app]
publisher = "flwrlabs"
- [flower.components]
+ [tool.flwr.app.components]
serverapp = "fedgpt.server:app"
clientapp = "fedgpt.client:app"
-
- [flower.engine]
- name = "simulation" # optional
-
- [flower.engine.simulation.supernode]
- count = 10 # optional
"""
expected_config = {
"build-system": {"build-backend": "hatchling.build", "requires": ["hatchling"]},
@@ -62,19 +53,18 @@ def test_load_pyproject_toml_load_from_cwd(tmp_path: Path) -> None:
"name": "fedgpt",
"version": "1.0.0",
"description": "",
- "authors": [{"email": "hello@flower.ai", "name": "The Flower Authors"}],
"license": {"text": "Apache License (2.0)"},
"dependencies": ["flwr[simulation]>=1.9.0,<2.0", "numpy>=1.21.0"],
},
- "flower": {
- "publisher": "flwrlabs",
- "components": {
- "serverapp": "fedgpt.server:app",
- "clientapp": "fedgpt.client:app",
- },
- "engine": {
- "name": "simulation",
- "simulation": {"supernode": {"count": 10}},
+ "tool": {
+ "flwr": {
+ "app": {
+ "publisher": "flwrlabs",
+ "components": {
+ "serverapp": "fedgpt.server:app",
+ "clientapp": "fedgpt.client:app",
+ },
+ },
},
},
}
@@ -109,27 +99,18 @@ def test_load_pyproject_toml_from_path(tmp_path: Path) -> None:
name = "fedgpt"
version = "1.0.0"
description = ""
- authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
- ]
license = {text = "Apache License (2.0)"}
dependencies = [
"flwr[simulation]>=1.9.0,<2.0",
"numpy>=1.21.0",
]
- [flower]
+ [tool.flwr.app]
publisher = "flwrlabs"
- [flower.components]
+ [tool.flwr.app.components]
serverapp = "fedgpt.server:app"
clientapp = "fedgpt.client:app"
-
- [flower.engine]
- name = "simulation" # optional
-
- [flower.engine.simulation.supernode]
- count = 10 # optional
"""
expected_config = {
"build-system": {"build-backend": "hatchling.build", "requires": ["hatchling"]},
@@ -137,19 +118,18 @@ def test_load_pyproject_toml_from_path(tmp_path: Path) -> None:
"name": "fedgpt",
"version": "1.0.0",
"description": "",
- "authors": [{"email": "hello@flower.ai", "name": "The Flower Authors"}],
"license": {"text": "Apache License (2.0)"},
"dependencies": ["flwr[simulation]>=1.9.0,<2.0", "numpy>=1.21.0"],
},
- "flower": {
- "publisher": "flwrlabs",
- "components": {
- "serverapp": "fedgpt.server:app",
- "clientapp": "fedgpt.client:app",
- },
- "engine": {
- "name": "simulation",
- "simulation": {"supernode": {"count": 10}},
+ "tool": {
+ "flwr": {
+ "app": {
+ "publisher": "flwrlabs",
+ "components": {
+ "serverapp": "fedgpt.server:app",
+ "clientapp": "fedgpt.client:app",
+ },
+ },
},
},
}
@@ -219,7 +199,7 @@ def test_validate_pyproject_toml_fields_no_flower_components() -> None:
"license": "",
"authors": [],
},
- "flower": {},
+ "tool": {"flwr": {"app": {}}},
}
# Execute
@@ -242,7 +222,7 @@ def test_validate_pyproject_toml_fields_no_server_and_client_app() -> None:
"license": "",
"authors": [],
},
- "flower": {"components": {}},
+ "tool": {"flwr": {"app": {"components": {}}}},
}
# Execute
@@ -265,9 +245,13 @@ def test_validate_pyproject_toml_fields() -> None:
"license": "",
"authors": [],
},
- "flower": {
- "publisher": "flwrlabs",
- "components": {"serverapp": "", "clientapp": ""},
+ "tool": {
+ "flwr": {
+ "app": {
+ "publisher": "flwrlabs",
+ "components": {"serverapp": "", "clientapp": ""},
+ },
+ },
},
}
@@ -291,11 +275,15 @@ def test_validate_pyproject_toml() -> None:
"license": "",
"authors": [],
},
- "flower": {
- "publisher": "flwrlabs",
- "components": {
- "serverapp": "flwr.cli.run:run",
- "clientapp": "flwr.cli.run:run",
+ "tool": {
+ "flwr": {
+ "app": {
+ "publisher": "flwrlabs",
+ "components": {
+ "serverapp": "flwr.cli.run:run",
+ "clientapp": "flwr.cli.run:run",
+ },
+ },
},
},
}
@@ -320,11 +308,15 @@ def test_validate_pyproject_toml_fail() -> None:
"license": "",
"authors": [],
},
- "flower": {
- "publisher": "flwrlabs",
- "components": {
- "serverapp": "flwr.cli.run:run",
- "clientapp": "flwr.cli.run:runa",
+ "tool": {
+ "flwr": {
+ "app": {
+ "publisher": "flwrlabs",
+ "components": {
+ "serverapp": "flwr.cli.run:run",
+ "clientapp": "flwr.cli.run:runa",
+ },
+ },
},
},
}
diff --git a/src/py/flwr/cli/install.py b/src/py/flwr/cli/install.py
index de9227bee450..749a4516f65c 100644
--- a/src/py/flwr/cli/install.py
+++ b/src/py/flwr/cli/install.py
@@ -16,6 +16,7 @@
import shutil
+import subprocess
import tempfile
import zipfile
from io import BytesIO
@@ -149,7 +150,7 @@ def validate_and_install(
)
raise typer.Exit(code=1)
- publisher = config["flower"]["publisher"]
+ publisher = config["tool"]["flwr"]["app"]["publisher"]
project_name = config["project"]["name"]
version = config["project"]["version"]
@@ -192,6 +193,21 @@ def validate_and_install(
else:
shutil.copy2(item, install_dir / item.name)
+ try:
+ subprocess.run(
+ ["pip", "install", "-e", install_dir, "--no-deps"],
+ capture_output=True,
+ text=True,
+ check=True,
+ )
+ except subprocess.CalledProcessError as e:
+ typer.secho(
+ f"❌ Failed to `pip install` package(s) from {install_dir}:\n{e.stderr}",
+ fg=typer.colors.RED,
+ bold=True,
+ )
+ raise typer.Exit(code=1) from e
+
typer.secho(
f"🎊 Successfully installed {project_name} to {install_dir}.",
fg=typer.colors.GREEN,
diff --git a/src/py/flwr/cli/new/new.py b/src/py/flwr/cli/new/new.py
index 9367cf6c9ffb..4bde009742f8 100644
--- a/src/py/flwr/cli/new/new.py
+++ b/src/py/flwr/cli/new/new.py
@@ -136,6 +136,7 @@ def new(
framework_str = framework_str.lower()
+ llm_challenge_str = None
if framework_str == "flowertune":
llm_challenge_value = prompt_options(
"Please select LLM challenge by typing in the number",
@@ -171,7 +172,7 @@ def new(
}
# List of files to render
- if framework_str == "flowertune":
+ if llm_challenge_str:
files = {
".gitignore": {"template": "app/.gitignore.tpl"},
"pyproject.toml": {"template": f"app/pyproject.{framework_str}.toml.tpl"},
@@ -228,10 +229,10 @@ def new(
"README.md": {"template": "app/README.md.tpl"},
"pyproject.toml": {"template": f"app/pyproject.{framework_str}.toml.tpl"},
f"{import_name}/__init__.py": {"template": "app/code/__init__.py.tpl"},
- f"{import_name}/server.py": {
+ f"{import_name}/server_app.py": {
"template": f"app/code/server.{framework_str}.py.tpl"
},
- f"{import_name}/client.py": {
+ f"{import_name}/client_app.py": {
"template": f"app/code/client.{framework_str}.py.tpl"
},
}
@@ -264,9 +265,11 @@ def new(
bold=True,
)
)
+
+ _add = " huggingface-cli login\n" if framework_str == "flowertune" else ""
print(
typer.style(
- f" cd {package_name}\n" + " pip install -e .\n flwr run\n",
+ f" cd {package_name}\n" + " pip install -e .\n" + _add + " flwr run\n",
fg=typer.colors.BRIGHT_CYAN,
bold=True,
)
diff --git a/src/py/flwr/cli/new/new_test.py b/src/py/flwr/cli/new/new_test.py
index 33ad745efa93..7f22bd5f9825 100644
--- a/src/py/flwr/cli/new/new_test.py
+++ b/src/py/flwr/cli/new/new_test.py
@@ -86,8 +86,8 @@ def test_new_correct_name(tmp_path: str) -> None:
}
expected_files_module = {
"__init__.py",
- "server.py",
- "client.py",
+ "server_app.py",
+ "client_app.py",
"task.py",
}
diff --git a/src/py/flwr/cli/new/templates/app/code/client.hf.py.tpl b/src/py/flwr/cli/new/templates/app/code/client.hf.py.tpl
index 314da2120c53..56bac8543c50 100644
--- a/src/py/flwr/cli/new/templates/app/code/client.hf.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/client.hf.py.tpl
@@ -1,6 +1,7 @@
"""$project_name: A Flower / HuggingFace Transformers app."""
from flwr.client import ClientApp, NumPyClient
+from flwr.common import Context
from transformers import AutoModelForSequenceClassification
from $import_name.task import (
@@ -38,12 +39,15 @@ class FlowerClient(NumPyClient):
return float(loss), len(self.testloader), {"accuracy": accuracy}
-def client_fn(cid):
+def client_fn(context: Context):
# Load model and data
net = AutoModelForSequenceClassification.from_pretrained(
CHECKPOINT, num_labels=2
).to(DEVICE)
- trainloader, valloader = load_data(int(cid), 2)
+
+ partition_id = int(context.node_config['partition-id'])
+ num_partitions = int(context.node_config['num-partitions])
+ trainloader, valloader = load_data(partition_id, num_partitions)
# Return Client instance
return FlowerClient(net, trainloader, valloader).to_client()
diff --git a/src/py/flwr/cli/new/templates/app/code/client.jax.py.tpl b/src/py/flwr/cli/new/templates/app/code/client.jax.py.tpl
index 3c6d2f03637a..48b667665f3f 100644
--- a/src/py/flwr/cli/new/templates/app/code/client.jax.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/client.jax.py.tpl
@@ -2,6 +2,7 @@
import jax
from flwr.client import NumPyClient, ClientApp
+from flwr.common import Context
from $import_name.task import (
evaluation,
@@ -44,7 +45,7 @@ class FlowerClient(NumPyClient):
)
return float(loss), num_examples, {"loss": float(loss)}
-def client_fn(cid):
+def client_fn(context: Context):
# Return Client instance
return FlowerClient().to_client()
diff --git a/src/py/flwr/cli/new/templates/app/code/client.mlx.py.tpl b/src/py/flwr/cli/new/templates/app/code/client.mlx.py.tpl
index 1722561370a8..37207c940d83 100644
--- a/src/py/flwr/cli/new/templates/app/code/client.mlx.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/client.mlx.py.tpl
@@ -4,6 +4,7 @@ import mlx.core as mx
import mlx.nn as nn
import mlx.optimizers as optim
from flwr.client import NumPyClient, ClientApp
+from flwr.common import Context
from $import_name.task import (
batch_iterate,
@@ -57,8 +58,10 @@ class FlowerClient(NumPyClient):
return loss.item(), len(self.test_images), {"accuracy": accuracy.item()}
-def client_fn(cid):
- data = load_data(int(cid), 2)
+def client_fn(context: Context):
+ partition_id = int(context.node_config["partition-id"])
+ num_partitions = int(context.node_config["num-partitions"])
+ data = load_data(partition_id, num_partitions)
# Return Client instance
return FlowerClient(data).to_client()
diff --git a/src/py/flwr/cli/new/templates/app/code/client.numpy.py.tpl b/src/py/flwr/cli/new/templates/app/code/client.numpy.py.tpl
index 232c305fc2a9..1dd83e108bb5 100644
--- a/src/py/flwr/cli/new/templates/app/code/client.numpy.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/client.numpy.py.tpl
@@ -1,6 +1,7 @@
"""$project_name: A Flower / NumPy app."""
from flwr.client import NumPyClient, ClientApp
+from flwr.common import Context
import numpy as np
@@ -15,7 +16,7 @@ class FlowerClient(NumPyClient):
return float(0.0), 1, {"accuracy": float(1.0)}
-def client_fn(cid: str):
+def client_fn(context: Context):
return FlowerClient().to_client()
diff --git a/src/py/flwr/cli/new/templates/app/code/client.pytorch.py.tpl b/src/py/flwr/cli/new/templates/app/code/client.pytorch.py.tpl
index c68974efaadf..addc71023a09 100644
--- a/src/py/flwr/cli/new/templates/app/code/client.pytorch.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/client.pytorch.py.tpl
@@ -1,6 +1,7 @@
"""$project_name: A Flower / PyTorch app."""
from flwr.client import NumPyClient, ClientApp
+from flwr.common import Context
from $import_name.task import (
Net,
@@ -31,10 +32,12 @@ class FlowerClient(NumPyClient):
return loss, len(self.valloader.dataset), {"accuracy": accuracy}
-def client_fn(cid):
+def client_fn(context: Context):
# Load model and data
net = Net().to(DEVICE)
- trainloader, valloader = load_data(int(cid), 2)
+ partition_id = int(context.node_config["partition-id"])
+ num_partitions = int(context.node_config["num-partitions"])
+ trainloader, valloader = load_data(partition_id, num_partitions)
# Return Client instance
return FlowerClient(net, trainloader, valloader).to_client()
diff --git a/src/py/flwr/cli/new/templates/app/code/client.sklearn.py.tpl b/src/py/flwr/cli/new/templates/app/code/client.sklearn.py.tpl
index 9181389cad1c..a1eefa034e7b 100644
--- a/src/py/flwr/cli/new/templates/app/code/client.sklearn.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/client.sklearn.py.tpl
@@ -4,6 +4,7 @@ import warnings
import numpy as np
from flwr.client import NumPyClient, ClientApp
+from flwr.common import Context
from flwr_datasets import FederatedDataset
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import log_loss
@@ -68,8 +69,9 @@ class FlowerClient(NumPyClient):
fds = FederatedDataset(dataset="mnist", partitioners={"train": 2})
-def client_fn(cid: str):
- dataset = fds.load_partition(int(cid), "train").with_format("numpy")
+def client_fn(context: Context):
+ partition_id = int(context.node_config["partition-id"])
+ dataset = fds.load_partition(partition_id, "train").with_format("numpy")
X, y = dataset["image"].reshape((len(dataset), -1)), dataset["label"]
diff --git a/src/py/flwr/cli/new/templates/app/code/client.tensorflow.py.tpl b/src/py/flwr/cli/new/templates/app/code/client.tensorflow.py.tpl
index dc55d4ca6569..0fe1c405a110 100644
--- a/src/py/flwr/cli/new/templates/app/code/client.tensorflow.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/client.tensorflow.py.tpl
@@ -1,6 +1,7 @@
"""$project_name: A Flower / TensorFlow app."""
from flwr.client import NumPyClient, ClientApp
+from flwr.common import Context
from $import_name.task import load_data, load_model
@@ -28,10 +29,12 @@ class FlowerClient(NumPyClient):
return loss, len(self.x_test), {"accuracy": accuracy}
-def client_fn(cid):
+def client_fn(context: Context):
# Load model and data
net = load_model()
- x_train, y_train, x_test, y_test = load_data(int(cid), 2)
+
+ partition_id = int(context.node_config["partition-id"])
+ x_train, y_train, x_test, y_test = load_data(partition_id, 2)
# Return Client instance
return FlowerClient(net, x_train, y_train, x_test, y_test).to_client()
diff --git a/src/py/flwr/cli/new/templates/app/code/flwr_tune/app.py.tpl b/src/py/flwr/cli/new/templates/app/code/flwr_tune/app.py.tpl
index ecb87bd71e3f..a0f781df04a1 100644
--- a/src/py/flwr/cli/new/templates/app/code/flwr_tune/app.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/flwr_tune/app.py.tpl
@@ -12,10 +12,10 @@ from flwr.client import ClientApp
from flwr.common import ndarrays_to_parameters
from flwr.server import ServerApp, ServerConfig
-from $import_name.client import gen_client_fn, get_parameters
+from $import_name.client_app import gen_client_fn, get_parameters
from $import_name.dataset import get_tokenizer_and_data_collator_and_propt_formatting
from $import_name.models import get_model
-from $import_name.server import fit_weighted_average, get_evaluate_fn, get_on_fit_config
+from $import_name.server_app import fit_weighted_average, get_evaluate_fn, get_on_fit_config
# Avoid warnings
warnings.filterwarnings("ignore", category=UserWarning)
diff --git a/src/py/flwr/cli/new/templates/app/code/flwr_tune/server.py.tpl b/src/py/flwr/cli/new/templates/app/code/flwr_tune/server.py.tpl
index 19223148bca5..5dd4d881f2f1 100644
--- a/src/py/flwr/cli/new/templates/app/code/flwr_tune/server.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/flwr_tune/server.py.tpl
@@ -1,6 +1,6 @@
"""$project_name: A Flower / FlowerTune app."""
-from $import_name.client import set_parameters
+from $import_name.client_app import set_parameters
from $import_name.models import get_model
diff --git a/src/py/flwr/cli/new/templates/app/code/server.hf.py.tpl b/src/py/flwr/cli/new/templates/app/code/server.hf.py.tpl
index d7d86931335b..43fce9e481c6 100644
--- a/src/py/flwr/cli/new/templates/app/code/server.hf.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/server.hf.py.tpl
@@ -1,17 +1,22 @@
"""$project_name: A Flower / HuggingFace Transformers app."""
+from flwr.common import Context
from flwr.server.strategy import FedAvg
-from flwr.server import ServerApp, ServerConfig
+from flwr.server import ServerApp, ServerAppComponents, ServerConfig
-# Define strategy
-strategy = FedAvg(
- fraction_fit=1.0,
- fraction_evaluate=1.0,
-)
+def server_fn(context: Context):
+ # Read from config
+ num_rounds = int(context.run_config["num-server-rounds"])
-# Start server
-app = ServerApp(
- config=ServerConfig(num_rounds=3),
- strategy=strategy,
-)
+ # Define strategy
+ strategy = FedAvg(
+ fraction_fit=1.0,
+ fraction_evaluate=1.0,
+ )
+ config = ServerConfig(num_rounds=num_rounds)
+
+ return ServerAppComponents(strategy=strategy, config=config)
+
+# Create ServerApp
+app = ServerApp(server_fn=server_fn)
diff --git a/src/py/flwr/cli/new/templates/app/code/server.jax.py.tpl b/src/py/flwr/cli/new/templates/app/code/server.jax.py.tpl
index 53cff7b905f4..4eb7149de999 100644
--- a/src/py/flwr/cli/new/templates/app/code/server.jax.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/server.jax.py.tpl
@@ -1,12 +1,19 @@
"""$project_name: A Flower / JAX app."""
-import flwr as fl
+from flwr.common import Context
+from flwr.server.strategy import FedAvg
+from flwr.server import ServerApp, ServerAppComponents, ServerConfig
-# Configure the strategy
-strategy = fl.server.strategy.FedAvg()
-# Flower ServerApp
-app = fl.server.ServerApp(
- config=fl.server.ServerConfig(num_rounds=3),
- strategy=strategy,
-)
+def server_fn(context: Context):
+ # Read from config
+ num_rounds = int(context.run_config["num-server-rounds"])
+
+ # Define strategy
+ strategy = FedAvg()
+ config = ServerConfig(num_rounds=num_rounds)
+
+ return ServerAppComponents(strategy=strategy, config=config)
+
+# Create ServerApp
+app = ServerApp(server_fn=server_fn)
diff --git a/src/py/flwr/cli/new/templates/app/code/server.mlx.py.tpl b/src/py/flwr/cli/new/templates/app/code/server.mlx.py.tpl
index b475e0e7dc36..72aed878553d 100644
--- a/src/py/flwr/cli/new/templates/app/code/server.mlx.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/server.mlx.py.tpl
@@ -1,15 +1,19 @@
"""$project_name: A Flower / MLX app."""
-from flwr.server import ServerApp, ServerConfig
+from flwr.common import Context
+from flwr.server import ServerApp, ServerAppComponents, ServerConfig
from flwr.server.strategy import FedAvg
-# Define strategy
-strategy = FedAvg()
+def server_fn(context: Context):
+ # Read from config
+ num_rounds = int(context.run_config["num-server-rounds"])
+ # Define strategy
+ strategy = FedAvg()
+ config = ServerConfig(num_rounds=num_rounds)
+
+ return ServerAppComponents(strategy=strategy, config=config)
# Create ServerApp
-app = ServerApp(
- config=ServerConfig(num_rounds=3),
- strategy=strategy,
-)
+app = ServerApp(server_fn=server_fn)
diff --git a/src/py/flwr/cli/new/templates/app/code/server.numpy.py.tpl b/src/py/flwr/cli/new/templates/app/code/server.numpy.py.tpl
index 03f95ae35cfd..d324b4f24fed 100644
--- a/src/py/flwr/cli/new/templates/app/code/server.numpy.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/server.numpy.py.tpl
@@ -1,12 +1,19 @@
"""$project_name: A Flower / NumPy app."""
-import flwr as fl
+from flwr.common import Context
+from flwr.server import ServerApp, ServerAppComponents, ServerConfig
+from flwr.server.strategy import FedAvg
-# Configure the strategy
-strategy = fl.server.strategy.FedAvg()
-# Flower ServerApp
-app = fl.server.ServerApp(
- config=fl.server.ServerConfig(num_rounds=1),
- strategy=strategy,
-)
+def server_fn(context: Context):
+ # Read from config
+ num_rounds = int(context.run_config["num-server-rounds"])
+
+ # Define strategy
+ strategy = FedAvg()
+ config = ServerConfig(num_rounds=num_rounds)
+
+ return ServerAppComponents(strategy=strategy, config=config)
+
+# Create ServerApp
+app = ServerApp(server_fn=server_fn)
diff --git a/src/py/flwr/cli/new/templates/app/code/server.pytorch.py.tpl b/src/py/flwr/cli/new/templates/app/code/server.pytorch.py.tpl
index dc635f79a664..7ac9508f8a25 100644
--- a/src/py/flwr/cli/new/templates/app/code/server.pytorch.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/server.pytorch.py.tpl
@@ -1,7 +1,7 @@
"""$project_name: A Flower / PyTorch app."""
-from flwr.common import ndarrays_to_parameters
-from flwr.server import ServerApp, ServerConfig
+from flwr.common import Context, ndarrays_to_parameters
+from flwr.server import ServerApp, ServerAppComponents, ServerConfig
from flwr.server.strategy import FedAvg
from $import_name.task import Net, get_weights
@@ -11,18 +11,20 @@ from $import_name.task import Net, get_weights
ndarrays = get_weights(Net())
parameters = ndarrays_to_parameters(ndarrays)
+def server_fn(context: Context):
+ # Read from config
+ num_rounds = int(context.run_config["num-server-rounds"])
-# Define strategy
-strategy = FedAvg(
- fraction_fit=1.0,
- fraction_evaluate=1.0,
- min_available_clients=2,
- initial_parameters=parameters,
-)
+ # Define strategy
+ strategy = FedAvg(
+ fraction_fit=1.0,
+ fraction_evaluate=1.0,
+ min_available_clients=2,
+ initial_parameters=parameters,
+ )
+ config = ServerConfig(num_rounds=num_rounds)
+ return ServerAppComponents(strategy=strategy, config=config)
# Create ServerApp
-app = ServerApp(
- config=ServerConfig(num_rounds=3),
- strategy=strategy,
-)
+app = ServerApp(server_fn=server_fn)
diff --git a/src/py/flwr/cli/new/templates/app/code/server.sklearn.py.tpl b/src/py/flwr/cli/new/templates/app/code/server.sklearn.py.tpl
index 266a53ac5794..d8837798d5a6 100644
--- a/src/py/flwr/cli/new/templates/app/code/server.sklearn.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/server.sklearn.py.tpl
@@ -1,17 +1,23 @@
"""$project_name: A Flower / Scikit-Learn app."""
-from flwr.server import ServerApp, ServerConfig
+from flwr.common import Context
+from flwr.server import ServerApp, ServerAppComponents, ServerConfig
from flwr.server.strategy import FedAvg
-strategy = FedAvg(
- fraction_fit=1.0,
- fraction_evaluate=1.0,
- min_available_clients=2,
-)
+def server_fn(context: Context):
+ # Read from config
+ num_rounds = int(context.run_config["num-server-rounds"])
+
+ # Define strategy
+ strategy = FedAvg(
+ fraction_fit=1.0,
+ fraction_evaluate=1.0,
+ min_available_clients=2,
+ )
+ config = ServerConfig(num_rounds=num_rounds)
+
+ return ServerAppComponents(strategy=strategy, config=config)
# Create ServerApp
-app = ServerApp(
- config=ServerConfig(num_rounds=3),
- strategy=strategy,
-)
+app = ServerApp(server_fn=server_fn)
diff --git a/src/py/flwr/cli/new/templates/app/code/server.tensorflow.py.tpl b/src/py/flwr/cli/new/templates/app/code/server.tensorflow.py.tpl
index 8d092164a468..abd2a977b503 100644
--- a/src/py/flwr/cli/new/templates/app/code/server.tensorflow.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/server.tensorflow.py.tpl
@@ -1,7 +1,7 @@
"""$project_name: A Flower / TensorFlow app."""
-from flwr.common import ndarrays_to_parameters
-from flwr.server import ServerApp, ServerConfig
+from flwr.common import Context, ndarrays_to_parameters
+from flwr.server import ServerApp, ServerAppComponents, ServerConfig
from flwr.server.strategy import FedAvg
from $import_name.task import load_model
@@ -11,17 +11,20 @@ config = ServerConfig(num_rounds=3)
parameters = ndarrays_to_parameters(load_model().get_weights())
-# Define strategy
-strategy = FedAvg(
- fraction_fit=1.0,
- fraction_evaluate=1.0,
- min_available_clients=2,
- initial_parameters=parameters,
-)
+def server_fn(context: Context):
+ # Read from config
+ num_rounds = int(context.run_config["num-server-rounds"])
+ # Define strategy
+ strategy = strategy = FedAvg(
+ fraction_fit=1.0,
+ fraction_evaluate=1.0,
+ min_available_clients=2,
+ initial_parameters=parameters,
+ )
+ config = ServerConfig(num_rounds=num_rounds)
+
+ return ServerAppComponents(strategy=strategy, config=config)
# Create ServerApp
-app = ServerApp(
- config=config,
- strategy=strategy,
-)
+app = ServerApp(server_fn=server_fn)
diff --git a/src/py/flwr/cli/new/templates/app/code/task.hf.py.tpl b/src/py/flwr/cli/new/templates/app/code/task.hf.py.tpl
index 8e89add66835..eb43acfce976 100644
--- a/src/py/flwr/cli/new/templates/app/code/task.hf.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/task.hf.py.tpl
@@ -16,9 +16,9 @@ DEVICE = torch.device("cpu")
CHECKPOINT = "distilbert-base-uncased" # transformer model checkpoint
-def load_data(partition_id, num_clients):
+def load_data(partition_id: int, num_partitions: int):
"""Load IMDB data (training and eval)"""
- fds = FederatedDataset(dataset="imdb", partitioners={"train": num_clients})
+ fds = FederatedDataset(dataset="imdb", partitioners={"train": num_partitions})
partition = fds.load_partition(partition_id)
# Divide data: 80% train, 20% test
partition_train_test = partition.train_test_split(test_size=0.2, seed=42)
diff --git a/src/py/flwr/cli/new/templates/app/code/task.mlx.py.tpl b/src/py/flwr/cli/new/templates/app/code/task.mlx.py.tpl
index bcd4dde93310..88053b0cd590 100644
--- a/src/py/flwr/cli/new/templates/app/code/task.mlx.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/task.mlx.py.tpl
@@ -43,8 +43,8 @@ def batch_iterate(batch_size, X, y):
yield X[ids], y[ids]
-def load_data(partition_id, num_clients):
- fds = FederatedDataset(dataset="mnist", partitioners={"train": num_clients})
+def load_data(partition_id: int, num_partitions: int):
+ fds = FederatedDataset(dataset="mnist", partitioners={"train": num_partitions})
partition = fds.load_partition(partition_id)
partition_splits = partition.train_test_split(test_size=0.2, seed=42)
diff --git a/src/py/flwr/cli/new/templates/app/code/task.pytorch.py.tpl b/src/py/flwr/cli/new/templates/app/code/task.pytorch.py.tpl
index b30c65a285b5..d5971ffb6ce5 100644
--- a/src/py/flwr/cli/new/templates/app/code/task.pytorch.py.tpl
+++ b/src/py/flwr/cli/new/templates/app/code/task.pytorch.py.tpl
@@ -34,7 +34,7 @@ class Net(nn.Module):
return self.fc3(x)
-def load_data(partition_id, num_partitions):
+def load_data(partition_id: int, num_partitions: int):
"""Load partition CIFAR10 data."""
fds = FederatedDataset(dataset="cifar10", partitioners={"train": num_partitions})
partition = fds.load_partition(partition_id)
diff --git a/src/py/flwr/cli/new/templates/app/pyproject.flowertune.toml.tpl b/src/py/flwr/cli/new/templates/app/pyproject.flowertune.toml.tpl
index 2ed6bd36fd89..ca0b25f172fb 100644
--- a/src/py/flwr/cli/new/templates/app/pyproject.flowertune.toml.tpl
+++ b/src/py/flwr/cli/new/templates/app/pyproject.flowertune.toml.tpl
@@ -6,10 +6,7 @@ build-backend = "hatchling.build"
name = "$package_name"
version = "1.0.0"
description = ""
-authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
-]
-license = { text = "Apache License (2.0)" }
+license = "Apache-2.0"
dependencies = [
"flwr[simulation]>=1.9.0,<2.0",
"flwr-datasets>=0.1.0,<1.0.0",
@@ -25,18 +22,18 @@ dependencies = [
[tool.hatch.build.targets.wheel]
packages = ["."]
-[flower]
+[tool.flwr.app]
publisher = "$username"
-[flower.components]
+[tool.flwr.app.components]
serverapp = "$import_name.app:server"
clientapp = "$import_name.app:client"
-[flower.engine]
-name = "simulation"
+[tool.flwr.app.config]
+num-server-rounds = "3"
-[flower.engine.simulation.supernode]
-num = $num_clients
+[tool.flwr.federations]
+default = "localhost"
-[flower.engine.simulation]
-backend_config = { client_resources = { num_cpus = 8, num_gpus = 1.0 } }
+[tool.flwr.federations.localhost]
+options.num-supernodes = 10
diff --git a/src/py/flwr/cli/new/templates/app/pyproject.hf.toml.tpl b/src/py/flwr/cli/new/templates/app/pyproject.hf.toml.tpl
index 71004f3421cd..92c954e754cf 100644
--- a/src/py/flwr/cli/new/templates/app/pyproject.hf.toml.tpl
+++ b/src/py/flwr/cli/new/templates/app/pyproject.hf.toml.tpl
@@ -6,10 +6,7 @@ build-backend = "hatchling.build"
name = "$package_name"
version = "1.0.0"
description = ""
-authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
-]
-license = { text = "Apache License (2.0)" }
+license = "Apache-2.0"
dependencies = [
"flwr[simulation]>=1.9.0,<2.0",
"flwr-datasets>=0.0.2,<1.0.0",
@@ -23,15 +20,18 @@ dependencies = [
[tool.hatch.build.targets.wheel]
packages = ["."]
-[flower]
+[tool.flwr.app]
publisher = "$username"
-[flower.components]
-serverapp = "$import_name.server:app"
-clientapp = "$import_name.client:app"
+[tool.flwr.app.components]
+serverapp = "$import_name.server_app:app"
+clientapp = "$import_name.client_app:app"
+
+[tool.flwr.app.config]
+num-server-rounds = "3"
-[flower.engine]
-name = "simulation"
+[tool.flwr.federations]
+default = "localhost"
-[flower.engine.simulation.supernode]
-num = 2
+[tool.flwr.federations.localhost]
+options.num-supernodes = 10
diff --git a/src/py/flwr/cli/new/templates/app/pyproject.jax.toml.tpl b/src/py/flwr/cli/new/templates/app/pyproject.jax.toml.tpl
index c5463e08b92c..e899f48f4c5c 100644
--- a/src/py/flwr/cli/new/templates/app/pyproject.jax.toml.tpl
+++ b/src/py/flwr/cli/new/templates/app/pyproject.jax.toml.tpl
@@ -6,10 +6,7 @@ build-backend = "hatchling.build"
name = "$package_name"
version = "1.0.0"
description = ""
-authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
-]
-license = {text = "Apache License (2.0)"}
+license = "Apache-2.0"
dependencies = [
"flwr[simulation]>=1.9.0,<2.0",
"jax==0.4.26",
@@ -20,9 +17,18 @@ dependencies = [
[tool.hatch.build.targets.wheel]
packages = ["."]
-[flower]
+[tool.flwr.app]
publisher = "$username"
-[flower.components]
-serverapp = "$import_name.server:app"
-clientapp = "$import_name.client:app"
+[tool.flwr.app.components]
+serverapp = "$import_name.server_app:app"
+clientapp = "$import_name.client_app:app"
+
+[tool.flwr.app.config]
+num-server-rounds = "3"
+
+[tool.flwr.federations]
+default = "localhost"
+
+[tool.flwr.federations.localhost]
+options.num-supernodes = 10
diff --git a/src/py/flwr/cli/new/templates/app/pyproject.mlx.toml.tpl b/src/py/flwr/cli/new/templates/app/pyproject.mlx.toml.tpl
index a850135a1fc5..6004c076cf87 100644
--- a/src/py/flwr/cli/new/templates/app/pyproject.mlx.toml.tpl
+++ b/src/py/flwr/cli/new/templates/app/pyproject.mlx.toml.tpl
@@ -6,10 +6,7 @@ build-backend = "hatchling.build"
name = "$package_name"
version = "1.0.0"
description = ""
-authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
-]
-license = { text = "Apache License (2.0)" }
+license = "Apache-2.0"
dependencies = [
"flwr[simulation]>=1.9.0,<2.0",
"flwr-datasets[vision]>=0.0.2,<1.0.0",
@@ -20,15 +17,18 @@ dependencies = [
[tool.hatch.build.targets.wheel]
packages = ["."]
-[flower]
+[tool.flwr.app]
publisher = "$username"
-[flower.components]
-serverapp = "$import_name.server:app"
-clientapp = "$import_name.client:app"
+[tool.flwr.app.components]
+serverapp = "$import_name.server_app:app"
+clientapp = "$import_name.client_app:app"
+
+[tool.flwr.app.config]
+num-server-rounds = "3"
-[flower.engine]
-name = "simulation"
+[tool.flwr.federations]
+default = "localhost"
-[flower.engine.simulation.supernode]
-num = 2
+[tool.flwr.federations.localhost]
+options.num-supernodes = 10
diff --git a/src/py/flwr/cli/new/templates/app/pyproject.numpy.toml.tpl b/src/py/flwr/cli/new/templates/app/pyproject.numpy.toml.tpl
index d49015eb567f..543936ed4a89 100644
--- a/src/py/flwr/cli/new/templates/app/pyproject.numpy.toml.tpl
+++ b/src/py/flwr/cli/new/templates/app/pyproject.numpy.toml.tpl
@@ -6,10 +6,7 @@ build-backend = "hatchling.build"
name = "$package_name"
version = "1.0.0"
description = ""
-authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
-]
-license = { text = "Apache License (2.0)" }
+license = "Apache-2.0"
dependencies = [
"flwr[simulation]>=1.9.0,<2.0",
"numpy>=1.21.0",
@@ -18,15 +15,18 @@ dependencies = [
[tool.hatch.build.targets.wheel]
packages = ["."]
-[flower]
+[tool.flwr.app]
publisher = "$username"
-[flower.components]
-serverapp = "$import_name.server:app"
-clientapp = "$import_name.client:app"
+[tool.flwr.app.components]
+serverapp = "$import_name.server_app:app"
+clientapp = "$import_name.client_app:app"
+
+[tool.flwr.app.config]
+num-server-rounds = "3"
-[flower.engine]
-name = "simulation"
+[tool.flwr.federations]
+default = "localhost"
-[flower.engine.simulation.supernode]
-num = 2
+[tool.flwr.federations.localhost]
+options.num-supernodes = 10
diff --git a/src/py/flwr/cli/new/templates/app/pyproject.pytorch.toml.tpl b/src/py/flwr/cli/new/templates/app/pyproject.pytorch.toml.tpl
index b56c0041b96c..8a92cf0eca9a 100644
--- a/src/py/flwr/cli/new/templates/app/pyproject.pytorch.toml.tpl
+++ b/src/py/flwr/cli/new/templates/app/pyproject.pytorch.toml.tpl
@@ -6,10 +6,7 @@ build-backend = "hatchling.build"
name = "$package_name"
version = "1.0.0"
description = ""
-authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
-]
-license = { text = "Apache License (2.0)" }
+license = "Apache-2.0"
dependencies = [
"flwr[simulation]>=1.9.0,<2.0",
"flwr-datasets[vision]>=0.0.2,<1.0.0",
@@ -20,15 +17,18 @@ dependencies = [
[tool.hatch.build.targets.wheel]
packages = ["."]
-[flower]
+[tool.flwr.app]
publisher = "$username"
-[flower.components]
-serverapp = "$import_name.server:app"
-clientapp = "$import_name.client:app"
+[tool.flwr.app.components]
+serverapp = "$import_name.server_app:app"
+clientapp = "$import_name.client_app:app"
+
+[tool.flwr.app.config]
+num-server-rounds = "3"
-[flower.engine]
-name = "simulation"
+[tool.flwr.federations]
+default = "localhost"
-[flower.engine.simulation.supernode]
-num = 2
+[tool.flwr.federations.localhost]
+options.num-supernodes = 10
diff --git a/src/py/flwr/cli/new/templates/app/pyproject.sklearn.toml.tpl b/src/py/flwr/cli/new/templates/app/pyproject.sklearn.toml.tpl
index 6f914ae659b1..5c1ffa09aed2 100644
--- a/src/py/flwr/cli/new/templates/app/pyproject.sklearn.toml.tpl
+++ b/src/py/flwr/cli/new/templates/app/pyproject.sklearn.toml.tpl
@@ -6,10 +6,7 @@ build-backend = "hatchling.build"
name = "$package_name"
version = "1.0.0"
description = ""
-authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
-]
-license = { text = "Apache License (2.0)" }
+license = "Apache-2.0"
dependencies = [
"flwr[simulation]>=1.9.0,<2.0",
"flwr-datasets[vision]>=0.0.2,<1.0.0",
@@ -19,15 +16,18 @@ dependencies = [
[tool.hatch.build.targets.wheel]
packages = ["."]
-[flower]
+[tool.flwr.app]
publisher = "$username"
-[flower.components]
-serverapp = "$import_name.server:app"
-clientapp = "$import_name.client:app"
+[tool.flwr.app.components]
+serverapp = "$import_name.server_app:app"
+clientapp = "$import_name.client_app:app"
+
+[tool.flwr.app.config]
+num-server-rounds = "3"
-[flower.engine]
-name = "simulation"
+[tool.flwr.federations]
+default = "localhost"
-[flower.engine.simulation.supernode]
-num = 2
+[tool.flwr.federations.localhost]
+options.num-supernodes = 10
diff --git a/src/py/flwr/cli/new/templates/app/pyproject.tensorflow.toml.tpl b/src/py/flwr/cli/new/templates/app/pyproject.tensorflow.toml.tpl
index 4ecd16143dcc..de1a445e33f9 100644
--- a/src/py/flwr/cli/new/templates/app/pyproject.tensorflow.toml.tpl
+++ b/src/py/flwr/cli/new/templates/app/pyproject.tensorflow.toml.tpl
@@ -6,10 +6,7 @@ build-backend = "hatchling.build"
name = "$package_name"
version = "1.0.0"
description = ""
-authors = [
- { name = "The Flower Authors", email = "hello@flower.ai" },
-]
-license = { text = "Apache License (2.0)" }
+license = "Apache-2.0"
dependencies = [
"flwr[simulation]>=1.9.0,<2.0",
"flwr-datasets[vision]>=0.0.2,<1.0.0",
@@ -19,15 +16,18 @@ dependencies = [
[tool.hatch.build.targets.wheel]
packages = ["."]
-[flower]
+[tool.flwr.app]
publisher = "$username"
-[flower.components]
-serverapp = "$import_name.server:app"
-clientapp = "$import_name.client:app"
+[tool.flwr.app.components]
+serverapp = "$import_name.server_app:app"
+clientapp = "$import_name.client_app:app"
+
+[tool.flwr.app.config]
+num-server-rounds = "3"
-[flower.engine]
-name = "simulation"
+[tool.flwr.federations]
+default = "localhost"
-[flower.engine.simulation.supernode]
-num = 2
+[tool.flwr.federations.localhost]
+options.num-supernodes = 10
diff --git a/src/py/flwr/cli/run/run.py b/src/py/flwr/cli/run/run.py
index 4c95a4041c05..5dedb701fea9 100644
--- a/src/py/flwr/cli/run/run.py
+++ b/src/py/flwr/cli/run/run.py
@@ -14,53 +14,48 @@
# ==============================================================================
"""Flower command line interface `run` command."""
+import subprocess
import sys
-from enum import Enum
from logging import DEBUG
-from typing import Optional
+from pathlib import Path
+from typing import Any, Dict, List, Optional
import typer
from typing_extensions import Annotated
-from flwr.cli import config_utils
-from flwr.common.constant import SUPEREXEC_DEFAULT_ADDRESS
+from flwr.cli.build import build
+from flwr.cli.config_utils import load_and_validate
+from flwr.common.config import parse_config_args
from flwr.common.grpc import GRPC_MAX_MESSAGE_LENGTH, create_channel
from flwr.common.logger import log
from flwr.proto.exec_pb2 import StartRunRequest # pylint: disable=E0611
from flwr.proto.exec_pb2_grpc import ExecStub
-from flwr.simulation.run_simulation import _run_simulation
-
-
-class Engine(str, Enum):
- """Enum defining the engine to run on."""
-
- SIMULATION = "simulation"
# pylint: disable-next=too-many-locals
def run(
- engine: Annotated[
- Optional[Engine],
- typer.Option(
- case_sensitive=False,
- help="The engine to run FL with (currently only simulation is supported).",
- ),
+ directory: Annotated[
+ Path,
+ typer.Argument(help="Path of the Flower project to run"),
+ ] = Path("."),
+ federation_name: Annotated[
+ Optional[str],
+ typer.Argument(help="Name of the federation to run the app on"),
] = None,
- use_superexec: Annotated[
- bool,
+ config_overrides: Annotated[
+ Optional[List[str]],
typer.Option(
- case_sensitive=False, help="Use this flag to use the new SuperExec API"
+ "--run-config",
+ "-c",
+ help="Override configuration key-value pairs",
),
- ] = False,
+ ] = None,
) -> None:
"""Run Flower project."""
- if use_superexec:
- _start_superexec_run()
- return
-
typer.secho("Loading project configuration... ", fg=typer.colors.BLUE)
- config, errors, warnings = config_utils.load_and_validate()
+ pyproject_path = directory / "pyproject.toml" if directory else None
+ config, errors, warnings = load_and_validate(path=pyproject_path)
if config is None:
typer.secho(
@@ -82,47 +77,133 @@ def run(
typer.secho("Success", fg=typer.colors.GREEN)
- server_app_ref = config["flower"]["components"]["serverapp"]
- client_app_ref = config["flower"]["components"]["clientapp"]
-
- if engine is None:
- engine = config["flower"]["engine"]["name"]
-
- if engine == Engine.SIMULATION:
- num_supernodes = config["flower"]["engine"]["simulation"]["supernode"]["num"]
- backend_config = config["flower"]["engine"]["simulation"].get(
- "backend_config", None
- )
+ federation_name = federation_name or config["tool"]["flwr"]["federations"].get(
+ "default"
+ )
- typer.secho("Starting run... ", fg=typer.colors.BLUE)
- _run_simulation(
- server_app_attr=server_app_ref,
- client_app_attr=client_app_ref,
- num_supernodes=num_supernodes,
- backend_config=backend_config,
+ if federation_name is None:
+ typer.secho(
+ "❌ No federation name was provided and the project's `pyproject.toml` "
+ "doesn't declare a default federation (with a SuperExec address or an "
+ "`options.num-supernodes` value).",
+ fg=typer.colors.RED,
+ bold=True,
)
- else:
+ raise typer.Exit(code=1)
+
+ # Validate the federation exists in the configuration
+ federation = config["tool"]["flwr"]["federations"].get(federation_name)
+ if federation is None:
+ available_feds = {
+ fed for fed in config["tool"]["flwr"]["federations"] if fed != "default"
+ }
typer.secho(
- f"Engine '{engine}' is not yet supported in `flwr run`",
+ f"❌ There is no `{federation_name}` federation declared in the "
+ "`pyproject.toml`.\n The following federations were found:\n\n"
+ + "\n".join(available_feds),
fg=typer.colors.RED,
bold=True,
)
+ raise typer.Exit(code=1)
+ if "address" in federation:
+ _run_with_superexec(federation, directory, config_overrides)
+ else:
+ _run_without_superexec(directory, federation, federation_name, config_overrides)
+
+
+def _run_with_superexec(
+ federation: Dict[str, str],
+ directory: Optional[Path],
+ config_overrides: Optional[List[str]],
+) -> None:
-def _start_superexec_run() -> None:
def on_channel_state_change(channel_connectivity: str) -> None:
"""Log channel connectivity."""
log(DEBUG, channel_connectivity)
+ insecure_str = federation.get("insecure")
+ if root_certificates := federation.get("root-certificates"):
+ root_certificates_bytes = Path(root_certificates).read_bytes()
+ if insecure := bool(insecure_str):
+ typer.secho(
+ "❌ `root_certificates` were provided but the `insecure` parameter"
+ "is set to `True`.",
+ fg=typer.colors.RED,
+ bold=True,
+ )
+ raise typer.Exit(code=1)
+ else:
+ root_certificates_bytes = None
+ if insecure_str is None:
+ typer.secho(
+ "❌ To disable TLS, set `insecure = true` in `pyproject.toml`.",
+ fg=typer.colors.RED,
+ bold=True,
+ )
+ raise typer.Exit(code=1)
+ if not (insecure := bool(insecure_str)):
+ typer.secho(
+ "❌ No certificate were given yet `insecure` is set to `False`.",
+ fg=typer.colors.RED,
+ bold=True,
+ )
+ raise typer.Exit(code=1)
+
channel = create_channel(
- server_address=SUPEREXEC_DEFAULT_ADDRESS,
- insecure=True,
- root_certificates=None,
+ server_address=federation["address"],
+ insecure=insecure,
+ root_certificates=root_certificates_bytes,
max_message_length=GRPC_MAX_MESSAGE_LENGTH,
interceptors=None,
)
channel.subscribe(on_channel_state_change)
stub = ExecStub(channel)
- req = StartRunRequest()
- stub.StartRun(req)
+ fab_path = build(directory)
+
+ req = StartRunRequest(
+ fab_file=Path(fab_path).read_bytes(),
+ override_config=parse_config_args(config_overrides, separator=","),
+ )
+ res = stub.StartRun(req)
+ typer.secho(f"🎊 Successfully started run {res.run_id}", fg=typer.colors.GREEN)
+
+
+def _run_without_superexec(
+ app_path: Optional[Path],
+ federation: Dict[str, Any],
+ federation_name: str,
+ config_overrides: Optional[List[str]],
+) -> None:
+ try:
+ num_supernodes = federation["options"]["num-supernodes"]
+ except KeyError as err:
+ typer.secho(
+ "❌ The project's `pyproject.toml` needs to declare the number of"
+ " SuperNodes in the simulation. To simulate 10 SuperNodes,"
+ " use the following notation:\n\n"
+ f"[tool.flwr.federations.{federation_name}]\n"
+ "options.num-supernodes = 10\n",
+ fg=typer.colors.RED,
+ bold=True,
+ )
+ raise typer.Exit(code=1) from err
+
+ command = [
+ "flower-simulation",
+ "--app",
+ f"{app_path}",
+ "--num-supernodes",
+ f"{num_supernodes}",
+ ]
+
+ if config_overrides:
+ command.extend(["--run-config", f"{config_overrides}"])
+
+ # Run the simulation
+ subprocess.run(
+ command,
+ check=True,
+ text=True,
+ )
diff --git a/src/py/flwr/client/__init__.py b/src/py/flwr/client/__init__.py
index 58fd94448586..218f2fe20d62 100644
--- a/src/py/flwr/client/__init__.py
+++ b/src/py/flwr/client/__init__.py
@@ -23,11 +23,13 @@
from .supernode import run_client_app as run_client_app
from .supernode import run_supernode as run_supernode
from .typing import ClientFn as ClientFn
+from .typing import ClientFnExt as ClientFnExt
__all__ = [
"Client",
"ClientApp",
"ClientFn",
+ "ClientFnExt",
"NumPyClient",
"mod",
"run_client_app",
diff --git a/src/py/flwr/client/app.py b/src/py/flwr/client/app.py
index 1226a0d7bc21..127bb423851f 100644
--- a/src/py/flwr/client/app.py
+++ b/src/py/flwr/client/app.py
@@ -18,7 +18,8 @@
import sys
import time
from dataclasses import dataclass
-from logging import DEBUG, ERROR, INFO, WARN
+from logging import ERROR, INFO, WARN
+from pathlib import Path
from typing import Callable, ContextManager, Dict, Optional, Tuple, Type, Union
from cryptography.hazmat.primitives.asymmetric import ec
@@ -26,8 +27,8 @@
from flwr.client.client import Client
from flwr.client.client_app import ClientApp, LoadClientAppError
-from flwr.client.typing import ClientFn
-from flwr.common import GRPC_MAX_MESSAGE_LENGTH, EventType, Message, event
+from flwr.client.typing import ClientFnExt
+from flwr.common import GRPC_MAX_MESSAGE_LENGTH, Context, EventType, Message, event
from flwr.common.address import parse_address
from flwr.common.constant import (
MISSING_EXTRA_REST,
@@ -41,6 +42,7 @@
from flwr.common.logger import log, warn_deprecated_feature
from flwr.common.message import Error
from flwr.common.retry_invoker import RetryInvoker, RetryState, exponential
+from flwr.common.typing import Run
from .grpc_adapter_client.connection import grpc_adapter
from .grpc_client.connection import grpc_connection
@@ -51,7 +53,7 @@
def _check_actionable_client(
- client: Optional[Client], client_fn: Optional[ClientFn]
+ client: Optional[Client], client_fn: Optional[ClientFnExt]
) -> None:
if client_fn is None and client is None:
raise ValueError(
@@ -72,7 +74,7 @@ def _check_actionable_client(
def start_client(
*,
server_address: str,
- client_fn: Optional[ClientFn] = None,
+ client_fn: Optional[ClientFnExt] = None,
client: Optional[Client] = None,
grpc_max_message_length: int = GRPC_MAX_MESSAGE_LENGTH,
root_certificates: Optional[Union[bytes, str]] = None,
@@ -92,7 +94,7 @@ def start_client(
The IPv4 or IPv6 address of the server. If the Flower
server runs on the same machine on port 8080, then `server_address`
would be `"[::]:8080"`.
- client_fn : Optional[ClientFn]
+ client_fn : Optional[ClientFnExt]
A callable that instantiates a Client. (default: None)
client : Optional[flwr.client.Client]
An implementation of the abstract base
@@ -136,8 +138,8 @@ class `flwr.client.Client` (default: None)
Starting an SSL-enabled gRPC client using system certificates:
- >>> def client_fn(cid: str):
- >>> return FlowerClient()
+ >>> def client_fn(context: Context):
+ >>> return FlowerClient().to_client()
>>>
>>> start_client(
>>> server_address=localhost:8080,
@@ -158,6 +160,7 @@ class `flwr.client.Client` (default: None)
event(EventType.START_CLIENT_ENTER)
_start_client_internal(
server_address=server_address,
+ node_config={},
load_client_app_fn=None,
client_fn=client_fn,
client=client,
@@ -179,8 +182,9 @@ class `flwr.client.Client` (default: None)
def _start_client_internal(
*,
server_address: str,
+ node_config: Dict[str, str],
load_client_app_fn: Optional[Callable[[str, str], ClientApp]] = None,
- client_fn: Optional[ClientFn] = None,
+ client_fn: Optional[ClientFnExt] = None,
client: Optional[Client] = None,
grpc_max_message_length: int = GRPC_MAX_MESSAGE_LENGTH,
root_certificates: Optional[Union[bytes, str]] = None,
@@ -191,6 +195,7 @@ def _start_client_internal(
] = None,
max_retries: Optional[int] = None,
max_wait_time: Optional[float] = None,
+ flwr_path: Optional[Path] = None,
) -> None:
"""Start a Flower client node which connects to a Flower server.
@@ -200,9 +205,11 @@ def _start_client_internal(
The IPv4 or IPv6 address of the server. If the Flower
server runs on the same machine on port 8080, then `server_address`
would be `"[::]:8080"`.
+ node_config: Dict[str, str]
+ The configuration of the node.
load_client_app_fn : Optional[Callable[[], ClientApp]] (default: None)
A function that can be used to load a `ClientApp` instance.
- client_fn : Optional[ClientFn]
+ client_fn : Optional[ClientFnExt]
A callable that instantiates a Client. (default: None)
client : Optional[flwr.client.Client]
An implementation of the abstract base
@@ -234,6 +241,8 @@ class `flwr.client.Client` (default: None)
The maximum duration before the client stops trying to
connect to the server in case of connection error.
If set to None, there is no limit to the total time.
+ flwr_path: Optional[Path] (default: None)
+ The fully resolved path containing installed Flower Apps.
"""
if insecure is None:
insecure = root_certificates is None
@@ -244,7 +253,7 @@ class `flwr.client.Client` (default: None)
if client_fn is None:
# Wrap `Client` instance in `client_fn`
def single_client_factory(
- cid: str, # pylint: disable=unused-argument
+ context: Context, # pylint: disable=unused-argument
) -> Client:
if client is None: # Added this to keep mypy happy
raise ValueError(
@@ -285,7 +294,7 @@ def _on_backoff(retry_state: RetryState) -> None:
log(WARN, "Connection attempt failed, retrying...")
else:
log(
- DEBUG,
+ WARN,
"Connection attempt failed, retrying in %.2f seconds",
retry_state.actual_wait,
)
@@ -293,7 +302,7 @@ def _on_backoff(retry_state: RetryState) -> None:
retry_invoker = RetryInvoker(
wait_gen_factory=exponential,
recoverable_exceptions=connection_error_type,
- max_tries=max_retries,
+ max_tries=max_retries + 1 if max_retries is not None else None,
max_time=max_wait_time,
on_giveup=lambda retry_state: (
log(
@@ -309,9 +318,10 @@ def _on_backoff(retry_state: RetryState) -> None:
on_backoff=_on_backoff,
)
- node_state = NodeState()
- # run_id -> (fab_id, fab_version)
- run_info: Dict[int, Tuple[str, str]] = {}
+ # NodeState gets initialized when the first connection is established
+ node_state: Optional[NodeState] = None
+
+ runs: Dict[int, Run] = {}
while not app_state_tracker.interrupt:
sleep_duration: int = 0
@@ -325,9 +335,31 @@ def _on_backoff(retry_state: RetryState) -> None:
) as conn:
receive, send, create_node, delete_node, get_run = conn
- # Register node
- if create_node is not None:
- create_node() # pylint: disable=not-callable
+ # Register node when connecting the first time
+ if node_state is None:
+ if create_node is None:
+ if transport not in ["grpc-bidi", None]:
+ raise NotImplementedError(
+ "All transports except `grpc-bidi` require "
+ "an implementation for `create_node()`.'"
+ )
+ # gRPC-bidi doesn't have the concept of node_id,
+ # so we set it to -1
+ node_state = NodeState(
+ node_id=-1,
+ node_config={},
+ )
+ else:
+ # Call create_node fn to register node
+ node_id: Optional[int] = ( # pylint: disable=assignment-from-none
+ create_node()
+ ) # pylint: disable=not-callable
+ if node_id is None:
+ raise ValueError("Node registration failed")
+ node_state = NodeState(
+ node_id=node_id,
+ node_config=node_config,
+ )
app_state_tracker.register_signal_handler()
while not app_state_tracker.interrupt:
@@ -361,15 +393,17 @@ def _on_backoff(retry_state: RetryState) -> None:
# Get run info
run_id = message.metadata.run_id
- if run_id not in run_info:
+ if run_id not in runs:
if get_run is not None:
- run_info[run_id] = get_run(run_id)
+ runs[run_id] = get_run(run_id)
# If get_run is None, i.e., in grpc-bidi mode
else:
- run_info[run_id] = ("", "")
+ runs[run_id] = Run(run_id, "", "", {})
# Register context for this run
- node_state.register_context(run_id=run_id)
+ node_state.register_context(
+ run_id=run_id, run=runs[run_id], flwr_path=flwr_path
+ )
# Retrieve context for this run
context = node_state.retrieve_context(run_id=run_id)
@@ -383,7 +417,10 @@ def _on_backoff(retry_state: RetryState) -> None:
# Handle app loading and task message
try:
# Load ClientApp instance
- client_app: ClientApp = load_client_app_fn(*run_info[run_id])
+ run: Run = runs[run_id]
+ client_app: ClientApp = load_client_app_fn(
+ run.fab_id, run.fab_version
+ )
# Execute ClientApp
reply_message = client_app(message=message, context=context)
@@ -566,9 +603,9 @@ def _init_connection(transport: Optional[str], server_address: str) -> Tuple[
Tuple[
Callable[[], Optional[Message]],
Callable[[Message], None],
+ Optional[Callable[[], Optional[int]]],
Optional[Callable[[], None]],
- Optional[Callable[[], None]],
- Optional[Callable[[int], Tuple[str, str]]],
+ Optional[Callable[[int], Run]],
]
],
],
diff --git a/src/py/flwr/client/client_app.py b/src/py/flwr/client/client_app.py
index 2e810f6560f2..2a913b3a248d 100644
--- a/src/py/flwr/client/client_app.py
+++ b/src/py/flwr/client/client_app.py
@@ -15,19 +15,62 @@
"""Flower ClientApp."""
+import inspect
from typing import Callable, List, Optional
+from flwr.client.client import Client
from flwr.client.message_handler.message_handler import (
handle_legacy_message_from_msgtype,
)
from flwr.client.mod.utils import make_ffn
-from flwr.client.typing import ClientFn, Mod
+from flwr.client.typing import ClientFnExt, Mod
from flwr.common import Context, Message, MessageType
-from flwr.common.logger import warn_preview_feature
+from flwr.common.logger import warn_deprecated_feature, warn_preview_feature
from .typing import ClientAppCallable
+def _alert_erroneous_client_fn() -> None:
+ raise ValueError(
+ "A `ClientApp` cannot make use of a `client_fn` that does "
+ "not have a signature in the form: `def client_fn(context: "
+ "Context)`. You can import the `Context` like this: "
+ "`from flwr.common import Context`"
+ )
+
+
+def _inspect_maybe_adapt_client_fn_signature(client_fn: ClientFnExt) -> ClientFnExt:
+ client_fn_args = inspect.signature(client_fn).parameters
+ first_arg = list(client_fn_args.keys())[0]
+
+ if len(client_fn_args) != 1:
+ _alert_erroneous_client_fn()
+
+ first_arg_type = client_fn_args[first_arg].annotation
+
+ if first_arg_type is str or first_arg == "cid":
+ # Warn previous signature for `client_fn` seems to be used
+ warn_deprecated_feature(
+ "`client_fn` now expects a signature `def client_fn(context: Context)`."
+ "The provided `client_fn` has signature: "
+ f"{dict(client_fn_args.items())}. You can import the `Context` like this:"
+ " `from flwr.common import Context`"
+ )
+
+ # Wrap depcreated client_fn inside a function with the expected signature
+ def adaptor_fn(
+ context: Context,
+ ) -> Client: # pylint: disable=unused-argument
+ # if patition-id is defined, pass it. Else pass node_id that should
+ # always be defined during Context init.
+ cid = context.node_config.get("partition-id", context.node_id)
+ return client_fn(str(cid)) # type: ignore
+
+ return adaptor_fn
+
+ return client_fn
+
+
class ClientAppException(Exception):
"""Exception raised when an exception is raised while executing a ClientApp."""
@@ -48,7 +91,7 @@ class ClientApp:
>>> class FlowerClient(NumPyClient):
>>> # ...
>>>
- >>> def client_fn(cid):
+ >>> def client_fn(context: Context):
>>> return FlowerClient().to_client()
>>>
>>> app = ClientApp(client_fn)
@@ -65,7 +108,7 @@ class ClientApp:
def __init__(
self,
- client_fn: Optional[ClientFn] = None, # Only for backward compatibility
+ client_fn: Optional[ClientFnExt] = None, # Only for backward compatibility
mods: Optional[List[Mod]] = None,
) -> None:
self._mods: List[Mod] = mods if mods is not None else []
@@ -74,6 +117,8 @@ def __init__(
self._call: Optional[ClientAppCallable] = None
if client_fn is not None:
+ client_fn = _inspect_maybe_adapt_client_fn_signature(client_fn)
+
def ffn(
message: Message,
context: Context,
diff --git a/src/py/flwr/client/grpc_adapter_client/connection.py b/src/py/flwr/client/grpc_adapter_client/connection.py
index e4e32b3accd0..80a5cf0b4656 100644
--- a/src/py/flwr/client/grpc_adapter_client/connection.py
+++ b/src/py/flwr/client/grpc_adapter_client/connection.py
@@ -27,6 +27,7 @@
from flwr.common.logger import log
from flwr.common.message import Message
from flwr.common.retry_invoker import RetryInvoker
+from flwr.common.typing import Run
@contextmanager
@@ -43,9 +44,9 @@ def grpc_adapter( # pylint: disable=R0913
Tuple[
Callable[[], Optional[Message]],
Callable[[Message], None],
+ Optional[Callable[[], Optional[int]]],
Optional[Callable[[], None]],
- Optional[Callable[[], None]],
- Optional[Callable[[int], Tuple[str, str]]],
+ Optional[Callable[[int], Run]],
]
]:
"""Primitives for request/response-based interaction with a server via GrpcAdapter.
diff --git a/src/py/flwr/client/grpc_client/connection.py b/src/py/flwr/client/grpc_client/connection.py
index 8c049861c672..a6417106d51b 100644
--- a/src/py/flwr/client/grpc_client/connection.py
+++ b/src/py/flwr/client/grpc_client/connection.py
@@ -38,6 +38,7 @@
from flwr.common.grpc import create_channel
from flwr.common.logger import log
from flwr.common.retry_invoker import RetryInvoker
+from flwr.common.typing import Run
from flwr.proto.transport_pb2 import ( # pylint: disable=E0611
ClientMessage,
Reason,
@@ -71,9 +72,9 @@ def grpc_connection( # pylint: disable=R0913, R0915
Tuple[
Callable[[], Optional[Message]],
Callable[[Message], None],
+ Optional[Callable[[], Optional[int]]],
Optional[Callable[[], None]],
- Optional[Callable[[], None]],
- Optional[Callable[[int], Tuple[str, str]]],
+ Optional[Callable[[int], Run]],
]
]:
"""Establish a gRPC connection to a gRPC server.
diff --git a/src/py/flwr/client/grpc_rere_client/connection.py b/src/py/flwr/client/grpc_rere_client/connection.py
index 34dc0e417383..e573df6854bc 100644
--- a/src/py/flwr/client/grpc_rere_client/connection.py
+++ b/src/py/flwr/client/grpc_rere_client/connection.py
@@ -41,6 +41,7 @@
from flwr.common.message import Message, Metadata
from flwr.common.retry_invoker import RetryInvoker
from flwr.common.serde import message_from_taskins, message_to_taskres
+from flwr.common.typing import Run
from flwr.proto.fleet_pb2 import ( # pylint: disable=E0611
CreateNodeRequest,
DeleteNodeRequest,
@@ -78,9 +79,9 @@ def grpc_request_response( # pylint: disable=R0913, R0914, R0915
Tuple[
Callable[[], Optional[Message]],
Callable[[Message], None],
+ Optional[Callable[[], Optional[int]]],
Optional[Callable[[], None]],
- Optional[Callable[[], None]],
- Optional[Callable[[int], Tuple[str, str]]],
+ Optional[Callable[[int], Run]],
]
]:
"""Primitives for request/response-based interaction with a server.
@@ -175,7 +176,7 @@ def ping() -> None:
if not ping_stop_event.is_set():
ping_stop_event.wait(next_interval)
- def create_node() -> None:
+ def create_node() -> Optional[int]:
"""Set create_node."""
# Call FleetAPI
create_node_request = CreateNodeRequest(ping_interval=PING_DEFAULT_INTERVAL)
@@ -188,6 +189,7 @@ def create_node() -> None:
nonlocal node, ping_thread
node = cast(Node, create_node_response.node)
ping_thread = start_ping_loop(ping, ping_stop_event)
+ return node.node_id
def delete_node() -> None:
"""Set delete_node."""
@@ -266,7 +268,7 @@ def send(message: Message) -> None:
# Cleanup
metadata = None
- def get_run(run_id: int) -> Tuple[str, str]:
+ def get_run(run_id: int) -> Run:
# Call FleetAPI
get_run_request = GetRunRequest(run_id=run_id)
get_run_response: GetRunResponse = retry_invoker.invoke(
@@ -275,7 +277,12 @@ def get_run(run_id: int) -> Tuple[str, str]:
)
# Return fab_id and fab_version
- return get_run_response.run.fab_id, get_run_response.run.fab_version
+ return Run(
+ run_id,
+ get_run_response.run.fab_id,
+ get_run_response.run.fab_version,
+ dict(get_run_response.run.override_config.items()),
+ )
try:
# Yield methods
diff --git a/src/py/flwr/client/message_handler/message_handler.py b/src/py/flwr/client/message_handler/message_handler.py
index 68326852970f..1ab84eb01468 100644
--- a/src/py/flwr/client/message_handler/message_handler.py
+++ b/src/py/flwr/client/message_handler/message_handler.py
@@ -14,7 +14,6 @@
# ==============================================================================
"""Client-side message handler."""
-
from logging import WARN
from typing import Optional, Tuple, cast
@@ -25,7 +24,7 @@
maybe_call_get_properties,
)
from flwr.client.numpy_client import NumPyClient
-from flwr.client.typing import ClientFn
+from flwr.client.typing import ClientFnExt
from flwr.common import ConfigsRecord, Context, Message, Metadata, RecordSet, log
from flwr.common.constant import MessageType, MessageTypeLegacy
from flwr.common.recordset_compat import (
@@ -90,10 +89,10 @@ def handle_control_message(message: Message) -> Tuple[Optional[Message], int]:
def handle_legacy_message_from_msgtype(
- client_fn: ClientFn, message: Message, context: Context
+ client_fn: ClientFnExt, message: Message, context: Context
) -> Message:
"""Handle legacy message in the inner most mod."""
- client = client_fn(str(message.metadata.partition_id))
+ client = client_fn(context)
# Check if NumPyClient is returend
if isinstance(client, NumPyClient):
diff --git a/src/py/flwr/client/message_handler/message_handler_test.py b/src/py/flwr/client/message_handler/message_handler_test.py
index 40907942513d..557d61ffb32a 100644
--- a/src/py/flwr/client/message_handler/message_handler_test.py
+++ b/src/py/flwr/client/message_handler/message_handler_test.py
@@ -22,7 +22,7 @@
from typing import List
from flwr.client import Client
-from flwr.client.typing import ClientFn
+from flwr.client.typing import ClientFnExt
from flwr.common import (
DEFAULT_TTL,
Code,
@@ -113,8 +113,8 @@ def evaluate(self, ins: EvaluateIns) -> EvaluateRes:
)
-def _get_client_fn(client: Client) -> ClientFn:
- def client_fn(cid: str) -> Client: # pylint: disable=unused-argument
+def _get_client_fn(client: Client) -> ClientFnExt:
+ def client_fn(contex: Context) -> Client: # pylint: disable=unused-argument
return client
return client_fn
@@ -143,7 +143,7 @@ def test_client_without_get_properties() -> None:
actual_msg = handle_legacy_message_from_msgtype(
client_fn=_get_client_fn(client),
message=message,
- context=Context(state=RecordSet()),
+ context=Context(node_id=1123, node_config={}, state=RecordSet(), run_config={}),
)
# Assert
@@ -207,7 +207,7 @@ def test_client_with_get_properties() -> None:
actual_msg = handle_legacy_message_from_msgtype(
client_fn=_get_client_fn(client),
message=message,
- context=Context(state=RecordSet()),
+ context=Context(node_id=1123, node_config={}, state=RecordSet(), run_config={}),
)
# Assert
diff --git a/src/py/flwr/client/mod/secure_aggregation/secaggplus_mod_test.py b/src/py/flwr/client/mod/secure_aggregation/secaggplus_mod_test.py
index 36844a2983a1..2832576fb4fc 100644
--- a/src/py/flwr/client/mod/secure_aggregation/secaggplus_mod_test.py
+++ b/src/py/flwr/client/mod/secure_aggregation/secaggplus_mod_test.py
@@ -73,7 +73,12 @@ def func(configs: Dict[str, ConfigsRecordValues]) -> ConfigsRecord:
def _make_ctxt() -> Context:
cfg = ConfigsRecord(SecAggPlusState().to_dict())
- return Context(RecordSet(configs_records={RECORD_KEY_STATE: cfg}))
+ return Context(
+ node_id=123,
+ node_config={},
+ state=RecordSet(configs_records={RECORD_KEY_STATE: cfg}),
+ run_config={},
+ )
def _make_set_state_fn(
diff --git a/src/py/flwr/client/mod/utils_test.py b/src/py/flwr/client/mod/utils_test.py
index 035e41639b10..a5bbd0a0bb4d 100644
--- a/src/py/flwr/client/mod/utils_test.py
+++ b/src/py/flwr/client/mod/utils_test.py
@@ -104,7 +104,7 @@ def test_multiple_mods(self) -> None:
state = RecordSet()
state.metrics_records[METRIC] = MetricsRecord({COUNTER: 0.0})
- context = Context(state=state)
+ context = Context(node_id=0, node_config={}, state=state, run_config={})
message = _get_dummy_flower_message()
# Execute
@@ -129,7 +129,7 @@ def test_filter(self) -> None:
# Prepare
footprint: List[str] = []
mock_app = make_mock_app("app", footprint)
- context = Context(state=RecordSet())
+ context = Context(node_id=0, node_config={}, state=RecordSet(), run_config={})
message = _get_dummy_flower_message()
def filter_mod(
diff --git a/src/py/flwr/client/node_state.py b/src/py/flwr/client/node_state.py
index 71681b783419..08c19967ea3d 100644
--- a/src/py/flwr/client/node_state.py
+++ b/src/py/flwr/client/node_state.py
@@ -15,27 +15,72 @@
"""Node state."""
-from typing import Any, Dict
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Dict, Optional
from flwr.common import Context, RecordSet
+from flwr.common.config import get_fused_config, get_fused_config_from_dir
+from flwr.common.typing import Run
+
+
+@dataclass()
+class RunInfo:
+ """Contains the Context and initial run_config of a Run."""
+
+ context: Context
+ initial_run_config: Dict[str, str]
class NodeState:
"""State of a node where client nodes execute runs."""
- def __init__(self) -> None:
- self._meta: Dict[str, Any] = {} # holds metadata about the node
- self.run_contexts: Dict[int, Context] = {}
+ def __init__(
+ self,
+ node_id: int,
+ node_config: Dict[str, str],
+ ) -> None:
+ self.node_id = node_id
+ self.node_config = node_config
+ self.run_infos: Dict[int, RunInfo] = {}
- def register_context(self, run_id: int) -> None:
+ def register_context(
+ self,
+ run_id: int,
+ run: Optional[Run] = None,
+ flwr_path: Optional[Path] = None,
+ app_dir: Optional[str] = None,
+ ) -> None:
"""Register new run context for this node."""
- if run_id not in self.run_contexts:
- self.run_contexts[run_id] = Context(state=RecordSet())
+ if run_id not in self.run_infos:
+ initial_run_config = {}
+ if app_dir:
+ # Load from app directory
+ app_path = Path(app_dir)
+ if app_path.is_dir():
+ override_config = run.override_config if run else {}
+ initial_run_config = get_fused_config_from_dir(
+ app_path, override_config
+ )
+ else:
+ raise ValueError("The specified `app_dir` must be a directory.")
+ else:
+ # Load from .fab
+ initial_run_config = get_fused_config(run, flwr_path) if run else {}
+ self.run_infos[run_id] = RunInfo(
+ initial_run_config=initial_run_config,
+ context=Context(
+ node_id=self.node_id,
+ node_config=self.node_config,
+ state=RecordSet(),
+ run_config=initial_run_config.copy(),
+ ),
+ )
def retrieve_context(self, run_id: int) -> Context:
"""Get run context given a run_id."""
- if run_id in self.run_contexts:
- return self.run_contexts[run_id]
+ if run_id in self.run_infos:
+ return self.run_infos[run_id].context
raise RuntimeError(
f"Context for run_id={run_id} doesn't exist."
@@ -45,4 +90,9 @@ def retrieve_context(self, run_id: int) -> Context:
def update_context(self, run_id: int, context: Context) -> None:
"""Update run context."""
- self.run_contexts[run_id] = context
+ if context.run_config != self.run_infos[run_id].initial_run_config:
+ raise ValueError(
+ "The `run_config` field of the `Context` object cannot be "
+ f"modified (run_id: {run_id})."
+ )
+ self.run_infos[run_id].context = context
diff --git a/src/py/flwr/client/node_state_tests.py b/src/py/flwr/client/node_state_tests.py
index 193f52661579..26ac4fea6855 100644
--- a/src/py/flwr/client/node_state_tests.py
+++ b/src/py/flwr/client/node_state_tests.py
@@ -41,7 +41,7 @@ def test_multirun_in_node_state() -> None:
expected_values = {0: "1", 1: "1" * 3, 2: "1" * 2, 3: "1", 5: "1"}
# NodeState
- node_state = NodeState()
+ node_state = NodeState(node_id=0, node_config={})
for task in tasks:
run_id = task.run_id
@@ -59,7 +59,8 @@ def test_multirun_in_node_state() -> None:
node_state.update_context(run_id=run_id, context=updated_state)
# Verify values
- for run_id, context in node_state.run_contexts.items():
+ for run_id, run_info in node_state.run_infos.items():
assert (
- context.state.configs_records["counter"]["count"] == expected_values[run_id]
+ run_info.context.state.configs_records["counter"]["count"]
+ == expected_values[run_id]
)
diff --git a/src/py/flwr/client/rest_client/connection.py b/src/py/flwr/client/rest_client/connection.py
index db5bd7eb6770..3e81969d898c 100644
--- a/src/py/flwr/client/rest_client/connection.py
+++ b/src/py/flwr/client/rest_client/connection.py
@@ -41,6 +41,7 @@
from flwr.common.message import Message, Metadata
from flwr.common.retry_invoker import RetryInvoker
from flwr.common.serde import message_from_taskins, message_to_taskres
+from flwr.common.typing import Run
from flwr.proto.fleet_pb2 import ( # pylint: disable=E0611
CreateNodeRequest,
CreateNodeResponse,
@@ -89,9 +90,9 @@ def http_request_response( # pylint: disable=,R0913, R0914, R0915
Tuple[
Callable[[], Optional[Message]],
Callable[[Message], None],
+ Optional[Callable[[], Optional[int]]],
Optional[Callable[[], None]],
- Optional[Callable[[], None]],
- Optional[Callable[[int], Tuple[str, str]]],
+ Optional[Callable[[int], Run]],
]
]:
"""Primitives for request/response-based interaction with a server.
@@ -236,19 +237,20 @@ def ping() -> None:
if not ping_stop_event.is_set():
ping_stop_event.wait(next_interval)
- def create_node() -> None:
+ def create_node() -> Optional[int]:
"""Set create_node."""
req = CreateNodeRequest(ping_interval=PING_DEFAULT_INTERVAL)
# Send the request
res = _request(req, CreateNodeResponse, PATH_CREATE_NODE)
if res is None:
- return
+ return None
# Remember the node and the ping-loop thread
nonlocal node, ping_thread
node = res.node
ping_thread = start_ping_loop(ping, ping_stop_event)
+ return node.node_id
def delete_node() -> None:
"""Set delete_node."""
@@ -344,16 +346,21 @@ def send(message: Message) -> None:
res.results, # pylint: disable=no-member
)
- def get_run(run_id: int) -> Tuple[str, str]:
+ def get_run(run_id: int) -> Run:
# Construct the request
req = GetRunRequest(run_id=run_id)
# Send the request
res = _request(req, GetRunResponse, PATH_GET_RUN)
if res is None:
- return "", ""
+ return Run(run_id, "", "", {})
- return res.run.fab_id, res.run.fab_version
+ return Run(
+ run_id,
+ res.run.fab_id,
+ res.run.fab_version,
+ dict(res.run.override_config.items()),
+ )
try:
# Yield methods
diff --git a/src/py/flwr/client/supernode/app.py b/src/py/flwr/client/supernode/app.py
index c9a16edeaf15..f3fb0e97805a 100644
--- a/src/py/flwr/client/supernode/app.py
+++ b/src/py/flwr/client/supernode/app.py
@@ -29,7 +29,12 @@
from flwr.client.client_app import ClientApp, LoadClientAppError
from flwr.common import EventType, event
-from flwr.common.config import get_flwr_dir, get_project_config, get_project_dir
+from flwr.common.config import (
+ get_flwr_dir,
+ get_project_config,
+ get_project_dir,
+ parse_config_args,
+)
from flwr.common.constant import (
TRANSPORT_TYPE_GRPC_ADAPTER,
TRANSPORT_TYPE_GRPC_RERE,
@@ -55,7 +60,12 @@ def run_supernode() -> None:
_warn_deprecated_server_arg(args)
root_certificates = _get_certificates(args)
- load_fn = _get_load_client_app_fn(args, multi_app=True)
+ load_fn = _get_load_client_app_fn(
+ default_app_ref=getattr(args, "client-app"),
+ dir_arg=args.dir,
+ flwr_dir_arg=args.flwr_dir,
+ multi_app=True,
+ )
authentication_keys = _try_setup_client_authentication(args)
_start_client_internal(
@@ -67,6 +77,8 @@ def run_supernode() -> None:
authentication_keys=authentication_keys,
max_retries=args.max_retries,
max_wait_time=args.max_wait_time,
+ node_config=parse_config_args([args.node_config]),
+ flwr_path=get_flwr_dir(args.flwr_dir),
)
# Graceful shutdown
@@ -86,11 +98,16 @@ def run_client_app() -> None:
_warn_deprecated_server_arg(args)
root_certificates = _get_certificates(args)
- load_fn = _get_load_client_app_fn(args, multi_app=False)
+ load_fn = _get_load_client_app_fn(
+ default_app_ref=getattr(args, "client-app"),
+ dir_arg=args.dir,
+ multi_app=False,
+ )
authentication_keys = _try_setup_client_authentication(args)
_start_client_internal(
server_address=args.superlink,
+ node_config=parse_config_args([args.node_config]),
load_client_app_fn=load_fn,
transport=args.transport,
root_certificates=root_certificates,
@@ -158,7 +175,10 @@ def _get_certificates(args: argparse.Namespace) -> Optional[bytes]:
def _get_load_client_app_fn(
- args: argparse.Namespace, multi_app: bool
+ default_app_ref: str,
+ dir_arg: str,
+ multi_app: bool,
+ flwr_dir_arg: Optional[str] = None,
) -> Callable[[str, str], ClientApp]:
"""Get the load_client_app_fn function.
@@ -170,23 +190,27 @@ def _get_load_client_app_fn(
loads a default ClientApp.
"""
# Find the Flower directory containing Flower Apps (only for multi-app)
- flwr_dir = Path("")
- if "flwr_dir" in args:
- if args.flwr_dir is None:
+ if not multi_app:
+ flwr_dir = Path("")
+ else:
+ if flwr_dir_arg is None:
flwr_dir = get_flwr_dir()
else:
- flwr_dir = Path(args.flwr_dir).absolute()
-
- sys.path.insert(0, str(flwr_dir.absolute()))
+ flwr_dir = Path(flwr_dir_arg).absolute()
- default_app_ref: str = getattr(args, "client-app")
+ inserted_path = None
if not multi_app:
log(
DEBUG,
"Flower SuperNode will load and validate ClientApp `%s`",
- getattr(args, "client-app"),
+ default_app_ref,
)
+ # Insert sys.path
+ dir_path = Path(dir_arg).absolute()
+ sys.path.insert(0, str(dir_path))
+ inserted_path = str(dir_path)
+
valid, error_msg = validate(default_app_ref)
if not valid and error_msg:
raise LoadClientAppError(error_msg) from None
@@ -195,7 +219,7 @@ def _load(fab_id: str, fab_version: str) -> ClientApp:
# If multi-app feature is disabled
if not multi_app:
# Get sys path to be inserted
- sys_path = Path(args.dir).absolute()
+ dir_path = Path(dir_arg).absolute()
# Set app reference
client_app_ref = default_app_ref
@@ -208,7 +232,7 @@ def _load(fab_id: str, fab_version: str) -> ClientApp:
log(WARN, "FAB ID is not provided; the default ClientApp will be loaded.")
# Get sys path to be inserted
- sys_path = Path(args.dir).absolute()
+ dir_path = Path(dir_arg).absolute()
# Set app reference
client_app_ref = default_app_ref
@@ -221,13 +245,21 @@ def _load(fab_id: str, fab_version: str) -> ClientApp:
raise LoadClientAppError("Failed to load ClientApp") from e
# Get sys path to be inserted
- sys_path = Path(project_dir).absolute()
+ dir_path = Path(project_dir).absolute()
# Set app reference
- client_app_ref = config["flower"]["components"]["clientapp"]
+ client_app_ref = config["tool"]["flwr"]["app"]["components"]["clientapp"]
# Set sys.path
- sys.path.insert(0, str(sys_path))
+ nonlocal inserted_path
+ if inserted_path != str(dir_path):
+ # Remove the previously inserted path
+ if inserted_path is not None:
+ sys.path.remove(inserted_path)
+ # Insert the new path
+ sys.path.insert(0, str(dir_path))
+
+ inserted_path = str(dir_path)
# Load ClientApp
log(
@@ -235,7 +267,7 @@ def _load(fab_id: str, fab_version: str) -> ClientApp:
"Loading ClientApp `%s`",
client_app_ref,
)
- client_app = load_app(client_app_ref, LoadClientAppError, sys_path)
+ client_app = load_app(client_app_ref, LoadClientAppError, dir_path)
if not isinstance(client_app, ClientApp):
raise LoadClientAppError(
@@ -344,8 +376,8 @@ def _parse_args_common(parser: argparse.ArgumentParser) -> None:
"--max-retries",
type=int,
default=None,
- help="The maximum number of times the client will try to connect to the"
- "server before giving up in case of a connection error. By default,"
+ help="The maximum number of times the client will try to reconnect to the"
+ "SuperLink before giving up in case of a connection error. By default,"
"it is set to None, meaning there is no limit to the number of tries.",
)
parser.add_argument(
@@ -353,7 +385,7 @@ def _parse_args_common(parser: argparse.ArgumentParser) -> None:
type=float,
default=None,
help="The maximum duration before the client stops trying to"
- "connect to the server in case of connection error. By default, it"
+ "connect to the SuperLink in case of connection error. By default, it"
"is set to None, meaning there is no limit to the total time.",
)
parser.add_argument(
@@ -373,6 +405,13 @@ def _parse_args_common(parser: argparse.ArgumentParser) -> None:
type=str,
help="The SuperNode's public key (as a path str) to enable authentication.",
)
+ parser.add_argument(
+ "--node-config",
+ type=str,
+ help="A comma separated list of key/value pairs (separated by `=`) to "
+ "configure the SuperNode. "
+ "E.g. --node-config 'key1=\"value1\",partition-id=0,num-partitions=100'",
+ )
def _try_setup_client_authentication(
diff --git a/src/py/flwr/client/typing.py b/src/py/flwr/client/typing.py
index 956ac7a15c05..9faed4bc7283 100644
--- a/src/py/flwr/client/typing.py
+++ b/src/py/flwr/client/typing.py
@@ -23,6 +23,7 @@
# Compatibility
ClientFn = Callable[[str], Client]
+ClientFnExt = Callable[[Context], Client]
ClientAppCallable = Callable[[Message, Context], Message]
Mod = Callable[[Message, Context, ClientAppCallable], Message]
diff --git a/src/py/flwr/common/config.py b/src/py/flwr/common/config.py
index 20de00a6fba9..789433a287e7 100644
--- a/src/py/flwr/common/config.py
+++ b/src/py/flwr/common/config.py
@@ -16,12 +16,13 @@
import os
from pathlib import Path
-from typing import Any, Dict, Optional, Union
+from typing import Any, Dict, List, Optional, Tuple, Union
import tomli
from flwr.cli.config_utils import validate_fields
from flwr.common.constant import APP_DIR, FAB_CONFIG_FILE, FLWR_HOME
+from flwr.common.typing import Run
def get_flwr_dir(provided_path: Optional[str] = None) -> Path:
@@ -30,7 +31,7 @@ def get_flwr_dir(provided_path: Optional[str] = None) -> Path:
return Path(
os.getenv(
FLWR_HOME,
- f"{os.getenv('XDG_DATA_HOME', os.getenv('HOME'))}/.flwr",
+ Path(f"{os.getenv('XDG_DATA_HOME', os.getenv('HOME'))}") / ".flwr",
)
)
return Path(provided_path).absolute()
@@ -71,3 +72,86 @@ def get_project_config(project_dir: Union[str, Path]) -> Dict[str, Any]:
)
return config
+
+
+def _fuse_dicts(
+ main_dict: Dict[str, str], override_dict: Dict[str, str]
+) -> Dict[str, str]:
+ fused_dict = main_dict.copy()
+
+ for key, value in override_dict.items():
+ if key in main_dict:
+ fused_dict[key] = value
+
+ return fused_dict
+
+
+def get_fused_config_from_dir(
+ project_dir: Path, override_config: Dict[str, str]
+) -> Dict[str, str]:
+ """Merge the overrides from a given dict with the config from a Flower App."""
+ default_config = get_project_config(project_dir)["tool"]["flwr"]["app"].get(
+ "config", {}
+ )
+ flat_default_config = flatten_dict(default_config)
+
+ return _fuse_dicts(flat_default_config, override_config)
+
+
+def get_fused_config(run: Run, flwr_dir: Optional[Path]) -> Dict[str, str]:
+ """Merge the overrides from a `Run` with the config from a FAB.
+
+ Get the config using the fab_id and the fab_version, remove the nesting by adding
+ the nested keys as prefixes separated by dots, and fuse it with the override dict.
+ """
+ if not run.fab_id or not run.fab_version:
+ return {}
+
+ project_dir = get_project_dir(run.fab_id, run.fab_version, flwr_dir)
+
+ return get_fused_config_from_dir(project_dir, run.override_config)
+
+
+def flatten_dict(raw_dict: Dict[str, Any], parent_key: str = "") -> Dict[str, str]:
+ """Flatten dict by joining nested keys with a given separator."""
+ items: List[Tuple[str, str]] = []
+ separator: str = "."
+ for k, v in raw_dict.items():
+ new_key = f"{parent_key}{separator}{k}" if parent_key else k
+ if isinstance(v, dict):
+ items.extend(flatten_dict(v, parent_key=new_key).items())
+ elif isinstance(v, str):
+ items.append((new_key, v))
+ else:
+ raise ValueError(
+ f"The value for key {k} needs to be a `str` or a `dict`.",
+ )
+ return dict(items)
+
+
+def parse_config_args(
+ config: Optional[List[str]],
+ separator: str = ",",
+) -> Dict[str, str]:
+ """Parse separator separated list of key-value pairs separated by '='."""
+ overrides: Dict[str, str] = {}
+
+ if config is None:
+ return overrides
+
+ for config_line in config:
+ if config_line:
+ overrides_list = config_line.split(separator)
+ if (
+ len(overrides_list) == 1
+ and "=" not in overrides_list
+ and overrides_list[0].endswith(".toml")
+ ):
+ with Path(overrides_list[0]).open("rb") as config_file:
+ overrides = flatten_dict(tomli.load(config_file))
+ else:
+ for kv_pair in overrides_list:
+ key, value = kv_pair.split("=")
+ overrides[key] = value
+
+ return overrides
diff --git a/src/py/flwr/common/config_test.py b/src/py/flwr/common/config_test.py
new file mode 100644
index 000000000000..e1597aa5a2ec
--- /dev/null
+++ b/src/py/flwr/common/config_test.py
@@ -0,0 +1,241 @@
+# Copyright 2024 Flower Labs GmbH. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Test util functions handling Flower config."""
+
+import os
+import textwrap
+from pathlib import Path
+from unittest.mock import patch
+
+import pytest
+
+from .config import (
+ _fuse_dicts,
+ flatten_dict,
+ get_flwr_dir,
+ get_project_config,
+ get_project_dir,
+ parse_config_args,
+)
+
+# Mock constants
+FAB_CONFIG_FILE = "pyproject.toml"
+
+
+def test_get_flwr_dir_with_provided_path() -> None:
+ """Test get_flwr_dir with a provided valid path."""
+ provided_path = "."
+ assert get_flwr_dir(provided_path) == Path(provided_path).absolute()
+
+
+def test_get_flwr_dir_without_provided_path() -> None:
+ """Test get_flwr_dir without a provided path, using default home directory."""
+ with patch.dict(os.environ, {"HOME": "/home/user"}):
+ assert get_flwr_dir() == Path("/home/user/.flwr")
+
+
+def test_get_flwr_dir_with_flwr_home() -> None:
+ """Test get_flwr_dir with FLWR_HOME environment variable set."""
+ with patch.dict(os.environ, {"FLWR_HOME": "/custom/flwr/home"}):
+ assert get_flwr_dir() == Path("/custom/flwr/home")
+
+
+def test_get_flwr_dir_with_xdg_data_home() -> None:
+ """Test get_flwr_dir with FLWR_HOME environment variable set."""
+ with patch.dict(os.environ, {"XDG_DATA_HOME": "/custom/data/home"}):
+ assert get_flwr_dir() == Path("/custom/data/home/.flwr")
+
+
+def test_get_project_dir_invalid_fab_id() -> None:
+ """Test get_project_dir with an invalid fab_id."""
+ with pytest.raises(ValueError):
+ get_project_dir("invalid_fab_id", "1.0.0")
+
+
+def test_get_project_dir_valid() -> None:
+ """Test get_project_dir with an valid fab_id and version."""
+ app_path = get_project_dir("app_name/user", "1.0.0", flwr_dir=".")
+ assert app_path == Path("apps") / "app_name" / "user" / "1.0.0"
+
+
+def test_get_project_config_file_not_found() -> None:
+ """Test get_project_config when the configuration file is not found."""
+ with pytest.raises(FileNotFoundError):
+ get_project_config("/invalid/dir")
+
+
+def test_get_fused_config_valid(tmp_path: Path) -> None:
+ """Test get_project_config when the configuration file is not found."""
+ pyproject_toml_content = """
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+
+ [project]
+ name = "fedgpt"
+ version = "1.0.0"
+ description = ""
+ license = {text = "Apache License (2.0)"}
+ dependencies = [
+ "flwr[simulation]>=1.9.0,<2.0",
+ "numpy>=1.21.0",
+ ]
+
+ [tool.flwr.app]
+ publisher = "flwrlabs"
+
+ [tool.flwr.app.components]
+ serverapp = "fedgpt.server:app"
+ clientapp = "fedgpt.client:app"
+
+ [tool.flwr.app.config]
+ num_server_rounds = "10"
+ momentum = "0.1"
+ lr = "0.01"
+ serverapp.test = "key"
+
+ [tool.flwr.app.config.clientapp]
+ test = "key"
+ """
+ overrides = {
+ "num_server_rounds": "5",
+ "lr": "0.2",
+ "serverapp.test": "overriden",
+ }
+ expected_config = {
+ "num_server_rounds": "5",
+ "momentum": "0.1",
+ "lr": "0.2",
+ "serverapp.test": "overriden",
+ "clientapp.test": "key",
+ }
+ # Current directory
+ origin = Path.cwd()
+
+ try:
+ # Change into the temporary directory
+ os.chdir(tmp_path)
+ with open(FAB_CONFIG_FILE, "w", encoding="utf-8") as f:
+ f.write(textwrap.dedent(pyproject_toml_content))
+
+ # Execute
+ default_config = get_project_config(tmp_path)["tool"]["flwr"]["app"].get(
+ "config", {}
+ )
+
+ config = _fuse_dicts(flatten_dict(default_config), overrides)
+
+ # Assert
+ assert config == expected_config
+ finally:
+ os.chdir(origin)
+
+
+def test_get_project_config_file_valid(tmp_path: Path) -> None:
+ """Test get_project_config when the configuration file is not found."""
+ pyproject_toml_content = """
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+
+ [project]
+ name = "fedgpt"
+ version = "1.0.0"
+ description = ""
+ license = {text = "Apache License (2.0)"}
+ dependencies = [
+ "flwr[simulation]>=1.9.0,<2.0",
+ "numpy>=1.21.0",
+ ]
+
+ [tool.flwr.app]
+ publisher = "flwrlabs"
+
+ [tool.flwr.app.components]
+ serverapp = "fedgpt.server:app"
+ clientapp = "fedgpt.client:app"
+
+ [tool.flwr.app.config]
+ num_server_rounds = "10"
+ momentum = "0.1"
+ lr = "0.01"
+ """
+ expected_config = {
+ "build-system": {"build-backend": "hatchling.build", "requires": ["hatchling"]},
+ "project": {
+ "name": "fedgpt",
+ "version": "1.0.0",
+ "description": "",
+ "license": {"text": "Apache License (2.0)"},
+ "dependencies": ["flwr[simulation]>=1.9.0,<2.0", "numpy>=1.21.0"],
+ },
+ "tool": {
+ "flwr": {
+ "app": {
+ "publisher": "flwrlabs",
+ "components": {
+ "serverapp": "fedgpt.server:app",
+ "clientapp": "fedgpt.client:app",
+ },
+ "config": {
+ "num_server_rounds": "10",
+ "momentum": "0.1",
+ "lr": "0.01",
+ },
+ },
+ },
+ },
+ }
+ # Current directory
+ origin = Path.cwd()
+
+ try:
+ # Change into the temporary directory
+ os.chdir(tmp_path)
+ with open(FAB_CONFIG_FILE, "w", encoding="utf-8") as f:
+ f.write(textwrap.dedent(pyproject_toml_content))
+
+ # Execute
+ config = get_project_config(tmp_path)
+
+ # Assert
+ assert config == expected_config
+ finally:
+ os.chdir(origin)
+
+
+def test_flatten_dict() -> None:
+ """Test flatten_dict with a nested dictionary."""
+ raw_dict = {"a": {"b": {"c": "d"}}, "e": "f"}
+ expected = {"a.b.c": "d", "e": "f"}
+ assert flatten_dict(raw_dict) == expected
+
+
+def test_parse_config_args_none() -> None:
+ """Test parse_config_args with None as input."""
+ assert not parse_config_args(None)
+
+
+def test_parse_config_args_overrides() -> None:
+ """Test parse_config_args with key-value pairs."""
+ assert parse_config_args(
+ ["key1=value1,key2=value2", "key3=value3", "key4=value4,key5=value5"]
+ ) == {
+ "key1": "value1",
+ "key2": "value2",
+ "key3": "value3",
+ "key4": "value4",
+ "key5": "value5",
+ }
diff --git a/src/py/flwr/common/constant.py b/src/py/flwr/common/constant.py
index ce29b3edb30e..72256a62add7 100644
--- a/src/py/flwr/common/constant.py
+++ b/src/py/flwr/common/constant.py
@@ -46,6 +46,9 @@
PING_RANDOM_RANGE = (-0.1, 0.1)
PING_MAX_INTERVAL = 1e300
+# IDs
+RUN_ID_NUM_BYTES = 8
+NODE_ID_NUM_BYTES = 8
GRPC_ADAPTER_METADATA_FLOWER_VERSION_KEY = "flower-version"
GRPC_ADAPTER_METADATA_SHOULD_EXIT_KEY = "should-exit"
@@ -54,6 +57,9 @@
FAB_CONFIG_FILE = "pyproject.toml"
FLWR_HOME = "FLWR_HOME"
+# Constants entries in Node config for Simulation
+PARTITION_ID_KEY = "partition-id"
+NUM_PARTITIONS_KEY = "num-partitions"
GRPC_ADAPTER_METADATA_FLOWER_VERSION_KEY = "flower-version"
GRPC_ADAPTER_METADATA_SHOULD_EXIT_KEY = "should-exit"
diff --git a/src/py/flwr/common/context.py b/src/py/flwr/common/context.py
index b6349307d150..4da52ba44481 100644
--- a/src/py/flwr/common/context.py
+++ b/src/py/flwr/common/context.py
@@ -16,16 +16,22 @@
from dataclasses import dataclass
+from typing import Dict
from .record import RecordSet
@dataclass
class Context:
- """State of your run.
+ """Context of your run.
Parameters
----------
+ node_id : int
+ The ID that identifies the node.
+ node_config : Dict[str, str]
+ A config (key/value mapping) unique to the node and independent of the
+ `run_config`. This config persists across all runs this node participates in.
state : RecordSet
Holds records added by the entity in a given run and that will stay local.
This means that the data it holds will never leave the system it's running from.
@@ -33,6 +39,25 @@ class Context:
executing mods. It can also be used as a memory to access
at different points during the lifecycle of this entity (e.g. across
multiple rounds)
+ run_config : Dict[str, str]
+ A config (key/value mapping) held by the entity in a given run and that will
+ stay local. It can be used at any point during the lifecycle of this entity
+ (e.g. across multiple rounds)
"""
+ node_id: int
+ node_config: Dict[str, str]
state: RecordSet
+ run_config: Dict[str, str]
+
+ def __init__( # pylint: disable=too-many-arguments
+ self,
+ node_id: int,
+ node_config: Dict[str, str],
+ state: RecordSet,
+ run_config: Dict[str, str],
+ ) -> None:
+ self.node_id = node_id
+ self.node_config = node_config
+ self.state = state
+ self.run_config = run_config
diff --git a/src/py/flwr/common/logger.py b/src/py/flwr/common/logger.py
index 7225b0663ae7..2077f9beaca0 100644
--- a/src/py/flwr/common/logger.py
+++ b/src/py/flwr/common/logger.py
@@ -197,6 +197,44 @@ def warn_deprecated_feature(name: str) -> None:
)
+def warn_deprecated_feature_with_example(
+ deprecation_message: str, example_message: str, code_example: str
+) -> None:
+ """Warn if a feature is deprecated and show code example."""
+ log(
+ WARN,
+ """DEPRECATED FEATURE: %s
+
+ Check the following `FEATURE UPDATE` warning message for the preferred
+ new mechanism to use this feature in Flower.
+ """,
+ deprecation_message,
+ )
+ log(
+ WARN,
+ """FEATURE UPDATE: %s
+ ------------------------------------------------------------
+ %s
+ ------------------------------------------------------------
+ """,
+ example_message,
+ code_example,
+ )
+
+
+def warn_unsupported_feature(name: str) -> None:
+ """Warn the user when they use an unsupported feature."""
+ log(
+ WARN,
+ """UNSUPPORTED FEATURE: %s
+
+ This is an unsupported feature. It will be removed
+ entirely in future versions of Flower.
+ """,
+ name,
+ )
+
+
def set_logger_propagation(
child_logger: logging.Logger, value: bool = True
) -> logging.Logger:
diff --git a/src/py/flwr/common/message.py b/src/py/flwr/common/message.py
index 7f7a0e4dd995..4138fc95a591 100644
--- a/src/py/flwr/common/message.py
+++ b/src/py/flwr/common/message.py
@@ -48,10 +48,6 @@ class Metadata: # pylint: disable=too-many-instance-attributes
message_type : str
A string that encodes the action to be executed on
the receiving end.
- partition_id : Optional[int]
- An identifier that can be used when loading a particular
- data partition for a ClientApp. Making use of this identifier
- is more relevant when conducting simulations.
"""
def __init__( # pylint: disable=too-many-arguments
@@ -64,7 +60,6 @@ def __init__( # pylint: disable=too-many-arguments
group_id: str,
ttl: float,
message_type: str,
- partition_id: int | None = None,
) -> None:
var_dict = {
"_run_id": run_id,
@@ -75,7 +70,6 @@ def __init__( # pylint: disable=too-many-arguments
"_group_id": group_id,
"_ttl": ttl,
"_message_type": message_type,
- "_partition_id": partition_id,
}
self.__dict__.update(var_dict)
@@ -149,16 +143,6 @@ def message_type(self, value: str) -> None:
"""Set message_type."""
self.__dict__["_message_type"] = value
- @property
- def partition_id(self) -> int | None:
- """An identifier telling which data partition a ClientApp should use."""
- return cast(int, self.__dict__["_partition_id"])
-
- @partition_id.setter
- def partition_id(self, value: int) -> None:
- """Set partition_id."""
- self.__dict__["_partition_id"] = value
-
def __repr__(self) -> str:
"""Return a string representation of this instance."""
view = ", ".join([f"{k.lstrip('_')}={v!r}" for k, v in self.__dict__.items()])
@@ -398,5 +382,4 @@ def _create_reply_metadata(msg: Message, ttl: float) -> Metadata:
group_id=msg.metadata.group_id,
ttl=ttl,
message_type=msg.metadata.message_type,
- partition_id=msg.metadata.partition_id,
)
diff --git a/src/py/flwr/common/message_test.py b/src/py/flwr/common/message_test.py
index daee57896903..c6142cb18256 100644
--- a/src/py/flwr/common/message_test.py
+++ b/src/py/flwr/common/message_test.py
@@ -174,7 +174,6 @@ def test_create_reply(
"group_id": "group_xyz",
"ttl": 10.0,
"message_type": "request",
- "partition_id": None,
},
),
(Error, {"code": 1, "reason": "reason_098"}),
diff --git a/src/py/flwr/common/typing.py b/src/py/flwr/common/typing.py
index f51830955679..04d2cf5bbf7f 100644
--- a/src/py/flwr/common/typing.py
+++ b/src/py/flwr/common/typing.py
@@ -194,3 +194,4 @@ class Run:
run_id: int
fab_id: str
fab_version: str
+ override_config: Dict[str, str]
diff --git a/src/py/flwr/proto/common_pb2.py b/src/py/flwr/proto/common_pb2.py
new file mode 100644
index 000000000000..8a6430137f05
--- /dev/null
+++ b/src/py/flwr/proto/common_pb2.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: flwr/proto/common.proto
+# Protobuf Python Version: 4.25.0
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66lwr/proto/common.proto\x12\nflwr.protob\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'flwr.proto.common_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+ DESCRIPTOR._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/src/py/flwr/proto/common_pb2.pyi b/src/py/flwr/proto/common_pb2.pyi
new file mode 100644
index 000000000000..e08fa11c2caa
--- /dev/null
+++ b/src/py/flwr/proto/common_pb2.pyi
@@ -0,0 +1,7 @@
+"""
+@generated by mypy-protobuf. Do not edit manually!
+isort:skip_file
+"""
+import google.protobuf.descriptor
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
diff --git a/src/py/flwr/proto/common_pb2_grpc.py b/src/py/flwr/proto/common_pb2_grpc.py
new file mode 100644
index 000000000000..2daafffebfc8
--- /dev/null
+++ b/src/py/flwr/proto/common_pb2_grpc.py
@@ -0,0 +1,4 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
diff --git a/src/py/flwr/proto/common_pb2_grpc.pyi b/src/py/flwr/proto/common_pb2_grpc.pyi
new file mode 100644
index 000000000000..f3a5a087ef5d
--- /dev/null
+++ b/src/py/flwr/proto/common_pb2_grpc.pyi
@@ -0,0 +1,4 @@
+"""
+@generated by mypy-protobuf. Do not edit manually!
+isort:skip_file
+"""
diff --git a/src/py/flwr/proto/driver_pb2.py b/src/py/flwr/proto/driver_pb2.py
index a2458b445563..07975937328d 100644
--- a/src/py/flwr/proto/driver_pb2.py
+++ b/src/py/flwr/proto/driver_pb2.py
@@ -17,29 +17,33 @@
from flwr.proto import run_pb2 as flwr_dot_proto_dot_run__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66lwr/proto/driver.proto\x12\nflwr.proto\x1a\x15\x66lwr/proto/node.proto\x1a\x15\x66lwr/proto/task.proto\x1a\x14\x66lwr/proto/run.proto\"7\n\x10\x43reateRunRequest\x12\x0e\n\x06\x66\x61\x62_id\x18\x01 \x01(\t\x12\x13\n\x0b\x66\x61\x62_version\x18\x02 \x01(\t\"#\n\x11\x43reateRunResponse\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\"!\n\x0fGetNodesRequest\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\"3\n\x10GetNodesResponse\x12\x1f\n\x05nodes\x18\x01 \x03(\x0b\x32\x10.flwr.proto.Node\"@\n\x12PushTaskInsRequest\x12*\n\rtask_ins_list\x18\x01 \x03(\x0b\x32\x13.flwr.proto.TaskIns\"\'\n\x13PushTaskInsResponse\x12\x10\n\x08task_ids\x18\x02 \x03(\t\"F\n\x12PullTaskResRequest\x12\x1e\n\x04node\x18\x01 \x01(\x0b\x32\x10.flwr.proto.Node\x12\x10\n\x08task_ids\x18\x02 \x03(\t\"A\n\x13PullTaskResResponse\x12*\n\rtask_res_list\x18\x01 \x03(\x0b\x32\x13.flwr.proto.TaskRes2\x84\x03\n\x06\x44river\x12J\n\tCreateRun\x12\x1c.flwr.proto.CreateRunRequest\x1a\x1d.flwr.proto.CreateRunResponse\"\x00\x12G\n\x08GetNodes\x12\x1b.flwr.proto.GetNodesRequest\x1a\x1c.flwr.proto.GetNodesResponse\"\x00\x12P\n\x0bPushTaskIns\x12\x1e.flwr.proto.PushTaskInsRequest\x1a\x1f.flwr.proto.PushTaskInsResponse\"\x00\x12P\n\x0bPullTaskRes\x12\x1e.flwr.proto.PullTaskResRequest\x1a\x1f.flwr.proto.PullTaskResResponse\"\x00\x12\x41\n\x06GetRun\x12\x19.flwr.proto.GetRunRequest\x1a\x1a.flwr.proto.GetRunResponse\"\x00\x62\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17\x66lwr/proto/driver.proto\x12\nflwr.proto\x1a\x15\x66lwr/proto/node.proto\x1a\x15\x66lwr/proto/task.proto\x1a\x14\x66lwr/proto/run.proto\"\xb9\x01\n\x10\x43reateRunRequest\x12\x0e\n\x06\x66\x61\x62_id\x18\x01 \x01(\t\x12\x13\n\x0b\x66\x61\x62_version\x18\x02 \x01(\t\x12I\n\x0foverride_config\x18\x03 \x03(\x0b\x32\x30.flwr.proto.CreateRunRequest.OverrideConfigEntry\x1a\x35\n\x13OverrideConfigEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"#\n\x11\x43reateRunResponse\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\"!\n\x0fGetNodesRequest\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\"3\n\x10GetNodesResponse\x12\x1f\n\x05nodes\x18\x01 \x03(\x0b\x32\x10.flwr.proto.Node\"@\n\x12PushTaskInsRequest\x12*\n\rtask_ins_list\x18\x01 \x03(\x0b\x32\x13.flwr.proto.TaskIns\"\'\n\x13PushTaskInsResponse\x12\x10\n\x08task_ids\x18\x02 \x03(\t\"F\n\x12PullTaskResRequest\x12\x1e\n\x04node\x18\x01 \x01(\x0b\x32\x10.flwr.proto.Node\x12\x10\n\x08task_ids\x18\x02 \x03(\t\"A\n\x13PullTaskResResponse\x12*\n\rtask_res_list\x18\x01 \x03(\x0b\x32\x13.flwr.proto.TaskRes2\x84\x03\n\x06\x44river\x12J\n\tCreateRun\x12\x1c.flwr.proto.CreateRunRequest\x1a\x1d.flwr.proto.CreateRunResponse\"\x00\x12G\n\x08GetNodes\x12\x1b.flwr.proto.GetNodesRequest\x1a\x1c.flwr.proto.GetNodesResponse\"\x00\x12P\n\x0bPushTaskIns\x12\x1e.flwr.proto.PushTaskInsRequest\x1a\x1f.flwr.proto.PushTaskInsResponse\"\x00\x12P\n\x0bPullTaskRes\x12\x1e.flwr.proto.PullTaskResRequest\x1a\x1f.flwr.proto.PullTaskResResponse\"\x00\x12\x41\n\x06GetRun\x12\x19.flwr.proto.GetRunRequest\x1a\x1a.flwr.proto.GetRunResponse\"\x00\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'flwr.proto.driver_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- _globals['_CREATERUNREQUEST']._serialized_start=107
- _globals['_CREATERUNREQUEST']._serialized_end=162
- _globals['_CREATERUNRESPONSE']._serialized_start=164
- _globals['_CREATERUNRESPONSE']._serialized_end=199
- _globals['_GETNODESREQUEST']._serialized_start=201
- _globals['_GETNODESREQUEST']._serialized_end=234
- _globals['_GETNODESRESPONSE']._serialized_start=236
- _globals['_GETNODESRESPONSE']._serialized_end=287
- _globals['_PUSHTASKINSREQUEST']._serialized_start=289
- _globals['_PUSHTASKINSREQUEST']._serialized_end=353
- _globals['_PUSHTASKINSRESPONSE']._serialized_start=355
- _globals['_PUSHTASKINSRESPONSE']._serialized_end=394
- _globals['_PULLTASKRESREQUEST']._serialized_start=396
- _globals['_PULLTASKRESREQUEST']._serialized_end=466
- _globals['_PULLTASKRESRESPONSE']._serialized_start=468
- _globals['_PULLTASKRESRESPONSE']._serialized_end=533
- _globals['_DRIVER']._serialized_start=536
- _globals['_DRIVER']._serialized_end=924
+ _globals['_CREATERUNREQUEST_OVERRIDECONFIGENTRY']._options = None
+ _globals['_CREATERUNREQUEST_OVERRIDECONFIGENTRY']._serialized_options = b'8\001'
+ _globals['_CREATERUNREQUEST']._serialized_start=108
+ _globals['_CREATERUNREQUEST']._serialized_end=293
+ _globals['_CREATERUNREQUEST_OVERRIDECONFIGENTRY']._serialized_start=240
+ _globals['_CREATERUNREQUEST_OVERRIDECONFIGENTRY']._serialized_end=293
+ _globals['_CREATERUNRESPONSE']._serialized_start=295
+ _globals['_CREATERUNRESPONSE']._serialized_end=330
+ _globals['_GETNODESREQUEST']._serialized_start=332
+ _globals['_GETNODESREQUEST']._serialized_end=365
+ _globals['_GETNODESRESPONSE']._serialized_start=367
+ _globals['_GETNODESRESPONSE']._serialized_end=418
+ _globals['_PUSHTASKINSREQUEST']._serialized_start=420
+ _globals['_PUSHTASKINSREQUEST']._serialized_end=484
+ _globals['_PUSHTASKINSRESPONSE']._serialized_start=486
+ _globals['_PUSHTASKINSRESPONSE']._serialized_end=525
+ _globals['_PULLTASKRESREQUEST']._serialized_start=527
+ _globals['_PULLTASKRESREQUEST']._serialized_end=597
+ _globals['_PULLTASKRESRESPONSE']._serialized_start=599
+ _globals['_PULLTASKRESRESPONSE']._serialized_end=664
+ _globals['_DRIVER']._serialized_start=667
+ _globals['_DRIVER']._serialized_end=1055
# @@protoc_insertion_point(module_scope)
diff --git a/src/py/flwr/proto/driver_pb2.pyi b/src/py/flwr/proto/driver_pb2.pyi
index 2d8d11fb59a3..95d4c9785ff1 100644
--- a/src/py/flwr/proto/driver_pb2.pyi
+++ b/src/py/flwr/proto/driver_pb2.pyi
@@ -16,16 +16,33 @@ DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class CreateRunRequest(google.protobuf.message.Message):
"""CreateRun"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+ class OverrideConfigEntry(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+ KEY_FIELD_NUMBER: builtins.int
+ VALUE_FIELD_NUMBER: builtins.int
+ key: typing.Text
+ value: typing.Text
+ def __init__(self,
+ *,
+ key: typing.Text = ...,
+ value: typing.Text = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+
FAB_ID_FIELD_NUMBER: builtins.int
FAB_VERSION_FIELD_NUMBER: builtins.int
+ OVERRIDE_CONFIG_FIELD_NUMBER: builtins.int
fab_id: typing.Text
fab_version: typing.Text
+ @property
+ def override_config(self) -> google.protobuf.internal.containers.ScalarMap[typing.Text, typing.Text]: ...
def __init__(self,
*,
fab_id: typing.Text = ...,
fab_version: typing.Text = ...,
+ override_config: typing.Optional[typing.Mapping[typing.Text, typing.Text]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["fab_id",b"fab_id","fab_version",b"fab_version"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["fab_id",b"fab_id","fab_version",b"fab_version","override_config",b"override_config"]) -> None: ...
global___CreateRunRequest = CreateRunRequest
class CreateRunResponse(google.protobuf.message.Message):
diff --git a/src/py/flwr/proto/exec_pb2.py b/src/py/flwr/proto/exec_pb2.py
index 7b037a9454c0..4aee0f4a882f 100644
--- a/src/py/flwr/proto/exec_pb2.py
+++ b/src/py/flwr/proto/exec_pb2.py
@@ -14,21 +14,25 @@
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x66lwr/proto/exec.proto\x12\nflwr.proto\"#\n\x0fStartRunRequest\x12\x10\n\x08\x66\x61\x62_file\x18\x01 \x01(\x0c\"\"\n\x10StartRunResponse\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\"#\n\x11StreamLogsRequest\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\"(\n\x12StreamLogsResponse\x12\x12\n\nlog_output\x18\x01 \x01(\t2\xa0\x01\n\x04\x45xec\x12G\n\x08StartRun\x12\x1b.flwr.proto.StartRunRequest\x1a\x1c.flwr.proto.StartRunResponse\"\x00\x12O\n\nStreamLogs\x12\x1d.flwr.proto.StreamLogsRequest\x1a\x1e.flwr.proto.StreamLogsResponse\"\x00\x30\x01\x62\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15\x66lwr/proto/exec.proto\x12\nflwr.proto\"\xa4\x01\n\x0fStartRunRequest\x12\x10\n\x08\x66\x61\x62_file\x18\x01 \x01(\x0c\x12H\n\x0foverride_config\x18\x02 \x03(\x0b\x32/.flwr.proto.StartRunRequest.OverrideConfigEntry\x1a\x35\n\x13OverrideConfigEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\"\n\x10StartRunResponse\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\"#\n\x11StreamLogsRequest\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\"(\n\x12StreamLogsResponse\x12\x12\n\nlog_output\x18\x01 \x01(\t2\xa0\x01\n\x04\x45xec\x12G\n\x08StartRun\x12\x1b.flwr.proto.StartRunRequest\x1a\x1c.flwr.proto.StartRunResponse\"\x00\x12O\n\nStreamLogs\x12\x1d.flwr.proto.StreamLogsRequest\x1a\x1e.flwr.proto.StreamLogsResponse\"\x00\x30\x01\x62\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'flwr.proto.exec_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- _globals['_STARTRUNREQUEST']._serialized_start=37
- _globals['_STARTRUNREQUEST']._serialized_end=72
- _globals['_STARTRUNRESPONSE']._serialized_start=74
- _globals['_STARTRUNRESPONSE']._serialized_end=108
- _globals['_STREAMLOGSREQUEST']._serialized_start=110
- _globals['_STREAMLOGSREQUEST']._serialized_end=145
- _globals['_STREAMLOGSRESPONSE']._serialized_start=147
- _globals['_STREAMLOGSRESPONSE']._serialized_end=187
- _globals['_EXEC']._serialized_start=190
- _globals['_EXEC']._serialized_end=350
+ _globals['_STARTRUNREQUEST_OVERRIDECONFIGENTRY']._options = None
+ _globals['_STARTRUNREQUEST_OVERRIDECONFIGENTRY']._serialized_options = b'8\001'
+ _globals['_STARTRUNREQUEST']._serialized_start=38
+ _globals['_STARTRUNREQUEST']._serialized_end=202
+ _globals['_STARTRUNREQUEST_OVERRIDECONFIGENTRY']._serialized_start=149
+ _globals['_STARTRUNREQUEST_OVERRIDECONFIGENTRY']._serialized_end=202
+ _globals['_STARTRUNRESPONSE']._serialized_start=204
+ _globals['_STARTRUNRESPONSE']._serialized_end=238
+ _globals['_STREAMLOGSREQUEST']._serialized_start=240
+ _globals['_STREAMLOGSREQUEST']._serialized_end=275
+ _globals['_STREAMLOGSRESPONSE']._serialized_start=277
+ _globals['_STREAMLOGSRESPONSE']._serialized_end=317
+ _globals['_EXEC']._serialized_start=320
+ _globals['_EXEC']._serialized_end=480
# @@protoc_insertion_point(module_scope)
diff --git a/src/py/flwr/proto/exec_pb2.pyi b/src/py/flwr/proto/exec_pb2.pyi
index 466812808da8..8065fc1de1b4 100644
--- a/src/py/flwr/proto/exec_pb2.pyi
+++ b/src/py/flwr/proto/exec_pb2.pyi
@@ -4,6 +4,7 @@ isort:skip_file
"""
import builtins
import google.protobuf.descriptor
+import google.protobuf.internal.containers
import google.protobuf.message
import typing
import typing_extensions
@@ -12,13 +13,30 @@ DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class StartRunRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+ class OverrideConfigEntry(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+ KEY_FIELD_NUMBER: builtins.int
+ VALUE_FIELD_NUMBER: builtins.int
+ key: typing.Text
+ value: typing.Text
+ def __init__(self,
+ *,
+ key: typing.Text = ...,
+ value: typing.Text = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+
FAB_FILE_FIELD_NUMBER: builtins.int
+ OVERRIDE_CONFIG_FIELD_NUMBER: builtins.int
fab_file: builtins.bytes
+ @property
+ def override_config(self) -> google.protobuf.internal.containers.ScalarMap[typing.Text, typing.Text]: ...
def __init__(self,
*,
fab_file: builtins.bytes = ...,
+ override_config: typing.Optional[typing.Mapping[typing.Text, typing.Text]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["fab_file",b"fab_file"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["fab_file",b"fab_file","override_config",b"override_config"]) -> None: ...
global___StartRunRequest = StartRunRequest
class StartRunResponse(google.protobuf.message.Message):
diff --git a/src/py/flwr/proto/run_pb2.py b/src/py/flwr/proto/run_pb2.py
index 13f06e7169aa..d6531201f647 100644
--- a/src/py/flwr/proto/run_pb2.py
+++ b/src/py/flwr/proto/run_pb2.py
@@ -14,17 +14,21 @@
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14\x66lwr/proto/run.proto\x12\nflwr.proto\":\n\x03Run\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\x12\x0e\n\x06\x66\x61\x62_id\x18\x02 \x01(\t\x12\x13\n\x0b\x66\x61\x62_version\x18\x03 \x01(\t\"\x1f\n\rGetRunRequest\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\".\n\x0eGetRunResponse\x12\x1c\n\x03run\x18\x01 \x01(\x0b\x32\x0f.flwr.proto.Runb\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14\x66lwr/proto/run.proto\x12\nflwr.proto\"\xaf\x01\n\x03Run\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\x12\x0e\n\x06\x66\x61\x62_id\x18\x02 \x01(\t\x12\x13\n\x0b\x66\x61\x62_version\x18\x03 \x01(\t\x12<\n\x0foverride_config\x18\x04 \x03(\x0b\x32#.flwr.proto.Run.OverrideConfigEntry\x1a\x35\n\x13OverrideConfigEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x1f\n\rGetRunRequest\x12\x0e\n\x06run_id\x18\x01 \x01(\x12\".\n\x0eGetRunResponse\x12\x1c\n\x03run\x18\x01 \x01(\x0b\x32\x0f.flwr.proto.Runb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'flwr.proto.run_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
- _globals['_RUN']._serialized_start=36
- _globals['_RUN']._serialized_end=94
- _globals['_GETRUNREQUEST']._serialized_start=96
- _globals['_GETRUNREQUEST']._serialized_end=127
- _globals['_GETRUNRESPONSE']._serialized_start=129
- _globals['_GETRUNRESPONSE']._serialized_end=175
+ _globals['_RUN_OVERRIDECONFIGENTRY']._options = None
+ _globals['_RUN_OVERRIDECONFIGENTRY']._serialized_options = b'8\001'
+ _globals['_RUN']._serialized_start=37
+ _globals['_RUN']._serialized_end=212
+ _globals['_RUN_OVERRIDECONFIGENTRY']._serialized_start=159
+ _globals['_RUN_OVERRIDECONFIGENTRY']._serialized_end=212
+ _globals['_GETRUNREQUEST']._serialized_start=214
+ _globals['_GETRUNREQUEST']._serialized_end=245
+ _globals['_GETRUNRESPONSE']._serialized_start=247
+ _globals['_GETRUNRESPONSE']._serialized_end=293
# @@protoc_insertion_point(module_scope)
diff --git a/src/py/flwr/proto/run_pb2.pyi b/src/py/flwr/proto/run_pb2.pyi
index 401d27855a41..3c58c04c1734 100644
--- a/src/py/flwr/proto/run_pb2.pyi
+++ b/src/py/flwr/proto/run_pb2.pyi
@@ -4,6 +4,7 @@ isort:skip_file
"""
import builtins
import google.protobuf.descriptor
+import google.protobuf.internal.containers
import google.protobuf.message
import typing
import typing_extensions
@@ -12,19 +13,36 @@ DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class Run(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+ class OverrideConfigEntry(google.protobuf.message.Message):
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
+ KEY_FIELD_NUMBER: builtins.int
+ VALUE_FIELD_NUMBER: builtins.int
+ key: typing.Text
+ value: typing.Text
+ def __init__(self,
+ *,
+ key: typing.Text = ...,
+ value: typing.Text = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+
RUN_ID_FIELD_NUMBER: builtins.int
FAB_ID_FIELD_NUMBER: builtins.int
FAB_VERSION_FIELD_NUMBER: builtins.int
+ OVERRIDE_CONFIG_FIELD_NUMBER: builtins.int
run_id: builtins.int
fab_id: typing.Text
fab_version: typing.Text
+ @property
+ def override_config(self) -> google.protobuf.internal.containers.ScalarMap[typing.Text, typing.Text]: ...
def __init__(self,
*,
run_id: builtins.int = ...,
fab_id: typing.Text = ...,
fab_version: typing.Text = ...,
+ override_config: typing.Optional[typing.Mapping[typing.Text, typing.Text]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["fab_id",b"fab_id","fab_version",b"fab_version","run_id",b"run_id"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["fab_id",b"fab_id","fab_version",b"fab_version","override_config",b"override_config","run_id",b"run_id"]) -> None: ...
global___Run = Run
class GetRunRequest(google.protobuf.message.Message):
diff --git a/src/py/flwr/server/__init__.py b/src/py/flwr/server/__init__.py
index 546ce263e2d5..896b46298327 100644
--- a/src/py/flwr/server/__init__.py
+++ b/src/py/flwr/server/__init__.py
@@ -28,6 +28,7 @@
from .server import Server as Server
from .server_app import ServerApp as ServerApp
from .server_config import ServerConfig as ServerConfig
+from .serverapp_components import ServerAppComponents as ServerAppComponents
__all__ = [
"ClientManager",
@@ -36,6 +37,7 @@
"LegacyContext",
"Server",
"ServerApp",
+ "ServerAppComponents",
"ServerConfig",
"SimpleClientManager",
"run_server_app",
diff --git a/src/py/flwr/server/compat/legacy_context.py b/src/py/flwr/server/compat/legacy_context.py
index 0b00c98bb16d..ee09d79012dc 100644
--- a/src/py/flwr/server/compat/legacy_context.py
+++ b/src/py/flwr/server/compat/legacy_context.py
@@ -52,4 +52,4 @@ def __init__(
self.strategy = strategy
self.client_manager = client_manager
self.history = History()
- super().__init__(state)
+ super().__init__(node_id=0, node_config={}, state=state, run_config={})
diff --git a/src/py/flwr/server/driver/grpc_driver.py b/src/py/flwr/server/driver/grpc_driver.py
index e614df659e3f..84da5882eb73 100644
--- a/src/py/flwr/server/driver/grpc_driver.py
+++ b/src/py/flwr/server/driver/grpc_driver.py
@@ -16,8 +16,8 @@
import time
import warnings
-from logging import DEBUG, ERROR, WARNING
-from typing import Iterable, List, Optional, Tuple, cast
+from logging import DEBUG, WARNING
+from typing import Iterable, List, Optional, cast
import grpc
@@ -27,8 +27,6 @@
from flwr.common.serde import message_from_taskres, message_to_taskins
from flwr.common.typing import Run
from flwr.proto.driver_pb2 import ( # pylint: disable=E0611
- CreateRunRequest,
- CreateRunResponse,
GetNodesRequest,
GetNodesResponse,
PullTaskResRequest,
@@ -53,167 +51,103 @@
"""
-class GrpcDriverStub:
- """`GrpcDriverStub` provides access to the gRPC Driver API/service.
+class GrpcDriver(Driver):
+ """`GrpcDriver` provides an interface to the Driver API.
Parameters
----------
- driver_service_address : Optional[str]
- The IPv4 or IPv6 address of the Driver API server.
- Defaults to `"[::]:9091"`.
+ run_id : int
+ The identifier of the run.
+ driver_service_address : str (default: "[::]:9091")
+ The address (URL, IPv6, IPv4) of the SuperLink Driver API service.
root_certificates : Optional[bytes] (default: None)
The PEM-encoded root certificates as a byte string.
If provided, a secure connection using the certificates will be
established to an SSL-enabled Flower server.
"""
- def __init__(
+ def __init__( # pylint: disable=too-many-arguments
self,
+ run_id: int,
driver_service_address: str = DEFAULT_SERVER_ADDRESS_DRIVER,
root_certificates: Optional[bytes] = None,
) -> None:
- self.driver_service_address = driver_service_address
- self.root_certificates = root_certificates
- self.channel: Optional[grpc.Channel] = None
- self.stub: Optional[DriverStub] = None
+ self._run_id = run_id
+ self._addr = driver_service_address
+ self._cert = root_certificates
+ self._run: Optional[Run] = None
+ self._grpc_stub: Optional[DriverStub] = None
+ self._channel: Optional[grpc.Channel] = None
+ self.node = Node(node_id=0, anonymous=True)
+
+ @property
+ def _is_connected(self) -> bool:
+ """Check if connected to the Driver API server."""
+ return self._channel is not None
- def is_connected(self) -> bool:
- """Return True if connected to the Driver API server, otherwise False."""
- return self.channel is not None
+ def _connect(self) -> None:
+ """Connect to the Driver API.
- def connect(self) -> None:
- """Connect to the Driver API."""
+ This will not call GetRun.
+ """
event(EventType.DRIVER_CONNECT)
- if self.channel is not None or self.stub is not None:
+ if self._is_connected:
log(WARNING, "Already connected")
return
- self.channel = create_channel(
- server_address=self.driver_service_address,
- insecure=(self.root_certificates is None),
- root_certificates=self.root_certificates,
+ self._channel = create_channel(
+ server_address=self._addr,
+ insecure=(self._cert is None),
+ root_certificates=self._cert,
)
- self.stub = DriverStub(self.channel)
- log(DEBUG, "[Driver] Connected to %s", self.driver_service_address)
+ self._grpc_stub = DriverStub(self._channel)
+ log(DEBUG, "[Driver] Connected to %s", self._addr)
- def disconnect(self) -> None:
+ def _disconnect(self) -> None:
"""Disconnect from the Driver API."""
event(EventType.DRIVER_DISCONNECT)
- if self.channel is None or self.stub is None:
+ if not self._is_connected:
log(DEBUG, "Already disconnected")
return
- channel = self.channel
- self.channel = None
- self.stub = None
+ channel: grpc.Channel = self._channel
+ self._channel = None
+ self._grpc_stub = None
channel.close()
log(DEBUG, "[Driver] Disconnected")
- def create_run(self, req: CreateRunRequest) -> CreateRunResponse:
- """Request for run ID."""
- # Check if channel is open
- if self.stub is None:
- log(ERROR, ERROR_MESSAGE_DRIVER_NOT_CONNECTED)
- raise ConnectionError("`GrpcDriverStub` instance not connected")
-
- # Call Driver API
- res: CreateRunResponse = self.stub.CreateRun(request=req)
- return res
-
- def get_run(self, req: GetRunRequest) -> GetRunResponse:
- """Get run information."""
- # Check if channel is open
- if self.stub is None:
- log(ERROR, ERROR_MESSAGE_DRIVER_NOT_CONNECTED)
- raise ConnectionError("`GrpcDriverStub` instance not connected")
-
- # Call gRPC Driver API
- res: GetRunResponse = self.stub.GetRun(request=req)
- return res
-
- def get_nodes(self, req: GetNodesRequest) -> GetNodesResponse:
- """Get client IDs."""
- # Check if channel is open
- if self.stub is None:
- log(ERROR, ERROR_MESSAGE_DRIVER_NOT_CONNECTED)
- raise ConnectionError("`GrpcDriverStub` instance not connected")
-
- # Call gRPC Driver API
- res: GetNodesResponse = self.stub.GetNodes(request=req)
- return res
-
- def push_task_ins(self, req: PushTaskInsRequest) -> PushTaskInsResponse:
- """Schedule tasks."""
- # Check if channel is open
- if self.stub is None:
- log(ERROR, ERROR_MESSAGE_DRIVER_NOT_CONNECTED)
- raise ConnectionError("`GrpcDriverStub` instance not connected")
-
- # Call gRPC Driver API
- res: PushTaskInsResponse = self.stub.PushTaskIns(request=req)
- return res
-
- def pull_task_res(self, req: PullTaskResRequest) -> PullTaskResResponse:
- """Get task results."""
- # Check if channel is open
- if self.stub is None:
- log(ERROR, ERROR_MESSAGE_DRIVER_NOT_CONNECTED)
- raise ConnectionError("`GrpcDriverStub` instance not connected")
-
- # Call Driver API
- res: PullTaskResResponse = self.stub.PullTaskRes(request=req)
- return res
-
-
-class GrpcDriver(Driver):
- """`Driver` class provides an interface to the Driver API.
-
- Parameters
- ----------
- run_id : int
- The identifier of the run.
- stub : Optional[GrpcDriverStub] (default: None)
- The ``GrpcDriverStub`` instance used to communicate with the SuperLink.
- If None, an instance connected to "[::]:9091" will be created.
- """
-
- def __init__( # pylint: disable=too-many-arguments
- self,
- run_id: int,
- stub: Optional[GrpcDriverStub] = None,
- ) -> None:
- self._run_id = run_id
- self._run: Optional[Run] = None
- self.stub = stub if stub is not None else GrpcDriverStub()
- self.node = Node(node_id=0, anonymous=True)
+ def _init_run(self) -> None:
+ # Check if is initialized
+ if self._run is not None:
+ return
+ # Get the run info
+ req = GetRunRequest(run_id=self._run_id)
+ res: GetRunResponse = self._stub.GetRun(req)
+ if not res.HasField("run"):
+ raise RuntimeError(f"Cannot find the run with ID: {self._run_id}")
+ self._run = Run(
+ run_id=res.run.run_id,
+ fab_id=res.run.fab_id,
+ fab_version=res.run.fab_version,
+ override_config=dict(res.run.override_config.items()),
+ )
@property
def run(self) -> Run:
"""Run information."""
- self._get_stub_and_run_id()
- return Run(**vars(cast(Run, self._run)))
+ self._init_run()
+ return Run(**vars(self._run))
- def _get_stub_and_run_id(self) -> Tuple[GrpcDriverStub, int]:
- # Check if is initialized
- if self._run is None:
- # Connect
- if not self.stub.is_connected():
- self.stub.connect()
- # Get the run info
- req = GetRunRequest(run_id=self._run_id)
- res = self.stub.get_run(req)
- if not res.HasField("run"):
- raise RuntimeError(f"Cannot find the run with ID: {self._run_id}")
- self._run = Run(
- run_id=res.run.run_id,
- fab_id=res.run.fab_id,
- fab_version=res.run.fab_version,
- )
-
- return self.stub, self._run.run_id
+ @property
+ def _stub(self) -> DriverStub:
+ """Driver stub."""
+ if not self._is_connected:
+ self._connect()
+ return cast(DriverStub, self._grpc_stub)
def _check_message(self, message: Message) -> None:
# Check if the message is valid
if not (
- message.metadata.run_id == cast(Run, self._run).run_id
+ # Assume self._run being initialized
+ message.metadata.run_id == self._run_id
and message.metadata.src_node_id == self.node.node_id
and message.metadata.message_id == ""
and message.metadata.reply_to_message == ""
@@ -234,7 +168,7 @@ def create_message( # pylint: disable=too-many-arguments
This method constructs a new `Message` with given content and metadata.
The `run_id` and `src_node_id` will be set automatically.
"""
- _, run_id = self._get_stub_and_run_id()
+ self._init_run()
if ttl:
warnings.warn(
"A custom TTL was set, but note that the SuperLink does not enforce "
@@ -245,7 +179,7 @@ def create_message( # pylint: disable=too-many-arguments
ttl_ = DEFAULT_TTL if ttl is None else ttl
metadata = Metadata(
- run_id=run_id,
+ run_id=self._run_id,
message_id="", # Will be set by the server
src_node_id=self.node.node_id,
dst_node_id=dst_node_id,
@@ -258,9 +192,11 @@ def create_message( # pylint: disable=too-many-arguments
def get_node_ids(self) -> List[int]:
"""Get node IDs."""
- stub, run_id = self._get_stub_and_run_id()
+ self._init_run()
# Call GrpcDriverStub method
- res = stub.get_nodes(GetNodesRequest(run_id=run_id))
+ res: GetNodesResponse = self._stub.GetNodes(
+ GetNodesRequest(run_id=self._run_id)
+ )
return [node.node_id for node in res.nodes]
def push_messages(self, messages: Iterable[Message]) -> Iterable[str]:
@@ -269,7 +205,7 @@ def push_messages(self, messages: Iterable[Message]) -> Iterable[str]:
This method takes an iterable of messages and sends each message
to the node specified in `dst_node_id`.
"""
- stub, _ = self._get_stub_and_run_id()
+ self._init_run()
# Construct TaskIns
task_ins_list: List[TaskIns] = []
for msg in messages:
@@ -280,7 +216,9 @@ def push_messages(self, messages: Iterable[Message]) -> Iterable[str]:
# Add to list
task_ins_list.append(taskins)
# Call GrpcDriverStub method
- res = stub.push_task_ins(PushTaskInsRequest(task_ins_list=task_ins_list))
+ res: PushTaskInsResponse = self._stub.PushTaskIns(
+ PushTaskInsRequest(task_ins_list=task_ins_list)
+ )
return list(res.task_ids)
def pull_messages(self, message_ids: Iterable[str]) -> Iterable[Message]:
@@ -289,9 +227,9 @@ def pull_messages(self, message_ids: Iterable[str]) -> Iterable[Message]:
This method is used to collect messages from the SuperLink that correspond to a
set of given message IDs.
"""
- stub, _ = self._get_stub_and_run_id()
+ self._init_run()
# Pull TaskRes
- res = stub.pull_task_res(
+ res: PullTaskResResponse = self._stub.PullTaskRes(
PullTaskResRequest(node=self.node, task_ids=message_ids)
)
# Convert TaskRes to Message
@@ -331,7 +269,7 @@ def send_and_receive(
def close(self) -> None:
"""Disconnect from the SuperLink if connected."""
# Check if `connect` was called before
- if not self.stub.is_connected():
+ if not self._is_connected:
return
# Disconnect
- self.stub.disconnect()
+ self._disconnect()
diff --git a/src/py/flwr/server/driver/grpc_driver_test.py b/src/py/flwr/server/driver/grpc_driver_test.py
index 72efc5f8b2c6..fdf3c676190d 100644
--- a/src/py/flwr/server/driver/grpc_driver_test.py
+++ b/src/py/flwr/server/driver/grpc_driver_test.py
@@ -41,10 +41,13 @@ def setUp(self) -> None:
mock_response = Mock(
run=Run(run_id=61016, fab_id="mock/mock", fab_version="v1.0.0")
)
- self.mock_grpc_driver_stub = Mock()
- self.mock_grpc_driver_stub.get_run.return_value = mock_response
- self.mock_grpc_driver_stub.HasField.return_value = True
- self.driver = GrpcDriver(run_id=61016, stub=self.mock_grpc_driver_stub)
+ self.mock_stub = Mock()
+ self.mock_channel = Mock()
+ self.mock_stub.GetRun.return_value = mock_response
+ mock_response.HasField.return_value = True
+ self.driver = GrpcDriver(run_id=61016)
+ self.driver._grpc_stub = self.mock_stub # pylint: disable=protected-access
+ self.driver._channel = self.mock_channel # pylint: disable=protected-access
def test_init_grpc_driver(self) -> None:
"""Test GrpcDriverStub initialization."""
@@ -52,21 +55,21 @@ def test_init_grpc_driver(self) -> None:
self.assertEqual(self.driver.run.run_id, 61016)
self.assertEqual(self.driver.run.fab_id, "mock/mock")
self.assertEqual(self.driver.run.fab_version, "v1.0.0")
- self.mock_grpc_driver_stub.get_run.assert_called_once()
+ self.mock_stub.GetRun.assert_called_once()
def test_get_nodes(self) -> None:
"""Test retrieval of nodes."""
# Prepare
mock_response = Mock()
mock_response.nodes = [Mock(node_id=404), Mock(node_id=200)]
- self.mock_grpc_driver_stub.get_nodes.return_value = mock_response
+ self.mock_stub.GetNodes.return_value = mock_response
# Execute
node_ids = self.driver.get_node_ids()
- args, kwargs = self.mock_grpc_driver_stub.get_nodes.call_args
+ args, kwargs = self.mock_stub.GetNodes.call_args
# Assert
- self.mock_grpc_driver_stub.get_run.assert_called_once()
+ self.mock_stub.GetRun.assert_called_once()
self.assertEqual(len(args), 1)
self.assertEqual(len(kwargs), 0)
self.assertIsInstance(args[0], GetNodesRequest)
@@ -77,7 +80,7 @@ def test_push_messages_valid(self) -> None:
"""Test pushing valid messages."""
# Prepare
mock_response = Mock(task_ids=["id1", "id2"])
- self.mock_grpc_driver_stub.push_task_ins.return_value = mock_response
+ self.mock_stub.PushTaskIns.return_value = mock_response
msgs = [
self.driver.create_message(RecordSet(), "", 0, "", DEFAULT_TTL)
for _ in range(2)
@@ -85,10 +88,10 @@ def test_push_messages_valid(self) -> None:
# Execute
msg_ids = self.driver.push_messages(msgs)
- args, kwargs = self.mock_grpc_driver_stub.push_task_ins.call_args
+ args, kwargs = self.mock_stub.PushTaskIns.call_args
# Assert
- self.mock_grpc_driver_stub.get_run.assert_called_once()
+ self.mock_stub.GetRun.assert_called_once()
self.assertEqual(len(args), 1)
self.assertEqual(len(kwargs), 0)
self.assertIsInstance(args[0], PushTaskInsRequest)
@@ -100,7 +103,7 @@ def test_push_messages_invalid(self) -> None:
"""Test pushing invalid messages."""
# Prepare
mock_response = Mock(task_ids=["id1", "id2"])
- self.mock_grpc_driver_stub.push_task_ins.return_value = mock_response
+ self.mock_stub.PushTaskIns.return_value = mock_response
msgs = [
self.driver.create_message(RecordSet(), "", 0, "", DEFAULT_TTL)
for _ in range(2)
@@ -124,16 +127,16 @@ def test_pull_messages_with_given_message_ids(self) -> None:
),
TaskRes(task=Task(ancestry=["id3"], error=error_to_proto(Error(code=0)))),
]
- self.mock_grpc_driver_stub.pull_task_res.return_value = mock_response
+ self.mock_stub.PullTaskRes.return_value = mock_response
msg_ids = ["id1", "id2", "id3"]
# Execute
msgs = self.driver.pull_messages(msg_ids)
reply_tos = {msg.metadata.reply_to_message for msg in msgs}
- args, kwargs = self.mock_grpc_driver_stub.pull_task_res.call_args
+ args, kwargs = self.mock_stub.PullTaskRes.call_args
# Assert
- self.mock_grpc_driver_stub.get_run.assert_called_once()
+ self.mock_stub.GetRun.assert_called_once()
self.assertEqual(len(args), 1)
self.assertEqual(len(kwargs), 0)
self.assertIsInstance(args[0], PullTaskResRequest)
@@ -144,14 +147,14 @@ def test_send_and_receive_messages_complete(self) -> None:
"""Test send and receive all messages successfully."""
# Prepare
mock_response = Mock(task_ids=["id1"])
- self.mock_grpc_driver_stub.push_task_ins.return_value = mock_response
+ self.mock_stub.PushTaskIns.return_value = mock_response
# The response message must include either `content` (i.e. a recordset) or
# an `Error`. We choose the latter in this case
error_proto = error_to_proto(Error(code=0))
mock_response = Mock(
task_res_list=[TaskRes(task=Task(ancestry=["id1"], error=error_proto))]
)
- self.mock_grpc_driver_stub.pull_task_res.return_value = mock_response
+ self.mock_stub.PullTaskRes.return_value = mock_response
msgs = [self.driver.create_message(RecordSet(), "", 0, "", DEFAULT_TTL)]
# Execute
@@ -166,9 +169,9 @@ def test_send_and_receive_messages_timeout(self) -> None:
# Prepare
sleep_fn = time.sleep
mock_response = Mock(task_ids=["id1"])
- self.mock_grpc_driver_stub.push_task_ins.return_value = mock_response
+ self.mock_stub.PushTaskIns.return_value = mock_response
mock_response = Mock(task_res_list=[])
- self.mock_grpc_driver_stub.pull_task_res.return_value = mock_response
+ self.mock_stub.PullTaskRes.return_value = mock_response
msgs = [self.driver.create_message(RecordSet(), "", 0, "", DEFAULT_TTL)]
# Execute
@@ -182,22 +185,20 @@ def test_send_and_receive_messages_timeout(self) -> None:
def test_del_with_initialized_driver(self) -> None:
"""Test cleanup behavior when Driver is initialized."""
- # Prepare
- self.mock_grpc_driver_stub.is_connected.return_value = True
-
# Execute
self.driver.close()
# Assert
- self.mock_grpc_driver_stub.disconnect.assert_called_once()
+ self.mock_channel.close.assert_called_once()
def test_del_with_uninitialized_driver(self) -> None:
"""Test cleanup behavior when Driver is not initialized."""
# Prepare
- self.mock_grpc_driver_stub.is_connected.return_value = False
+ self.driver._grpc_stub = None # pylint: disable=protected-access
+ self.driver._channel = None # pylint: disable=protected-access
# Execute
self.driver.close()
# Assert
- self.mock_grpc_driver_stub.disconnect.assert_not_called()
+ self.mock_channel.close.assert_not_called()
diff --git a/src/py/flwr/server/driver/inmemory_driver_test.py b/src/py/flwr/server/driver/inmemory_driver_test.py
index eff38f548826..d0f32e830f7d 100644
--- a/src/py/flwr/server/driver/inmemory_driver_test.py
+++ b/src/py/flwr/server/driver/inmemory_driver_test.py
@@ -15,7 +15,6 @@
"""Tests for in-memory driver."""
-import os
import time
import unittest
from typing import Iterable, List, Tuple
@@ -23,7 +22,7 @@
from uuid import uuid4
from flwr.common import RecordSet
-from flwr.common.constant import PING_MAX_INTERVAL
+from flwr.common.constant import NODE_ID_NUM_BYTES, PING_MAX_INTERVAL
from flwr.common.message import Error
from flwr.common.serde import (
error_to_proto,
@@ -34,6 +33,7 @@
from flwr.common.typing import Run
from flwr.proto.task_pb2 import Task, TaskRes # pylint: disable=E0611
from flwr.server.superlink.state import InMemoryState, SqliteState, StateFactory
+from flwr.server.superlink.state.utils import generate_rand_int_from_bytes
from .inmemory_driver import InMemoryDriver
@@ -82,11 +82,14 @@ def setUp(self) -> None:
self.num_nodes = 42
self.state = MagicMock()
self.state.get_nodes.return_value = [
- int.from_bytes(os.urandom(8), "little", signed=True)
+ generate_rand_int_from_bytes(NODE_ID_NUM_BYTES)
for _ in range(self.num_nodes)
]
self.state.get_run.return_value = Run(
- run_id=61016, fab_id="mock/mock", fab_version="v1.0.0"
+ run_id=61016,
+ fab_id="mock/mock",
+ fab_version="v1.0.0",
+ override_config={"test_key": "test_value"},
)
state_factory = MagicMock(state=lambda: self.state)
self.driver = InMemoryDriver(run_id=61016, state_factory=state_factory)
@@ -98,6 +101,7 @@ def test_get_run(self) -> None:
self.assertEqual(self.driver.run.run_id, 61016)
self.assertEqual(self.driver.run.fab_id, "mock/mock")
self.assertEqual(self.driver.run.fab_version, "v1.0.0")
+ self.assertEqual(self.driver.run.override_config["test_key"], "test_value")
def test_get_nodes(self) -> None:
"""Test retrieval of nodes."""
@@ -223,7 +227,7 @@ def test_task_store_consistency_after_push_pull_sqlitestate(self) -> None:
# Prepare
state = StateFactory("").state()
self.driver = InMemoryDriver(
- state.create_run("", ""), MagicMock(state=lambda: state)
+ state.create_run("", "", {}), MagicMock(state=lambda: state)
)
msg_ids, node_id = push_messages(self.driver, self.num_nodes)
assert isinstance(state, SqliteState)
@@ -249,7 +253,7 @@ def test_task_store_consistency_after_push_pull_inmemory_state(self) -> None:
# Prepare
state_factory = StateFactory(":flwr-in-memory-state:")
state = state_factory.state()
- self.driver = InMemoryDriver(state.create_run("", ""), state_factory)
+ self.driver = InMemoryDriver(state.create_run("", "", {}), state_factory)
msg_ids, node_id = push_messages(self.driver, self.num_nodes)
assert isinstance(state, InMemoryState)
diff --git a/src/py/flwr/server/run_serverapp.py b/src/py/flwr/server/run_serverapp.py
index 3505ebfdb0a9..0169946e237d 100644
--- a/src/py/flwr/server/run_serverapp.py
+++ b/src/py/flwr/server/run_serverapp.py
@@ -19,16 +19,24 @@
import sys
from logging import DEBUG, INFO, WARN
from pathlib import Path
-from typing import Optional
+from typing import Dict, Optional
from flwr.common import Context, EventType, RecordSet, event
-from flwr.common.config import get_flwr_dir, get_project_config, get_project_dir
+from flwr.common.config import (
+ get_flwr_dir,
+ get_fused_config,
+ get_project_config,
+ get_project_dir,
+)
from flwr.common.logger import log, update_console_handler, warn_deprecated_feature
from flwr.common.object_ref import load_app
-from flwr.proto.driver_pb2 import CreateRunRequest # pylint: disable=E0611
+from flwr.proto.driver_pb2 import ( # pylint: disable=E0611
+ CreateRunRequest,
+ CreateRunResponse,
+)
from .driver import Driver
-from .driver.grpc_driver import GrpcDriver, GrpcDriverStub
+from .driver.grpc_driver import GrpcDriver
from .server_app import LoadServerAppError, ServerApp
ADDRESS_DRIVER_API = "0.0.0.0:9091"
@@ -37,6 +45,7 @@
def run(
driver: Driver,
server_app_dir: str,
+ server_app_run_config: Dict[str, str],
server_app_attr: Optional[str] = None,
loaded_server_app: Optional[ServerApp] = None,
) -> None:
@@ -69,7 +78,9 @@ def _load() -> ServerApp:
server_app = _load()
# Initialize Context
- context = Context(state=RecordSet())
+ context = Context(
+ node_id=0, node_config={}, state=RecordSet(), run_config=server_app_run_config
+ )
# Call ServerApp
server_app(driver=driver, context=context)
@@ -144,22 +155,29 @@ def run_server_app() -> None: # pylint: disable=too-many-branches
"For more details, use: ``flower-server-app -h``"
)
- stub = GrpcDriverStub(
- driver_service_address=args.superlink, root_certificates=root_certificates
- )
+ # Initialize GrpcDriver
if args.run_id is not None:
# User provided `--run-id`, but not `server-app`
- run_id = args.run_id
+ driver = GrpcDriver(
+ run_id=args.run_id,
+ driver_service_address=args.superlink,
+ root_certificates=root_certificates,
+ )
else:
# User provided `server-app`, but not `--run-id`
# Create run if run_id is not provided
- stub.connect()
+ driver = GrpcDriver(
+ run_id=0, # Will be overwritten
+ driver_service_address=args.superlink,
+ root_certificates=root_certificates,
+ )
+ # Create run
req = CreateRunRequest(fab_id=args.fab_id, fab_version=args.fab_version)
- res = stub.create_run(req)
- run_id = res.run_id
+ res: CreateRunResponse = driver._stub.CreateRun(req) # pylint: disable=W0212
+ # Overwrite driver._run_id
+ driver._run_id = res.run_id # pylint: disable=W0212
- # Initialize GrpcDriver
- driver = GrpcDriver(run_id=run_id, stub=stub)
+ server_app_run_config = {}
# Dynamically obtain ServerApp path based on run_id
if args.run_id is not None:
@@ -168,7 +186,8 @@ def run_server_app() -> None: # pylint: disable=too-many-branches
run_ = driver.run
server_app_dir = str(get_project_dir(run_.fab_id, run_.fab_version, flwr_dir))
config = get_project_config(server_app_dir)
- server_app_attr = config["flower"]["components"]["serverapp"]
+ server_app_attr = config["tool"]["flwr"]["app"]["components"]["serverapp"]
+ server_app_run_config = get_fused_config(run_, flwr_dir)
else:
# User provided `server-app`, but not `--run-id`
server_app_dir = str(Path(args.dir).absolute())
@@ -182,7 +201,12 @@ def run_server_app() -> None: # pylint: disable=too-many-branches
)
# Run the ServerApp with the Driver
- run(driver=driver, server_app_dir=server_app_dir, server_app_attr=server_app_attr)
+ run(
+ driver=driver,
+ server_app_dir=server_app_dir,
+ server_app_run_config=server_app_run_config,
+ server_app_attr=server_app_attr,
+ )
# Clean up
driver.close()
diff --git a/src/py/flwr/server/server_app.py b/src/py/flwr/server/server_app.py
index 43b3bcce3f36..e9cb4ddcaf0d 100644
--- a/src/py/flwr/server/server_app.py
+++ b/src/py/flwr/server/server_app.py
@@ -17,8 +17,11 @@
from typing import Callable, Optional
-from flwr.common import Context, RecordSet
-from flwr.common.logger import warn_preview_feature
+from flwr.common import Context
+from flwr.common.logger import (
+ warn_deprecated_feature_with_example,
+ warn_preview_feature,
+)
from flwr.server.strategy import Strategy
from .client_manager import ClientManager
@@ -26,7 +29,20 @@
from .driver import Driver
from .server import Server
from .server_config import ServerConfig
-from .typing import ServerAppCallable
+from .typing import ServerAppCallable, ServerFn
+
+SERVER_FN_USAGE_EXAMPLE = """
+
+ def server_fn(context: Context):
+ server_config = ServerConfig(num_rounds=3)
+ strategy = FedAvg()
+ return ServerAppComponents(
+ strategy=strategy,
+ server_config=server_config,
+ )
+
+ app = ServerApp(server_fn=server_fn)
+"""
class ServerApp:
@@ -36,13 +52,15 @@ class ServerApp:
--------
Use the `ServerApp` with an existing `Strategy`:
- >>> server_config = ServerConfig(num_rounds=3)
- >>> strategy = FedAvg()
+ >>> def server_fn(context: Context):
+ >>> server_config = ServerConfig(num_rounds=3)
+ >>> strategy = FedAvg()
+ >>> return ServerAppComponents(
+ >>> strategy=strategy,
+ >>> server_config=server_config,
+ >>> )
>>>
- >>> app = ServerApp(
- >>> server_config=server_config,
- >>> strategy=strategy,
- >>> )
+ >>> app = ServerApp(server_fn=server_fn)
Use the `ServerApp` with a custom main function:
@@ -53,23 +71,52 @@ class ServerApp:
>>> print("ServerApp running")
"""
+ # pylint: disable=too-many-arguments
def __init__(
self,
server: Optional[Server] = None,
config: Optional[ServerConfig] = None,
strategy: Optional[Strategy] = None,
client_manager: Optional[ClientManager] = None,
+ server_fn: Optional[ServerFn] = None,
) -> None:
+ if any([server, config, strategy, client_manager]):
+ warn_deprecated_feature_with_example(
+ deprecation_message="Passing either `server`, `config`, `strategy` or "
+ "`client_manager` directly to the ServerApp "
+ "constructor is deprecated.",
+ example_message="Pass `ServerApp` arguments wrapped "
+ "in a `flwr.server.ServerAppComponents` object that gets "
+ "returned by a function passed as the `server_fn` argument "
+ "to the `ServerApp` constructor. For example: ",
+ code_example=SERVER_FN_USAGE_EXAMPLE,
+ )
+
+ if server_fn:
+ raise ValueError(
+ "Passing `server_fn` is incompatible with passing the "
+ "other arguments (now deprecated) to ServerApp. "
+ "Use `server_fn` exclusively."
+ )
+
self._server = server
self._config = config
self._strategy = strategy
self._client_manager = client_manager
+ self._server_fn = server_fn
self._main: Optional[ServerAppCallable] = None
def __call__(self, driver: Driver, context: Context) -> None:
"""Execute `ServerApp`."""
# Compatibility mode
if not self._main:
+ if self._server_fn:
+ # Execute server_fn()
+ components = self._server_fn(context)
+ self._server = components.server
+ self._config = components.config
+ self._strategy = components.strategy
+ self._client_manager = components.client_manager
start_driver(
server=self._server,
config=self._config,
@@ -80,7 +127,6 @@ def __call__(self, driver: Driver, context: Context) -> None:
return
# New execution mode
- context = Context(state=RecordSet())
self._main(driver, context)
def main(self) -> Callable[[ServerAppCallable], ServerAppCallable]:
diff --git a/src/py/flwr/server/server_app_test.py b/src/py/flwr/server/server_app_test.py
index 0751a0cb2bc5..b0672b3202ed 100644
--- a/src/py/flwr/server/server_app_test.py
+++ b/src/py/flwr/server/server_app_test.py
@@ -29,7 +29,7 @@ def test_server_app_custom_mode() -> None:
# Prepare
app = ServerApp()
driver = MagicMock()
- context = Context(state=RecordSet())
+ context = Context(node_id=0, node_config={}, state=RecordSet(), run_config={})
called = {"called": False}
diff --git a/src/py/flwr/server/serverapp_components.py b/src/py/flwr/server/serverapp_components.py
new file mode 100644
index 000000000000..315f0a889a61
--- /dev/null
+++ b/src/py/flwr/server/serverapp_components.py
@@ -0,0 +1,52 @@
+# Copyright 2024 Flower Labs GmbH. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""ServerAppComponents for the ServerApp."""
+
+
+from dataclasses import dataclass
+from typing import Optional
+
+from .client_manager import ClientManager
+from .server import Server
+from .server_config import ServerConfig
+from .strategy import Strategy
+
+
+@dataclass
+class ServerAppComponents: # pylint: disable=too-many-instance-attributes
+ """Components to construct a ServerApp.
+
+ Parameters
+ ----------
+ server : Optional[Server] (default: None)
+ A server implementation, either `flwr.server.Server` or a subclass
+ thereof. If no instance is provided, one will be created internally.
+ config : Optional[ServerConfig] (default: None)
+ Currently supported values are `num_rounds` (int, default: 1) and
+ `round_timeout` in seconds (float, default: None).
+ strategy : Optional[Strategy] (default: None)
+ An implementation of the abstract base class
+ `flwr.server.strategy.Strategy`. If no strategy is provided, then
+ `flwr.server.strategy.FedAvg` will be used.
+ client_manager : Optional[ClientManager] (default: None)
+ An implementation of the class `flwr.server.ClientManager`. If no
+ implementation is provided, then `flwr.server.SimpleClientManager`
+ will be used.
+ """
+
+ server: Optional[Server] = None
+ config: Optional[ServerConfig] = None
+ strategy: Optional[Strategy] = None
+ client_manager: Optional[ClientManager] = None
diff --git a/src/py/flwr/server/superlink/driver/driver_servicer.py b/src/py/flwr/server/superlink/driver/driver_servicer.py
index 03128f02158e..7f8ded3bdb85 100644
--- a/src/py/flwr/server/superlink/driver/driver_servicer.py
+++ b/src/py/flwr/server/superlink/driver/driver_servicer.py
@@ -69,7 +69,11 @@ def CreateRun(
"""Create run ID."""
log(DEBUG, "DriverServicer.CreateRun")
state: State = self.state_factory.state()
- run_id = state.create_run(request.fab_id, request.fab_version)
+ run_id = state.create_run(
+ request.fab_id,
+ request.fab_version,
+ dict(request.override_config.items()),
+ )
return CreateRunResponse(run_id=run_id)
def PushTaskIns(
diff --git a/src/py/flwr/server/superlink/fleet/grpc_rere/server_interceptor_test.py b/src/py/flwr/server/superlink/fleet/grpc_rere/server_interceptor_test.py
index 01499102b7d8..798e71435585 100644
--- a/src/py/flwr/server/superlink/fleet/grpc_rere/server_interceptor_test.py
+++ b/src/py/flwr/server/superlink/fleet/grpc_rere/server_interceptor_test.py
@@ -328,7 +328,7 @@ def test_successful_get_run_with_metadata(self) -> None:
self.state.create_node(
ping_interval=30, public_key=public_key_to_bytes(self._client_public_key)
)
- run_id = self.state.create_run("", "")
+ run_id = self.state.create_run("", "", {})
request = GetRunRequest(run_id=run_id)
shared_secret = generate_shared_key(
self._client_private_key, self._server_public_key
@@ -359,7 +359,7 @@ def test_unsuccessful_get_run_with_metadata(self) -> None:
self.state.create_node(
ping_interval=30, public_key=public_key_to_bytes(self._client_public_key)
)
- run_id = self.state.create_run("", "")
+ run_id = self.state.create_run("", "", {})
request = GetRunRequest(run_id=run_id)
client_private_key, _ = generate_key_pairs()
shared_secret = generate_shared_key(client_private_key, self._server_public_key)
diff --git a/src/py/flwr/server/superlink/fleet/vce/backend/backend.py b/src/py/flwr/server/superlink/fleet/vce/backend/backend.py
index 1d5e3a6a51ad..31c64bd3b233 100644
--- a/src/py/flwr/server/superlink/fleet/vce/backend/backend.py
+++ b/src/py/flwr/server/superlink/fleet/vce/backend/backend.py
@@ -33,8 +33,8 @@ def __init__(self, backend_config: BackendConfig, work_dir: str) -> None:
"""Construct a backend."""
@abstractmethod
- async def build(self) -> None:
- """Build backend asynchronously.
+ def build(self) -> None:
+ """Build backend.
Different components need to be in place before workers in a backend are ready
to accept jobs. When this method finishes executing, the backend should be fully
@@ -54,11 +54,11 @@ def is_worker_idle(self) -> bool:
"""Report whether a backend worker is idle and can therefore run a ClientApp."""
@abstractmethod
- async def terminate(self) -> None:
+ def terminate(self) -> None:
"""Terminate backend."""
@abstractmethod
- async def process_message(
+ def process_message(
self,
app: Callable[[], ClientApp],
message: Message,
diff --git a/src/py/flwr/server/superlink/fleet/vce/backend/raybackend.py b/src/py/flwr/server/superlink/fleet/vce/backend/raybackend.py
index 8a21393db590..0ab29a234f88 100644
--- a/src/py/flwr/server/superlink/fleet/vce/backend/raybackend.py
+++ b/src/py/flwr/server/superlink/fleet/vce/backend/raybackend.py
@@ -21,6 +21,7 @@
import ray
from flwr.client.client_app import ClientApp
+from flwr.common.constant import PARTITION_ID_KEY
from flwr.common.context import Context
from flwr.common.logger import log
from flwr.common.message import Message
@@ -153,12 +154,12 @@ def is_worker_idle(self) -> bool:
"""Report whether the pool has idle actors."""
return self.pool.is_actor_available()
- async def build(self) -> None:
+ def build(self) -> None:
"""Build pool of Ray actors that this backend will submit jobs to."""
- await self.pool.add_actors_to_pool(self.pool.actors_capacity)
+ self.pool.add_actors_to_pool(self.pool.actors_capacity)
log(DEBUG, "Constructed ActorPool with: %i actors", self.pool.num_actors)
- async def process_message(
+ def process_message(
self,
app: Callable[[], ClientApp],
message: Message,
@@ -168,21 +169,20 @@ async def process_message(
Return output message and updated context.
"""
- partition_id = message.metadata.partition_id
+ partition_id = context.node_config[PARTITION_ID_KEY]
try:
# Submit a task to the pool
- future = await self.pool.submit(
+ future = self.pool.submit(
lambda a, a_fn, mssg, cid, state: a.run.remote(a_fn, mssg, cid, state),
(app, message, str(partition_id), context),
)
- await future
# Fetch result
(
out_mssg,
updated_context,
- ) = await self.pool.fetch_result_and_return_actor_to_pool(future)
+ ) = self.pool.fetch_result_and_return_actor_to_pool(future)
return out_mssg, updated_context
@@ -193,11 +193,11 @@ async def process_message(
self.__class__.__name__,
)
# add actor back into pool
- await self.pool.add_actor_back_to_pool(future)
+ self.pool.add_actor_back_to_pool(future)
raise ex
- async def terminate(self) -> None:
+ def terminate(self) -> None:
"""Terminate all actors in actor pool."""
- await self.pool.terminate_all_actors()
+ self.pool.terminate_all_actors()
ray.shutdown()
log(DEBUG, "Terminated %s", self.__class__.__name__)
diff --git a/src/py/flwr/server/superlink/fleet/vce/backend/raybackend_test.py b/src/py/flwr/server/superlink/fleet/vce/backend/raybackend_test.py
index 57c952cc9310..a38cff96ceef 100644
--- a/src/py/flwr/server/superlink/fleet/vce/backend/raybackend_test.py
+++ b/src/py/flwr/server/superlink/fleet/vce/backend/raybackend_test.py
@@ -14,16 +14,16 @@
# ==============================================================================
"""Test for Ray backend for the Fleet API using the Simulation Engine."""
-import asyncio
from math import pi
from pathlib import Path
from typing import Callable, Dict, Optional, Tuple, Union
-from unittest import IsolatedAsyncioTestCase
+from unittest import TestCase
import ray
from flwr.client import Client, NumPyClient
from flwr.client.client_app import ClientApp, LoadClientAppError
+from flwr.client.node_state import NodeState
from flwr.common import (
DEFAULT_TTL,
Config,
@@ -33,9 +33,9 @@
Message,
MessageTypeLegacy,
Metadata,
- RecordSet,
Scalar,
)
+from flwr.common.constant import PARTITION_ID_KEY
from flwr.common.object_ref import load_app
from flwr.common.recordset_compat import getpropertiesins_to_recordset
from flwr.server.superlink.fleet.vce.backend.backend import BackendConfig
@@ -54,7 +54,7 @@ def get_properties(self, config: Config) -> Dict[str, Scalar]:
return {"result": result}
-def get_dummy_client(cid: str) -> Client: # pylint: disable=unused-argument
+def get_dummy_client(context: Context) -> Client: # pylint: disable=unused-argument
"""Return a DummyClient converted to Client type."""
return DummyClient().to_client()
@@ -82,18 +82,18 @@ def _load_app() -> ClientApp:
return _load_app
-async def backend_build_process_and_termination(
+def backend_build_process_and_termination(
backend: RayBackend,
process_args: Optional[Tuple[Callable[[], ClientApp], Message, Context]] = None,
) -> Union[Tuple[Message, Context], None]:
"""Build, process job and terminate RayBackend."""
- await backend.build()
+ backend.build()
to_return = None
if process_args:
- to_return = await backend.process_message(*process_args)
+ to_return = backend.process_message(*process_args)
- await backend.terminate()
+ backend.terminate()
return to_return
@@ -102,12 +102,13 @@ def _create_message_and_context() -> Tuple[Message, Context, float]:
# Construct a Message
mult_factor = 2024
+ run_id = 0
getproperties_ins = GetPropertiesIns(config={"factor": mult_factor})
recordset = getpropertiesins_to_recordset(getproperties_ins)
message = Message(
content=recordset,
metadata=Metadata(
- run_id=0,
+ run_id=run_id,
message_id="",
group_id="",
src_node_id=0,
@@ -118,8 +119,10 @@ def _create_message_and_context() -> Tuple[Message, Context, float]:
),
)
- # Construct emtpy Context
- context = Context(state=RecordSet())
+ # Construct NodeState and retrieve context
+ node_state = NodeState(node_id=run_id, node_config={PARTITION_ID_KEY: str(0)})
+ node_state.register_context(run_id=run_id)
+ context = node_state.retrieve_context(run_id=run_id)
# Expected output
expected_output = pi * mult_factor
@@ -127,10 +130,10 @@ def _create_message_and_context() -> Tuple[Message, Context, float]:
return message, context, expected_output
-class AsyncTestRayBackend(IsolatedAsyncioTestCase):
- """A basic class that allows runnig multliple asyncio tests."""
+class TestRayBackend(TestCase):
+ """A basic class that allows runnig multliple tests."""
- async def on_cleanup(self) -> None:
+ def doCleanups(self) -> None:
"""Ensure Ray has shutdown."""
if ray.is_initialized():
ray.shutdown()
@@ -138,9 +141,7 @@ async def on_cleanup(self) -> None:
def test_backend_creation_and_termination(self) -> None:
"""Test creation of RayBackend and its termination."""
backend = RayBackend(backend_config={}, work_dir="")
- asyncio.run(
- backend_build_process_and_termination(backend=backend, process_args=None)
- )
+ backend_build_process_and_termination(backend=backend, process_args=None)
def test_backend_creation_submit_and_termination(
self,
@@ -155,10 +156,8 @@ def test_backend_creation_submit_and_termination(
message, context, expected_output = _create_message_and_context()
- res = asyncio.run(
- backend_build_process_and_termination(
- backend=backend, process_args=(client_app_callable, message, context)
- )
+ res = backend_build_process_and_termination(
+ backend=backend, process_args=(client_app_callable, message, context)
)
if res is None:
@@ -187,7 +186,6 @@ def test_backend_creation_submit_and_termination_non_existing_client_app(
self.test_backend_creation_submit_and_termination(
client_app_loader=_load_from_module("a_non_existing_module:app")
)
- self.addAsyncCleanup(self.on_cleanup)
def test_backend_creation_submit_and_termination_existing_client_app(
self,
@@ -215,7 +213,6 @@ def test_backend_creation_submit_and_termination_existing_client_app_unsetworkdi
client_app_loader=_load_from_module("raybackend_test:client_app"),
workdir="/?&%$^#%@$!",
)
- self.addAsyncCleanup(self.on_cleanup)
def test_backend_creation_with_init_arguments(self) -> None:
"""Testing whether init args are properly parsed to Ray."""
@@ -246,5 +243,3 @@ def test_backend_creation_with_init_arguments(self) -> None:
nodes = ray.nodes()
assert nodes[0]["Resources"]["CPU"] == backend_config_2["init_args"]["num_cpus"]
-
- self.addAsyncCleanup(self.on_cleanup)
diff --git a/src/py/flwr/server/superlink/fleet/vce/vce_api.py b/src/py/flwr/server/superlink/fleet/vce/vce_api.py
index 3c9628a6d2a3..320f839e9e01 100644
--- a/src/py/flwr/server/superlink/fleet/vce/vce_api.py
+++ b/src/py/flwr/server/superlink/fleet/vce/vce_api.py
@@ -14,24 +14,33 @@
# ==============================================================================
"""Fleet Simulation Engine API."""
-import asyncio
+
import json
-import sys
+import threading
import time
import traceback
+from concurrent.futures import ThreadPoolExecutor
from logging import DEBUG, ERROR, INFO, WARN
from pathlib import Path
-from typing import Callable, Dict, List, Optional
+from queue import Empty, Queue
+from time import sleep
+from typing import Callable, Dict, Optional
from flwr.client.client_app import ClientApp, ClientAppException, LoadClientAppError
from flwr.client.node_state import NodeState
-from flwr.common.constant import PING_MAX_INTERVAL, ErrorCode
+from flwr.client.supernode.app import _get_load_client_app_fn
+from flwr.common.constant import (
+ NUM_PARTITIONS_KEY,
+ PARTITION_ID_KEY,
+ PING_MAX_INTERVAL,
+ ErrorCode,
+)
from flwr.common.logger import log
from flwr.common.message import Error
-from flwr.common.object_ref import load_app
from flwr.common.serde import message_from_taskins, message_to_taskres
-from flwr.proto.task_pb2 import TaskIns # pylint: disable=E0611
-from flwr.server.superlink.state import StateFactory
+from flwr.common.typing import Run
+from flwr.proto.task_pb2 import TaskIns, TaskRes # pylint: disable=E0611
+from flwr.server.superlink.state import State, StateFactory
from .backend import Backend, error_messages_backends, supported_backends
@@ -51,34 +60,57 @@ def _register_nodes(
return nodes_mapping
+def _register_node_states(
+ nodes_mapping: NodeToPartitionMapping,
+ run: Run,
+ app_dir: Optional[str] = None,
+) -> Dict[int, NodeState]:
+ """Create NodeState objects and pre-register the context for the run."""
+ node_states: Dict[int, NodeState] = {}
+ num_partitions = len(set(nodes_mapping.values()))
+ for node_id, partition_id in nodes_mapping.items():
+ node_states[node_id] = NodeState(
+ node_id=node_id,
+ node_config={
+ PARTITION_ID_KEY: str(partition_id),
+ NUM_PARTITIONS_KEY: str(num_partitions),
+ },
+ )
+
+ # Pre-register Context objects
+ node_states[node_id].register_context(
+ run_id=run.run_id, run=run, app_dir=app_dir
+ )
+
+ return node_states
+
+
# pylint: disable=too-many-arguments,too-many-locals
-async def worker(
+def worker(
app_fn: Callable[[], ClientApp],
- queue: "asyncio.Queue[TaskIns]",
+ taskins_queue: "Queue[TaskIns]",
+ taskres_queue: "Queue[TaskRes]",
node_states: Dict[int, NodeState],
- state_factory: StateFactory,
- nodes_mapping: NodeToPartitionMapping,
backend: Backend,
+ f_stop: threading.Event,
) -> None:
"""Get TaskIns from queue and pass it to an actor in the pool to execute it."""
- state = state_factory.state()
- while True:
+ while not f_stop.is_set():
out_mssg = None
try:
- task_ins: TaskIns = await queue.get()
+ # Fetch from queue with timeout. We use a timeout so
+ # the stopping event can be evaluated even when the queue is empty.
+ task_ins: TaskIns = taskins_queue.get(timeout=1.0)
node_id = task_ins.task.consumer.node_id
- # Register and retrieve runstate
- node_states[node_id].register_context(run_id=task_ins.run_id)
+ # Retrieve context
context = node_states[node_id].retrieve_context(run_id=task_ins.run_id)
# Convert TaskIns to Message
message = message_from_taskins(task_ins)
- # Set partition_id
- message.metadata.partition_id = nodes_mapping[node_id]
# Let backend process message
- out_mssg, updated_context = await backend.process_message(
+ out_mssg, updated_context = backend.process_message(
app_fn, message, context
)
@@ -86,11 +118,9 @@ async def worker(
node_states[node_id].update_context(
task_ins.run_id, context=updated_context
)
-
- except asyncio.CancelledError as e:
- log(DEBUG, "Terminating async worker: %s", e)
- break
-
+ except Empty:
+ # An exception raised if queue.get times out
+ pass
# Exceptions aren't raised but reported as an error message
except Exception as ex: # pylint: disable=broad-exception-caught
log(ERROR, ex)
@@ -114,67 +144,48 @@ async def worker(
task_res = message_to_taskres(out_mssg)
# Store TaskRes in state
task_res.task.pushed_at = time.time()
- state.store_task_res(task_res)
+ taskres_queue.put(task_res)
-async def add_taskins_to_queue(
- queue: "asyncio.Queue[TaskIns]",
- state_factory: StateFactory,
+def add_taskins_to_queue(
+ state: State,
+ queue: "Queue[TaskIns]",
nodes_mapping: NodeToPartitionMapping,
- backend: Backend,
- consumers: List["asyncio.Task[None]"],
- f_stop: asyncio.Event,
+ f_stop: threading.Event,
) -> None:
- """Retrieve TaskIns and add it to the queue."""
- state = state_factory.state()
- num_initial_consumers = len(consumers)
+ """Put TaskIns in a queue from State."""
while not f_stop.is_set():
for node_id in nodes_mapping.keys():
- task_ins = state.get_task_ins(node_id=node_id, limit=1)
- if task_ins:
- await queue.put(task_ins[0])
-
- # Count consumers that are running
- num_active = sum(not (cc.done()) for cc in consumers)
-
- # Alert if number of consumers decreased by half
- if num_active < num_initial_consumers // 2:
- log(
- WARN,
- "Number of active workers has more than halved: (%i/%i active)",
- num_active,
- num_initial_consumers,
- )
+ task_ins_list = state.get_task_ins(node_id=node_id, limit=1)
+ for task_ins in task_ins_list:
+ queue.put(task_ins)
+ sleep(0.1)
- # Break if consumers died
- if num_active == 0:
- raise RuntimeError("All workers have died. Ending Simulation.")
- # Log some stats
- log(
- DEBUG,
- "Simulation Engine stats: "
- "Active workers: (%i/%i) | %s (%i workers) | Tasks in queue: %i)",
- num_active,
- num_initial_consumers,
- backend.__class__.__name__,
- backend.num_workers,
- queue.qsize(),
- )
- await asyncio.sleep(1.0)
- log(DEBUG, "Async producer: Stopped pulling from StateFactory.")
+def put_taskres_into_state(
+ state: State, queue: "Queue[TaskRes]", f_stop: threading.Event
+) -> None:
+ """Put TaskRes into State from a queue."""
+ while not f_stop.is_set():
+ try:
+ taskres = queue.get(timeout=1.0)
+ state.store_task_res(taskres)
+ except Empty:
+ # queue is empty when timeout was triggered
+ pass
-async def run(
+def run_api(
app_fn: Callable[[], ClientApp],
backend_fn: Callable[[], Backend],
nodes_mapping: NodeToPartitionMapping,
state_factory: StateFactory,
node_states: Dict[int, NodeState],
- f_stop: asyncio.Event,
+ f_stop: threading.Event,
) -> None:
- """Run the VCE async."""
- queue: "asyncio.Queue[TaskIns]" = asyncio.Queue(128)
+ """Run the VCE."""
+ taskins_queue: "Queue[TaskIns]" = Queue()
+ taskres_queue: "Queue[TaskRes]" = Queue()
try:
@@ -182,29 +193,48 @@ async def run(
backend = backend_fn()
# Build backend
- await backend.build()
+ backend.build()
# Add workers (they submit Messages to Backend)
- worker_tasks = [
- asyncio.create_task(
- worker(
- app_fn, queue, node_states, state_factory, nodes_mapping, backend
- )
- )
- for _ in range(backend.num_workers)
- ]
- # Create producer (adds TaskIns into Queue)
- producer = asyncio.create_task(
- add_taskins_to_queue(
- queue, state_factory, nodes_mapping, backend, worker_tasks, f_stop
- )
+ state = state_factory.state()
+
+ extractor_th = threading.Thread(
+ target=add_taskins_to_queue,
+ args=(
+ state,
+ taskins_queue,
+ nodes_mapping,
+ f_stop,
+ ),
+ )
+ extractor_th.start()
+
+ injector_th = threading.Thread(
+ target=put_taskres_into_state,
+ args=(
+ state,
+ taskres_queue,
+ f_stop,
+ ),
)
+ injector_th.start()
+
+ with ThreadPoolExecutor() as executor:
+ _ = [
+ executor.submit(
+ worker,
+ app_fn,
+ taskins_queue,
+ taskres_queue,
+ node_states,
+ backend,
+ f_stop,
+ )
+ for _ in range(backend.num_workers)
+ ]
- # Wait for producer to finish
- # The producer runs forever until f_stop is set or until
- # all worker (consumer) coroutines are completed. Workers
- # also run forever and only end if an exception is raised.
- await asyncio.gather(producer)
+ extractor_th.join()
+ injector_th.join()
except Exception as ex:
@@ -219,18 +249,9 @@ async def run(
raise RuntimeError("Simulation Engine crashed.") from ex
finally:
- # Produced task terminated, now cancel worker tasks
- for w_t in worker_tasks:
- _ = w_t.cancel()
-
- while not all(w_t.done() for w_t in worker_tasks):
- log(DEBUG, "Terminating async workers...")
- await asyncio.sleep(0.5)
-
- await asyncio.gather(*[w_t for w_t in worker_tasks if not w_t.done()])
# Terminate backend
- await backend.terminate()
+ backend.terminate()
# pylint: disable=too-many-arguments,unused-argument,too-many-locals,too-many-branches
@@ -239,7 +260,10 @@ def start_vce(
backend_name: str,
backend_config_json_stream: str,
app_dir: str,
- f_stop: asyncio.Event,
+ is_app: bool,
+ f_stop: threading.Event,
+ run: Run,
+ flwr_dir: Optional[str] = None,
client_app: Optional[ClientApp] = None,
client_app_attr: Optional[str] = None,
num_supernodes: Optional[int] = None,
@@ -290,9 +314,9 @@ def start_vce(
)
# Construct mapping of NodeStates
- node_states: Dict[int, NodeState] = {}
- for node_id in nodes_mapping:
- node_states[node_id] = NodeState()
+ node_states = _register_node_states(
+ nodes_mapping=nodes_mapping, run=run, app_dir=app_dir if is_app else None
+ )
# Load backend config
log(DEBUG, "Supported backends: %s", list(supported_backends.keys()))
@@ -321,16 +345,12 @@ def backend_fn() -> Backend:
def _load() -> ClientApp:
if client_app_attr:
-
- if app_dir is not None:
- sys.path.insert(0, app_dir)
-
- app: ClientApp = load_app(client_app_attr, LoadClientAppError, app_dir)
-
- if not isinstance(app, ClientApp):
- raise LoadClientAppError(
- f"Attribute {client_app_attr} is not of type {ClientApp}",
- ) from None
+ app = _get_load_client_app_fn(
+ default_app_ref=client_app_attr,
+ dir_arg=app_dir,
+ flwr_dir_arg=flwr_dir,
+ multi_app=True,
+ )(run.fab_id, run.fab_version)
if client_app:
app = client_app
@@ -340,18 +360,25 @@ def _load() -> ClientApp:
try:
# Test if ClientApp can be loaded
- _ = app_fn()
+ client_app = app_fn()
+
+ # Cache `ClientApp`
+ if client_app_attr:
+ # Now wrap the loaded ClientApp in a dummy function
+ # this prevent unnecesary low-level loading of ClientApp
+ def _load_client_app() -> ClientApp:
+ return client_app
+
+ app_fn = _load_client_app
# Run main simulation loop
- asyncio.run(
- run(
- app_fn,
- backend_fn,
- nodes_mapping,
- state_factory,
- node_states,
- f_stop,
- )
+ run_api(
+ app_fn,
+ backend_fn,
+ nodes_mapping,
+ state_factory,
+ node_states,
+ f_stop,
)
except LoadClientAppError as loadapp_ex:
f_stop_delay = 10
diff --git a/src/py/flwr/server/superlink/fleet/vce/vce_api_test.py b/src/py/flwr/server/superlink/fleet/vce/vce_api_test.py
index df9f2cc96f95..33c359af5cc8 100644
--- a/src/py/flwr/server/superlink/fleet/vce/vce_api_test.py
+++ b/src/py/flwr/server/superlink/fleet/vce/vce_api_test.py
@@ -15,7 +15,6 @@
"""Test Fleet Simulation Engine API."""
-import asyncio
import threading
import time
from itertools import cycle
@@ -24,7 +23,7 @@
from pathlib import Path
from time import sleep
from typing import Dict, Optional, Set, Tuple
-from unittest import IsolatedAsyncioTestCase
+from unittest import TestCase
from uuid import UUID
from flwr.client.client_app import LoadClientAppError
@@ -46,7 +45,7 @@
from flwr.server.superlink.state import InMemoryState, StateFactory
-def terminate_simulation(f_stop: asyncio.Event, sleep_duration: int) -> None:
+def terminate_simulation(f_stop: threading.Event, sleep_duration: int) -> None:
"""Set event to terminate Simulation Engine after `sleep_duration` seconds."""
sleep(sleep_duration)
f_stop.set()
@@ -82,7 +81,9 @@ def register_messages_into_state(
) -> Dict[UUID, float]:
"""Register `num_messages` into the state factory."""
state: InMemoryState = state_factory.state() # type: ignore
- state.run_ids[run_id] = Run(run_id=run_id, fab_id="Mock/mock", fab_version="v1.0.0")
+ state.run_ids[run_id] = Run(
+ run_id=run_id, fab_id="Mock/mock", fab_version="v1.0.0", override_config={}
+ )
# Artificially add TaskIns to state so they can be processed
# by the Simulation Engine logic
nodes_cycle = cycle(nodes_mapping.keys()) # we have more messages than supernodes
@@ -146,15 +147,15 @@ def start_and_shutdown(
) -> None:
"""Start Simulation Engine and terminate after specified number of seconds.
- Some tests need to be terminated by triggering externally an asyncio.Event. This
- is enabled whtn passing `duration`>0.
+ Some tests need to be terminated by triggering externally an threading.Event. This
+ is enabled when passing `duration`>0.
"""
- f_stop = asyncio.Event()
+ f_stop = threading.Event()
if duration:
# Setup thread that will set the f_stop event, triggering the termination of all
- # asyncio logic in the Simulation Engine. It will also terminate the Backend.
+ # logic in the Simulation Engine. It will also terminate the Backend.
termination_th = threading.Thread(
target=terminate_simulation, args=(f_stop, duration)
)
@@ -164,6 +165,8 @@ def start_and_shutdown(
if not app_dir:
app_dir = _autoresolve_app_dir()
+ run = Run(run_id=1234, fab_id="", fab_version="", override_config={})
+
start_vce(
num_supernodes=num_supernodes,
client_app_attr=client_app_attr,
@@ -171,7 +174,9 @@ def start_and_shutdown(
backend_config_json_stream=backend_config,
state_factory=state_factory,
app_dir=app_dir,
+ is_app=False,
f_stop=f_stop,
+ run=run,
existing_nodes_mapping=nodes_mapping,
)
@@ -179,8 +184,8 @@ def start_and_shutdown(
termination_th.join()
-class AsyncTestFleetSimulationEngineRayBackend(IsolatedAsyncioTestCase):
- """A basic class that enables testing asyncio functionalities."""
+class TestFleetSimulationEngineRayBackend(TestCase):
+ """A basic class that enables testing functionalities."""
def test_erroneous_no_supernodes_client_mapping(self) -> None:
"""Test with unset arguments."""
diff --git a/src/py/flwr/server/superlink/state/in_memory_state.py b/src/py/flwr/server/superlink/state/in_memory_state.py
index da9c754c3115..bc4bd4478a23 100644
--- a/src/py/flwr/server/superlink/state/in_memory_state.py
+++ b/src/py/flwr/server/superlink/state/in_memory_state.py
@@ -15,7 +15,6 @@
"""In-memory State implementation."""
-import os
import threading
import time
from logging import ERROR
@@ -23,12 +22,13 @@
from uuid import UUID, uuid4
from flwr.common import log, now
+from flwr.common.constant import NODE_ID_NUM_BYTES, RUN_ID_NUM_BYTES
from flwr.common.typing import Run
from flwr.proto.task_pb2 import TaskIns, TaskRes # pylint: disable=E0611
from flwr.server.superlink.state.state import State
from flwr.server.utils import validate_task_ins_or_res
-from .utils import make_node_unavailable_taskres
+from .utils import generate_rand_int_from_bytes, make_node_unavailable_taskres
class InMemoryState(State): # pylint: disable=R0902,R0904
@@ -216,7 +216,7 @@ def create_node(
) -> int:
"""Create, store in state, and return `node_id`."""
# Sample a random int64 as node_id
- node_id: int = int.from_bytes(os.urandom(8), "little", signed=True)
+ node_id = generate_rand_int_from_bytes(NODE_ID_NUM_BYTES)
with self.lock:
if node_id in self.node_ids:
@@ -275,15 +275,23 @@ def get_node_id(self, client_public_key: bytes) -> Optional[int]:
"""Retrieve stored `node_id` filtered by `client_public_keys`."""
return self.public_key_to_node_id.get(client_public_key)
- def create_run(self, fab_id: str, fab_version: str) -> int:
+ def create_run(
+ self,
+ fab_id: str,
+ fab_version: str,
+ override_config: Dict[str, str],
+ ) -> int:
"""Create a new run for the specified `fab_id` and `fab_version`."""
# Sample a random int64 as run_id
with self.lock:
- run_id: int = int.from_bytes(os.urandom(8), "little", signed=True)
+ run_id = generate_rand_int_from_bytes(RUN_ID_NUM_BYTES)
if run_id not in self.run_ids:
self.run_ids[run_id] = Run(
- run_id=run_id, fab_id=fab_id, fab_version=fab_version
+ run_id=run_id,
+ fab_id=fab_id,
+ fab_version=fab_version,
+ override_config=override_config,
)
return run_id
log(ERROR, "Unexpected run creation failure.")
diff --git a/src/py/flwr/server/superlink/state/sqlite_state.py b/src/py/flwr/server/superlink/state/sqlite_state.py
index 4df9470ded62..ea6f349b9f9a 100644
--- a/src/py/flwr/server/superlink/state/sqlite_state.py
+++ b/src/py/flwr/server/superlink/state/sqlite_state.py
@@ -15,7 +15,7 @@
"""SQLite based implemenation of server state."""
-import os
+import json
import re
import sqlite3
import time
@@ -24,6 +24,7 @@
from uuid import UUID, uuid4
from flwr.common import log, now
+from flwr.common.constant import NODE_ID_NUM_BYTES, RUN_ID_NUM_BYTES
from flwr.common.typing import Run
from flwr.proto.node_pb2 import Node # pylint: disable=E0611
from flwr.proto.recordset_pb2 import RecordSet # pylint: disable=E0611
@@ -31,7 +32,7 @@
from flwr.server.utils.validator import validate_task_ins_or_res
from .state import State
-from .utils import make_node_unavailable_taskres
+from .utils import generate_rand_int_from_bytes, make_node_unavailable_taskres
SQL_CREATE_TABLE_NODE = """
CREATE TABLE IF NOT EXISTS node(
@@ -61,9 +62,10 @@
SQL_CREATE_TABLE_RUN = """
CREATE TABLE IF NOT EXISTS run(
- run_id INTEGER UNIQUE,
- fab_id TEXT,
- fab_version TEXT
+ run_id INTEGER UNIQUE,
+ fab_id TEXT,
+ fab_version TEXT,
+ override_config TEXT
);
"""
@@ -541,7 +543,7 @@ def create_node(
) -> int:
"""Create, store in state, and return `node_id`."""
# Sample a random int64 as node_id
- node_id: int = int.from_bytes(os.urandom(8), "little", signed=True)
+ node_id = generate_rand_int_from_bytes(NODE_ID_NUM_BYTES)
query = "SELECT node_id FROM node WHERE public_key = :public_key;"
row = self.query(query, {"public_key": public_key})
@@ -613,17 +615,27 @@ def get_node_id(self, client_public_key: bytes) -> Optional[int]:
return node_id
return None
- def create_run(self, fab_id: str, fab_version: str) -> int:
+ def create_run(
+ self,
+ fab_id: str,
+ fab_version: str,
+ override_config: Dict[str, str],
+ ) -> int:
"""Create a new run for the specified `fab_id` and `fab_version`."""
# Sample a random int64 as run_id
- run_id: int = int.from_bytes(os.urandom(8), "little", signed=True)
+ run_id = generate_rand_int_from_bytes(RUN_ID_NUM_BYTES)
# Check conflicts
query = "SELECT COUNT(*) FROM run WHERE run_id = ?;"
# If run_id does not exist
if self.query(query, (run_id,))[0]["COUNT(*)"] == 0:
- query = "INSERT INTO run (run_id, fab_id, fab_version) VALUES (?, ?, ?);"
- self.query(query, (run_id, fab_id, fab_version))
+ query = (
+ "INSERT INTO run (run_id, fab_id, fab_version, override_config)"
+ "VALUES (?, ?, ?, ?);"
+ )
+ self.query(
+ query, (run_id, fab_id, fab_version, json.dumps(override_config))
+ )
return run_id
log(ERROR, "Unexpected run creation failure.")
return 0
@@ -687,7 +699,10 @@ def get_run(self, run_id: int) -> Optional[Run]:
try:
row = self.query(query, (run_id,))[0]
return Run(
- run_id=run_id, fab_id=row["fab_id"], fab_version=row["fab_version"]
+ run_id=run_id,
+ fab_id=row["fab_id"],
+ fab_version=row["fab_version"],
+ override_config=json.loads(row["override_config"]),
)
except sqlite3.IntegrityError:
log(ERROR, "`run_id` does not exist.")
diff --git a/src/py/flwr/server/superlink/state/state.py b/src/py/flwr/server/superlink/state/state.py
index 65e2c63cab69..c93f6ba756b8 100644
--- a/src/py/flwr/server/superlink/state/state.py
+++ b/src/py/flwr/server/superlink/state/state.py
@@ -16,7 +16,7 @@
import abc
-from typing import List, Optional, Set
+from typing import Dict, List, Optional, Set
from uuid import UUID
from flwr.common.typing import Run
@@ -157,7 +157,12 @@ def get_node_id(self, client_public_key: bytes) -> Optional[int]:
"""Retrieve stored `node_id` filtered by `client_public_keys`."""
@abc.abstractmethod
- def create_run(self, fab_id: str, fab_version: str) -> int:
+ def create_run(
+ self,
+ fab_id: str,
+ fab_version: str,
+ override_config: Dict[str, str],
+ ) -> int:
"""Create a new run for the specified `fab_id` and `fab_version`."""
@abc.abstractmethod
diff --git a/src/py/flwr/server/superlink/state/state_test.py b/src/py/flwr/server/superlink/state/state_test.py
index 373202d5cde6..5f0d23ffc4d8 100644
--- a/src/py/flwr/server/superlink/state/state_test.py
+++ b/src/py/flwr/server/superlink/state/state_test.py
@@ -52,7 +52,7 @@ def test_create_and_get_run(self) -> None:
"""Test if create_run and get_run work correctly."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("Mock/mock", "v1.0.0")
+ run_id = state.create_run("Mock/mock", "v1.0.0", {"test_key": "test_value"})
# Execute
run = state.get_run(run_id)
@@ -62,6 +62,7 @@ def test_create_and_get_run(self) -> None:
assert run.run_id == run_id
assert run.fab_id == "Mock/mock"
assert run.fab_version == "v1.0.0"
+ assert run.override_config["test_key"] == "test_value"
def test_get_task_ins_empty(self) -> None:
"""Validate that a new state has no TaskIns."""
@@ -90,7 +91,7 @@ def test_store_task_ins_one(self) -> None:
# Prepare
consumer_node_id = 1
state = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
task_ins = create_task_ins(
consumer_node_id=consumer_node_id, anonymous=False, run_id=run_id
)
@@ -125,7 +126,7 @@ def test_store_and_delete_tasks(self) -> None:
# Prepare
consumer_node_id = 1
state = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
task_ins_0 = create_task_ins(
consumer_node_id=consumer_node_id, anonymous=False, run_id=run_id
)
@@ -199,7 +200,7 @@ def test_task_ins_store_anonymous_and_retrieve_anonymous(self) -> None:
"""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
task_ins = create_task_ins(consumer_node_id=0, anonymous=True, run_id=run_id)
# Execute
@@ -214,7 +215,7 @@ def test_task_ins_store_anonymous_and_fail_retrieving_identitiy(self) -> None:
"""Store anonymous TaskIns and fail to retrieve it."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
task_ins = create_task_ins(consumer_node_id=0, anonymous=True, run_id=run_id)
# Execute
@@ -228,7 +229,7 @@ def test_task_ins_store_identity_and_fail_retrieving_anonymous(self) -> None:
"""Store identity TaskIns and fail retrieving it as anonymous."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
task_ins = create_task_ins(consumer_node_id=1, anonymous=False, run_id=run_id)
# Execute
@@ -242,7 +243,7 @@ def test_task_ins_store_identity_and_retrieve_identity(self) -> None:
"""Store identity TaskIns and retrieve it."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
task_ins = create_task_ins(consumer_node_id=1, anonymous=False, run_id=run_id)
# Execute
@@ -259,7 +260,7 @@ def test_task_ins_store_delivered_and_fail_retrieving(self) -> None:
"""Fail retrieving delivered task."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
task_ins = create_task_ins(consumer_node_id=1, anonymous=False, run_id=run_id)
# Execute
@@ -302,7 +303,7 @@ def test_task_res_store_and_retrieve_by_task_ins_id(self) -> None:
"""Store TaskRes retrieve it by task_ins_id."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
task_ins_id = uuid4()
task_res = create_task_res(
producer_node_id=0,
@@ -323,7 +324,7 @@ def test_node_ids_initial_state(self) -> None:
"""Test retrieving all node_ids and empty initial state."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
# Execute
retrieved_node_ids = state.get_nodes(run_id)
@@ -335,7 +336,7 @@ def test_create_node_and_get_nodes(self) -> None:
"""Test creating a client node."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
node_ids = []
# Execute
@@ -352,7 +353,7 @@ def test_create_node_public_key(self) -> None:
# Prepare
state: State = self.state_factory()
public_key = b"mock"
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
# Execute
node_id = state.create_node(ping_interval=10, public_key=public_key)
@@ -368,7 +369,7 @@ def test_create_node_public_key_twice(self) -> None:
# Prepare
state: State = self.state_factory()
public_key = b"mock"
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
node_id = state.create_node(ping_interval=10, public_key=public_key)
# Execute
@@ -390,7 +391,7 @@ def test_delete_node(self) -> None:
"""Test deleting a client node."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
node_id = state.create_node(ping_interval=10)
# Execute
@@ -405,7 +406,7 @@ def test_delete_node_public_key(self) -> None:
# Prepare
state: State = self.state_factory()
public_key = b"mock"
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
node_id = state.create_node(ping_interval=10, public_key=public_key)
# Execute
@@ -422,7 +423,7 @@ def test_delete_node_public_key_none(self) -> None:
# Prepare
state: State = self.state_factory()
public_key = b"mock"
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
node_id = 0
# Execute & Assert
@@ -441,7 +442,7 @@ def test_delete_node_wrong_public_key(self) -> None:
state: State = self.state_factory()
public_key = b"mock"
wrong_public_key = b"mock_mock"
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
node_id = state.create_node(ping_interval=10, public_key=public_key)
# Execute & Assert
@@ -460,7 +461,7 @@ def test_get_node_id_wrong_public_key(self) -> None:
state: State = self.state_factory()
public_key = b"mock"
wrong_public_key = b"mock_mock"
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
# Execute
state.create_node(ping_interval=10, public_key=public_key)
@@ -475,7 +476,7 @@ def test_get_nodes_invalid_run_id(self) -> None:
"""Test retrieving all node_ids with invalid run_id."""
# Prepare
state: State = self.state_factory()
- state.create_run("mock/mock", "v1.0.0")
+ state.create_run("mock/mock", "v1.0.0", {})
invalid_run_id = 61016
state.create_node(ping_interval=10)
@@ -489,7 +490,7 @@ def test_num_task_ins(self) -> None:
"""Test if num_tasks returns correct number of not delivered task_ins."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
task_0 = create_task_ins(consumer_node_id=0, anonymous=True, run_id=run_id)
task_1 = create_task_ins(consumer_node_id=0, anonymous=True, run_id=run_id)
@@ -507,7 +508,7 @@ def test_num_task_res(self) -> None:
"""Test if num_tasks returns correct number of not delivered task_res."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
task_0 = create_task_res(
producer_node_id=0, anonymous=True, ancestry=["1"], run_id=run_id
)
@@ -608,7 +609,7 @@ def test_acknowledge_ping(self) -> None:
"""Test if acknowledge_ping works and if get_nodes return online nodes."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
node_ids = [state.create_node(ping_interval=10) for _ in range(100)]
for node_id in node_ids[:70]:
state.acknowledge_ping(node_id, ping_interval=30)
@@ -627,7 +628,7 @@ def test_node_unavailable_error(self) -> None:
"""Test if get_task_res return TaskRes containing node unavailable error."""
# Prepare
state: State = self.state_factory()
- run_id = state.create_run("mock/mock", "v1.0.0")
+ run_id = state.create_run("mock/mock", "v1.0.0", {})
node_id_0 = state.create_node(ping_interval=90)
node_id_1 = state.create_node(ping_interval=30)
# Create and store TaskIns
diff --git a/src/py/flwr/server/superlink/state/utils.py b/src/py/flwr/server/superlink/state/utils.py
index 233a90946cc7..b12a87ac998d 100644
--- a/src/py/flwr/server/superlink/state/utils.py
+++ b/src/py/flwr/server/superlink/state/utils.py
@@ -17,6 +17,7 @@
import time
from logging import ERROR
+from os import urandom
from uuid import uuid4
from flwr.common import log
@@ -31,6 +32,11 @@
)
+def generate_rand_int_from_bytes(num_bytes: int) -> int:
+ """Generate a random `num_bytes` integer."""
+ return int.from_bytes(urandom(num_bytes), "little", signed=True)
+
+
def make_node_unavailable_taskres(ref_taskins: TaskIns) -> TaskRes:
"""Generate a TaskRes with a node unavailable error from a TaskIns."""
current_time = time.time()
diff --git a/src/py/flwr/server/typing.py b/src/py/flwr/server/typing.py
index 01143af74392..cdb1c0db4fe7 100644
--- a/src/py/flwr/server/typing.py
+++ b/src/py/flwr/server/typing.py
@@ -20,6 +20,8 @@
from flwr.common import Context
from .driver import Driver
+from .serverapp_components import ServerAppComponents
ServerAppCallable = Callable[[Driver, Context], None]
Workflow = Callable[[Driver, Context], None]
+ServerFn = Callable[[Context], ServerAppComponents]
diff --git a/src/py/flwr/simulation/app.py b/src/py/flwr/simulation/app.py
index 856d6fc45e22..973a9a89e652 100644
--- a/src/py/flwr/simulation/app.py
+++ b/src/py/flwr/simulation/app.py
@@ -27,14 +27,16 @@
import ray
from ray.util.scheduling_strategies import NodeAffinitySchedulingStrategy
-from flwr.client import ClientFn
+from flwr.client import ClientFnExt
from flwr.common import EventType, event
-from flwr.common.logger import log, set_logger_propagation
+from flwr.common.constant import NODE_ID_NUM_BYTES
+from flwr.common.logger import log, set_logger_propagation, warn_unsupported_feature
from flwr.server.client_manager import ClientManager
from flwr.server.history import History
from flwr.server.server import Server, init_defaults, run_fl
from flwr.server.server_config import ServerConfig
from flwr.server.strategy import Strategy
+from flwr.server.superlink.state.utils import generate_rand_int_from_bytes
from flwr.simulation.ray_transport.ray_actor import (
ClientAppActor,
VirtualClientEngineActor,
@@ -51,7 +53,7 @@
`start_simulation(
*,
client_fn: ClientFn,
- num_clients: Optional[int] = None,
+ num_clients: int,
clients_ids: Optional[List[str]] = None,
client_resources: Optional[Dict[str, float]] = None,
server: Optional[Server] = None,
@@ -70,13 +72,29 @@
"""
+NodeToPartitionMapping = Dict[int, int]
+
+
+def _create_node_id_to_partition_mapping(
+ num_clients: int,
+) -> NodeToPartitionMapping:
+ """Generate a node_id:partition_id mapping."""
+ nodes_mapping: NodeToPartitionMapping = {} # {node-id; partition-id}
+ for i in range(num_clients):
+ while True:
+ node_id = generate_rand_int_from_bytes(NODE_ID_NUM_BYTES)
+ if node_id not in nodes_mapping:
+ break
+ nodes_mapping[node_id] = i
+ return nodes_mapping
+
# pylint: disable=too-many-arguments,too-many-statements,too-many-branches
def start_simulation(
*,
- client_fn: ClientFn,
- num_clients: Optional[int] = None,
- clients_ids: Optional[List[str]] = None,
+ client_fn: ClientFnExt,
+ num_clients: int,
+ clients_ids: Optional[List[str]] = None, # UNSUPPORTED, WILL BE REMOVED
client_resources: Optional[Dict[str, float]] = None,
server: Optional[Server] = None,
config: Optional[ServerConfig] = None,
@@ -92,23 +110,24 @@ def start_simulation(
Parameters
----------
- client_fn : ClientFn
- A function creating client instances. The function must take a single
- `str` argument called `cid`. It should return a single client instance
- of type Client. Note that the created client instances are ephemeral
- and will often be destroyed after a single method invocation. Since client
- instances are not long-lived, they should not attempt to carry state over
- method invocations. Any state required by the instance (model, dataset,
- hyperparameters, ...) should be (re-)created in either the call to `client_fn`
- or the call to any of the client methods (e.g., load evaluation data in the
- `evaluate` method itself).
- num_clients : Optional[int]
- The total number of clients in this simulation. This must be set if
- `clients_ids` is not set and vice-versa.
+ client_fn : ClientFnExt
+ A function creating `Client` instances. The function must have the signature
+ `client_fn(context: Context). It should return
+ a single client instance of type `Client`. Note that the created client
+ instances are ephemeral and will often be destroyed after a single method
+ invocation. Since client instances are not long-lived, they should not attempt
+ to carry state over method invocations. Any state required by the instance
+ (model, dataset, hyperparameters, ...) should be (re-)created in either the
+ call to `client_fn` or the call to any of the client methods (e.g., load
+ evaluation data in the `evaluate` method itself).
+ num_clients : int
+ The total number of clients in this simulation.
clients_ids : Optional[List[str]]
+ UNSUPPORTED, WILL BE REMOVED. USE `num_clients` INSTEAD.
List `client_id`s for each client. This is only required if
`num_clients` is not set. Setting both `num_clients` and `clients_ids`
with `len(clients_ids)` not equal to `num_clients` generates an error.
+ Using this argument will raise an error.
client_resources : Optional[Dict[str, float]] (default: `{"num_cpus": 1, "num_gpus": 0.0}`)
CPU and GPU resources for a single client. Supported keys
are `num_cpus` and `num_gpus`. To understand the GPU utilization caused by
@@ -158,7 +177,6 @@ def start_simulation(
is an advanced feature. For all details, please refer to the Ray documentation:
https://docs.ray.io/en/latest/ray-core/scheduling/index.html
-
Returns
-------
hist : flwr.server.history.History
@@ -170,6 +188,14 @@ def start_simulation(
{"num_clients": len(clients_ids) if clients_ids is not None else num_clients},
)
+ if clients_ids is not None:
+ warn_unsupported_feature(
+ "Passing `clients_ids` to `start_simulation` is deprecated and not longer "
+ "used by `start_simulation`. Use `num_clients` exclusively instead."
+ )
+ log(ERROR, "`clients_ids` argument used.")
+ sys.exit()
+
# Set logger propagation
loop: Optional[asyncio.AbstractEventLoop] = None
try:
@@ -196,20 +222,8 @@ def start_simulation(
initialized_config,
)
- # clients_ids takes precedence
- cids: List[str]
- if clients_ids is not None:
- if (num_clients is not None) and (len(clients_ids) != num_clients):
- log(ERROR, INVALID_ARGUMENTS_START_SIMULATION)
- sys.exit()
- else:
- cids = clients_ids
- else:
- if num_clients is None:
- log(ERROR, INVALID_ARGUMENTS_START_SIMULATION)
- sys.exit()
- else:
- cids = [str(x) for x in range(num_clients)]
+ # Create node-id to partition-id mapping
+ nodes_mapping = _create_node_id_to_partition_mapping(num_clients)
# Default arguments for Ray initialization
if not ray_init_args:
@@ -308,10 +322,12 @@ def update_resources(f_stop: threading.Event) -> None:
)
# Register one RayClientProxy object for each client with the ClientManager
- for cid in cids:
+ for node_id, partition_id in nodes_mapping.items():
client_proxy = RayActorClientProxy(
client_fn=client_fn,
- cid=cid,
+ node_id=node_id,
+ partition_id=partition_id,
+ num_partitions=num_clients,
actor_pool=pool,
)
initialized_server.client_manager().register(client=client_proxy)
diff --git a/src/py/flwr/simulation/ray_transport/ray_actor.py b/src/py/flwr/simulation/ray_transport/ray_actor.py
index 7afffb865334..b1c9d2b9c0c1 100644
--- a/src/py/flwr/simulation/ray_transport/ray_actor.py
+++ b/src/py/flwr/simulation/ray_transport/ray_actor.py
@@ -14,7 +14,6 @@
# ==============================================================================
"""Ray-based Flower Actor and ActorPool implementation."""
-import asyncio
import threading
from abc import ABC
from logging import DEBUG, ERROR, WARNING
@@ -411,9 +410,7 @@ def __init__(
self.client_resources = client_resources
# Queue of idle actors
- self.pool: "asyncio.Queue[Type[VirtualClientEngineActor]]" = asyncio.Queue(
- maxsize=1024
- )
+ self.pool: List[VirtualClientEngineActor] = []
self.num_actors = 0
# Resolve arguments to pass during actor init
@@ -427,38 +424,37 @@ def __init__(
# Figure out how many actors can be created given the cluster resources
# and the resources the user indicates each VirtualClient will need
self.actors_capacity = pool_size_from_resources(client_resources)
- self._future_to_actor: Dict[Any, Type[VirtualClientEngineActor]] = {}
+ self._future_to_actor: Dict[Any, VirtualClientEngineActor] = {}
def is_actor_available(self) -> bool:
"""Return true if there is an idle actor."""
- return self.pool.qsize() > 0
+ return len(self.pool) > 0
- async def add_actors_to_pool(self, num_actors: int) -> None:
+ def add_actors_to_pool(self, num_actors: int) -> None:
"""Add actors to the pool.
This method may be executed also if new resources are added to your Ray cluster
(e.g. you add a new node).
"""
for _ in range(num_actors):
- await self.pool.put(self.create_actor_fn()) # type: ignore
+ self.pool.append(self.create_actor_fn()) # type: ignore
self.num_actors += num_actors
- async def terminate_all_actors(self) -> None:
+ def terminate_all_actors(self) -> None:
"""Terminate actors in pool."""
num_terminated = 0
- while self.pool.qsize():
- actor = await self.pool.get()
+ for actor in self.pool:
actor.terminate.remote() # type: ignore
num_terminated += 1
log(DEBUG, "Terminated %i actors", num_terminated)
- async def submit(
+ def submit(
self, actor_fn: Any, job: Tuple[ClientAppFn, Message, str, Context]
) -> Any:
"""On idle actor, submit job and return future."""
# Remove idle actor from pool
- actor = await self.pool.get()
+ actor = self.pool.pop()
# Submit job to actor
app_fn, mssg, cid, context = job
future = actor_fn(actor, app_fn, mssg, cid, context)
@@ -467,18 +463,18 @@ async def submit(
self._future_to_actor[future] = actor
return future
- async def add_actor_back_to_pool(self, future: Any) -> None:
+ def add_actor_back_to_pool(self, future: Any) -> None:
"""Ad actor assigned to run future back into the pool."""
actor = self._future_to_actor.pop(future)
- await self.pool.put(actor)
+ self.pool.append(actor)
- async def fetch_result_and_return_actor_to_pool(
+ def fetch_result_and_return_actor_to_pool(
self, future: Any
) -> Tuple[Message, Context]:
"""Pull result given a future and add actor back to pool."""
- # Get actor that ran job
- await self.add_actor_back_to_pool(future)
# Retrieve result for object store
# Instead of doing ray.get(future) we await it
- _, out_mssg, updated_context = await future
+ _, out_mssg, updated_context = ray.get(future)
+ # Get actor that ran job
+ self.add_actor_back_to_pool(future)
return out_mssg, updated_context
diff --git a/src/py/flwr/simulation/ray_transport/ray_client_proxy.py b/src/py/flwr/simulation/ray_transport/ray_client_proxy.py
index d3d103bb377a..895272c2fd79 100644
--- a/src/py/flwr/simulation/ray_transport/ray_client_proxy.py
+++ b/src/py/flwr/simulation/ray_transport/ray_client_proxy.py
@@ -20,11 +20,16 @@
from typing import Optional
from flwr import common
-from flwr.client import ClientFn
+from flwr.client import ClientFnExt
from flwr.client.client_app import ClientApp
from flwr.client.node_state import NodeState
from flwr.common import DEFAULT_TTL, Message, Metadata, RecordSet
-from flwr.common.constant import MessageType, MessageTypeLegacy
+from flwr.common.constant import (
+ NUM_PARTITIONS_KEY,
+ PARTITION_ID_KEY,
+ MessageType,
+ MessageTypeLegacy,
+)
from flwr.common.logger import log
from flwr.common.recordset_compat import (
evaluateins_to_recordset,
@@ -43,17 +48,30 @@
class RayActorClientProxy(ClientProxy):
"""Flower client proxy which delegates work using Ray."""
- def __init__(
- self, client_fn: ClientFn, cid: str, actor_pool: VirtualClientEngineActorPool
+ def __init__( # pylint: disable=too-many-arguments
+ self,
+ client_fn: ClientFnExt,
+ node_id: int,
+ partition_id: int,
+ num_partitions: int,
+ actor_pool: VirtualClientEngineActorPool,
):
- super().__init__(cid)
+ super().__init__(cid=str(node_id))
+ self.node_id = node_id
+ self.partition_id = partition_id
def _load_app() -> ClientApp:
return ClientApp(client_fn=client_fn)
self.app_fn = _load_app
self.actor_pool = actor_pool
- self.proxy_state = NodeState()
+ self.proxy_state = NodeState(
+ node_id=node_id,
+ node_config={
+ PARTITION_ID_KEY: str(partition_id),
+ NUM_PARTITIONS_KEY: str(num_partitions),
+ },
+ )
def _submit_job(self, message: Message, timeout: Optional[float]) -> Message:
"""Sumbit a message to the ActorPool."""
@@ -62,16 +80,19 @@ def _submit_job(self, message: Message, timeout: Optional[float]) -> Message:
# Register state
self.proxy_state.register_context(run_id=run_id)
- # Retrieve state
- state = self.proxy_state.retrieve_context(run_id=run_id)
+ # Retrieve context
+ context = self.proxy_state.retrieve_context(run_id=run_id)
+ partition_id_str = context.node_config[PARTITION_ID_KEY]
try:
self.actor_pool.submit_client_job(
- lambda a, a_fn, mssg, cid, state: a.run.remote(a_fn, mssg, cid, state),
- (self.app_fn, message, self.cid, state),
+ lambda a, a_fn, mssg, partition_id, context: a.run.remote(
+ a_fn, mssg, partition_id, context
+ ),
+ (self.app_fn, message, partition_id_str, context),
)
out_mssg, updated_context = self.actor_pool.get_client_result(
- self.cid, timeout
+ partition_id_str, timeout
)
# Update state
@@ -103,11 +124,10 @@ def _wrap_recordset_in_message(
message_id="",
group_id=str(group_id) if group_id is not None else "",
src_node_id=0,
- dst_node_id=int(self.cid),
+ dst_node_id=self.node_id,
reply_to_message="",
ttl=timeout if timeout else DEFAULT_TTL,
message_type=message_type,
- partition_id=int(self.cid),
),
)
diff --git a/src/py/flwr/simulation/ray_transport/ray_client_proxy_test.py b/src/py/flwr/simulation/ray_transport/ray_client_proxy_test.py
index 9680b3846f1d..62e0cfd61c99 100644
--- a/src/py/flwr/simulation/ray_transport/ray_client_proxy_test.py
+++ b/src/py/flwr/simulation/ray_transport/ray_client_proxy_test.py
@@ -23,6 +23,7 @@
from flwr.client import Client, NumPyClient
from flwr.client.client_app import ClientApp
+from flwr.client.node_state import NodeState
from flwr.common import (
DEFAULT_TTL,
Config,
@@ -31,14 +32,18 @@
Message,
MessageTypeLegacy,
Metadata,
- RecordSet,
Scalar,
)
+from flwr.common.constant import NUM_PARTITIONS_KEY, PARTITION_ID_KEY
from flwr.common.recordset_compat import (
getpropertiesins_to_recordset,
recordset_to_getpropertiesres,
)
from flwr.common.recordset_compat_test import _get_valid_getpropertiesins
+from flwr.simulation.app import (
+ NodeToPartitionMapping,
+ _create_node_id_to_partition_mapping,
+)
from flwr.simulation.ray_transport.ray_actor import (
ClientAppActor,
VirtualClientEngineActor,
@@ -50,12 +55,12 @@
class DummyClient(NumPyClient):
"""A dummy NumPyClient for tests."""
- def __init__(self, cid: str) -> None:
- self.cid = int(cid)
+ def __init__(self, node_id: int) -> None:
+ self.node_id = node_id
def get_properties(self, config: Config) -> Dict[str, Scalar]:
"""Return properties by doing a simple calculation."""
- result = int(self.cid) * pi
+ result = self.node_id * pi
# store something in context
self.context.state.configs_records["result"] = ConfigsRecord(
@@ -64,14 +69,16 @@ def get_properties(self, config: Config) -> Dict[str, Scalar]:
return {"result": result}
-def get_dummy_client(cid: str) -> Client:
+def get_dummy_client(context: Context) -> Client:
"""Return a DummyClient converted to Client type."""
- return DummyClient(cid).to_client()
+ return DummyClient(context.node_id).to_client()
def prep(
actor_type: Type[VirtualClientEngineActor] = ClientAppActor,
-) -> Tuple[List[RayActorClientProxy], VirtualClientEngineActorPool]: # pragma: no cover
+) -> Tuple[
+ List[RayActorClientProxy], VirtualClientEngineActorPool, NodeToPartitionMapping
+]: # pragma: no cover
"""Prepare ClientProxies and pool for tests."""
client_resources = {"num_cpus": 1, "num_gpus": 0.0}
@@ -87,16 +94,19 @@ def create_actor_fn() -> Type[VirtualClientEngineActor]:
# Create 373 client proxies
num_proxies = 373 # a prime number
+ mapping = _create_node_id_to_partition_mapping(num_proxies)
proxies = [
RayActorClientProxy(
client_fn=get_dummy_client,
- cid=str(cid),
+ node_id=node_id,
+ partition_id=partition_id,
+ num_partitions=num_proxies,
actor_pool=pool,
)
- for cid in range(num_proxies)
+ for node_id, partition_id in mapping.items()
]
- return proxies, pool
+ return proxies, pool, mapping
def test_cid_consistency_one_at_a_time() -> None:
@@ -104,7 +114,7 @@ def test_cid_consistency_one_at_a_time() -> None:
Submit one job and waits for completion. Then submits the next and so on
"""
- proxies, _ = prep()
+ proxies, _, _ = prep()
getproperties_ins = _get_valid_getpropertiesins()
recordset = getpropertiesins_to_recordset(getproperties_ins)
@@ -123,7 +133,7 @@ def test_cid_consistency_one_at_a_time() -> None:
res = recordset_to_getpropertiesres(message_out.content)
- assert int(prox.cid) * pi == res.properties["result"]
+ assert int(prox.node_id) * pi == res.properties["result"]
ray.shutdown()
@@ -134,7 +144,7 @@ def test_cid_consistency_all_submit_first_run_consistency() -> None:
All jobs are submitted at the same time. Then fetched one at a time. This also tests
NodeState (at each Proxy) and RunState basic functionality.
"""
- proxies, _ = prep()
+ proxies, _, _ = prep()
run_id = 0
getproperties_ins = _get_valid_getpropertiesins()
@@ -156,21 +166,21 @@ def test_cid_consistency_all_submit_first_run_consistency() -> None:
)
prox.actor_pool.submit_client_job(
lambda a, a_fn, mssg, cid, state: a.run.remote(a_fn, mssg, cid, state),
- (prox.app_fn, message, prox.cid, state),
+ (prox.app_fn, message, str(prox.node_id), state),
)
# fetch results one at a time
shuffle(proxies)
for prox in proxies:
message_out, updated_context = prox.actor_pool.get_client_result(
- prox.cid, timeout=None
+ str(prox.node_id), timeout=None
)
prox.proxy_state.update_context(run_id, context=updated_context)
res = recordset_to_getpropertiesres(message_out.content)
- assert int(prox.cid) * pi == res.properties["result"]
+ assert prox.node_id * pi == res.properties["result"]
assert (
- str(int(prox.cid) * pi)
+ str(prox.node_id * pi)
== prox.proxy_state.retrieve_context(run_id).state.configs_records[
"result"
]["result"]
@@ -181,9 +191,19 @@ def test_cid_consistency_all_submit_first_run_consistency() -> None:
def test_cid_consistency_without_proxies() -> None:
"""Test cid consistency of jobs submitted/retrieved to/from pool w/o ClientProxy."""
- proxies, pool = prep()
- num_clients = len(proxies)
- cids = [str(cid) for cid in range(num_clients)]
+ _, pool, mapping = prep()
+ node_ids = list(mapping.keys())
+
+ # register node states
+ node_states: Dict[int, NodeState] = {}
+ for node_id, partition_id in mapping.items():
+ node_states[node_id] = NodeState(
+ node_id=node_id,
+ node_config={
+ PARTITION_ID_KEY: str(partition_id),
+ NUM_PARTITIONS_KEY: str(len(node_ids)),
+ },
+ )
getproperties_ins = _get_valid_getpropertiesins()
recordset = getpropertiesins_to_recordset(getproperties_ins)
@@ -192,32 +212,37 @@ def _load_app() -> ClientApp:
return ClientApp(client_fn=get_dummy_client)
# submit all jobs (collect later)
- shuffle(cids)
- for cid in cids:
+ shuffle(node_ids)
+ run_id = 0
+ for node_id in node_ids:
message = Message(
content=recordset,
metadata=Metadata(
- run_id=0,
+ run_id=run_id,
message_id="",
group_id=str(0),
src_node_id=0,
- dst_node_id=12345,
+ dst_node_id=node_id,
reply_to_message="",
ttl=DEFAULT_TTL,
message_type=MessageTypeLegacy.GET_PROPERTIES,
- partition_id=int(cid),
),
)
+ # register and retrieve context
+ node_states[node_id].register_context(run_id=run_id)
+ context = node_states[node_id].retrieve_context(run_id=run_id)
+ partition_id_str = context.node_config[PARTITION_ID_KEY]
pool.submit_client_job(
- lambda a, c_fn, j_fn, cid_, state: a.run.remote(c_fn, j_fn, cid_, state),
- (_load_app, message, cid, Context(state=RecordSet())),
+ lambda a, c_fn, j_fn, nid_, state: a.run.remote(c_fn, j_fn, nid_, state),
+ (_load_app, message, partition_id_str, context),
)
# fetch results one at a time
- shuffle(cids)
- for cid in cids:
- message_out, _ = pool.get_client_result(cid, timeout=None)
+ shuffle(node_ids)
+ for node_id in node_ids:
+ partition_id_str = str(mapping[node_id])
+ message_out, _ = pool.get_client_result(partition_id_str, timeout=None)
res = recordset_to_getpropertiesres(message_out.content)
- assert int(cid) * pi == res.properties["result"]
+ assert node_id * pi == res.properties["result"]
ray.shutdown()
diff --git a/src/py/flwr/simulation/run_simulation.py b/src/py/flwr/simulation/run_simulation.py
index 7c7a412a245b..e5d82207e08b 100644
--- a/src/py/flwr/simulation/run_simulation.py
+++ b/src/py/flwr/simulation/run_simulation.py
@@ -18,45 +18,157 @@
import asyncio
import json
import logging
+import sys
import threading
import traceback
+from argparse import Namespace
from logging import DEBUG, ERROR, INFO, WARNING
+from pathlib import Path
from time import sleep
-from typing import Optional
+from typing import Dict, List, Optional
+from flwr.cli.config_utils import load_and_validate
from flwr.client import ClientApp
from flwr.common import EventType, event, log
+from flwr.common.config import get_fused_config_from_dir, parse_config_args
+from flwr.common.constant import RUN_ID_NUM_BYTES
from flwr.common.logger import set_logger_propagation, update_console_handler
from flwr.common.typing import Run
from flwr.server.driver import Driver, InMemoryDriver
-from flwr.server.run_serverapp import run
+from flwr.server.run_serverapp import run as run_server_app
from flwr.server.server_app import ServerApp
from flwr.server.superlink.fleet import vce
from flwr.server.superlink.fleet.vce.backend.backend import BackendConfig
from flwr.server.superlink.state import StateFactory
+from flwr.server.superlink.state.utils import generate_rand_int_from_bytes
from flwr.simulation.ray_transport.utils import (
enable_tf_gpu_growth as enable_gpu_growth,
)
+def _check_args_do_not_interfere(args: Namespace) -> bool:
+ """Ensure decoupling of flags for different ways to start the simulation."""
+ mode_one_args = ["app", "run_config"]
+ mode_two_args = ["client_app", "server_app"]
+
+ def _resolve_message(conflict_keys: List[str]) -> str:
+ return ",".join([f"`--{key}`".replace("_", "-") for key in conflict_keys])
+
+ # When passing `--app`, `--app-dir` is ignored
+ if args.app and args.app_dir:
+ log(ERROR, "Either `--app` or `--app-dir` can be set, but not both.")
+ return False
+
+ if any(getattr(args, key) for key in mode_one_args):
+ if any(getattr(args, key) for key in mode_two_args):
+ log(
+ ERROR,
+ "Passing any of {%s} alongside with any of {%s}",
+ _resolve_message(mode_one_args),
+ _resolve_message(mode_two_args),
+ )
+ return False
+
+ if not args.app:
+ log(ERROR, "You need to pass --app")
+ return False
+
+ return True
+
+ # Ensure all args are set (required for the non-FAB mode of execution)
+ if not all(getattr(args, key) for key in mode_two_args):
+ log(
+ ERROR,
+ "Passing all of %s keys are required.",
+ _resolve_message(mode_two_args),
+ )
+ return False
+
+ return True
+
+
# Entry point from CLI
+# pylint: disable=too-many-locals
def run_simulation_from_cli() -> None:
"""Run Simulation Engine from the CLI."""
args = _parse_args_run_simulation().parse_args()
+ # We are supporting two modes for the CLI entrypoint:
+ # 1) Running an app dir containing a `pyproject.toml`
+ # 2) Running any ClientApp and SeverApp w/o pyproject.toml being present
+ # For 2), some CLI args are compulsory, but they are not required for 1)
+ # We first do these checks
+ args_check_pass = _check_args_do_not_interfere(args)
+ if not args_check_pass:
+ sys.exit("Simulation Engine cannot start.")
+
+ run_id = (
+ generate_rand_int_from_bytes(RUN_ID_NUM_BYTES)
+ if args.run_id is None
+ else args.run_id
+ )
+ if args.app:
+ # Mode 1
+ app_path = Path(args.app)
+ if not app_path.is_dir():
+ log(ERROR, "--app is not a directory")
+ sys.exit("Simulation Engine cannot start.")
+
+ # Load pyproject.toml
+ config, errors, warnings = load_and_validate(
+ app_path / "pyproject.toml", check_module=False
+ )
+ if errors:
+ raise ValueError(errors)
+
+ if warnings:
+ log(WARNING, warnings)
+
+ if config is None:
+ raise ValueError("Config extracted from FAB's pyproject.toml is not valid")
+
+ # Get ClientApp and SeverApp components
+ app_components = config["tool"]["flwr"]["app"]["components"]
+ client_app_attr = app_components["clientapp"]
+ server_app_attr = app_components["serverapp"]
+
+ override_config = parse_config_args([args.run_config])
+ fused_config = get_fused_config_from_dir(app_path, override_config)
+ app_dir = args.app
+ is_app = True
+
+ else:
+ # Mode 2
+ client_app_attr = args.client_app
+ server_app_attr = args.server_app
+ override_config = {}
+ fused_config = None
+ app_dir = args.app_dir
+ is_app = False
+
+ # Create run
+ run = Run(
+ run_id=run_id,
+ fab_id="",
+ fab_version="",
+ override_config=override_config,
+ )
+
# Load JSON config
backend_config_dict = json.loads(args.backend_config)
_run_simulation(
- server_app_attr=args.server_app,
- client_app_attr=args.client_app,
+ server_app_attr=server_app_attr,
+ client_app_attr=client_app_attr,
num_supernodes=args.num_supernodes,
backend_name=args.backend,
backend_config=backend_config_dict,
- app_dir=args.app_dir,
- run_id=args.run_id,
+ app_dir=app_dir,
+ run=run,
enable_tf_gpu_growth=args.enable_tf_gpu_growth,
verbose_logging=args.verbose,
+ server_app_run_config=fused_config,
+ is_app=is_app,
)
@@ -126,16 +238,25 @@ def run_simulation(
def run_serverapp_th(
server_app_attr: Optional[str],
server_app: Optional[ServerApp],
+ server_app_run_config: Dict[str, str],
driver: Driver,
app_dir: str,
- f_stop: asyncio.Event,
+ f_stop: threading.Event,
+ has_exception: threading.Event,
enable_tf_gpu_growth: bool,
delay_launch: int = 3,
) -> threading.Thread:
"""Run SeverApp in a thread."""
- def server_th_with_start_checks( # type: ignore
- tf_gpu_growth: bool, stop_event: asyncio.Event, **kwargs
+ def server_th_with_start_checks(
+ tf_gpu_growth: bool,
+ stop_event: threading.Event,
+ exception_event: threading.Event,
+ _driver: Driver,
+ _server_app_dir: str,
+ _server_app_run_config: Dict[str, str],
+ _server_app_attr: Optional[str],
+ _server_app: Optional[ServerApp],
) -> None:
"""Run SeverApp, after check if GPU memory growth has to be set.
@@ -147,10 +268,18 @@ def server_th_with_start_checks( # type: ignore
enable_gpu_growth()
# Run ServerApp
- run(**kwargs)
+ run_server_app(
+ driver=_driver,
+ server_app_dir=_server_app_dir,
+ server_app_run_config=_server_app_run_config,
+ server_app_attr=_server_app_attr,
+ loaded_server_app=_server_app,
+ )
except Exception as ex: # pylint: disable=broad-exception-caught
log(ERROR, "ServerApp thread raised an exception: %s", ex)
log(ERROR, traceback.format_exc())
+ exception_event.set()
+ raise
finally:
log(DEBUG, "ServerApp finished running.")
# Upon completion, trigger stop event if one was passed
@@ -160,71 +289,66 @@ def server_th_with_start_checks( # type: ignore
serverapp_th = threading.Thread(
target=server_th_with_start_checks,
- args=(enable_tf_gpu_growth, f_stop),
- kwargs={
- "server_app_attr": server_app_attr,
- "loaded_server_app": server_app,
- "driver": driver,
- "server_app_dir": app_dir,
- },
+ args=(
+ enable_tf_gpu_growth,
+ f_stop,
+ has_exception,
+ driver,
+ app_dir,
+ server_app_run_config,
+ server_app_attr,
+ server_app,
+ ),
)
sleep(delay_launch)
serverapp_th.start()
return serverapp_th
-def _override_run_id(state: StateFactory, run_id_to_replace: int, run_id: int) -> None:
- """Override the run_id of an existing Run."""
- log(DEBUG, "Pre-registering run with id %s", run_id)
- # Remove run
- run_info: Run = state.state().run_ids.pop(run_id_to_replace) # type: ignore
- # Update with new run_id and insert back in state
- run_info.run_id = run_id
- state.state().run_ids[run_id] = run_info # type: ignore
-
-
# pylint: disable=too-many-locals
def _main_loop(
num_supernodes: int,
backend_name: str,
backend_config_stream: str,
app_dir: str,
+ is_app: bool,
enable_tf_gpu_growth: bool,
- run_id: Optional[int] = None,
+ run: Run,
+ flwr_dir: Optional[str] = None,
client_app: Optional[ClientApp] = None,
client_app_attr: Optional[str] = None,
server_app: Optional[ServerApp] = None,
server_app_attr: Optional[str] = None,
+ server_app_run_config: Optional[Dict[str, str]] = None,
) -> None:
- """Launch SuperLink with Simulation Engine, then ServerApp on a separate thread.
-
- Everything runs on the main thread or a separate one, depending on whether the main
- thread already contains a running Asyncio event loop. This is the case if running
- the Simulation Engine on a Jupyter/Colab notebook.
- """
+ """Launch SuperLink with Simulation Engine, then ServerApp on a separate thread."""
# Initialize StateFactory
state_factory = StateFactory(":flwr-in-memory-state:")
- f_stop = asyncio.Event()
+ f_stop = threading.Event()
+ # A Threading event to indicate if an exception was raised in the ServerApp thread
+ server_app_thread_has_exception = threading.Event()
serverapp_th = None
try:
- # Create run (with empty fab_id and fab_version)
- run_id_ = state_factory.state().create_run("", "")
+ # Register run
+ log(DEBUG, "Pre-registering run with id %s", run.run_id)
+ state_factory.state().run_ids[run.run_id] = run # type: ignore
- if run_id:
- _override_run_id(state_factory, run_id_to_replace=run_id_, run_id=run_id)
- run_id_ = run_id
+ if server_app_run_config is None:
+ server_app_run_config = {}
# Initialize Driver
- driver = InMemoryDriver(run_id=run_id_, state_factory=state_factory)
+ driver = InMemoryDriver(run_id=run.run_id, state_factory=state_factory)
# Get and run ServerApp thread
serverapp_th = run_serverapp_th(
server_app_attr=server_app_attr,
server_app=server_app,
+ server_app_run_config=server_app_run_config,
driver=driver,
app_dir=app_dir,
f_stop=f_stop,
+ has_exception=server_app_thread_has_exception,
enable_tf_gpu_growth=enable_tf_gpu_growth,
)
@@ -237,8 +361,11 @@ def _main_loop(
backend_name=backend_name,
backend_config_json_stream=backend_config_stream,
app_dir=app_dir,
+ is_app=is_app,
state_factory=state_factory,
f_stop=f_stop,
+ run=run,
+ flwr_dir=flwr_dir,
)
except Exception as ex:
@@ -253,6 +380,8 @@ def _main_loop(
event(EventType.RUN_SUPERLINK_LEAVE)
if serverapp_th:
serverapp_th.join()
+ if server_app_thread_has_exception.is_set():
+ raise RuntimeError("Exception in ServerApp thread")
log(DEBUG, "Stopping Simulation Engine now.")
@@ -266,10 +395,13 @@ def _run_simulation(
backend_config: Optional[BackendConfig] = None,
client_app_attr: Optional[str] = None,
server_app_attr: Optional[str] = None,
+ server_app_run_config: Optional[Dict[str, str]] = None,
app_dir: str = "",
- run_id: Optional[int] = None,
+ flwr_dir: Optional[str] = None,
+ run: Optional[Run] = None,
enable_tf_gpu_growth: bool = False,
verbose_logging: bool = False,
+ is_app: bool = False,
) -> None:
r"""Launch the Simulation Engine.
@@ -298,20 +430,27 @@ def _run_simulation(
parameters. Values supported in are those included by
`flwr.common.typing.ConfigsRecordValues`.
- client_app_attr : str
+ client_app_attr : Optional[str]
A path to a `ClientApp` module to be loaded: For example: `client:app` or
`project.package.module:wrapper.app`."
- server_app_attr : str
+ server_app_attr : Optional[str]
A path to a `ServerApp` module to be loaded: For example: `server:app` or
`project.package.module:wrapper.app`."
+ server_app_run_config : Optional[Dict[str, str]]
+ Config dictionary that parameterizes the run config. It will be made accesible
+ to the ServerApp.
+
app_dir : str
Add specified directory to the PYTHONPATH and load `ClientApp` from there.
(Default: current working directory.)
- run_id : Optional[int]
- An integer specifying the ID of the run started when running this function.
+ flwr_dir : Optional[str]
+ The path containing installed Flower Apps.
+
+ run : Optional[Run]
+ An object carrying details about the run.
enable_tf_gpu_growth : bool (default: False)
A boolean to indicate whether to enable GPU growth on the main thread. This is
@@ -324,6 +463,11 @@ def _run_simulation(
verbose_logging : bool (default: False)
When disabled, only INFO, WARNING and ERROR log messages will be shown. If
enabled, DEBUG-level logs will be displayed.
+
+ is_app : bool (default: False)
+ A flag that indicates whether the simulation is running an app or not. This is
+ needed in order to attempt loading an app's pyproject.toml when nodes register
+ a context object.
"""
if backend_config is None:
backend_config = {}
@@ -349,45 +493,47 @@ def _run_simulation(
# Convert config to original JSON-stream format
backend_config_stream = json.dumps(backend_config)
- simulation_engine_th = None
+ # If no `Run` object is set, create one
+ if run is None:
+ run_id = generate_rand_int_from_bytes(RUN_ID_NUM_BYTES)
+ run = Run(run_id=run_id, fab_id="", fab_version="", override_config={})
+
args = (
num_supernodes,
backend_name,
backend_config_stream,
app_dir,
+ is_app,
enable_tf_gpu_growth,
- run_id,
+ run,
+ flwr_dir,
client_app,
client_app_attr,
server_app,
server_app_attr,
+ server_app_run_config,
)
# Detect if there is an Asyncio event loop already running.
- # If yes, run everything on a separate thread. In environments
- # like Jupyter/Colab notebooks, there is an event loop present.
- run_in_thread = False
+ # If yes, disable logger propagation. In environmnets
+ # like Jupyter/Colab notebooks, it's often better to do this.
+ asyncio_loop_running = False
try:
_ = (
asyncio.get_running_loop()
) # Raises RuntimeError if no event loop is present
log(DEBUG, "Asyncio event loop already running.")
- run_in_thread = True
+ asyncio_loop_running = True
except RuntimeError:
- log(DEBUG, "No asyncio event loop running")
+ pass
finally:
- if run_in_thread:
+ if asyncio_loop_running:
# Set logger propagation to False to prevent duplicated log output in Colab.
logger = set_logger_propagation(logger, False)
- log(DEBUG, "Starting Simulation Engine on a new thread.")
- simulation_engine_th = threading.Thread(target=_main_loop, args=args)
- simulation_engine_th.start()
- simulation_engine_th.join()
- else:
- log(DEBUG, "Starting Simulation Engine on the main thread.")
- _main_loop(*args)
+
+ _main_loop(*args)
def _parse_args_run_simulation() -> argparse.ArgumentParser:
@@ -397,12 +543,10 @@ def _parse_args_run_simulation() -> argparse.ArgumentParser:
)
parser.add_argument(
"--server-app",
- required=True,
help="For example: `server:app` or `project.package.module:wrapper.app`",
)
parser.add_argument(
"--client-app",
- required=True,
help="For example: `client:app` or `project.package.module:wrapper.app`",
)
parser.add_argument(
@@ -411,6 +555,18 @@ def _parse_args_run_simulation() -> argparse.ArgumentParser:
required=True,
help="Number of simulated SuperNodes.",
)
+ parser.add_argument(
+ "--app",
+ type=str,
+ default=None,
+ help="Path to a directory containing a FAB-like structure with a "
+ "pyproject.toml.",
+ )
+ parser.add_argument(
+ "--run-config",
+ default=None,
+ help="Override configuration key-value pairs.",
+ )
parser.add_argument(
"--backend",
default="ray",
@@ -449,6 +605,17 @@ def _parse_args_run_simulation() -> argparse.ArgumentParser:
"ClientApp and ServerApp from there."
" Default: current working directory.",
)
+ parser.add_argument(
+ "--flwr-dir",
+ default=None,
+ help="""The path containing installed Flower Apps.
+ By default, this value is equal to:
+
+ - `$FLWR_HOME/` if `$FLWR_HOME` is defined
+ - `$XDG_DATA_HOME/.flwr/` if `$XDG_DATA_HOME` is defined
+ - `$HOME/.flwr/` in all other cases
+ """,
+ )
parser.add_argument(
"--run-id",
type=int,
diff --git a/src/py/flwr/superexec/app.py b/src/py/flwr/superexec/app.py
index fa89e83b5e75..9f1753ce041b 100644
--- a/src/py/flwr/superexec/app.py
+++ b/src/py/flwr/superexec/app.py
@@ -24,6 +24,7 @@
from flwr.common import EventType, event, log
from flwr.common.address import parse_address
+from flwr.common.config import parse_config_args
from flwr.common.constant import SUPEREXEC_DEFAULT_ADDRESS
from flwr.common.exit_handlers import register_exit_handlers
from flwr.common.object_ref import load_app, validate
@@ -55,6 +56,7 @@ def run_superexec() -> None:
address=address,
executor=_load_executor(args),
certificates=certificates,
+ config=parse_config_args([args.executor_config]),
)
grpc_servers = [superexec_server]
@@ -74,20 +76,25 @@ def _parse_args_run_superexec() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
description="Start a Flower SuperExec",
)
- parser.add_argument(
- "executor",
- help="For example: `deployment:exec` or `project.package.module:wrapper.exec`.",
- )
parser.add_argument(
"--address",
help="SuperExec (gRPC) server address (IPv4, IPv6, or a domain name)",
default=SUPEREXEC_DEFAULT_ADDRESS,
)
+ parser.add_argument(
+ "--executor",
+ help="For example: `deployment:exec` or `project.package.module:wrapper.exec`.",
+ default="flwr.superexec.deployment:executor",
+ )
parser.add_argument(
"--executor-dir",
help="The directory for the executor.",
default=".",
)
+ parser.add_argument(
+ "--executor-config",
+ help="Key-value pairs for the executor config, separated by commas.",
+ )
parser.add_argument(
"--insecure",
action="store_true",
@@ -126,11 +133,11 @@ def _try_obtain_certificates(
return None
# Check if certificates are provided
if args.ssl_certfile and args.ssl_keyfile and args.ssl_ca_certfile:
- if not Path.is_file(args.ssl_ca_certfile):
+ if not Path(args.ssl_ca_certfile).is_file():
sys.exit("Path argument `--ssl-ca-certfile` does not point to a file.")
- if not Path.is_file(args.ssl_certfile):
+ if not Path(args.ssl_certfile).is_file():
sys.exit("Path argument `--ssl-certfile` does not point to a file.")
- if not Path.is_file(args.ssl_keyfile):
+ if not Path(args.ssl_keyfile).is_file():
sys.exit("Path argument `--ssl-keyfile` does not point to a file.")
certificates = (
Path(args.ssl_ca_certfile).read_bytes(), # CA certificate
diff --git a/src/py/flwr/superexec/deployment.py b/src/py/flwr/superexec/deployment.py
new file mode 100644
index 000000000000..d012d408a9ff
--- /dev/null
+++ b/src/py/flwr/superexec/deployment.py
@@ -0,0 +1,176 @@
+# Copyright 2024 Flower Labs GmbH. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Deployment engine executor."""
+
+import subprocess
+from logging import ERROR, INFO
+from pathlib import Path
+from typing import Dict, Optional
+
+from typing_extensions import override
+
+from flwr.cli.config_utils import get_fab_metadata
+from flwr.cli.install import install_from_fab
+from flwr.common.grpc import create_channel
+from flwr.common.logger import log
+from flwr.proto.driver_pb2 import CreateRunRequest # pylint: disable=E0611
+from flwr.proto.driver_pb2_grpc import DriverStub
+from flwr.server.driver.grpc_driver import DEFAULT_SERVER_ADDRESS_DRIVER
+
+from .executor import Executor, RunTracker
+
+
+class DeploymentEngine(Executor):
+ """Deployment engine executor.
+
+ Parameters
+ ----------
+ superlink: str (default: "0.0.0.0:9091")
+ Address of the SuperLink to connect to.
+ root_certificates: Optional[str] (default: None)
+ Specifies the path to the PEM-encoded root certificate file for
+ establishing secure HTTPS connections.
+ flwr_dir: Optional[str] (default: None)
+ The path containing installed Flower Apps.
+ """
+
+ def __init__(
+ self,
+ superlink: str = DEFAULT_SERVER_ADDRESS_DRIVER,
+ root_certificates: Optional[str] = None,
+ flwr_dir: Optional[str] = None,
+ ) -> None:
+ self.superlink = superlink
+ if root_certificates is None:
+ self.root_certificates = None
+ self.root_certificates_bytes = None
+ else:
+ self.root_certificates = root_certificates
+ self.root_certificates_bytes = Path(root_certificates).read_bytes()
+ self.flwr_dir = flwr_dir
+ self.stub: Optional[DriverStub] = None
+
+ @override
+ def set_config(
+ self,
+ config: Dict[str, str],
+ ) -> None:
+ """Set executor config arguments.
+
+ Parameters
+ ----------
+ config : Dict[str, str]
+ A dictionary for configuration values.
+ Supported configuration key/value pairs:
+ - "superlink": str
+ The address of the SuperLink Driver API.
+ - "root-certificates": str
+ The path to the root certificates.
+ - "flwr-dir": str
+ The path to the Flower directory.
+ """
+ if not config:
+ return
+ if superlink_address := config.get("superlink"):
+ self.superlink = superlink_address
+ if root_certificates := config.get("root-certificates"):
+ self.root_certificates = root_certificates
+ self.root_certificates_bytes = Path(root_certificates).read_bytes()
+ if flwr_dir := config.get("flwr-dir"):
+ self.flwr_dir = flwr_dir
+
+ def _connect(self) -> None:
+ if self.stub is not None:
+ return
+ channel = create_channel(
+ server_address=self.superlink,
+ insecure=(self.root_certificates_bytes is None),
+ root_certificates=self.root_certificates_bytes,
+ )
+ self.stub = DriverStub(channel)
+
+ def _create_run(
+ self,
+ fab_id: str,
+ fab_version: str,
+ override_config: Dict[str, str],
+ ) -> int:
+ if self.stub is None:
+ self._connect()
+
+ assert self.stub is not None
+
+ req = CreateRunRequest(
+ fab_id=fab_id,
+ fab_version=fab_version,
+ override_config=override_config,
+ )
+ res = self.stub.CreateRun(request=req)
+ return int(res.run_id)
+
+ @override
+ def start_run(
+ self,
+ fab_file: bytes,
+ override_config: Dict[str, str],
+ ) -> Optional[RunTracker]:
+ """Start run using the Flower Deployment Engine."""
+ try:
+ # Install FAB to flwr dir
+ fab_version, fab_id = get_fab_metadata(fab_file)
+ install_from_fab(fab_file, None, True)
+
+ # Call SuperLink to create run
+ run_id: int = self._create_run(fab_id, fab_version, override_config)
+ log(INFO, "Created run %s", str(run_id))
+
+ command = [
+ "flower-server-app",
+ "--run-id",
+ str(run_id),
+ "--superlink",
+ str(self.superlink),
+ ]
+
+ if self.flwr_dir:
+ command.append("--flwr-dir")
+ command.append(self.flwr_dir)
+
+ if self.root_certificates is None:
+ command.append("--insecure")
+ else:
+ command.append("--root-certificates")
+ command.append(self.root_certificates)
+
+ # Execute the command
+ proc = subprocess.Popen( # pylint: disable=consider-using-with
+ command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ text=True,
+ )
+ log(INFO, "Started run %s", str(run_id))
+
+ return RunTracker(
+ run_id=run_id,
+ proc=proc,
+ )
+ # pylint: disable-next=broad-except
+ except Exception as e:
+ log(ERROR, "Could not start run: %s", str(e))
+ return None
+
+
+executor = DeploymentEngine()
diff --git a/src/py/flwr/superexec/exec_grpc.py b/src/py/flwr/superexec/exec_grpc.py
index 127d5615dd84..d90cec3e47cd 100644
--- a/src/py/flwr/superexec/exec_grpc.py
+++ b/src/py/flwr/superexec/exec_grpc.py
@@ -15,7 +15,7 @@
"""SuperExec gRPC API."""
from logging import INFO
-from typing import Optional, Tuple
+from typing import Dict, Optional, Tuple
import grpc
@@ -32,8 +32,11 @@ def run_superexec_api_grpc(
address: str,
executor: Executor,
certificates: Optional[Tuple[bytes, bytes, bytes]],
+ config: Dict[str, str],
) -> grpc.Server:
"""Run SuperExec API (gRPC, request-response)."""
+ executor.set_config(config)
+
exec_servicer: grpc.Server = ExecServicer(
executor=executor,
)
@@ -45,7 +48,7 @@ def run_superexec_api_grpc(
certificates=certificates,
)
- log(INFO, "Flower ECE: Starting SuperExec API (gRPC-rere) on %s", address)
+ log(INFO, "Starting Flower SuperExec gRPC server on %s", address)
superexec_grpc_server.start()
return superexec_grpc_server
diff --git a/src/py/flwr/superexec/exec_servicer.py b/src/py/flwr/superexec/exec_servicer.py
index e5ef2bd59a79..61a7bc289af3 100644
--- a/src/py/flwr/superexec/exec_servicer.py
+++ b/src/py/flwr/superexec/exec_servicer.py
@@ -45,7 +45,10 @@ def StartRun(
"""Create run ID."""
log(INFO, "ExecServicer.StartRun")
- run = self.executor.start_run(request.fab_file)
+ run = self.executor.start_run(
+ request.fab_file,
+ dict(request.override_config.items()),
+ )
if run is None:
log(ERROR, "Executor failed to start run")
diff --git a/src/py/flwr/superexec/exec_servicer_test.py b/src/py/flwr/superexec/exec_servicer_test.py
index 41f67b74c48b..edc91df4530e 100644
--- a/src/py/flwr/superexec/exec_servicer_test.py
+++ b/src/py/flwr/superexec/exec_servicer_test.py
@@ -36,7 +36,7 @@ def test_start_run() -> None:
run_res.proc = proc
executor = MagicMock()
- executor.start_run = lambda _: run_res
+ executor.start_run = lambda _, __: run_res
context_mock = MagicMock()
diff --git a/src/py/flwr/superexec/executor.py b/src/py/flwr/superexec/executor.py
index f85ac4c157fc..62d64f366cec 100644
--- a/src/py/flwr/superexec/executor.py
+++ b/src/py/flwr/superexec/executor.py
@@ -17,7 +17,7 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from subprocess import Popen
-from typing import Optional
+from typing import Dict, Optional
@dataclass
@@ -31,10 +31,24 @@ class RunTracker:
class Executor(ABC):
"""Execute and monitor a Flower run."""
+ @abstractmethod
+ def set_config(
+ self,
+ config: Dict[str, str],
+ ) -> None:
+ """Register provided config as class attributes.
+
+ Parameters
+ ----------
+ config : Optional[Dict[str, str]]
+ A dictionary for configuration values.
+ """
+
@abstractmethod
def start_run(
self,
fab_file: bytes,
+ override_config: Dict[str, str],
) -> Optional[RunTracker]:
"""Start a run using the given Flower FAB ID and version.
@@ -45,6 +59,8 @@ def start_run(
----------
fab_file : bytes
The Flower App Bundle file bytes.
+ override_config: Dict[str, str]
+ The config overrides dict sent by the user (using `flwr run`).
Returns
-------
diff --git a/src/py/flwr/superexec/simulation.py b/src/py/flwr/superexec/simulation.py
new file mode 100644
index 000000000000..58cc194a16d4
--- /dev/null
+++ b/src/py/flwr/superexec/simulation.py
@@ -0,0 +1,147 @@
+# Copyright 2024 Flower Labs GmbH. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Simulation engine executor."""
+
+
+import subprocess
+import sys
+from logging import ERROR, INFO, WARN
+from typing import Dict, Optional
+
+from typing_extensions import override
+
+from flwr.cli.config_utils import load_and_validate
+from flwr.cli.install import install_from_fab
+from flwr.common.constant import RUN_ID_NUM_BYTES
+from flwr.common.logger import log
+from flwr.server.superlink.state.utils import generate_rand_int_from_bytes
+
+from .executor import Executor, RunTracker
+
+
+class SimulationEngine(Executor):
+ """Simulation engine executor.
+
+ Parameters
+ ----------
+ num_supernodes: Opitonal[str] (default: None)
+ Total number of nodes to involve in the simulation.
+ """
+
+ def __init__(
+ self,
+ num_supernodes: Optional[str] = None,
+ ) -> None:
+ self.num_supernodes = num_supernodes
+
+ @override
+ def set_config(
+ self,
+ config: Dict[str, str],
+ ) -> None:
+ """Set executor config arguments.
+
+ Parameters
+ ----------
+ config : Dict[str, str]
+ A dictionary for configuration values.
+ Supported configuration key/value pairs:
+ - "num-supernodes": str
+ Number of nodes to register for the simulation.
+ """
+ if not config:
+ return
+ if num_supernodes := config.get("num-supernodes"):
+ self.num_supernodes = num_supernodes
+
+ # Validate config
+ if self.num_supernodes is None:
+ log(
+ ERROR,
+ "To start a run with the simulation plugin, please specify "
+ "the number of SuperNodes. This can be done by using the "
+ "`--executor-config` argument when launching the SuperExec.",
+ )
+ raise ValueError("`num-supernodes` must not be `None`")
+
+ @override
+ def start_run(
+ self, fab_file: bytes, override_config: Dict[str, str]
+ ) -> Optional[RunTracker]:
+ """Start run using the Flower Simulation Engine."""
+ try:
+
+ # Install FAB to flwr dir
+ fab_path = install_from_fab(fab_file, None, True)
+
+ # Install FAB Python package
+ subprocess.check_call(
+ [sys.executable, "-m", "pip", "install", "--no-deps", str(fab_path)],
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ )
+
+ # Load and validate config
+ config, errors, warnings = load_and_validate(fab_path / "pyproject.toml")
+ if errors:
+ raise ValueError(errors)
+
+ if warnings:
+ log(WARN, warnings)
+
+ if config is None:
+ raise ValueError(
+ "Config extracted from FAB's pyproject.toml is not valid"
+ )
+
+ # In Simulation there is no SuperLink, still we create a run_id
+ run_id = generate_rand_int_from_bytes(RUN_ID_NUM_BYTES)
+ log(INFO, "Created run %s", str(run_id))
+
+ # Prepare commnand
+ command = [
+ "flower-simulation",
+ "--app",
+ f"{str(fab_path)}",
+ "--num-supernodes",
+ f"{self.num_supernodes}",
+ "--run-id",
+ str(run_id),
+ ]
+
+ if override_config:
+ command.extend(["--run-config", f"{override_config}"])
+
+ # Start Simulation
+ proc = subprocess.run( # pylint: disable=consider-using-with
+ command,
+ check=True,
+ text=True,
+ )
+
+ log(INFO, "Started run %s", str(run_id))
+
+ return RunTracker(
+ run_id=run_id,
+ proc=proc, # type:ignore
+ )
+
+ # pylint: disable-next=broad-except
+ except Exception as e:
+ log(ERROR, "Could not start run: %s", str(e))
+ return None
+
+
+executor = SimulationEngine()