Skip to content

Commit

Permalink
Merge branch 'main' into update-poetry
Browse files Browse the repository at this point in the history
  • Loading branch information
danieljanes authored Jul 14, 2024
2 parents b4ac78a + 79eb86c commit 93beadf
Show file tree
Hide file tree
Showing 213 changed files with 3,280 additions and 1,361 deletions.
10 changes: 5 additions & 5 deletions .github/workflows/_docker-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ jobs:
- name: Set up QEMU
if: matrix.platform.qemu != ''
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3.0.0
uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee # v3.1.0
with:
platforms: ${{ matrix.platform.qemu }}

Expand All @@ -92,7 +92,7 @@ jobs:
images: ${{ inputs.namespace-repository }}

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3.3.0
uses: docker/setup-buildx-action@4fd812986e6c8c2a69e18311145f9371337f27d4 # v3.4.0

- name: Login to Docker Hub
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3.2.0
Expand Down Expand Up @@ -122,7 +122,7 @@ jobs:
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4.3.4
with:
name: digests-${{ steps.build-id.outputs.id }}-${{ matrix.platform.name }}
path: /tmp/digests/*
Expand All @@ -138,7 +138,7 @@ jobs:
metadata: ${{ steps.meta.outputs.json }}
steps:
- name: Download digests
uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
pattern: digests-${{ needs.build.outputs.build-id }}-*
path: /tmp/digests
Expand All @@ -152,7 +152,7 @@ jobs:
tags: ${{ inputs.tags }}

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3.3.0
uses: docker/setup-buildx-action@4fd812986e6c8c2a69e18311145f9371337f27d4 # v3.4.0

- name: Login to Docker Hub
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3.2.0
Expand Down
75 changes: 0 additions & 75 deletions .github/workflows/docker-images.yml

This file was deleted.

31 changes: 13 additions & 18 deletions .github/workflows/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,44 +66,39 @@ jobs:

- directory: bare-client-auth

- directory: jax
- directory: framework-jax

- directory: pytorch
- directory: framework-pytorch
dataset: |
from torchvision.datasets import CIFAR10
CIFAR10('./data', download=True)
- directory: tensorflow
- directory: framework-tensorflow
dataset: |
import tensorflow as tf
tf.keras.datasets.cifar10.load_data()
- directory: tabnet
dataset: |
import tensorflow_datasets as tfds
tfds.load(name='iris', split=tfds.Split.TRAIN)
- directory: opacus
- directory: framework-opacus
dataset: |
from torchvision.datasets import CIFAR10
CIFAR10('./data', download=True)
- directory: pytorch-lightning
- directory: framework-pytorch-lightning
dataset: |
from torchvision.datasets import MNIST
MNIST('./data', download=True)
- directory: scikit-learn
- directory: framework-scikit-learn
dataset: |
import openml
openml.datasets.get_dataset(554)
- directory: fastai
- directory: framework-fastai
dataset: |
from fastai.vision.all import untar_data, URLs
untar_data(URLs.MNIST)
- directory: pandas
- directory: framework-pandas
dataset: |
from pathlib import Path
from sklearn.datasets import load_iris
Expand Down Expand Up @@ -145,7 +140,7 @@ jobs:
run: python -c "${{ matrix.dataset }}"
- name: Run edge client test
if: ${{ matrix.directory != 'bare-client-auth' }}
run: ./../test.sh "${{ matrix.directory }}"
run: ./../test_legacy.sh "${{ matrix.directory }}"
- name: Run virtual client test
if: ${{ matrix.directory != 'bare-client-auth' }}
run: python simulation.py
Expand All @@ -154,16 +149,16 @@ jobs:
run: python simulation_next.py
- name: Run driver test
if: ${{ matrix.directory != 'bare-client-auth' }}
run: ./../test_driver.sh "${{ matrix.directory }}"
run: ./../test_superlink.sh "${{ matrix.directory }}"
- name: Run driver test with REST
if: ${{ matrix.directory == 'bare' }}
run: ./../test_driver.sh bare rest
run: ./../test_superlink.sh bare rest
- name: Run driver test with SQLite database
if: ${{ matrix.directory == 'bare' }}
run: ./../test_driver.sh bare sqlite
run: ./../test_superlink.sh bare sqlite
- name: Run driver test with client authentication
if: ${{ matrix.directory == 'bare-client-auth' }}
run: ./../test_driver.sh bare client-auth
run: ./../test_superlink.sh bare client-auth
- name: Run reconnection test with SQLite database
if: ${{ matrix.directory == 'bare' }}
run: ./../test_reconnection.sh sqlite
Expand Down
67 changes: 67 additions & 0 deletions .github/workflows/framework-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,70 @@ jobs:
curl $tar_url --output dist/$tar_name
python -m poetry publish -u __token__ -p ${{ secrets.PYPI_TOKEN_RELEASE_FLWR }}
parameters:
if: ${{ github.repository == 'adap/flower' }}
name: Collect docker build parameters
runs-on: ubuntu-22.04
timeout-minutes: 10
needs: publish
outputs:
pip-version: ${{ steps.versions.outputs.pip-version }}
setuptools-version: ${{ steps.versions.outputs.setuptools-version }}
matrix: ${{ steps.matrix.outputs.matrix }}
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1

- uses: ./.github/actions/bootstrap
id: bootstrap

- id: versions
run: |
echo "pip-version=${{ steps.bootstrap.outputs.pip-version }}" >> "$GITHUB_OUTPUT"
echo "setuptools-version=${{ steps.bootstrap.outputs.setuptools-version }}" >> "$GITHUB_OUTPUT"
- id: matrix
run: |
FLWR_VERSION=$(poetry version -s)
python dev/build-docker-image-matrix.py --flwr-version "${FLWR_VERSION}" > matrix.json
echo "matrix=$(cat matrix.json)" >> $GITHUB_OUTPUT
build-base-images:
if: ${{ github.repository == 'adap/flower' }}
name: Build base images
uses: ./.github/workflows/_docker-build.yml
needs: parameters
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.parameters.outputs.matrix).base }}
with:
namespace-repository: ${{ matrix.images.namespace_repository }}
file-dir: ${{ matrix.images.file_dir }}
build-args: |
PYTHON_VERSION=${{ matrix.images.python_version }}
PIP_VERSION=${{ needs.parameters.outputs.pip-version }}
SETUPTOOLS_VERSION=${{ needs.parameters.outputs.setuptools-version }}
DISTRO=${{ matrix.images.distro.name }}
DISTRO_VERSION=${{ matrix.images.distro.version }}
FLWR_VERSION=${{ matrix.images.flwr_version }}
tags: ${{ matrix.images.tag }}
secrets:
dockerhub-user: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}

build-binary-images:
if: ${{ github.repository == 'adap/flower' }}
name: Build binary images
uses: ./.github/workflows/_docker-build.yml
needs: [parameters, build-base-images]
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.parameters.outputs.matrix).binary }}
with:
namespace-repository: ${{ matrix.images.namespace_repository }}
file-dir: ${{ matrix.images.file_dir }}
build-args: BASE_IMAGE=${{ matrix.images.base_image }}
tags: ${{ matrix.images.tags }}
secrets:
dockerhub-user: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
3 changes: 2 additions & 1 deletion .github/workflows/release-nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,8 @@ jobs:
images: [
{ repository: "flwr/superlink", file_dir: "src/docker/superlink" },
{ repository: "flwr/supernode", file_dir: "src/docker/supernode" },
{ repository: "flwr/serverapp", file_dir: "src/docker/serverapp" }
{ repository: "flwr/serverapp", file_dir: "src/docker/serverapp" },
{ repository: "flwr/superexec", file_dir: "src/docker/superexec" }
]
with:
namespace-repository: ${{ matrix.images.repository }}
Expand Down
11 changes: 6 additions & 5 deletions datasets/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,12 @@ Create **custom partitioning schemes** or choose from the **implemented [partiti
* IID partitioning `IidPartitioner(num_partitions)`
* Dirichlet partitioning `DirichletPartitioner(num_partitions, partition_by, alpha)`
* InnerDirichlet partitioning `InnerDirichletPartitioner(partition_sizes, partition_by, alpha)`
* Natural ID partitioner `NaturalIdPartitioner(partition_by)`
* Size partitioner (the abstract base class for the partitioners dictating the division based the number of samples) `SizePartitioner`
* Linear partitioner `LinearPartitioner(num_partitions)`
* Square partitioner `SquarePartitioner(num_partitions)`
* Exponential partitioner `ExponentialPartitioner(num_partitions)`
* Pathological partitioning `PathologicalPartitioner(num_partitions, partition_by, num_classes_per_partition, class_assignment_mode)`
* Natural ID partitioning `NaturalIdPartitioner(partition_by)`
* Size based partitioning (the abstract base class for the partitioners dictating the division based the number of samples) `SizePartitioner`
* Linear partitioning `LinearPartitioner(num_partitions)`
* Square partitioning `SquarePartitioner(num_partitions)`
* Exponential partitioning `ExponentialPartitioner(num_partitions)`
* more to come in the future releases (contributions are welcome).
<p align="center">
<img src="./doc/source/_static/readme/comparison_of_partitioning_schemes.png" alt="Comparison of partitioning schemes."/>
Expand Down
2 changes: 1 addition & 1 deletion datasets/doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def find_test_modules(package_path):
.. raw:: html
<br/>
<a href="https://colab.research.google.com/github/adap/flower/blob/main/doc/source/{{ env.doc2path(env.docname, base=None) }}">
<a href="https://colab.research.google.com/github/adap/flower/blob/main/datasets/doc/source/{{ env.doc2path(env.docname, base=None) }}">
<img alt="Open in Colab" src="https://colab.research.google.com/assets/colab-badge.svg"/>
</a>
"""
Expand Down
1 change: 1 addition & 0 deletions datasets/doc/source/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ Here are a few of the ``Partitioner`` s that are available: (for a full list see
* IID partitioning ``IidPartitioner(num_partitions)``
* Dirichlet partitioning ``DirichletPartitioner(num_partitions, partition_by, alpha)``
* InnerDirichlet partitioning ``InnerDirichletPartitioner(partition_sizes, partition_by, alpha)``
* PathologicalPartitioner ``PathologicalPartitioner(num_partitions, partition_by, num_classes_per_partition, class_assignment_mode)``
* Natural ID partitioner ``NaturalIdPartitioner(partition_by)``
* Size partitioner (the abstract base class for the partitioners dictating the division based the number of samples) ``SizePartitioner``
* Linear partitioner ``LinearPartitioner(num_partitions)``
Expand Down
2 changes: 1 addition & 1 deletion datasets/flwr_datasets/mock_utils_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def _generate_random_image_column(
pil_imgs = []
for np_image in np_images:
# Convert the NumPy array to a PIL image
pil_img_beg = Image.fromarray(np_image) # type: ignore
pil_img_beg = Image.fromarray(np_image)

# Save the image to an in-memory bytes buffer
in_memory_file = io.BytesIO()
Expand Down
2 changes: 2 additions & 0 deletions datasets/flwr_datasets/partitioner/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from .linear_partitioner import LinearPartitioner
from .natural_id_partitioner import NaturalIdPartitioner
from .partitioner import Partitioner
from .pathological_partitioner import PathologicalPartitioner
from .shard_partitioner import ShardPartitioner
from .size_partitioner import SizePartitioner
from .square_partitioner import SquarePartitioner
Expand All @@ -34,6 +35,7 @@
"LinearPartitioner",
"NaturalIdPartitioner",
"Partitioner",
"PathologicalPartitioner",
"ShardPartitioner",
"SizePartitioner",
"SquarePartitioner",
Expand Down
Loading

0 comments on commit 93beadf

Please sign in to comment.