diff --git a/dev/build-docs.sh b/dev/build-docs.sh index f8d4f91508de..f4bf958b0ebf 100755 --- a/dev/build-docs.sh +++ b/dev/build-docs.sh @@ -8,9 +8,7 @@ cd $ROOT ./dev/build-baseline-docs.sh cd $ROOT -./dev/update-examples.sh -cd examples/doc -make docs +python dev/build-example-docs.py cd $ROOT ./datasets/dev/build-flwr-datasets-docs.sh diff --git a/dev/build-example-docs.py b/dev/build-example-docs.py new file mode 100644 index 000000000000..204380f312ac --- /dev/null +++ b/dev/build-example-docs.py @@ -0,0 +1,236 @@ +# Copyright 2024 Flower Labs GmbH. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Build the Flower Example docs.""" + +import os +import shutil +import re +import subprocess +from pathlib import Path + +ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +INDEX = os.path.join(ROOT, "examples", "doc", "source", "index.rst") + +initial_text = """ +Flower Examples Documentation +----------------------------- + +Welcome to Flower Examples' documentation. `Flower `_ is +a friendly federated learning framework. + +Join the Flower Community +------------------------- + +The Flower Community is growing quickly - we're a friendly group of researchers, +engineers, students, professionals, academics, and other enthusiasts. + +.. button-link:: https://flower.ai/join-slack + :color: primary + :shadow: + + Join us on Slack + +Quickstart Examples +------------------- + +Flower Quickstart Examples are a collection of demo projects that show how you +can use Flower in combination with other existing frameworks or technologies. + +""" + +table_headers = ( + "\n.. list-table::\n :widths: 50 15 15 15\n " + ":header-rows: 1\n\n * - Title\n - Framework\n - Dataset\n - Tags\n\n" +) + +categories = { + "quickstart": {"table": table_headers, "list": ""}, + "advanced": {"table": table_headers, "list": ""}, + "other": {"table": table_headers, "list": ""}, +} + + +def _convert_to_link(search_result): + if "|" in search_result: + if "," in search_result: + result = "" + for part in search_result.split(","): + result += f"{_convert_to_link(part)}, " + return result[:-2] + + name, url = search_result.replace('"', "").split("|") + return f"`{name.strip()} <{url.strip()}>`_" + + return search_result + + +def _read_metadata(example): + with open(os.path.join(example, "README.md")) as f: + content = f.read() + + metadata_match = re.search(r"^---(.*?)^---", content, re.DOTALL | re.MULTILINE) + if not metadata_match: + raise ValueError("Metadata block not found") + metadata = metadata_match.group(1) + + title_match = re.search(r"^title:\s*(.+)$", metadata, re.MULTILINE) + if not title_match: + raise ValueError("Title not found in metadata") + title = title_match.group(1).strip() + + labels_match = re.search(r"^labels:\s*\[(.+?)\]$", metadata, re.MULTILINE) + if not labels_match: + raise ValueError("Labels not found in metadata") + labels = labels_match.group(1).strip() + + dataset_match = re.search( + r"^dataset:\s*\[(.*?)\]$", metadata, re.DOTALL | re.MULTILINE + ) + if not dataset_match: + raise ValueError("Dataset not found in metadata") + dataset = dataset_match.group(1).strip() + + framework_match = re.search( + r"^framework:\s*\[(.*?|)\]$", metadata, re.DOTALL | re.MULTILINE + ) + if not framework_match: + raise ValueError("Framework not found in metadata") + framework = framework_match.group(1).strip() + + dataset = _convert_to_link(re.sub(r"\s+", " ", dataset).strip()) + framework = _convert_to_link(re.sub(r"\s+", " ", framework).strip()) + return title, labels, dataset, framework + + +def _add_table_entry(example, label, table_var): + title, labels, dataset, framework = _read_metadata(example) + example_name = Path(example).stem + table_entry = ( + f" * - `{title} <{example_name}.html>`_ \n " + f"- {framework} \n - {dataset} \n - {labels}\n\n" + ) + if label in labels: + categories[table_var]["table"] += table_entry + categories[table_var]["list"] += f" {example_name}\n" + return True + return False + + +def _copy_markdown_files(example): + for file in os.listdir(example): + if file.endswith(".md"): + src = os.path.join(example, file) + dest = os.path.join( + ROOT, "examples", "doc", "source", os.path.basename(example) + ".md" + ) + shutil.copyfile(src, dest) + + +def _add_gh_button(example): + gh_text = f'[View on GitHub](https://github.com/adap/flower/blob/main/examples/{example})' + readme_file = os.path.join(ROOT, "examples", "doc", "source", example + ".md") + with open(readme_file, "r+") as f: + content = f.read() + if gh_text not in content: + content = re.sub( + r"(^# .+$)", rf"\1\n\n{gh_text}", content, count=1, flags=re.MULTILINE + ) + f.seek(0) + f.write(content) + f.truncate() + + +def _copy_images(example): + static_dir = os.path.join(example, "_static") + dest_dir = os.path.join(ROOT, "examples", "doc", "source", "_static") + if os.path.isdir(static_dir): + for file in os.listdir(static_dir): + if file.endswith((".jpg", ".png", ".jpeg")): + shutil.copyfile( + os.path.join(static_dir, file), os.path.join(dest_dir, file) + ) + + +def _add_all_entries(index_file): + examples_dir = os.path.join(ROOT, "examples") + for example in sorted(os.listdir(examples_dir)): + example_path = os.path.join(examples_dir, example) + if os.path.isdir(example_path) and example != "doc": + _copy_markdown_files(example_path) + _add_gh_button(example) + _copy_images(example) + + +def _main(): + if os.path.exists(INDEX): + os.remove(INDEX) + + with open(INDEX, "w") as index_file: + index_file.write(initial_text) + + examples_dir = os.path.join(ROOT, "examples") + for example in sorted(os.listdir(examples_dir)): + example_path = os.path.join(examples_dir, example) + if os.path.isdir(example_path) and example != "doc": + _copy_markdown_files(example_path) + _add_gh_button(example) + _copy_images(example_path) + if not _add_table_entry(example_path, "quickstart", "quickstart"): + if not _add_table_entry(example_path, "comprehensive", "comprehensive"): + if not _add_table_entry(example_path, "advanced", "advanced"): + _add_table_entry(example_path, "", "other") + + with open(INDEX, "a") as index_file: + index_file.write(categories["quickstart"]["table"]) + + index_file.write("\nAdvanced Examples\n-----------------\n") + index_file.write( + "Advanced Examples are mostly for users that are both familiar with " + "Federated Learning but also somewhat familiar with Flower's main " + "features.\n" + ) + index_file.write(categories["advanced"]["table"]) + + index_file.write("\nOther Examples\n--------------\n") + index_file.write( + "Flower Examples are a collection of example projects written with " + "Flower that explore different domains and features. You can check " + "which examples already exist and/or contribute your own example.\n" + ) + index_file.write(categories["other"]["table"]) + + _add_all_entries(index_file) + + index_file.write( + "\n.. toctree::\n :maxdepth: 1\n :caption: Quickstart\n :hidden:\n\n" + ) + index_file.write(categories["quickstart"]["list"]) + + index_file.write( + "\n.. toctree::\n :maxdepth: 1\n :caption: Advanced\n :hidden:\n\n" + ) + index_file.write(categories["advanced"]["list"]) + + index_file.write( + "\n.. toctree::\n :maxdepth: 1\n :caption: Others\n :hidden:\n\n" + ) + index_file.write(categories["other"]["list"]) + + index_file.write("\n") + + +if __name__ == "__main__": + _main() + subprocess.call(f"cd {ROOT}/examples/doc && make html", shell=True) diff --git a/dev/update-examples.sh b/dev/update-examples.sh deleted file mode 100755 index 1076b4621984..000000000000 --- a/dev/update-examples.sh +++ /dev/null @@ -1,91 +0,0 @@ -#!/bin/bash -set -e -cd "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"/../ - -ROOT=`pwd` -INDEX=$ROOT/examples/doc/source/index.md -INSERT_LINE=6 - -copy_markdown_files () { - for file in $1/*.md; do - # Copy the README into the source of the Example docs as the name of the example - if [[ $(basename "$file") = "README.md" ]]; then - cp $file $ROOT/examples/doc/source/$1.md 2>&1 >/dev/null - else - # If the example contains other markdown files, copy them to the source of the Example docs - cp $file $ROOT/examples/doc/source/$(basename "$file") 2>&1 >/dev/null - fi - done -} - -add_gh_button () { - gh_text="[\"View](https://github.com/adap/flower/blob/main/examples/$1)" - readme_file="$ROOT/examples/doc/source/$1.md" - - if ! grep -Fq "$gh_text" "$readme_file"; then - awk -v text="$gh_text" ' - /^# / && !found { - print $0 "\n" text; - found=1; - next; - } - { print } - ' "$readme_file" > tmpfile && mv tmpfile "$readme_file" - fi -} - -copy_images () { - if [ -d "$1/_static" ]; then - cp $1/_static/**.{jpg,png,jpeg} $ROOT/examples/doc/source/_static/ 2>/dev/null || true - fi -} - -add_to_index () { - (echo $INSERT_LINE; echo a; echo $1; echo .; echo wq) | ed $INDEX 2>&1 >/dev/null -} - -add_single_entry () { - # Copy markdown files to correct folder - copy_markdown_files $1 - - # Add button linked to GitHub - add_gh_button $1 - - # Copy all images of the _static folder into the examples - # docs static folder - copy_images $1 - - # Insert the name of the example into the index file - add_to_index $1 -} - -add_all_entries () { - cd $ROOT/examples - # Iterate through each folder in examples/ - for d in $(printf '%s\n' */ | sort -V); do - # Add entry based on the name of the folder - example=${d%/} - - if [[ $example != doc ]]; then - add_single_entry $example - fi - done -} - -# Clean up before starting -rm -f $ROOT/examples/doc/source/*.md -rm -f $INDEX - -# Create empty index file -touch $INDEX - -echo "# Flower Examples Documentation" >> $INDEX -echo "" >> $INDEX -echo "\`\`\`{toctree}" >> $INDEX -echo "---" >> $INDEX -echo "maxdepth: 1" >> $INDEX -echo "---" >> $INDEX - -add_all_entries - -echo "\`\`\`" >> $INDEX diff --git a/examples/advanced-pytorch/README.md b/examples/advanced-pytorch/README.md index c1ba85b95879..bce80ab2f53d 100644 --- a/examples/advanced-pytorch/README.md +++ b/examples/advanced-pytorch/README.md @@ -1,3 +1,10 @@ +--- +title: Advanced Flower Example using PyTorch +labels: [advanced, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [torch | https://pytorch.org/, torchvision | https://pytorch.org/vision/stable/index.html] +--- + # Advanced Flower Example (PyTorch) This example demonstrates an advanced federated learning setup using Flower with PyTorch. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) and it differs from the quickstart example in the following ways: diff --git a/examples/advanced-tensorflow/README.md b/examples/advanced-tensorflow/README.md index 94707b5cbc98..2b44baded43e 100644 --- a/examples/advanced-tensorflow/README.md +++ b/examples/advanced-tensorflow/README.md @@ -1,3 +1,10 @@ +--- +title: Advanced Flower Example using TensorFlow/Keras +labels: [advanced, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [tensorflow | https://www.tensorflow.org/, Keras | https://keras.io/] +--- + # Advanced Flower Example (TensorFlow/Keras) This example demonstrates an advanced federated learning setup using Flower with TensorFlow/Keras. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) and it differs from the quickstart example in the following ways: diff --git a/examples/android-kotlin/README.md b/examples/android-kotlin/README.md index 2d0f704fdc0e..28ca5565b64c 100644 --- a/examples/android-kotlin/README.md +++ b/examples/android-kotlin/README.md @@ -1,3 +1,11 @@ +--- +title: Flower Android Example using Kotlin and TF Lite +labels: [basic, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [Android | https://www.android.com/, Kotlin | https://kotlinlang.org/, + TensorFlowLite | https://www.tensorflow.org/lite] +--- + # Flower Android Client Example with Kotlin and TensorFlow Lite 2022 This example is similar to the Flower Android Example in Java: diff --git a/examples/android/README.md b/examples/android/README.md index f9f2bb93b8dc..78c3d3a2c243 100644 --- a/examples/android/README.md +++ b/examples/android/README.md @@ -1,3 +1,11 @@ +--- +title: Flower Android Example using Java and TF Lite +labels: [basic, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [Android | https://www.android.com/, Java | https://www.java.com/, TensorFlowLite + | https://www.tensorflow.org/lite] +--- + # Flower Android Example (TensorFlowLite) This example demonstrates a federated learning setup with Android clients in a background thread. The training on Android is done on a CIFAR10 dataset using TensorFlow Lite. The setup is as follows: diff --git a/examples/app-pytorch/README.md b/examples/app-pytorch/README.md index 14de3c7d632e..cb41b371ed22 100644 --- a/examples/app-pytorch/README.md +++ b/examples/app-pytorch/README.md @@ -1,3 +1,10 @@ +--- +title: Example Flower App using PyTorch +labels: [basic, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [torch | https://pytorch.org/, torchvision | https://pytorch.org/vision/stable/index.html] +--- + # Flower App (PyTorch) 🧪 > 🧪 = This example covers experimental features that might change in future versions of Flower diff --git a/examples/app-secure-aggregation/README.md b/examples/app-secure-aggregation/README.md index d1ea7bdc893f..b21674b052eb 100644 --- a/examples/app-secure-aggregation/README.md +++ b/examples/app-secure-aggregation/README.md @@ -1,3 +1,10 @@ +--- +title: Example Flower App with Secure Aggregation +labels: [basic, vision, fds] +dataset: [] +framework: [numpy | https://numpy.org/] +--- + # Secure aggregation with Flower (the SecAgg+ protocol) 🧪 > 🧪 = This example covers experimental features that might change in future versions of Flower diff --git a/examples/custom-metrics/README.md b/examples/custom-metrics/README.md index 317fb6336106..e173d7d32703 100644 --- a/examples/custom-metrics/README.md +++ b/examples/custom-metrics/README.md @@ -1,3 +1,10 @@ +--- +title: Example Flower App with Custom Metrics +labels: [basic, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [tensorflow | https://www.tensorflow.org/] +--- + # Flower Example using Custom Metrics This simple example demonstrates how to calculate custom metrics over multiple clients beyond the traditional ones available in the ML frameworks. In this case, it demonstrates the use of ready-available `scikit-learn` metrics: accuracy, recall, precision, and f1-score. diff --git a/examples/custom-mods/README.md b/examples/custom-mods/README.md index 6b03abcfbfe0..7127a27975c5 100644 --- a/examples/custom-mods/README.md +++ b/examples/custom-mods/README.md @@ -1,3 +1,10 @@ +--- +title: Example Flower App with Custom Mods +labels: [mods, monitoring, app] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [wandb | https://wandb.ai/home, tensorboard | https://www.tensorflow.org/tensorboard] +--- + # Using custom mods 🧪 > 🧪 = This example covers experimental features that might change in future versions of Flower diff --git a/examples/doc/source/.gitignore b/examples/doc/source/.gitignore index dd449725e188..73ee14e96f68 100644 --- a/examples/doc/source/.gitignore +++ b/examples/doc/source/.gitignore @@ -1 +1,2 @@ *.md +index.rst diff --git a/examples/embedded-devices/README.md b/examples/embedded-devices/README.md index f1c5931b823a..e98570e95c69 100644 --- a/examples/embedded-devices/README.md +++ b/examples/embedded-devices/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Embedded Devices Example +labels: [basic, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10, MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [torch | https://pytorch.org/, tensorflow | https://www.tensorflow.org/] +--- + # Federated Learning on Embedded Devices with Flower This example will show you how Flower makes it very easy to run Federated Learning workloads on edge devices. Here we'll be showing how to use NVIDIA Jetson devices and Raspberry Pi as Flower clients. You can run this example using either PyTorch or Tensorflow. The FL workload (i.e. model, dataset and training loop) is mostly borrowed from the [quickstart-pytorch](https://github.com/adap/flower/tree/main/examples/simulation-pytorch) and [quickstart-tensorflow](https://github.com/adap/flower/tree/main/examples/quickstart-tensorflow) examples. @@ -65,7 +72,7 @@ If you are working on this tutorial on your laptop or desktop, it can host the F - Install `pip`. In the terminal type: `sudo apt install python3-pip -y` - Now clone this directory. You just need to execute the `git clone` command shown at the top of this README.md on your device. - - Install Flower and your ML framework: We have prepared some convenient installation scripts that will install everything you need. You are free to install other versions of these ML frameworks to suit your needs. + - Install Flower and your ML framework of choice: We have prepared some convenient installation scripts that will install everything you need. You are free to install other versions of these ML frameworks to suit your needs. - If you want your clients to use PyTorch: `pip3 install -r requirements_pytorch.txt` - If you want your clients to use TensorFlow: `pip3 install -r requirements_tf.txt` @@ -180,7 +187,7 @@ If you are working on this tutorial on your laptop or desktop, it can host the F ## Running Embedded FL with Flower -For this demo, we'll be using [CIFAR-10](https://www.cs.toronto.edu/~kriz/cifar.html), a popular dataset for image classification comprised of 10 classes (e.g. car, bird, airplane) and a total of 60K `32x32` RGB images. The training set contains 50K images. The server will automatically download the dataset should it not be found in `./data`. The clients do the same. The dataset is by default split into 50 partitions (each to be assigned to a different client). This can be controlled with the `NUM_CLIENTS` global variable in the client scripts. In this example, each device will play the role of a specific user (specified via `--cid` -- we'll show this later) and therefore only do local training with that portion of the data. For CIFAR-10, clients will be training a MobileNet-v2/3 model. +For this demo, we'll be using [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10](https://www.cs.toronto.edu/~kriz/cifar.html), a popular dataset for image classification comprised of 10 classes (e.g. car, bird, airplane) and a total of 60K `32x32` RGB images. The training set contains 50K images. The server will automatically download the dataset should it not be found in `./data`. The clients do the same. The dataset is by default split into 50 partitions (each to be assigned to a different client). This can be controlled with the `NUM_CLIENTS` global variable in the client scripts. In this example, each device will play the role of a specific user (specified via `--cid` -- we'll show this later) and therefore only do local training with that portion of the data. For CIFAR-10, clients will be training a MobileNet-v2/3 model. You can run this example using MNIST and a smaller CNN model by passing flag `--mnist`. This is useful if you are using devices with a very limited amount of memory (e.g. RaspberryPi Zero) or if you want the training taking place on the embedded devices to be much faster (specially if these are CPU-only). The partitioning of the dataset is done in the same way. diff --git a/examples/federated-kaplan-meier-fitter/README.md b/examples/federated-kaplan-meier-fitter/README.md index 1569467d6f82..5d0768d42ea6 100644 --- a/examples/federated-kaplan-meier-fitter/README.md +++ b/examples/federated-kaplan-meier-fitter/README.md @@ -1,3 +1,11 @@ +--- +title: Flower Example using KaplanMeierFitter +labels: [estimator, medical] +dataset: [Waltons | + https://lifelines.readthedocs.io/en/latest/lifelines.datasets.html#lifelines.datasets.load_waltons] +framework: [lifelines | https://lifelines.readthedocs.io/en/latest/index.html] +--- + # Flower Example using KaplanMeierFitter This is an introductory example on **federated survival analysis** using [Flower](https://flower.ai/) diff --git a/examples/fl-dp-sa/README.md b/examples/fl-dp-sa/README.md index 47eedb70a2b8..7269503af57f 100644 --- a/examples/fl-dp-sa/README.md +++ b/examples/fl-dp-sa/README.md @@ -1,4 +1,11 @@ -# fl_dp_sa +--- +title: Example of Flower App with DP and SA +labels: [basic, vision, fds] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [torch | https://pytorch.org/, torchvision | https://pytorch.org/vision/stable/index.html] +--- + +# Example of Flower App with DP and SA This is a simple example that utilizes central differential privacy with client-side fixed clipping and secure aggregation. Note: This example is designed for a small number of rounds and is intended for demonstration purposes. diff --git a/examples/fl-tabular/README.md b/examples/fl-tabular/README.md index 58afd1080b70..2cc2a1481d60 100644 --- a/examples/fl-tabular/README.md +++ b/examples/fl-tabular/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Example on Adult Census Income Tabular Dataset +labels: [basic, tabular, fds] +dataset: [Adult Census Income | https://www.kaggle.com/datasets/uciml/adult-census-income/data] +framework: [scikit-learn | https://scikit-learn.org/, torch | https://pytorch.org/] +--- + # Flower Example on Adult Census Income Tabular Dataset This code exemplifies a federated learning setup using the Flower framework on the ["Adult Census Income"](https://huggingface.co/datasets/scikit-learn/adult-census-income) tabular dataset. The "Adult Census Income" dataset contains demographic information such as age, education, occupation, etc., with the target attribute being income level (\<=50K or >50K). The dataset is partitioned into subsets, simulating a federated environment with 5 clients, each holding a distinct portion of the data. Categorical variables are one-hot encoded, and the data is split into training and testing sets. Federated learning is conducted using the FedAvg strategy for 5 rounds. diff --git a/examples/flower-authentication/README.md b/examples/flower-authentication/README.md index 589270e621c9..3709ab4139c1 100644 --- a/examples/flower-authentication/README.md +++ b/examples/flower-authentication/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Example with Authentication +labels: [advanced, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [torch | https://pytorch.org/, torchvision | https://pytorch.org/vision/stable/index.html] +--- + # Flower Authentication with PyTorch 🧪 > 🧪 = This example covers experimental features that might change in future versions of Flower diff --git a/examples/flower-in-30-minutes/README.md b/examples/flower-in-30-minutes/README.md index 5fd9b882413b..d598694c7b56 100644 --- a/examples/flower-in-30-minutes/README.md +++ b/examples/flower-in-30-minutes/README.md @@ -1,3 +1,10 @@ +--- +title: 30-minute tutorial running Flower simulation with PyTorch +labels: [colab, vision, simulation] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [torch | https://pytorch.org/] +--- + # 30-minute tutorial running Flower simulation with PyTorch This README links to a Jupyter notebook that you can either download and run locally or [![open it in Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/adap/flower/blob/main/examples/flower-in-30-minutes/tutorial.ipynb). This is a short 30-minute (or less!) tutorial showcasing the basics of Flower federated learning simulations using PyTorch. diff --git a/examples/flower-simulation-step-by-step-pytorch/README.md b/examples/flower-simulation-step-by-step-pytorch/README.md index beb8dd7f6f95..28f99ab5fa6e 100644 --- a/examples/flower-simulation-step-by-step-pytorch/README.md +++ b/examples/flower-simulation-step-by-step-pytorch/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Simulation Step-by-Step +labels: [basic, vision, simulation] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [torch | https://pytorch.org/] +--- + # Flower Simulation Step-by-Step > Since this tutorial (and its video series) was put together, Flower has been updated a few times. As a result, some of the steps to construct the environment (see below) have been updated. Some parts of the code have also been updated. Overall, the content of this tutorial and how things work remains the same as in the video tutorials. diff --git a/examples/flower-via-docker-compose/README.md b/examples/flower-via-docker-compose/README.md index 3ef1ac37bcda..07bb8a2c00a3 100644 --- a/examples/flower-via-docker-compose/README.md +++ b/examples/flower-via-docker-compose/README.md @@ -1,3 +1,10 @@ +--- +title: Leveraging Flower and Docker for Device Heterogeneity Management in FL +labels: [deployment, vision, tutorial] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [Docker | https://www.docker.com/, tensorflow | https://www.tensorflow.org/] +--- + # Leveraging Flower and Docker for Device Heterogeneity Management in Federated Learning

diff --git a/examples/ios/README.md b/examples/ios/README.md index 4e17e7a674f3..bb36a3f1729f 100644 --- a/examples/ios/README.md +++ b/examples/ios/README.md @@ -1,3 +1,10 @@ +--- +title: Simple Flower Example on iOS +labels: [mobile, vision, fds] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [Swift | https://www.swift.org/] +--- + # FLiOS - A Flower SDK for iOS Devices with Example FLiOS is a sample application for testing and benchmarking the Swift implementation of Flower. The default scenario uses the MNIST dataset and the associated digit recognition model. The app includes the Swift package in `./src/swift` and allows extension for other benchmarking scenarios. The app guides the user through the steps of the machine learning process that would be executed in a normal production environment as a background task of the application. The app is therefore aimed at researchers and research institutions to test their hypotheses and perform performance analyses. diff --git a/examples/llm-flowertune/README.md b/examples/llm-flowertune/README.md index 4f98072f8c7f..2507dd1bb546 100644 --- a/examples/llm-flowertune/README.md +++ b/examples/llm-flowertune/README.md @@ -1,3 +1,10 @@ +--- +title: Federated LLM Fine-tuning with Flower +labels: [llm, nlp, LLama2] +dataset: [Alpaca-GPT4 | https://huggingface.co/datasets/vicgalle/alpaca-gpt4] +framework: [PEFT | https://huggingface.co/docs/peft/index, torch | https://pytorch.org/] +--- + # LLM FlowerTune: Federated LLM Fine-tuning with Flower Large language models (LLMs), which have been trained on vast amounts of publicly accessible data, have shown remarkable effectiveness in a wide range of areas. diff --git a/examples/opacus/README.md b/examples/opacus/README.md index 6fc0d2ff49a0..2c586ccabaff 100644 --- a/examples/opacus/README.md +++ b/examples/opacus/README.md @@ -1,3 +1,10 @@ +--- +title: Sample-Level Differential Privacy using Opacus +labels: [dp, security, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [opacus | https://opacus.ai/, torch | https://pytorch.org/] +--- + # Training with Sample-Level Differential Privacy using Opacus Privacy Engine In this example, we demonstrate how to train a model with differential privacy (DP) using Flower. We employ PyTorch and integrate the Opacus Privacy Engine to achieve sample-level differential privacy. This setup ensures robust privacy guarantees during the client training phase. The code is adapted from the [PyTorch Quickstart example](https://github.com/adap/flower/tree/main/examples/quickstart-pytorch). diff --git a/examples/pytorch-federated-variational-autoencoder/README.md b/examples/pytorch-federated-variational-autoencoder/README.md index 00af7a6328b2..539ab6904a1c 100644 --- a/examples/pytorch-federated-variational-autoencoder/README.md +++ b/examples/pytorch-federated-variational-autoencoder/README.md @@ -1,3 +1,10 @@ +--- +title: Federated Variational Autoencoder using Pytorch +labels: [basic, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [torch | https://pytorch.org/, torchvision | https://pytorch.org/vision/stable/index.html] +--- + # Flower Example for Federated Variational Autoencoder using Pytorch This example demonstrates how a variational autoencoder (VAE) can be trained in a federated way using the Flower framework. diff --git a/examples/pytorch-from-centralized-to-federated/README.md b/examples/pytorch-from-centralized-to-federated/README.md index 06ee89dddcac..683604ec4eb9 100644 --- a/examples/pytorch-from-centralized-to-federated/README.md +++ b/examples/pytorch-from-centralized-to-federated/README.md @@ -1,3 +1,10 @@ +--- +title: PyTorch, From Centralized To Federated +labels: [basic, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [torch | https://pytorch.org/] +--- + # PyTorch: From Centralized To Federated This example demonstrates how an already existing centralized PyTorch-based machine learning project can be federated with Flower. diff --git a/examples/quickstart-cpp/README.md b/examples/quickstart-cpp/README.md index d6cbeebe1bc6..7e79aa63b851 100644 --- a/examples/quickstart-cpp/README.md +++ b/examples/quickstart-cpp/README.md @@ -1,3 +1,10 @@ +--- +title: Simple Flower Example using C++ +labels: [quickstart, linear regression, tabular] +dataset: [Synthetic] +framework: [C++ | https://isocpp.org/] +--- + # Flower Clients in C++ (under development) In this example you will train a linear model on synthetic data using C++ clients. diff --git a/examples/quickstart-fastai/README.md b/examples/quickstart-fastai/README.md index 38ef23c95a1e..8a8c41a49c22 100644 --- a/examples/quickstart-fastai/README.md +++ b/examples/quickstart-fastai/README.md @@ -1,3 +1,10 @@ +--- +title: Simple Flower Example using fastai +labels: [quickstart, vision] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [fastai | https://fast.ai] +--- + # Flower Example using fastai This introductory example to Flower uses [fastai](https://www.fast.ai/), but deep knowledge of fastai is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. diff --git a/examples/quickstart-huggingface/README.md b/examples/quickstart-huggingface/README.md index ce7790cd4af5..7235b12d7c8e 100644 --- a/examples/quickstart-huggingface/README.md +++ b/examples/quickstart-huggingface/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Transformers Example using HuggingFace +labels: [quickstart, llm, nlp, sentiment] +dataset: [IMDB | https://huggingface.co/datasets/stanfordnlp/imdb] +framework: [transformers | https://huggingface.co/docs/transformers/index] +--- + # Federated HuggingFace Transformers using Flower and PyTorch This introductory example to using [HuggingFace](https://huggingface.co) Transformers with Flower with PyTorch. This example has been extended from the [quickstart-pytorch](https://flower.ai/docs/examples/quickstart-pytorch.html) example. The training script closely follows the [HuggingFace course](https://huggingface.co/course/chapter3?fw=pt), so you are encouraged to check that out for a detailed explanation of the transformer pipeline. diff --git a/examples/quickstart-jax/README.md b/examples/quickstart-jax/README.md index 836adf558d88..5d9939a2b4fb 100644 --- a/examples/quickstart-jax/README.md +++ b/examples/quickstart-jax/README.md @@ -1,3 +1,10 @@ +--- +title: Simple Flower Example using Jax +labels: [quickstart, linear regression] +dataset: [Synthetic] +framework: [JAX | https://jax.readthedocs.io/en/latest/] +--- + # JAX: From Centralized To Federated This example demonstrates how an already existing centralized JAX-based machine learning project can be federated with Flower. diff --git a/examples/quickstart-mlcube/README.md b/examples/quickstart-mlcube/README.md index 8e6fc29b3ad8..a2e989e6804b 100644 --- a/examples/quickstart-mlcube/README.md +++ b/examples/quickstart-mlcube/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Example using TensorFlow/Keras + MLCube +labels: [quickstart, vision, deployment] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [tensorflow | https://www.tensorflow.org/, Keras | https://keras.io/] +--- + # Flower Example using TensorFlow/Keras + MLCube This introductory example to Flower uses MLCube together with Keras, but deep knowledge of Keras is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use-cases with MLCube. Running this example in itself is quite easy. diff --git a/examples/quickstart-mlx/README.md b/examples/quickstart-mlx/README.md index cca55bcb946a..633b32f7bde7 100644 --- a/examples/quickstart-mlx/README.md +++ b/examples/quickstart-mlx/README.md @@ -1,3 +1,10 @@ +--- +title: Simple Flower Example using MLX +labels: [quickstart, vision] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [MLX | https://ml-explore.github.io/mlx/build/html/index.html] +--- + # Flower Example using MLX This introductory example to Flower uses [MLX](https://ml-explore.github.io/mlx/build/html/index.html), but deep knowledge of MLX is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. Running this example in itself is quite easy. diff --git a/examples/quickstart-monai/README.md b/examples/quickstart-monai/README.md index 4a9afef4f86a..43f8c1cacbc7 100644 --- a/examples/quickstart-monai/README.md +++ b/examples/quickstart-monai/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Example using MONAI +labels: [quickstart, medical, vision] +dataset: [MedNIST | https://medmnist.com/] +framework: [MONAI | https://monai.io/] +--- + # Flower Example using MONAI This introductory example to Flower uses MONAI, but deep knowledge of MONAI is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. diff --git a/examples/quickstart-pandas/README.md b/examples/quickstart-pandas/README.md index dd69f3ead3cb..d2cc7abbacd1 100644 --- a/examples/quickstart-pandas/README.md +++ b/examples/quickstart-pandas/README.md @@ -1,3 +1,10 @@ +--- +title: Simple Flower Example using Pandas +labels: [quickstart, tabular, federated analytics] +dataset: [Iris | https://scikit-learn.org/stable/auto_examples/datasets/plot_iris_dataset.html] +framework: [pandas | https://pandas.pydata.org/] +--- + # Flower Example using Pandas This introductory example to Flower uses Pandas, but deep knowledge of Pandas is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to diff --git a/examples/quickstart-pytorch-lightning/README.md b/examples/quickstart-pytorch-lightning/README.md index fb29c7e9e9ea..ec968a1d8d0a 100644 --- a/examples/quickstart-pytorch-lightning/README.md +++ b/examples/quickstart-pytorch-lightning/README.md @@ -1,3 +1,10 @@ +--- +title: Simple Flower Example using PyTorch-Lightning +labels: [quickstart, vision, fds] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [lightning | https://lightning.ai/docs/pytorch/stable/] +--- + # Flower Example using PyTorch Lightning This introductory example to Flower uses PyTorch, but deep knowledge of PyTorch Lightning is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. Running this example in itself is quite easy. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to download, partition and preprocess the MNIST dataset. diff --git a/examples/quickstart-pytorch/README.md b/examples/quickstart-pytorch/README.md index 93d6a593f362..63a357b37e58 100644 --- a/examples/quickstart-pytorch/README.md +++ b/examples/quickstart-pytorch/README.md @@ -1,3 +1,10 @@ +--- +title: Simple Flower Example using PyTorch +labels: [quickstart, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [torch | https://pytorch.org/, torchvision | https://pytorch.org/vision/stable/index.html] +--- + # Flower Example using PyTorch This introductory example to Flower uses PyTorch, but deep knowledge of PyTorch is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. Running this example in itself is quite easy. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to download, partition and preprocess the CIFAR-10 dataset. diff --git a/examples/quickstart-sklearn-tabular/README.md b/examples/quickstart-sklearn-tabular/README.md index a975a9392800..bb9aac58a0ed 100644 --- a/examples/quickstart-sklearn-tabular/README.md +++ b/examples/quickstart-sklearn-tabular/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Example using Scikit-Learn +labels: [quickstart, tabular, fds] +dataset: [Iris | https://scikit-learn.org/stable/auto_examples/datasets/plot_iris_dataset.html] +framework: [scikit-learn | https://scikit-learn.org/] +--- + # Flower Example using scikit-learn This example of Flower uses `scikit-learn`'s `LogisticRegression` model to train a federated learning system on diff --git a/examples/quickstart-tabnet/README.md b/examples/quickstart-tabnet/README.md index 19a139f83064..13cddc2bc737 100644 --- a/examples/quickstart-tabnet/README.md +++ b/examples/quickstart-tabnet/README.md @@ -1,3 +1,10 @@ +--- +title: Simple Flower Example using Tabnet +labels: [quickstart, tabular] +dataset: [Iris | https://scikit-learn.org/stable/auto_examples/datasets/plot_iris_dataset.html] +framework: [tabnet | https://github.com/titu1994/tf-TabNet] +--- + # Flower TabNet Example using TensorFlow This introductory example to Flower uses Keras but deep knowledge of Keras is not necessarily required to run the example. However, it will help you understanding how to adapt Flower to your use-cases. You can learn more about TabNet from [paper](https://arxiv.org/abs/1908.07442) and its implementation using TensorFlow at [this repository](https://github.com/titu1994/tf-TabNet). Note also that the basis of this example using federated learning is the example from the repository above. diff --git a/examples/quickstart-tensorflow/README.md b/examples/quickstart-tensorflow/README.md index ae1fe19834a3..ce7190b35ef9 100644 --- a/examples/quickstart-tensorflow/README.md +++ b/examples/quickstart-tensorflow/README.md @@ -1,3 +1,10 @@ +--- +title: Simple Flower Example using TensorFlow +labels: [quickstart, vision, fds] +dataset: [CIFAR-10 | https://huggingface.co/datasets/uoft-cs/cifar10] +framework: [tensorflow | https://www.tensorflow.org/] +--- + # Flower Example using TensorFlow/Keras This introductory example to Flower uses Keras but deep knowledge of Keras is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. diff --git a/examples/simulation-pytorch/README.md b/examples/simulation-pytorch/README.md index 93f9e1acbac7..85b9e136e6dc 100644 --- a/examples/simulation-pytorch/README.md +++ b/examples/simulation-pytorch/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Simulation Example using PyTorch +labels: [basic, vision, fds, simulation] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [torch | https://pytorch.org/, torchvision | https://pytorch.org/vision/stable/index.html] +--- + # Flower Simulation example using PyTorch This introductory example uses the simulation capabilities of Flower to simulate a large number of clients on a single machine. Take a look at the [Documentation](https://flower.ai/docs/framework/how-to-run-simulations.html) for a deep dive into how Flower simulation works. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to download, partition and preprocess the MNIST dataset. This examples uses 100 clients by default. diff --git a/examples/simulation-tensorflow/README.md b/examples/simulation-tensorflow/README.md index 917d7b34c7af..2dc9a41cb959 100644 --- a/examples/simulation-tensorflow/README.md +++ b/examples/simulation-tensorflow/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Simulation Example using TensorFlow/Keras +labels: [basic, vision, fds, simulation] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [tensorflow | https://www.tensorflow.org/, Keras | https://keras.io/] +--- + # Flower Simulation example using TensorFlow/Keras This introductory example uses the simulation capabilities of Flower to simulate a large number of clients on a single machine. Take a look at the [Documentation](https://flower.ai/docs/framework/how-to-run-simulations.html) for a deep dive into how Flower simulation works. This example uses [Flower Datasets](https://flower.ai/docs/datasets/) to download, partition and preprocess the MNIST dataset. This examples uses 100 clients by default. diff --git a/examples/sklearn-logreg-mnist/README.md b/examples/sklearn-logreg-mnist/README.md index 12b1a5e3bc1a..edea3d7b28e8 100644 --- a/examples/sklearn-logreg-mnist/README.md +++ b/examples/sklearn-logreg-mnist/README.md @@ -1,3 +1,10 @@ +--- +title: Flower LogReg Example using Scikit-Learn +labels: [basic, vision, logistic regression, fds] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [scikit-learn | https://scikit-learn.org/] +--- + # Flower Example using scikit-learn This example of Flower uses `scikit-learn`'s `LogisticRegression` model to train a federated learning system. It will help you understand how to adapt Flower for use with `scikit-learn`. diff --git a/examples/tensorflow-privacy/README.md b/examples/tensorflow-privacy/README.md index a1f1be00f6b0..48ed4594edac 100644 --- a/examples/tensorflow-privacy/README.md +++ b/examples/tensorflow-privacy/README.md @@ -1,3 +1,10 @@ +--- +title: Sample-Level DP using TensorFlow-Privacy Engine +labels: [basic, vision, fds, privacy, dp] +dataset: [MNIST | https://huggingface.co/datasets/ylecun/mnist] +framework: [tensorflow | https://www.tensorflow.org/] +--- + # Training with Sample-Level Differential Privacy using TensorFlow-Privacy Engine In this example, we demonstrate how to train a model with sample-level differential privacy (DP) using Flower. We employ TensorFlow and integrate the tensorflow-privacy Engine to achieve sample-level differential privacy. This setup ensures robust privacy guarantees during the client training phase. diff --git a/examples/vertical-fl/README.md b/examples/vertical-fl/README.md index ba8228a059f9..30b38e59c619 100644 --- a/examples/vertical-fl/README.md +++ b/examples/vertical-fl/README.md @@ -1,3 +1,11 @@ +--- +title: Vertical FL Flower Example +labels: [vertical, tabular, advanced] +dataset: [Titanic | https://www.kaggle.com/competitions/titanic] +framework: [torch | https://pytorch.org/, pandas | https://pandas.pydata.org/, scikit-learn + | https://scikit-learn.org/] +--- + # Vertical Federated Learning example This example will showcase how you can perform Vertical Federated Learning using diff --git a/examples/vit-finetune/README.md b/examples/vit-finetune/README.md index ac1652acf02d..3ef38aca5773 100644 --- a/examples/vit-finetune/README.md +++ b/examples/vit-finetune/README.md @@ -1,3 +1,10 @@ +--- +title: Federated finetuning of a ViT +labels: [finetuneing, vision, fds] +dataset: [Oxford Flower-102 | https://www.robots.ox.ac.uk/~vgg/data/flowers/102/] +framework: [torch | https://pytorch.org/, torchvision | https://pytorch.org/vision/stable/index.html] +--- + # Federated finetuning of a ViT This example shows how to use Flower's Simulation Engine to federate the finetuning of a Vision Transformer ([ViT-Base-16](https://pytorch.org/vision/main/models/generated/torchvision.models.vit_b_16.html#torchvision.models.vit_b_16)) that has been pretrained on ImageNet. To keep things simple we'll be finetuning it to [Oxford Flower-102](https://www.robots.ox.ac.uk/~vgg/data/flowers/102/index.html) datasset, creating 20 partitions using [Flower Datasets](https://flower.ai/docs/datasets/). We'll be finetuning just the exit `head` of the ViT, this means that the training is not that costly and each client requires just ~1GB of VRAM (for a batch size of 32 images). diff --git a/examples/whisper-federated-finetuning/README.md b/examples/whisper-federated-finetuning/README.md index ddebe51247b2..1a79e581016f 100644 --- a/examples/whisper-federated-finetuning/README.md +++ b/examples/whisper-federated-finetuning/README.md @@ -1,3 +1,11 @@ +--- +title: On-device Federated Finetuning for Speech Classification +labels: [finetuning, speech, transformers] +dataset: [SpeechCommands | https://huggingface.co/datasets/google/speech_commands] +framework: [transformers | https://huggingface.co/docs/transformers/index, whisper + | https://huggingface.co/openai/whisper-tiny] +--- + # On-device Federated Finetuning for Speech Classification This example demonstrates how to, from a pre-trained [Whisper](https://openai.com/research/whisper) model, finetune it for the downstream task of keyword spotting. We'll be implementing a federated downstream finetuning pipeline using Flower involving a total of 100 clients. As for the downstream dataset, we'll be using the [Google Speech Commands](https://huggingface.co/datasets/speech_commands) dataset for keyword spotting. We'll take the encoder part of the [Whisper-tiny](https://huggingface.co/openai/whisper-tiny) model, freeze its parameters, and learn a lightweight classification (\<800K parameters !!) head to correctly classify a spoken word. diff --git a/examples/xgboost-comprehensive/README.md b/examples/xgboost-comprehensive/README.md index dc6d7e3872d6..ea06febe43a7 100644 --- a/examples/xgboost-comprehensive/README.md +++ b/examples/xgboost-comprehensive/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Example using XGBoost +labels: [advanced, classification, tabular] +dataset: [HIGGS | https://archive.ics.uci.edu/dataset/280/higgs] +framework: [xgboost | https://xgboost.readthedocs.io/en/stable/] +--- + # Flower Example using XGBoost (Comprehensive) This example demonstrates a comprehensive federated learning setup using Flower with XGBoost. diff --git a/examples/xgboost-quickstart/README.md b/examples/xgboost-quickstart/README.md index 713b6eab8bac..40edfd0c1870 100644 --- a/examples/xgboost-quickstart/README.md +++ b/examples/xgboost-quickstart/README.md @@ -1,3 +1,10 @@ +--- +title: Flower Example using PyTorch +labels: [quickstart, classification, tabular] +dataset: [HIGGS | https://archive.ics.uci.edu/dataset/280/higgs] +framework: [xgboost | https://xgboost.readthedocs.io/en/stable/] +--- + # Flower Example using XGBoost This example demonstrates how to perform EXtreme Gradient Boosting (XGBoost) within Flower using `xgboost` package.