From 3b112af614fc0fd728000bd80c41fbfa0a4adcbd Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Thu, 12 Oct 2023 16:19:30 +0200 Subject: [PATCH 01/18] First draft --- examples/quickstart-monai/.gitignore | 1 + examples/quickstart-monai/client.py | 56 ++++++++++ examples/quickstart-monai/data.py | 120 +++++++++++++++++++++ examples/quickstart-monai/model.py | 29 +++++ examples/quickstart-monai/pyproject.toml | 18 ++++ examples/quickstart-monai/requirements.txt | 4 + examples/quickstart-monai/run.sh | 20 ++++ examples/quickstart-monai/server.py | 25 +++++ 8 files changed, 273 insertions(+) create mode 100644 examples/quickstart-monai/.gitignore create mode 100644 examples/quickstart-monai/client.py create mode 100644 examples/quickstart-monai/data.py create mode 100644 examples/quickstart-monai/model.py create mode 100644 examples/quickstart-monai/pyproject.toml create mode 100644 examples/quickstart-monai/requirements.txt create mode 100755 examples/quickstart-monai/run.sh create mode 100644 examples/quickstart-monai/server.py diff --git a/examples/quickstart-monai/.gitignore b/examples/quickstart-monai/.gitignore new file mode 100644 index 000000000000..a218cab9669e --- /dev/null +++ b/examples/quickstart-monai/.gitignore @@ -0,0 +1 @@ +MedNIST* diff --git a/examples/quickstart-monai/client.py b/examples/quickstart-monai/client.py new file mode 100644 index 000000000000..206d13cf4239 --- /dev/null +++ b/examples/quickstart-monai/client.py @@ -0,0 +1,56 @@ +import warnings +from collections import OrderedDict + +import flwr as fl +import torch +from monai.networks.nets import DenseNet121 + +from data import load_data +from model import train, evaluate + +# ############################################################################# +# 1. Regular PyTorch pipeline: nn.Module, train, test, and DataLoader +# ############################################################################# + +warnings.filterwarnings("ignore", category=UserWarning) +DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + +# ############################################################################# +# 2. Federation of the pipeline with Flower +# ############################################################################# + +# Load model and data (simple CNN, CIFAR-10) +trainloader, _, testloader, num_class = load_data() +net = DenseNet121( + spatial_dims=2, + in_channels=1, + out_channels=num_class +).to(DEVICE) + + +# Define Flower client +class FlowerClient(fl.client.NumPyClient): + def get_parameters(self, config): + return [val.cpu().numpy() for _, val in net.state_dict().items()] + + def set_parameters(self, parameters): + params_dict = zip(net.state_dict().keys(), parameters) + state_dict = OrderedDict({k: torch.tensor(v) for k, v in params_dict}) + net.load_state_dict(state_dict, strict=True) + + def fit(self, parameters, config): + self.set_parameters(parameters) + train(net, trainloader, epoch_num=1, device=DEVICE) + return self.get_parameters(config={}), len(trainloader), {} + + def evaluate(self, parameters, config): + self.set_parameters(parameters) + loss, accuracy = evaluate(net, testloader, DEVICE) + return loss, len(testloader), {"accuracy": accuracy} + + +# Start Flower client +fl.client.start_numpy_client( + server_address="127.0.0.1:8080", + client=FlowerClient(), +) diff --git a/examples/quickstart-monai/data.py b/examples/quickstart-monai/data.py new file mode 100644 index 000000000000..b2f9d5d33551 --- /dev/null +++ b/examples/quickstart-monai/data.py @@ -0,0 +1,120 @@ +import os +import numpy as np +from monai.transforms import ( + EnsureChannelFirst, + Compose, + LoadImage, + RandFlip, + RandRotate, + RandZoom, + ScaleIntensity, + ToTensor, +) + +from monai.data import Dataset, DataLoader +from urllib import request +import tarfile + + +def load_data(): + image_file_list, image_label_list, num_total, num_class = _download_data() + trainX, trainY, valX, valY, testX, testY = _split_data(image_file_list, image_label_list, num_total) + train_transforms, val_transforms = _get_transforms() + + train_ds = MedNISTDataset(trainX, trainY, train_transforms) + train_loader = DataLoader(train_ds, batch_size=300, shuffle=True, num_workers=2) + + val_ds = MedNISTDataset(valX, valY, val_transforms) + val_loader = DataLoader(val_ds, batch_size=300, num_workers=2) + + test_ds = MedNISTDataset(testX, testY, val_transforms) + test_loader = DataLoader(test_ds, batch_size=300, num_workers=2) + + return train_loader, val_loader, test_loader, num_class + + +class MedNISTDataset(Dataset): + + def __init__(self, image_files, labels, transforms): + self.image_files = image_files + self.labels = labels + self.transforms = transforms + + def __len__(self): + return len(self.image_files) + + def __getitem__(self, index): + return self.transforms(self.image_files[index]), self.labels[index] + + +def _download_data(): + data_dir = './MedNIST/' + _download_and_extract("https://dl.dropboxusercontent.com/s/5wwskxctvcxiuea/MedNIST.tar.gz", os.path.join(data_dir)) + + class_names = sorted([x for x in os.listdir(data_dir) if os.path.isdir(os.path.join(data_dir, x))]) + num_class = len(class_names) + image_files = [[os.path.join(data_dir, class_name, x) + for x in os.listdir(os.path.join(data_dir, class_name))] + for class_name in class_names] + image_file_list = [] + image_label_list = [] + for i, class_name in enumerate(class_names): + image_file_list.extend(image_files[i]) + image_label_list.extend([i] * len(image_files[i])) + num_total = len(image_label_list) + return image_file_list, image_label_list, num_total, num_class + + +def _split_data(image_file_list, image_label_list, num_total): + valid_frac, test_frac = 0.1, 0.1 + trainX, trainY = [], [] + valX, valY = [], [] + testX, testY = [], [] + + for i in range(num_total): + rann = np.random.random() + if rann < valid_frac: + valX.append(image_file_list[i]) + valY.append(image_label_list[i]) + elif rann < test_frac + valid_frac: + testX.append(image_file_list[i]) + testY.append(image_label_list[i]) + else: + trainX.append(image_file_list[i]) + trainY.append(image_label_list[i]) + + + return trainX, trainY, valX, valY, testX, testY + +def _get_transforms(): + train_transforms = Compose([ + LoadImage(image_only=True), + EnsureChannelFirst(), + ScaleIntensity(), + RandRotate(range_x=15, prob=0.5, keep_size=True), + RandFlip(spatial_axis=0, prob=0.5), + RandZoom(min_zoom=0.9, max_zoom=1.1, prob=0.5, keep_size=True), + ToTensor() + ]) + + val_transforms = Compose([ + LoadImage(image_only=True), + EnsureChannelFirst(), + ScaleIntensity(), + ToTensor() + ]) + + return train_transforms, val_transforms + +def _download_and_extract(url, dest_folder): + if not os.path.isdir(dest_folder): + # Download the tar.gz file + tar_gz_filename = url.split("/")[-1] + if not os.path.isfile(tar_gz_filename ): + with request.urlopen(url) as response, open(tar_gz_filename, 'wb') as out_file: + out_file.write(response.read()) + + # Extract the tar.gz file + with tarfile.open(tar_gz_filename, 'r:gz') as tar_ref: + tar_ref.extractall() + diff --git a/examples/quickstart-monai/model.py b/examples/quickstart-monai/model.py new file mode 100644 index 000000000000..ed64a41c2ffd --- /dev/null +++ b/examples/quickstart-monai/model.py @@ -0,0 +1,29 @@ +import torch + +def train(model, train_loader, epoch_num, device): + loss_function = torch.nn.CrossEntropyLoss() + optimizer = torch.optim.Adam(model.parameters(), 1e-5) + for _ in range(epoch_num): + model.train() + for inputs, labels in train_loader: + optimizer.zero_grad() + loss_function(model(inputs.to(device)), labels.to(device)).backward() + optimizer.step() + +def evaluate(model, test_loader, device): + model.eval() + loss = 0.0 + y_true = list() + y_pred = list() + loss_function = torch.nn.CrossEntropyLoss() + with torch.no_grad(): + for test_images, test_labels in test_loader: + out = model(test_images.to(device)) + test_labels = test_labels.to(device) + loss += loss_function(out, test_labels) + pred = out.argmax(dim=1) + for i in range(len(pred)): + y_true.append(test_labels[i].item()) + y_pred.append(pred[i].item()) + accuracy = sum([1 if t == p else 0 for t, p in zip(y_true, y_pred)]) / len(test_loader.dataset) + return loss, accuracy diff --git a/examples/quickstart-monai/pyproject.toml b/examples/quickstart-monai/pyproject.toml new file mode 100644 index 000000000000..797f2728bb8c --- /dev/null +++ b/examples/quickstart-monai/pyproject.toml @@ -0,0 +1,18 @@ +[build-system] +requires = ["poetry-core>=1.4.0"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry] +name = "quickstart-pytorch" +version = "0.1.0" +description = "PyTorch Federated Learning Quickstart with Flower" +authors = ["The Flower Authors "] + +[tool.poetry.dependencies] +python = ">=3.8,<3.11" +flwr = ">=1.0,<2.0" +torch = "1.13.1" +tqdm = "4.65.0" +scikit-learn = "1.3.1" +monai = { version = "1.2.0", extras=["gdown", "nibabel", "tqdm", "itk"] } +numpy = "1.21.3" diff --git a/examples/quickstart-monai/requirements.txt b/examples/quickstart-monai/requirements.txt new file mode 100644 index 000000000000..797ca6db6244 --- /dev/null +++ b/examples/quickstart-monai/requirements.txt @@ -0,0 +1,4 @@ +flwr>=1.0, <2.0 +torch==1.13.1 +torchvision==0.14.1 +tqdm==4.65.0 diff --git a/examples/quickstart-monai/run.sh b/examples/quickstart-monai/run.sh new file mode 100755 index 000000000000..d2bf34f834b1 --- /dev/null +++ b/examples/quickstart-monai/run.sh @@ -0,0 +1,20 @@ +#!/bin/bash +set -e +cd "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"/ + +# Download the CIFAR-10 dataset +python -c "from torchvision.datasets import CIFAR10; CIFAR10('./data', download=True)" + +echo "Starting server" +python server.py & +sleep 3 # Sleep for 3s to give the server enough time to start + +for i in `seq 0 1`; do + echo "Starting client $i" + python client.py & +done + +# Enable CTRL+C to stop all background processes +trap "trap - SIGTERM && kill -- -$$" SIGINT SIGTERM +# Wait for all background processes to complete +wait diff --git a/examples/quickstart-monai/server.py b/examples/quickstart-monai/server.py new file mode 100644 index 000000000000..fe691a88aba0 --- /dev/null +++ b/examples/quickstart-monai/server.py @@ -0,0 +1,25 @@ +from typing import List, Tuple + +import flwr as fl +from flwr.common import Metrics + + +# Define metric aggregation function +def weighted_average(metrics: List[Tuple[int, Metrics]]) -> Metrics: + # Multiply accuracy of each client by number of examples used + accuracies = [num_examples * m["accuracy"] for num_examples, m in metrics] + examples = [num_examples for num_examples, _ in metrics] + + # Aggregate and return custom metric (weighted average) + return {"accuracy": sum(accuracies) / sum(examples)} + + +# Define strategy +strategy = fl.server.strategy.FedAvg(evaluate_metrics_aggregation_fn=weighted_average) + +# Start Flower server +fl.server.start_server( + server_address="0.0.0.0:8080", + config=fl.server.ServerConfig(num_rounds=3), + strategy=strategy, +) From 364678dea7503aec3964f3412db11c9f3201eaa8 Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Fri, 13 Oct 2023 09:28:31 +0200 Subject: [PATCH 02/18] Working version --- examples/quickstart-monai/client.py | 55 ++++++++++----------- examples/quickstart-monai/data.py | 75 +++++++++++++++++------------ examples/quickstart-monai/model.py | 10 ++-- 3 files changed, 77 insertions(+), 63 deletions(-) diff --git a/examples/quickstart-monai/client.py b/examples/quickstart-monai/client.py index 206d13cf4239..b611ace541ac 100644 --- a/examples/quickstart-monai/client.py +++ b/examples/quickstart-monai/client.py @@ -6,51 +6,48 @@ from monai.networks.nets import DenseNet121 from data import load_data -from model import train, evaluate - -# ############################################################################# -# 1. Regular PyTorch pipeline: nn.Module, train, test, and DataLoader -# ############################################################################# +from model import train, test warnings.filterwarnings("ignore", category=UserWarning) DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") - -# ############################################################################# -# 2. Federation of the pipeline with Flower -# ############################################################################# - -# Load model and data (simple CNN, CIFAR-10) -trainloader, _, testloader, num_class = load_data() -net = DenseNet121( - spatial_dims=2, - in_channels=1, - out_channels=num_class -).to(DEVICE) +# DEVICE = torch.device("mps") # Define Flower client class FlowerClient(fl.client.NumPyClient): + def __init__(self, net, trainloader, testloader, device): + self.net = net + self.trainloader = trainloader + self.testloader = testloader + self.device = device + print(len(self.testloader)) + def get_parameters(self, config): - return [val.cpu().numpy() for _, val in net.state_dict().items()] + return [val.cpu().numpy() for _, val in self.net.state_dict().items()] def set_parameters(self, parameters): - params_dict = zip(net.state_dict().keys(), parameters) + params_dict = zip(self.net.state_dict().keys(), parameters) state_dict = OrderedDict({k: torch.tensor(v) for k, v in params_dict}) - net.load_state_dict(state_dict, strict=True) + self.net.load_state_dict(state_dict, strict=True) def fit(self, parameters, config): self.set_parameters(parameters) - train(net, trainloader, epoch_num=1, device=DEVICE) - return self.get_parameters(config={}), len(trainloader), {} + train(self.net, self.trainloader, epoch_num=1, device=self.device) + return self.get_parameters(config={}), len(self.trainloader), {} def evaluate(self, parameters, config): self.set_parameters(parameters) - loss, accuracy = evaluate(net, testloader, DEVICE) - return loss, len(testloader), {"accuracy": accuracy} + loss, accuracy = test(self.net, self.testloader, self.device) + return loss, len(self.testloader), {"accuracy": accuracy} + +if __name__ == "__main__": + # Load model and data (simple CNN, CIFAR-10) + trainloader, _, testloader, num_class = load_data() + net = DenseNet121(spatial_dims=2, in_channels=1, out_channels=num_class).to(DEVICE) -# Start Flower client -fl.client.start_numpy_client( - server_address="127.0.0.1:8080", - client=FlowerClient(), -) + # Start Flower client + fl.client.start_numpy_client( + server_address="127.0.0.1:8080", + client=FlowerClient(net, trainloader, testloader, DEVICE), + ) diff --git a/examples/quickstart-monai/data.py b/examples/quickstart-monai/data.py index b2f9d5d33551..0e55abd5234e 100644 --- a/examples/quickstart-monai/data.py +++ b/examples/quickstart-monai/data.py @@ -1,5 +1,6 @@ import os import numpy as np +from PIL import Image from monai.transforms import ( EnsureChannelFirst, Compose, @@ -18,7 +19,9 @@ def load_data(): image_file_list, image_label_list, num_total, num_class = _download_data() - trainX, trainY, valX, valY, testX, testY = _split_data(image_file_list, image_label_list, num_total) + trainX, trainY, valX, valY, testX, testY = _split_data( + image_file_list, image_label_list, num_total + ) train_transforms, val_transforms = _get_transforms() train_ds = MedNISTDataset(trainX, trainY, train_transforms) @@ -34,7 +37,6 @@ def load_data(): class MedNISTDataset(Dataset): - def __init__(self, image_files, labels, transforms): self.image_files = image_files self.labels = labels @@ -48,14 +50,23 @@ def __getitem__(self, index): def _download_data(): - data_dir = './MedNIST/' - _download_and_extract("https://dl.dropboxusercontent.com/s/5wwskxctvcxiuea/MedNIST.tar.gz", os.path.join(data_dir)) - - class_names = sorted([x for x in os.listdir(data_dir) if os.path.isdir(os.path.join(data_dir, x))]) + data_dir = "./MedNIST/" + _download_and_extract( + "https://dl.dropboxusercontent.com/s/5wwskxctvcxiuea/MedNIST.tar.gz", + os.path.join(data_dir), + ) + + class_names = sorted( + [x for x in os.listdir(data_dir) if os.path.isdir(os.path.join(data_dir, x))] + ) num_class = len(class_names) - image_files = [[os.path.join(data_dir, class_name, x) - for x in os.listdir(os.path.join(data_dir, class_name))] - for class_name in class_names] + image_files = [ + [ + os.path.join(data_dir, class_name, x) + for x in os.listdir(os.path.join(data_dir, class_name)) + ] + for class_name in class_names + ] image_file_list = [] image_label_list = [] for i, class_name in enumerate(class_names): @@ -83,38 +94,40 @@ def _split_data(image_file_list, image_label_list, num_total): trainX.append(image_file_list[i]) trainY.append(image_label_list[i]) - return trainX, trainY, valX, valY, testX, testY + def _get_transforms(): - train_transforms = Compose([ - LoadImage(image_only=True), - EnsureChannelFirst(), - ScaleIntensity(), - RandRotate(range_x=15, prob=0.5, keep_size=True), - RandFlip(spatial_axis=0, prob=0.5), - RandZoom(min_zoom=0.9, max_zoom=1.1, prob=0.5, keep_size=True), - ToTensor() - ]) - - val_transforms = Compose([ - LoadImage(image_only=True), - EnsureChannelFirst(), - ScaleIntensity(), - ToTensor() - ]) - + train_transforms = Compose( + [ + LoadImage(image_only=True), + EnsureChannelFirst(), + ScaleIntensity(), + RandRotate(range_x=15, prob=0.5, keep_size=True), + RandFlip(spatial_axis=0, prob=0.5), + RandZoom(min_zoom=0.9, max_zoom=1.1, prob=0.5, keep_size=True), + ToTensor(), + ] + ) + + val_transforms = Compose( + [LoadImage(image_only=True), EnsureChannelFirst(), ScaleIntensity(), ToTensor()] + ) + return train_transforms, val_transforms + def _download_and_extract(url, dest_folder): if not os.path.isdir(dest_folder): # Download the tar.gz file tar_gz_filename = url.split("/")[-1] - if not os.path.isfile(tar_gz_filename ): - with request.urlopen(url) as response, open(tar_gz_filename, 'wb') as out_file: + if not os.path.isfile(tar_gz_filename): + with request.urlopen(url) as response, open( + tar_gz_filename, "wb" + ) as out_file: out_file.write(response.read()) - + # Extract the tar.gz file - with tarfile.open(tar_gz_filename, 'r:gz') as tar_ref: + with tarfile.open(tar_gz_filename, "r:gz") as tar_ref: tar_ref.extractall() diff --git a/examples/quickstart-monai/model.py b/examples/quickstart-monai/model.py index ed64a41c2ffd..4c74d50553e4 100644 --- a/examples/quickstart-monai/model.py +++ b/examples/quickstart-monai/model.py @@ -1,5 +1,6 @@ import torch + def train(model, train_loader, epoch_num, device): loss_function = torch.nn.CrossEntropyLoss() optimizer = torch.optim.Adam(model.parameters(), 1e-5) @@ -10,7 +11,8 @@ def train(model, train_loader, epoch_num, device): loss_function(model(inputs.to(device)), labels.to(device)).backward() optimizer.step() -def evaluate(model, test_loader, device): + +def test(model, test_loader, device): model.eval() loss = 0.0 y_true = list() @@ -20,10 +22,12 @@ def evaluate(model, test_loader, device): for test_images, test_labels in test_loader: out = model(test_images.to(device)) test_labels = test_labels.to(device) - loss += loss_function(out, test_labels) + loss += loss_function(out, test_labels).item() pred = out.argmax(dim=1) for i in range(len(pred)): y_true.append(test_labels[i].item()) y_pred.append(pred[i].item()) - accuracy = sum([1 if t == p else 0 for t, p in zip(y_true, y_pred)]) / len(test_loader.dataset) + accuracy = sum([1 if t == p else 0 for t, p in zip(y_true, y_pred)]) / len( + test_loader.dataset + ) return loss, accuracy From 7ebbd8b9ef49a671306014d1b181edf3718ed358 Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Wed, 20 Dec 2023 17:31:49 +0100 Subject: [PATCH 03/18] Fix imports --- examples/quickstart-monai/client.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/quickstart-monai/client.py b/examples/quickstart-monai/client.py index b611ace541ac..f1b514a2529f 100644 --- a/examples/quickstart-monai/client.py +++ b/examples/quickstart-monai/client.py @@ -3,7 +3,7 @@ import flwr as fl import torch -from monai.networks.nets import DenseNet121 +from monai.networks.nets.densenet import DenseNet121 from data import load_data from model import train, test @@ -20,7 +20,6 @@ def __init__(self, net, trainloader, testloader, device): self.trainloader = trainloader self.testloader = testloader self.device = device - print(len(self.testloader)) def get_parameters(self, config): return [val.cpu().numpy() for _, val in self.net.state_dict().items()] From 6245f64ff37c4b1451dfba98d7ba8e667b0c8e4c Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Wed, 20 Dec 2023 17:32:44 +0100 Subject: [PATCH 04/18] Formatting --- examples/quickstart-monai/client.py | 6 +++--- examples/quickstart-monai/data.py | 13 ++++++------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/examples/quickstart-monai/client.py b/examples/quickstart-monai/client.py index f1b514a2529f..6742971c093e 100644 --- a/examples/quickstart-monai/client.py +++ b/examples/quickstart-monai/client.py @@ -1,12 +1,12 @@ import warnings from collections import OrderedDict -import flwr as fl import torch +from data import load_data +from model import test, train from monai.networks.nets.densenet import DenseNet121 -from data import load_data -from model import train, test +import flwr as fl warnings.filterwarnings("ignore", category=UserWarning) DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") diff --git a/examples/quickstart-monai/data.py b/examples/quickstart-monai/data.py index 0e55abd5234e..82a052d8639e 100644 --- a/examples/quickstart-monai/data.py +++ b/examples/quickstart-monai/data.py @@ -1,9 +1,12 @@ import os +import tarfile +from urllib import request + import numpy as np -from PIL import Image +from monai.data import DataLoader, Dataset from monai.transforms import ( - EnsureChannelFirst, Compose, + EnsureChannelFirst, LoadImage, RandFlip, RandRotate, @@ -11,10 +14,7 @@ ScaleIntensity, ToTensor, ) - -from monai.data import Dataset, DataLoader -from urllib import request -import tarfile +from PIL import Image def load_data(): @@ -130,4 +130,3 @@ def _download_and_extract(url, dest_folder): # Extract the tar.gz file with tarfile.open(tar_gz_filename, "r:gz") as tar_ref: tar_ref.extractall() - From 423e51e122ee3507b1c5116429a62c538bfdb765 Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Wed, 20 Dec 2023 17:39:49 +0100 Subject: [PATCH 05/18] Update README --- examples/quickstart-monai/pyproject.toml | 4 ++-- examples/quickstart-monai/requirements.txt | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/examples/quickstart-monai/pyproject.toml b/examples/quickstart-monai/pyproject.toml index 797f2728bb8c..345260ff78ae 100644 --- a/examples/quickstart-monai/pyproject.toml +++ b/examples/quickstart-monai/pyproject.toml @@ -3,9 +3,9 @@ requires = ["poetry-core>=1.4.0"] build-backend = "poetry.core.masonry.api" [tool.poetry] -name = "quickstart-pytorch" +name = "quickstart-monai" version = "0.1.0" -description = "PyTorch Federated Learning Quickstart with Flower" +description = "MONAI Federated Learning Quickstart with Flower" authors = ["The Flower Authors "] [tool.poetry.dependencies] diff --git a/examples/quickstart-monai/requirements.txt b/examples/quickstart-monai/requirements.txt index 797ca6db6244..1cf72f020f92 100644 --- a/examples/quickstart-monai/requirements.txt +++ b/examples/quickstart-monai/requirements.txt @@ -1,4 +1,6 @@ flwr>=1.0, <2.0 torch==1.13.1 -torchvision==0.14.1 tqdm==4.65.0 +scikit-learn==1.3.1 +monai["gdown", "nibabel", "tqdm", "itk"]==1.2.0 +numpy==1.21.3 From 0b127de5c30b8c11d521b5006ef1895439759876 Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Wed, 20 Dec 2023 18:24:06 +0100 Subject: [PATCH 06/18] Update requirements --- examples/quickstart-monai/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/quickstart-monai/requirements.txt b/examples/quickstart-monai/requirements.txt index 1cf72f020f92..1427e5cd6e9b 100644 --- a/examples/quickstart-monai/requirements.txt +++ b/examples/quickstart-monai/requirements.txt @@ -2,5 +2,5 @@ flwr>=1.0, <2.0 torch==1.13.1 tqdm==4.65.0 scikit-learn==1.3.1 -monai["gdown", "nibabel", "tqdm", "itk"]==1.2.0 -numpy==1.21.3 +monai[gdown,nibabel,tqdm,itk]==1.3.0 +numpy==1.24.4 From e6248b0964a785593ec6352a866210faa7137427 Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Fri, 22 Dec 2023 12:08:58 +0100 Subject: [PATCH 07/18] Add README --- examples/quickstart-monai/README.md | 85 +++++++++++++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100644 examples/quickstart-monai/README.md diff --git a/examples/quickstart-monai/README.md b/examples/quickstart-monai/README.md new file mode 100644 index 000000000000..b59dcfb99277 --- /dev/null +++ b/examples/quickstart-monai/README.md @@ -0,0 +1,85 @@ +# Flower Example using MONAI + +This introductory example to Flower uses MONAI, but deep knowledge of MONAI is not necessarily required to run the example. However, it will help you understand how to adapt Flower to your use case. +Running this example in itself is quite easy. + +[MONAI](https://docs.monai.io/en/latest/index.html)(Medical Open Network for AI) is a PyTorch-based, open-source framework for deep learning in healthcare imaging, part of the PyTorch Ecosystem. + +Its ambitions are: + +- developing a community of academic, industrial and clinical researchers collaborating on a common foundation; + +- creating state-of-the-art, end-to-end training workflows for healthcare imaging; + +- providing researchers with an optimized and standardized way to create and evaluate deep learning models. + +## Project Setup + +Start by cloning the example project. We prepared a single-line command that you can copy into your shell which will checkout the example for you: + +```shell +git clone --depth=1 https://github.com/adap/flower.git _tmp && mv _tmp/examples/quickstart-monai . && rm -rf _tmp && cd quickstart-monai +``` + +This will create a new directory called `quickstart-monai` containing the following files: + +```shell +-- pyproject.toml +-- requirements.txt +-- client.py +-- data.py +-- model.py +-- server.py +-- README.md +``` + +### Installing Dependencies + +Project dependencies (such as `monai` and `flwr`) are defined in `pyproject.toml` and `requirements.txt`. We recommend [Poetry](https://python-poetry.org/docs/) to install those dependencies and manage your virtual environment ([Poetry installation](https://python-poetry.org/docs/#installation)) or [pip](https://pip.pypa.io/en/latest/development/), but feel free to use a different way of installing dependencies and managing virtual environments if you have other preferences. + +#### Poetry + +```shell +poetry install +poetry shell +``` + +Poetry will install all your dependencies in a newly created virtual environment. To verify that everything works correctly you can run the following command: + +```shell +poetry run python3 -c "import flwr" +``` + +If you don't see any errors you're good to go! + +#### pip + +Write the command below in your terminal to install the dependencies according to the configuration file requirements.txt. + +```shell +pip install -r requirements.txt +``` + +## Run Federated Learning with MONAI and Flower + +Afterwards you are ready to start the Flower server as well as the clients. You can simply start the server in a terminal as follows: + +```shell +python3 server.py +``` + +Now you are ready to start the Flower clients which will participate in the learning. To do so simply open two more terminal windows and run the following commands. + +Start client 1 in the first terminal: + +```shell +python3 client.py +``` + +Start client 2 in the second terminal: + +```shell +python3 client.py +``` + +You will see that the federated training is starting. Look at the [code](https://github.com/adap/flower/tree/main/examples/quickstart-monai) for a detailed explanation. From 1d94f2543e8332033185fa3cbd1ceea9077dd21f Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Fri, 22 Dec 2023 17:01:22 +0100 Subject: [PATCH 08/18] Add partitioning --- examples/quickstart-monai/README.md | 4 ++-- examples/quickstart-monai/client.py | 10 +++++++- examples/quickstart-monai/data.py | 30 ++++++++++++++++++++---- examples/quickstart-monai/pyproject.toml | 2 +- examples/quickstart-monai/run.sh | 5 ++-- 5 files changed, 40 insertions(+), 11 deletions(-) diff --git a/examples/quickstart-monai/README.md b/examples/quickstart-monai/README.md index b59dcfb99277..38fa2fd5f88b 100644 --- a/examples/quickstart-monai/README.md +++ b/examples/quickstart-monai/README.md @@ -73,13 +73,13 @@ Now you are ready to start the Flower clients which will participate in the lear Start client 1 in the first terminal: ```shell -python3 client.py +python3 client.py --node_id 0 ``` Start client 2 in the second terminal: ```shell -python3 client.py +python3 client.py --node_id 1 ``` You will see that the federated training is starting. Look at the [code](https://github.com/adap/flower/tree/main/examples/quickstart-monai) for a detailed explanation. diff --git a/examples/quickstart-monai/client.py b/examples/quickstart-monai/client.py index 6742971c093e..c6468dee83a9 100644 --- a/examples/quickstart-monai/client.py +++ b/examples/quickstart-monai/client.py @@ -1,3 +1,4 @@ +import argparse import warnings from collections import OrderedDict @@ -41,8 +42,15 @@ def evaluate(self, parameters, config): if __name__ == "__main__": + total_partitions = 10 + parser = argparse.ArgumentParser() + parser.add_argument( + "--node_id", type=int, choices=range(total_partitions), required=True + ) + args = parser.parse_args() + # Load model and data (simple CNN, CIFAR-10) - trainloader, _, testloader, num_class = load_data() + trainloader, _, testloader, num_class = load_data(total_partitions, args.node_id) net = DenseNet121(spatial_dims=2, in_channels=1, out_channels=num_class).to(DEVICE) # Start Flower client diff --git a/examples/quickstart-monai/data.py b/examples/quickstart-monai/data.py index 82a052d8639e..0aa9443de9fd 100644 --- a/examples/quickstart-monai/data.py +++ b/examples/quickstart-monai/data.py @@ -15,22 +15,44 @@ ToTensor, ) from PIL import Image +from torch.utils.data import Subset -def load_data(): +def _partition(dataset, num_shards, index): + total_size = len(dataset) + shard_size = total_size // num_shards + + # Calculate start and end indices for the shard + start_idx = index * shard_size + if index == num_shards - 1: + # Last shard takes the remainder + end_idx = total_size + else: + end_idx = start_idx + shard_size + + # Create a subset for the shard + shard = Subset(dataset, range(start_idx, end_idx)) + return shard + + +def load_data(num_shards, index): image_file_list, image_label_list, num_total, num_class = _download_data() trainX, trainY, valX, valY, testX, testY = _split_data( image_file_list, image_label_list, num_total ) train_transforms, val_transforms = _get_transforms() - train_ds = MedNISTDataset(trainX, trainY, train_transforms) + train_ds = _partition( + MedNISTDataset(trainX, trainY, train_transforms), num_shards, index + ) train_loader = DataLoader(train_ds, batch_size=300, shuffle=True, num_workers=2) - val_ds = MedNISTDataset(valX, valY, val_transforms) + val_ds = _partition(MedNISTDataset(valX, valY, val_transforms), num_shards, index) val_loader = DataLoader(val_ds, batch_size=300, num_workers=2) - test_ds = MedNISTDataset(testX, testY, val_transforms) + test_ds = _partition( + MedNISTDataset(testX, testY, val_transforms), num_shards, index + ) test_loader = DataLoader(test_ds, batch_size=300, num_workers=2) return train_loader, val_loader, test_loader, num_class diff --git a/examples/quickstart-monai/pyproject.toml b/examples/quickstart-monai/pyproject.toml index 345260ff78ae..15d7af5befc6 100644 --- a/examples/quickstart-monai/pyproject.toml +++ b/examples/quickstart-monai/pyproject.toml @@ -14,5 +14,5 @@ flwr = ">=1.0,<2.0" torch = "1.13.1" tqdm = "4.65.0" scikit-learn = "1.3.1" -monai = { version = "1.2.0", extras=["gdown", "nibabel", "tqdm", "itk"] } +monai = { version = "1.3.0", extras=["gdown", "nibabel", "tqdm", "itk"] } numpy = "1.21.3" diff --git a/examples/quickstart-monai/run.sh b/examples/quickstart-monai/run.sh index d2bf34f834b1..6ba0c8f9af39 100755 --- a/examples/quickstart-monai/run.sh +++ b/examples/quickstart-monai/run.sh @@ -2,8 +2,7 @@ set -e cd "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"/ -# Download the CIFAR-10 dataset -python -c "from torchvision.datasets import CIFAR10; CIFAR10('./data', download=True)" +python -c "from data import _download_data; _download_data()" echo "Starting server" python server.py & @@ -11,7 +10,7 @@ sleep 3 # Sleep for 3s to give the server enough time to start for i in `seq 0 1`; do echo "Starting client $i" - python client.py & + python client.py --node_id $i & done # Enable CTRL+C to stop all background processes From 550e817c9e14b0f39186041eca292471ae8a0b4d Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Fri, 5 Jan 2024 09:12:51 +0100 Subject: [PATCH 09/18] Update examples/quickstart-monai/requirements.txt Co-authored-by: Javier --- examples/quickstart-monai/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/quickstart-monai/requirements.txt b/examples/quickstart-monai/requirements.txt index 1427e5cd6e9b..e3f1e463c629 100644 --- a/examples/quickstart-monai/requirements.txt +++ b/examples/quickstart-monai/requirements.txt @@ -4,3 +4,4 @@ tqdm==4.65.0 scikit-learn==1.3.1 monai[gdown,nibabel,tqdm,itk]==1.3.0 numpy==1.24.4 +pillow==10.2.0 From 2bcddda472be2980c88ae36f1221c12ca702c1a1 Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Fri, 5 Jan 2024 09:12:59 +0100 Subject: [PATCH 10/18] Update examples/quickstart-monai/pyproject.toml Co-authored-by: Javier --- examples/quickstart-monai/pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/quickstart-monai/pyproject.toml b/examples/quickstart-monai/pyproject.toml index 15d7af5befc6..66a56ee2270b 100644 --- a/examples/quickstart-monai/pyproject.toml +++ b/examples/quickstart-monai/pyproject.toml @@ -15,4 +15,5 @@ torch = "1.13.1" tqdm = "4.65.0" scikit-learn = "1.3.1" monai = { version = "1.3.0", extras=["gdown", "nibabel", "tqdm", "itk"] } -numpy = "1.21.3" +numpy = "1.24.4" +pillow = "10.2.0" From 4434cf40ce9c7eb62319112acc1bac9bb0959301 Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Fri, 5 Jan 2024 09:13:05 +0100 Subject: [PATCH 11/18] Update examples/quickstart-monai/README.md Co-authored-by: Javier --- examples/quickstart-monai/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/quickstart-monai/README.md b/examples/quickstart-monai/README.md index 38fa2fd5f88b..b10f6dad4d54 100644 --- a/examples/quickstart-monai/README.md +++ b/examples/quickstart-monai/README.md @@ -68,7 +68,7 @@ Afterwards you are ready to start the Flower server as well as the clients. You python3 server.py ``` -Now you are ready to start the Flower clients which will participate in the learning. To do so simply open two more terminal windows and run the following commands. +Now you are ready to start the Flower clients which will participate in the learning. To do so simply open two more terminal windows and run the following commands. Clients will train a [DenseNet121](https://docs.monai.io/en/stable/networks.html#densenet121) from MONAI. If a GPU is present in your system, clients will use it. Start client 1 in the first terminal: From a235e3bb96cc5a93390b8bceb3d5c878bc44ff43 Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Fri, 5 Jan 2024 09:14:14 +0100 Subject: [PATCH 12/18] Use num_workers=0 --- examples/quickstart-monai/client.py | 1 - examples/quickstart-monai/data.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/examples/quickstart-monai/client.py b/examples/quickstart-monai/client.py index c6468dee83a9..2586457bc354 100644 --- a/examples/quickstart-monai/client.py +++ b/examples/quickstart-monai/client.py @@ -11,7 +11,6 @@ warnings.filterwarnings("ignore", category=UserWarning) DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") -# DEVICE = torch.device("mps") # Define Flower client diff --git a/examples/quickstart-monai/data.py b/examples/quickstart-monai/data.py index 0aa9443de9fd..21dcd47fdf91 100644 --- a/examples/quickstart-monai/data.py +++ b/examples/quickstart-monai/data.py @@ -45,15 +45,15 @@ def load_data(num_shards, index): train_ds = _partition( MedNISTDataset(trainX, trainY, train_transforms), num_shards, index ) - train_loader = DataLoader(train_ds, batch_size=300, shuffle=True, num_workers=2) + train_loader = DataLoader(train_ds, batch_size=300, shuffle=True, num_workers=0) val_ds = _partition(MedNISTDataset(valX, valY, val_transforms), num_shards, index) - val_loader = DataLoader(val_ds, batch_size=300, num_workers=2) + val_loader = DataLoader(val_ds, batch_size=300, num_workers=0) test_ds = _partition( MedNISTDataset(testX, testY, val_transforms), num_shards, index ) - test_loader = DataLoader(test_ds, batch_size=300, num_workers=2) + test_loader = DataLoader(test_ds, batch_size=300, num_workers=0) return train_loader, val_loader, test_loader, num_class From 932be8b9384bede723c747f83f839bdc72163e04 Mon Sep 17 00:00:00 2001 From: jafermarq Date: Fri, 5 Jan 2024 18:02:05 +0000 Subject: [PATCH 13/18] PIL wasn't really used, removing also from env setups --- examples/quickstart-monai/data.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/examples/quickstart-monai/data.py b/examples/quickstart-monai/data.py index 21dcd47fdf91..a58f739d6328 100644 --- a/examples/quickstart-monai/data.py +++ b/examples/quickstart-monai/data.py @@ -14,7 +14,6 @@ ScaleIntensity, ToTensor, ) -from PIL import Image from torch.utils.data import Subset @@ -45,15 +44,15 @@ def load_data(num_shards, index): train_ds = _partition( MedNISTDataset(trainX, trainY, train_transforms), num_shards, index ) - train_loader = DataLoader(train_ds, batch_size=300, shuffle=True, num_workers=0) + train_loader = DataLoader(train_ds, batch_size=300, shuffle=True) val_ds = _partition(MedNISTDataset(valX, valY, val_transforms), num_shards, index) - val_loader = DataLoader(val_ds, batch_size=300, num_workers=0) + val_loader = DataLoader(val_ds, batch_size=300) test_ds = _partition( MedNISTDataset(testX, testY, val_transforms), num_shards, index ) - test_loader = DataLoader(test_ds, batch_size=300, num_workers=0) + test_loader = DataLoader(test_ds, batch_size=300) return train_loader, val_loader, test_loader, num_class From 82823fddd8179a4c2745c277d7ece71db4e4e499 Mon Sep 17 00:00:00 2001 From: Charles Beauville Date: Fri, 5 Jan 2024 23:17:29 +0100 Subject: [PATCH 14/18] Update examples/quickstart-monai/data.py Co-authored-by: Javier --- examples/quickstart-monai/data.py | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/examples/quickstart-monai/data.py b/examples/quickstart-monai/data.py index a58f739d6328..ba88395f6fb1 100644 --- a/examples/quickstart-monai/data.py +++ b/examples/quickstart-monai/data.py @@ -14,11 +14,11 @@ ScaleIntensity, ToTensor, ) -from torch.utils.data import Subset -def _partition(dataset, num_shards, index): - total_size = len(dataset) +def _partition(files_list, labels_list, num_shards, index): + total_size = len(files_list) + assert total_size == len(labels_list), f"List of datapoints and labels must be of the same length" shard_size = total_size // num_shards # Calculate start and end indices for the shard @@ -30,28 +30,33 @@ def _partition(dataset, num_shards, index): end_idx = start_idx + shard_size # Create a subset for the shard - shard = Subset(dataset, range(start_idx, end_idx)) - return shard + files = files_list[start_idx: end_idx] + labels = labels_list[start_idx: end_idx] + return files, labels def load_data(num_shards, index): image_file_list, image_label_list, num_total, num_class = _download_data() + + # get partition given index + files_list, labels_list = _partition(image_file_list, image_label_list, num_shards, index) + + trainX, trainY, valX, valY, testX, testY = _split_data( - image_file_list, image_label_list, num_total + files_list, labels_list, len(files_list) ) train_transforms, val_transforms = _get_transforms() - train_ds = _partition( - MedNISTDataset(trainX, trainY, train_transforms), num_shards, index - ) + train_ds = MedNISTDataset(trainX, trainY, train_transforms) train_loader = DataLoader(train_ds, batch_size=300, shuffle=True) - val_ds = _partition(MedNISTDataset(valX, valY, val_transforms), num_shards, index) + val_ds = MedNISTDataset(valX, valY, val_transforms) val_loader = DataLoader(val_ds, batch_size=300) - test_ds = _partition( - MedNISTDataset(testX, testY, val_transforms), num_shards, index - ) + test_ds = MedNISTDataset(testX, testY, val_transforms) + test_loader = DataLoader(test_ds, batch_size=300) + + return train_loader, val_loader, test_loader, num_class test_loader = DataLoader(test_ds, batch_size=300) return train_loader, val_loader, test_loader, num_class From 026471a816441c2b87bacdd75e882fc2b12a4463 Mon Sep 17 00:00:00 2001 From: jafermarq Date: Wed, 17 Jan 2024 18:51:38 +0000 Subject: [PATCH 15/18] consistency with other examples --- examples/quickstart-monai/README.md | 4 ++-- examples/quickstart-monai/client.py | 2 +- examples/quickstart-monai/run.sh | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/quickstart-monai/README.md b/examples/quickstart-monai/README.md index b10f6dad4d54..d817962f0d33 100644 --- a/examples/quickstart-monai/README.md +++ b/examples/quickstart-monai/README.md @@ -73,13 +73,13 @@ Now you are ready to start the Flower clients which will participate in the lear Start client 1 in the first terminal: ```shell -python3 client.py --node_id 0 +python3 client.py --node-id 0 ``` Start client 2 in the second terminal: ```shell -python3 client.py --node_id 1 +python3 client.py --node-id 1 ``` You will see that the federated training is starting. Look at the [code](https://github.com/adap/flower/tree/main/examples/quickstart-monai) for a detailed explanation. diff --git a/examples/quickstart-monai/client.py b/examples/quickstart-monai/client.py index 2586457bc354..a0d0e726f19b 100644 --- a/examples/quickstart-monai/client.py +++ b/examples/quickstart-monai/client.py @@ -44,7 +44,7 @@ def evaluate(self, parameters, config): total_partitions = 10 parser = argparse.ArgumentParser() parser.add_argument( - "--node_id", type=int, choices=range(total_partitions), required=True + "--node-id", type=int, choices=range(total_partitions), required=True ) args = parser.parse_args() diff --git a/examples/quickstart-monai/run.sh b/examples/quickstart-monai/run.sh index 6ba0c8f9af39..9be45ad5013e 100755 --- a/examples/quickstart-monai/run.sh +++ b/examples/quickstart-monai/run.sh @@ -10,7 +10,7 @@ sleep 3 # Sleep for 3s to give the server enough time to start for i in `seq 0 1`; do echo "Starting client $i" - python client.py --node_id $i & + python client.py --node-id $i & done # Enable CTRL+C to stop all background processes From cd04f6c4705a5028ccef792ce476571edf6605a8 Mon Sep 17 00:00:00 2001 From: jafermarq Date: Wed, 17 Jan 2024 20:13:36 +0000 Subject: [PATCH 16/18] formatting --- examples/quickstart-monai/data.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/examples/quickstart-monai/data.py b/examples/quickstart-monai/data.py index ba88395f6fb1..d184476522e8 100644 --- a/examples/quickstart-monai/data.py +++ b/examples/quickstart-monai/data.py @@ -18,7 +18,9 @@ def _partition(files_list, labels_list, num_shards, index): total_size = len(files_list) - assert total_size == len(labels_list), f"List of datapoints and labels must be of the same length" + assert total_size == len( + labels_list + ), f"List of datapoints and labels must be of the same length" shard_size = total_size // num_shards # Calculate start and end indices for the shard @@ -30,17 +32,18 @@ def _partition(files_list, labels_list, num_shards, index): end_idx = start_idx + shard_size # Create a subset for the shard - files = files_list[start_idx: end_idx] - labels = labels_list[start_idx: end_idx] + files = files_list[start_idx:end_idx] + labels = labels_list[start_idx:end_idx] return files, labels def load_data(num_shards, index): - image_file_list, image_label_list, num_total, num_class = _download_data() - - # get partition given index - files_list, labels_list = _partition(image_file_list, image_label_list, num_shards, index) + image_file_list, image_label_list, _, num_class = _download_data() + # Get partition given index + files_list, labels_list = _partition( + image_file_list, image_label_list, num_shards, index + ) trainX, trainY, valX, valY, testX, testY = _split_data( files_list, labels_list, len(files_list) @@ -57,9 +60,6 @@ def load_data(num_shards, index): test_loader = DataLoader(test_ds, batch_size=300) return train_loader, val_loader, test_loader, num_class - test_loader = DataLoader(test_ds, batch_size=300) - - return train_loader, val_loader, test_loader, num_class class MedNISTDataset(Dataset): From af97e032f83a1f759c4c4110fbc81800f6306e6a Mon Sep 17 00:00:00 2001 From: jafermarq Date: Wed, 17 Jan 2024 23:42:02 +0000 Subject: [PATCH 17/18] in top-level `README.md` --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 750b5cdb4b93..2858c9020973 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ design of Flower is based on a few guiding principles: - **Framework-agnostic**: Different machine learning frameworks have different strengths. Flower can be used with any machine learning framework, for example, [PyTorch](https://pytorch.org), - [TensorFlow](https://tensorflow.org), [Hugging Face Transformers](https://huggingface.co/), [PyTorch Lightning](https://pytorchlightning.ai/), [scikit-learn](https://scikit-learn.org/), [JAX](https://jax.readthedocs.io/), [TFLite](https://tensorflow.org/lite/), [fastai](https://www.fast.ai/), [Pandas](https://pandas.pydata.org/) for federated analytics, or even raw [NumPy](https://numpy.org/) + [TensorFlow](https://tensorflow.org), [Hugging Face Transformers](https://huggingface.co/), [PyTorch Lightning](https://pytorchlightning.ai/), [scikit-learn](https://scikit-learn.org/), [JAX](https://jax.readthedocs.io/), [TFLite](https://tensorflow.org/lite/), [fastai](https://www.fast.ai/), [MONAI](https://docs.monai.io/en/latest/index.html), [Pandas](https://pandas.pydata.org/) for federated analytics, or even raw [NumPy](https://numpy.org/) for users who enjoy computing gradients by hand. - **Understandable**: Flower is written with maintainability in mind. The @@ -124,6 +124,7 @@ Quickstart examples: - [Quickstart (fastai)](https://github.com/adap/flower/tree/main/examples/quickstart-fastai) - [Quickstart (Pandas)](https://github.com/adap/flower/tree/main/examples/quickstart-pandas) - [Quickstart (JAX)](https://github.com/adap/flower/tree/main/examples/quickstart-jax) +- [Quickstart (MONAI)](https://github.com/adap/flower/tree/main/examples/quickstart-monai) - [Quickstart (scikit-learn)](https://github.com/adap/flower/tree/main/examples/sklearn-logreg-mnist) - [Quickstart (Android [TFLite])](https://github.com/adap/flower/tree/main/examples/android) - [Quickstart (iOS [CoreML])](https://github.com/adap/flower/tree/main/examples/ios) From 68c80a35f93db71b4ae721c79d4b5b427b6f656c Mon Sep 17 00:00:00 2001 From: jafermarq Date: Wed, 28 Feb 2024 19:04:39 +0000 Subject: [PATCH 18/18] format --- examples/quickstart-monai/client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/examples/quickstart-monai/client.py b/examples/quickstart-monai/client.py index 5582c72d0f02..0ed943da83cc 100644 --- a/examples/quickstart-monai/client.py +++ b/examples/quickstart-monai/client.py @@ -49,7 +49,9 @@ def evaluate(self, parameters, config): args = parser.parse_args() # Load model and data (simple CNN, CIFAR-10) - trainloader, _, testloader, num_class = load_data(total_partitions, args.partition_id) + trainloader, _, testloader, num_class = load_data( + total_partitions, args.partition_id + ) net = DenseNet121(spatial_dims=2, in_channels=1, out_channels=num_class).to(DEVICE) # Start Flower client