diff --git a/.editorconfig b/.editorconfig index 321808ebaecf..103fe51237c8 100644 --- a/.editorconfig +++ b/.editorconfig @@ -16,6 +16,14 @@ profile = black indent_style = space indent_size = 2 +[*.md] +indent_style = space +indent_size = 2 + [*.yml] indent_style = space indent_size = 2 + +[*.toml] +indent_style = space +indent_size = 4 diff --git a/.github/workflows/_docker-build.yml b/.github/workflows/_docker-build.yml index 227b0d7482ae..b6a74d1b4677 100644 --- a/.github/workflows/_docker-build.yml +++ b/.github/workflows/_docker-build.yml @@ -101,6 +101,7 @@ jobs: attempt_delay: 9000 # 9 secs with: | pull: true + sbom: true platforms: ${{ matrix.platform.docker }} context: "{{defaultContext}}:${{ inputs.file-dir }}" outputs: type=image,name=${{ inputs.namespace-repository }},push-by-digest=true,name-canonical=true,push=true diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index aba3726017fd..355037668f7f 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -51,6 +51,63 @@ jobs: short_sha: ${{ steps.upload.outputs.SHORT_SHA }} dir: ${{ steps.upload.outputs.DIR }} + superexec: + runs-on: ubuntu-22.04 + timeout-minutes: 10 + needs: wheel + strategy: + fail-fast: false + matrix: + python-version: ["3.9", "3.10", "3.11"] + directory: [e2e-bare-auth] + connection: [secure, insecure] + engine: [deployment-engine, simulation-engine] + authentication: [no-auth, client-auth] + exclude: + - connection: insecure + authentication: client-auth + name: | + SuperExec / + Python ${{ matrix.python-version }} / + ${{ matrix.connection }} / + ${{ matrix.authentication }} / + ${{ matrix.engine }} + defaults: + run: + working-directory: e2e/${{ matrix.directory }} + steps: + - uses: actions/checkout@v4 + - name: Bootstrap + uses: ./.github/actions/bootstrap + with: + python-version: ${{ matrix.python-version }} + poetry-skip: 'true' + - name: Install Flower from repo + if: ${{ github.repository != 'adap/flower' || github.event.pull_request.head.repo.fork || github.actor == 'dependabot[bot]' }} + run: | + if [[ "${{ matrix.engine }}" == "simulation-engine" ]]; then + python -m pip install ".[simulation]" + else + python -m pip install . + fi + - name: Download and install Flower wheel from artifact store + if: ${{ github.repository == 'adap/flower' && !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' }} + run: | + # Define base URL for wheel file + WHEEL_URL="https://${{ env.ARTIFACT_BUCKET }}/py/${{ needs.wheel.outputs.dir }}/${{ needs.wheel.outputs.short_sha }}/${{ needs.wheel.outputs.whl_path }}" + if [[ "${{ matrix.engine }}" == "simulation-engine" ]]; then + python -m pip install "flwr[simulation] @ ${WHEEL_URL}" + else + python -m pip install "${WHEEL_URL}" + fi + - name: > + Run SuperExec test / + ${{ matrix.connection }} / + ${{ matrix.authentication }} / + ${{ matrix.engine }} + working-directory: e2e/${{ matrix.directory }} + run: ./../test_superexec.sh "${{ matrix.connection }}" "${{ matrix.authentication}}" "${{ matrix.engine }}" + frameworks: runs-on: ubuntu-22.04 timeout-minutes: 10 diff --git a/.github/workflows/update_translations.yml b/.github/workflows/update_translations.yml new file mode 100644 index 000000000000..9419f4aaef25 --- /dev/null +++ b/.github/workflows/update_translations.yml @@ -0,0 +1,79 @@ +name: Translations + +on: + schedule: + - cron: '0 0 * * *' # Runs every day at midnight + workflow_dispatch: # Allows to manually trigger the workflow + +jobs: + update-and-pr: + runs-on: ubuntu-22.04 + permissions: + contents: write + pull-requests: write + env: + branch-name: auto-update-trans-text + name: Update text + steps: + - uses: actions/checkout@v4 + + - name: Bootstrap + uses: ./.github/actions/bootstrap + with: + python-version: '3.10' + + - name: Install dependencies + run: | + python -m poetry install + pip install sphinx==7.3.7 + + - name: Install pandoc + uses: nikeee/setup-pandoc@v1 + + - name: Update text and translations for all locales + run: | + cd doc + make update-text + for langDir in locales/*; do + if [ -d "$langDir" ]; then + lang=$(basename $langDir) + echo "Updating language $lang" + make update-lang lang=$lang + fi + done + + - name: Commit changes + run: | + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + git add doc/locales + git commit -m "Update text and language files" + continue-on-error: true + + - name: Calculate diff # Even without doc changes the update-lang command will generate 228 additions and 60 deletions, so we only want to open a PR when there is more + id: calculate_diff + run: | + additions=$(git diff --numstat HEAD^1 | awk '{s+=$1} END {print s}') + deletions=$(git diff --numstat HEAD^1 | awk '{s+=$2} END {print s}') + echo "Additions: $additions" + echo "Deletions: $deletions" + echo "additions=$additions" >> $GITHUB_OUTPUT + echo "deletions=$deletions" >> $GITHUB_OUTPUT + + - name: Push changes + if: steps.calculate_diff.outputs.additions > 228 && steps.calculate_diff.outputs.deletions > 60 + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + branch: '${{ env.branch-name }}' + + - name: Create Pull Request + if: steps.calculate_diff.outputs.additions > 228 && steps.calculate_diff.outputs.deletions > 60 + uses: peter-evans/create-pull-request@v6 + with: + token: ${{ secrets.GITHUB_TOKEN }} + branch: '${{ env.branch-name }}' + delete-branch: true + title: 'docs(framework:skip) Update source texts for translations (automated)' + body: 'This PR is auto-generated to update text and language files.' + draft: false diff --git a/README.md b/README.md index 9f2604ad37b0..1be37ed391f7 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,7 @@ [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](https://github.com/adap/flower/blob/main/CONTRIBUTING.md) ![Build](https://github.com/adap/flower/actions/workflows/framework.yml/badge.svg) [![Downloads](https://static.pepy.tech/badge/flwr)](https://pepy.tech/project/flwr) +[![Docker Hub](https://img.shields.io/badge/Docker%20Hub-flwr-blue)](https://hub.docker.com/u/flwr) [![Slack](https://img.shields.io/badge/Chat-Slack-red)](https://flower.ai/join-slack) Flower (`flwr`) is a framework for building federated learning systems. The diff --git a/benchmarks/flowertune-llm/README.md b/benchmarks/flowertune-llm/README.md index f45b7a6198b7..cab9b9156514 100644 --- a/benchmarks/flowertune-llm/README.md +++ b/benchmarks/flowertune-llm/README.md @@ -13,13 +13,13 @@ As the first step, please register for a Flower account on [flower.ai/login](htt Then, create a new Python environment and install Flower. > [!TIP] -> We recommend using `pyenv` and the `virtualenv` plugin to create your environment. Other manager such as Conda would likely work too. Check the [documentation](https://flower.ai/docs/framework/how-to-install-flower.html) for alternative ways of installing Flower. +> We recommend using `pyenv` with the `virtualenv` plugin to create your environment with Python >= 3.10.0. Other managers, such as Conda, will likely work as well. Check the [documentation](https://flower.ai/docs/framework/how-to-install-flower.html) for alternative ways to install Flower. ```shell pip install flwr ``` -On the new environment, create a new Flower project using the `FlowerTune` template. You will be prompted for a name to give to your project, your username, and for your choice of LLM challenge: +In the new environment, create a new Flower project using the `FlowerTune` template. You will be prompted for a name to give to your app/project, your username, and for your choice of LLM challenge: ```shell flwr new --framework=FlowerTune ``` @@ -64,5 +64,5 @@ following the `README.md` in [`evaluation`](https://github.com/adap/flower/tree/ > [!NOTE] -> If you have any questions about running FlowerTune LLM challenges or evaluation, please feel free to make posts at [Flower Discuss](https://discuss.flower.ai) forum, +> If you have any questions about running FlowerTune LLM challenges or evaluation, please feel free to make posts at our dedicated [FlowerTune Category](https://discuss.flower.ai/c/flowertune-llm-leaderboard/) on [Flower Discuss](https://discuss.flower.ai) forum, or join our [Slack channel](https://flower.ai/join-slack/) to ask questions in the `#flowertune-llm-leaderboard` channel. diff --git a/benchmarks/flowertune-llm/evaluation/README.md b/benchmarks/flowertune-llm/evaluation/README.md index e2a7477fca76..c99ad640203a 100644 --- a/benchmarks/flowertune-llm/evaluation/README.md +++ b/benchmarks/flowertune-llm/evaluation/README.md @@ -39,6 +39,9 @@ The default template generated by `flwr new` (see the [Project Creation Instruct |:----------:|:-----:|:---------:|:--------------:|:---------------:|:-----:| | Pass@1 (%) | 31.60 | 23.78 | 28.57 | 25.47 | 27.36 | +> [!NOTE] +> In the LLM Leaderboard, we rank the submissions based on the **average** value derived from different evaluation datasets for each challenge. + ## Make submission on FlowerTune LLM Leaderboard diff --git a/datasets/doc/source/index.rst b/datasets/doc/source/index.rst index 84e25a920f2f..d6b51fc84ad6 100644 --- a/datasets/doc/source/index.rst +++ b/datasets/doc/source/index.rst @@ -3,6 +3,8 @@ Flower Datasets Flower Datasets (``flwr-datasets``) is a library that enables the quick and easy creation of datasets for federated learning/analytics/evaluation. It enables heterogeneity (non-iidness) simulation and division of datasets with the preexisting notion of IDs. The library was created by the ``Flower Labs`` team that also created `Flower `_ : A Friendly Federated Learning Framework. +Try out an interactive demo to generate code and visualize heterogeneous divisions at the :ref:`bottom of this page`. + Flower Datasets Framework ------------------------- @@ -133,7 +135,6 @@ What makes Flower Datasets stand out from other libraries? * New custom partitioning schemes (``Partitioner`` subclasses) integrated with the whole ecosystem. - Join the Flower Community ------------------------- @@ -144,3 +145,16 @@ The Flower Community is growing quickly - we're a friendly group of researchers, :shadow: Join us on Slack + +.. _demo: +Demo +---- + +.. raw:: html + + + + diff --git a/datasets/flwr_datasets/partitioner/__init__.py b/datasets/flwr_datasets/partitioner/__init__.py index 3fed4446db42..a14efa1cc905 100644 --- a/datasets/flwr_datasets/partitioner/__init__.py +++ b/datasets/flwr_datasets/partitioner/__init__.py @@ -27,6 +27,7 @@ from .partitioner import Partitioner from .pathological_partitioner import PathologicalPartitioner from .shard_partitioner import ShardPartitioner +from .size_partitioner import SizePartitioner from .square_partitioner import SquarePartitioner __all__ = [ @@ -42,5 +43,6 @@ "Partitioner", "PathologicalPartitioner", "ShardPartitioner", + "SizePartitioner", "SquarePartitioner", ] diff --git a/datasets/flwr_datasets/partitioner/size_partitioner.py b/datasets/flwr_datasets/partitioner/size_partitioner.py new file mode 100644 index 000000000000..a79b6b7249f2 --- /dev/null +++ b/datasets/flwr_datasets/partitioner/size_partitioner.py @@ -0,0 +1,128 @@ +# Copyright 2024 Flower Labs GmbH. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""SizePartitioner class.""" + + +import warnings +from collections.abc import Sequence + +import datasets +from flwr_datasets.partitioner.partitioner import Partitioner + + +class SizePartitioner(Partitioner): + """Partitioner that creates each partition with the size specified by a user. + + Parameters + ---------- + partition_sizes : Sequence[int] + The size of each partition. partition_id 0 will have partition_sizes[0] + samples, partition_id 1 will have partition_sizes[1] samples, etc. + + Examples + -------- + >>> from flwr_datasets import FederatedDataset + >>> from flwr_datasets.partitioner import SizePartitioner + >>> + >>> partition_sizes = [15_000, 5_000, 30_000] + >>> partitioner = SizePartitioner(partition_sizes) + >>> fds = FederatedDataset(dataset="cifar10", partitioners={"train": partitioner}) + """ + + def __init__(self, partition_sizes: Sequence[int]) -> None: + super().__init__() + self._pre_ds_validate_partition_sizes(partition_sizes) + self._partition_sizes = partition_sizes + self._partition_id_to_indices: dict[int, list[int]] = {} + self._partition_id_to_indices_determined = False + + def load_partition(self, partition_id: int) -> datasets.Dataset: + """Load a single partition of the size of partition_sizes[partition_id]. + + For example if given partition_sizes=[20_000, 10_000, 30_000], + then partition_id=0 will return a partition of size 20_000, + partition_id=1 will return a partition of size 10_000, etc. + + Parameters + ---------- + partition_id : int + The index that corresponds to the requested partition. + + Returns + ------- + dataset_partition : Dataset + Single dataset partition. + """ + self._determine_partition_id_to_indices_if_needed() + return self.dataset.select(self._partition_id_to_indices[partition_id]) + + @property + def num_partitions(self) -> int: + """Total number of partitions.""" + self._determine_partition_id_to_indices_if_needed() + return len(self._partition_sizes) + + @property + def partition_id_to_indices(self) -> dict[int, list[int]]: + """Partition id to indices (the result of partitioning).""" + self._determine_partition_id_to_indices_if_needed() + return self._partition_id_to_indices + + def _determine_partition_id_to_indices_if_needed( + self, + ) -> None: + """Create an assignment of indices to the partition indices.""" + if self._partition_id_to_indices_determined: + return + self._post_ds_validate_partition_sizes() + start = 0 + end = 0 + for partition_id, partition_size in enumerate(self._partition_sizes): + end += partition_size + indices = list(range(start, end)) + self._partition_id_to_indices[partition_id] = indices + start = end + self._partition_id_to_indices_determined = True + + def _pre_ds_validate_partition_sizes(self, partition_sizes: Sequence[int]) -> None: + """Check if the partition sizes are valid (no information about the dataset).""" + if not isinstance(partition_sizes, Sequence): + raise ValueError("Partition sizes must be a sequence.") + if len(partition_sizes) == 0: + raise ValueError("Partition sizes must not be empty.") + if not all( + isinstance(partition_size, int) for partition_size in partition_sizes + ): + raise ValueError("All partition sizes must be integers.") + if not all(partition_size > 0 for partition_size in partition_sizes): + raise ValueError("All partition sizes must be greater than zero.") + + def _post_ds_validate_partition_sizes(self) -> None: + """Validate the partition sizes against the dataset size.""" + desired_partition_sizes = sum(self._partition_sizes) + dataset_size = len(self.dataset) + if desired_partition_sizes > dataset_size: + raise ValueError( + f"The sum of partition sizes sum({self._partition_sizes})" + f"= {desired_partition_sizes} is greater than the size of" + f" the dataset {dataset_size}." + ) + if desired_partition_sizes < dataset_size: + warnings.warn( + f"The sum of partition sizes is {desired_partition_sizes}, which is" + f"smaller than the size of the dataset: {dataset_size}. " + f"Ignore this warning if it is the desired behavior.", + stacklevel=1, + ) diff --git a/datasets/flwr_datasets/partitioner/size_partitioner_test.py b/datasets/flwr_datasets/partitioner/size_partitioner_test.py new file mode 100644 index 000000000000..be8edf9d2764 --- /dev/null +++ b/datasets/flwr_datasets/partitioner/size_partitioner_test.py @@ -0,0 +1,392 @@ +# Copyright 2023 Flower Labs GmbH. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Test ShardPartitioner.""" + + +# pylint: disable=W0212, R0913 +import unittest +from typing import Optional + +from datasets import Dataset +from flwr_datasets.partitioner.shard_partitioner import ShardPartitioner + + +def _dummy_setup( + num_rows: int, + partition_by: str, + num_partitions: int, + num_shards_per_partition: Optional[int], + shard_size: Optional[int], + keep_incomplete_shard: bool = False, +) -> tuple[Dataset, ShardPartitioner]: + """Create a dummy dataset for testing.""" + data = { + partition_by: [i % 3 for i in range(num_rows)], + "features": list(range(num_rows)), + } + dataset = Dataset.from_dict(data) + partitioner = ShardPartitioner( + num_partitions=num_partitions, + num_shards_per_partition=num_shards_per_partition, + partition_by=partition_by, + shard_size=shard_size, + keep_incomplete_shard=keep_incomplete_shard, + ) + partitioner.dataset = dataset + return dataset, partitioner + + +class TestShardPartitionerSpec1(unittest.TestCase): + """Test first possible initialization of ShardPartitioner. + + Specify num_shards_per_partition and shard_size arguments. + """ + + def test_correct_num_partitions(self) -> None: + """Test the correct number of partitions is created.""" + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = 3 + shard_size = 10 + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + _ = partitioner.load_partition(0) + num_partitions_created = len(partitioner._partition_id_to_indices.keys()) + self.assertEqual(num_partitions_created, num_partitions) + + def test_correct_partition_sizes(self) -> None: + """Test if the partitions sizes are as theoretically calculated.""" + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = 3 + shard_size = 10 + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + sizes = [len(partitioner.load_partition(i)) for i in range(num_partitions)] + sizes = sorted(sizes) + self.assertEqual(sizes, [30, 30, 30]) + + def test_unique_samples(self) -> None: + """Test if each partition has unique samples. + + (No duplicates along partitions). + """ + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = 3 + shard_size = 10 + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + partitions = [ + partitioner.load_partition(i)["features"] for i in range(num_partitions) + ] + combined_list = [item for sublist in partitions for item in sublist] + combined_set = set(combined_list) + self.assertEqual(len(combined_list), len(combined_set)) + + +class TestShardPartitionerSpec2(unittest.TestCase): + """Test second possible initialization of ShardPartitioner. + + Specify shard_size and keep_incomplete_shard=False. This setting creates partitions + that might have various sizes (each shard is same size). + """ + + def test_correct_num_partitions(self) -> None: + """Test the correct number of partitions is created.""" + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = None + shard_size = 10 + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + _ = partitioner.load_partition(0) + num_partitions_created = len(partitioner._partition_id_to_indices.keys()) + self.assertEqual(num_partitions_created, num_partitions) + + def test_correct_partition_sizes(self) -> None: + """Test if the partitions sizes are as theoretically calculated.""" + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = None + shard_size = 10 + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + sizes = [len(partitioner.load_partition(i)) for i in range(num_partitions)] + sizes = sorted(sizes) + self.assertEqual(sizes, [30, 40, 40]) + + def test_unique_samples(self) -> None: + """Test if each partition has unique samples. + + (No duplicates along partitions). + """ + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = None + shard_size = 10 + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + partitions = [ + partitioner.load_partition(i)["features"] for i in range(num_partitions) + ] + combined_list = [item for sublist in partitions for item in sublist] + combined_set = set(combined_list) + self.assertEqual(len(combined_list), len(combined_set)) + + +class TestShardPartitionerSpec3(unittest.TestCase): + """Test third possible initialization of ShardPartitioner. + + Specify shard_size and keep_incomplete_shard=True. This setting creates partitions + that might have various sizes (each shard is same size). + """ + + def test_correct_num_partitions(self) -> None: + """Test the correct number of partitions is created.""" + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = None + shard_size = 10 + keep_incomplete_shard = True + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + _ = partitioner.load_partition(0) + num_partitions_created = len(partitioner._partition_id_to_indices.keys()) + self.assertEqual(num_partitions_created, num_partitions) + + def test_correct_partition_sizes(self) -> None: + """Test if the partitions sizes are as theoretically calculated.""" + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = None + shard_size = 10 + keep_incomplete_shard = True + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + sizes = [len(partitioner.load_partition(i)) for i in range(num_partitions)] + sizes = sorted(sizes) + self.assertEqual(sizes, [33, 40, 40]) + + def test_unique_samples(self) -> None: + """Test if each partition has unique samples. + + (No duplicates along partitions). + """ + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = None + shard_size = 10 + keep_incomplete_shard = True + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + partitions = [ + partitioner.load_partition(i)["features"] for i in range(num_partitions) + ] + combined_list = [item for sublist in partitions for item in sublist] + combined_set = set(combined_list) + self.assertEqual(len(combined_list), len(combined_set)) + + +class TestShardPartitionerSpec4(unittest.TestCase): + """Test fourth possible initialization of ShardPartitioner. + + Specify num_shards_per_partition but not shard_size arguments. + """ + + def test_correct_num_partitions(self) -> None: + """Test the correct number of partitions is created.""" + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = 3 + shard_size = None + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + _ = partitioner.load_partition(0) + num_partitions_created = len(partitioner._partition_id_to_indices.keys()) + self.assertEqual(num_partitions_created, num_partitions) + + def test_correct_partition_sizes(self) -> None: + """Test if the partitions sizes are as theoretically calculated.""" + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = 3 + shard_size = None + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + sizes = [len(partitioner.load_partition(i)) for i in range(num_partitions)] + sizes = sorted(sizes) + self.assertEqual(sizes, [36, 36, 36]) + + def test_unique_samples(self) -> None: + """Test if each partition has unique samples. + + (No duplicates along partitions). + """ + partition_by = "label" + num_rows = 113 + num_partitions = 3 + num_shards_per_partition = 3 + shard_size = None + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + partitions = [ + partitioner.load_partition(i)["features"] for i in range(num_partitions) + ] + combined_list = [item for sublist in partitions for item in sublist] + combined_set = set(combined_list) + self.assertEqual(len(combined_list), len(combined_set)) + + +class TestShardPartitionerIncorrectSpec(unittest.TestCase): + """Test the incorrect specification cases. + + The lack of correctness can be caused by the num_partitions, shard_size and + num_shards_per_partition can create. + """ + + def test_incorrect_specification(self) -> None: + """Test if the given specification makes the partitioning possible.""" + partition_by = "label" + num_rows = 10 + num_partitions = 3 + num_shards_per_partition = 2 + shard_size = 10 + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + with self.assertRaises(ValueError): + _ = partitioner.load_partition(0) + + def test_too_big_shard_size(self) -> None: + """Test if it is impossible to create an empty partition.""" + partition_by = "label" + num_rows = 20 + num_partitions = 3 + num_shards_per_partition = None + shard_size = 10 + keep_incomplete_shard = False + _, partitioner = _dummy_setup( + num_rows, + partition_by, + num_partitions, + num_shards_per_partition, + shard_size, + keep_incomplete_shard, + ) + with self.assertRaises(ValueError): + _ = partitioner.load_partition(2).num_rows + + +if __name__ == "__main__": + unittest.main() diff --git a/dev/build-docker-image-matrix.py b/dev/build-docker-image-matrix.py index c19949e358b9..52c96e3cca7a 100644 --- a/dev/build-docker-image-matrix.py +++ b/dev/build-docker-image-matrix.py @@ -134,7 +134,7 @@ def tag_latest_ubuntu_with_flwr_version(image: BaseImage) -> List[str]: ubuntu_base_images = generate_base_images( flwr_version, SUPPORTED_PYTHON_VERSIONS, - [Distro(DistroName.UBUNTU, "22.04")], + [Distro(DistroName.UBUNTU, "24.04")], ) # alpine base images for the latest supported python version alpine_base_images = generate_base_images( diff --git a/dev/format.sh b/dev/format.sh index e1e2abc307f1..ada5a7f13abc 100755 --- a/dev/format.sh +++ b/dev/format.sh @@ -36,3 +36,6 @@ python -m nbstripout examples/*/*.ipynb --extra-keys "$KEYS" # Markdown python -m mdformat --number doc/source examples + +# RST +docstrfmt doc/source diff --git a/dev/test.sh b/dev/test.sh index 58ac0b3d24cd..170d9f4acd1e 100755 --- a/dev/test.sh +++ b/dev/test.sh @@ -56,6 +56,14 @@ echo "- mdformat: done" echo "- All Markdown checks passed" +echo "- Start rST checks" + +echo "- docstrfmt: start" +docstrfmt --check doc/source +echo "- docstrfmt: done" + +echo "- All rST checks passed" + echo "- Start license checks" echo "- copyright: start" diff --git a/doc/locales/fr/LC_MESSAGES/framework-docs.po b/doc/locales/fr/LC_MESSAGES/framework-docs.po index bee09019489f..e6cd61627bf6 100644 --- a/doc/locales/fr/LC_MESSAGES/framework-docs.po +++ b/doc/locales/fr/LC_MESSAGES/framework-docs.po @@ -3,7 +3,7 @@ msgid "" msgstr "" "Project-Id-Version: Flower Docs\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-09-15 09:09+0200\n" +"POT-Creation-Date: 2024-09-27 00:30+0000\n" "PO-Revision-Date: 2023-09-05 17:54+0000\n" "Last-Translator: Charles Beauville \n" "Language: fr\n" @@ -13,7 +13,7 @@ msgstr "" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.15.0\n" +"Generated-By: Babel 2.16.0\n" #: ../../source/contributor-explanation-public-and-private-apis.rst:2 msgid "Public and private APIs" @@ -58,23 +58,23 @@ msgid "" "or not by reading the Flower source code." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:22 +#: ../../source/contributor-explanation-public-and-private-apis.rst:23 #, fuzzy msgid "Flower public API" msgstr "Flower ClientApp." -#: ../../source/contributor-explanation-public-and-private-apis.rst:24 +#: ../../source/contributor-explanation-public-and-private-apis.rst:25 msgid "Flower has a well-defined public API. Let's look at this in more detail." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:28 +#: ../../source/contributor-explanation-public-and-private-apis.rst:29 msgid "" "Every component that is reachable by recursively following " "``__init__.__all__`` starting from the root package (``flwr``) is part of" " the public API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:30 +#: ../../source/contributor-explanation-public-and-private-apis.rst:32 msgid "" "If you want to determine whether a component " "(class/function/generator/...) is part of the public API or not, you need" @@ -82,13 +82,13 @@ msgid "" "src/py/flwr`` to look at the Python sub-packages contained ``flwr``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:43 +#: ../../source/contributor-explanation-public-and-private-apis.rst:46 msgid "" "Contrast this with the definition of ``__all__`` in the root " "``src/py/flwr/__init__.py``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:55 +#: ../../source/contributor-explanation-public-and-private-apis.rst:59 msgid "" "You can see that ``flwr`` has six subpackages (``cli``, ``client``, " "``common``, ``proto``, ``server``, ``simulation``), but only four of them" @@ -96,7 +96,7 @@ msgid "" "``simulation``)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:57 +#: ../../source/contributor-explanation-public-and-private-apis.rst:63 msgid "" "What does this mean? It means that ``client``, ``common``, ``server`` and" " ``simulation`` are part of the public API, but ``cli`` and ``proto`` are" @@ -107,21 +107,21 @@ msgid "" "even be removed completely." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:62 +#: ../../source/contributor-explanation-public-and-private-apis.rst:70 msgid "Therefore, as a Flower user:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:64 +#: ../../source/contributor-explanation-public-and-private-apis.rst:72 msgid "``from flwr import client`` ✅ Ok, you're importing a public API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:65 +#: ../../source/contributor-explanation-public-and-private-apis.rst:73 msgid "" "``from flwr import proto`` ❌ Not recommended, you're importing a private " "API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:67 +#: ../../source/contributor-explanation-public-and-private-apis.rst:75 msgid "" "What about components that are nested deeper in the hierarchy? Let's look" " at Flower strategies to see another typical pattern. Flower strategies " @@ -130,7 +130,7 @@ msgid "" "``src/py/flwr/server/strategy/__init__.py``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:81 +#: ../../source/contributor-explanation-public-and-private-apis.rst:91 msgid "" "What's notable here is that all strategies are implemented in dedicated " "modules (e.g., ``fedavg.py``). In ``__init__.py``, we *import* the " @@ -142,33 +142,33 @@ msgid "" "the public API (as long as we update the import path in ``__init__.py``)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:86 +#: ../../source/contributor-explanation-public-and-private-apis.rst:99 msgid "Therefore:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:88 +#: ../../source/contributor-explanation-public-and-private-apis.rst:101 msgid "" "``from flwr.server.strategy import FedAvg`` ✅ Ok, you're importing a " "class that is part of the public API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:89 +#: ../../source/contributor-explanation-public-and-private-apis.rst:103 msgid "" "``from flwr.server.strategy import fedavg`` ❌ Not recommended, you're " "importing a private module." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:91 +#: ../../source/contributor-explanation-public-and-private-apis.rst:106 msgid "" "This approach is also implemented in the tooling that automatically " "builds API reference docs." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:94 +#: ../../source/contributor-explanation-public-and-private-apis.rst:110 msgid "Flower public API of private packages" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:96 +#: ../../source/contributor-explanation-public-and-private-apis.rst:112 msgid "" "We also use this to define the public API of private subpackages. Public," " in this context, means the API that other ``flwr`` subpackages should " @@ -176,14 +176,14 @@ msgid "" "not exported via ``src/py/flwr/server/__init__.py``'s ``__all__``)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:100 +#: ../../source/contributor-explanation-public-and-private-apis.rst:117 msgid "" "Still, the private sub-package ``flwr.server.driver`` defines a " "\"public\" API using ``__all__`` in " "``src/py/flwr/server/driver/__init__.py``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:114 +#: ../../source/contributor-explanation-public-and-private-apis.rst:132 msgid "" "The interesting part is that both ``GrpcDriver`` and ``InMemoryDriver`` " "are never used by Flower framework users, only by other parts of the " @@ -195,7 +195,7 @@ msgid "" "``InMemoryDriver`` class definition)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:117 +#: ../../source/contributor-explanation-public-and-private-apis.rst:140 msgid "" "This is because ``flwr.server.driver`` defines a public interface for " "other ``flwr`` subpackages. This allows codeowners of " @@ -224,23 +224,23 @@ msgid "" "development environment." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:12 +#: ../../source/contributor-how-to-build-docker-images.rst:13 #, fuzzy msgid "Clone the ``flower`` repository." msgstr "**Fourche le dépôt de Flower**" -#: ../../source/contributor-how-to-build-docker-images.rst:18 +#: ../../source/contributor-how-to-build-docker-images.rst:19 msgid "Verify the Docker daemon is running." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:20 +#: ../../source/contributor-how-to-build-docker-images.rst:21 msgid "" "The build instructions that assemble the images are located in the " "respective Dockerfiles. You can find them in the subdirectories of " "``src/docker``." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:23 +#: ../../source/contributor-how-to-build-docker-images.rst:24 msgid "" "Flower Docker images are configured via build arguments. Through build " "arguments, we can make the creation of images more flexible. For example," @@ -251,149 +251,149 @@ msgid "" "below." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:30 +#: ../../source/contributor-how-to-build-docker-images.rst:32 #, fuzzy msgid "Building the Base Image" msgstr "Chargement des données" -#: ../../source/contributor-how-to-build-docker-images.rst:36 -#: ../../source/contributor-how-to-build-docker-images.rst:98 +#: ../../source/contributor-how-to-build-docker-images.rst:38 +#: ../../source/contributor-how-to-build-docker-images.rst:104 #, fuzzy msgid "Build argument" msgstr "Amélioration de la documentation" -#: ../../source/contributor-how-to-build-docker-images.rst:37 -#: ../../source/contributor-how-to-build-docker-images.rst:99 +#: ../../source/contributor-how-to-build-docker-images.rst:39 +#: ../../source/contributor-how-to-build-docker-images.rst:105 #, fuzzy msgid "Description" msgstr "Dépréciations" -#: ../../source/contributor-how-to-build-docker-images.rst:38 -#: ../../source/contributor-how-to-build-docker-images.rst:100 +#: ../../source/contributor-how-to-build-docker-images.rst:40 +#: ../../source/contributor-how-to-build-docker-images.rst:106 #, fuzzy msgid "Required" msgstr "Changements nécessaires" -#: ../../source/contributor-how-to-build-docker-images.rst:39 -#: ../../source/contributor-how-to-build-docker-images.rst:101 -#: ../../source/docker/persist-superlink-state.rst:18 -#: ../../source/docker/pin-version.rst:11 +#: ../../source/contributor-how-to-build-docker-images.rst:41 +#: ../../source/contributor-how-to-build-docker-images.rst:107 +#: ../../source/docker/persist-superlink-state.rst:19 +#: ../../source/docker/pin-version.rst:12 #: ../../source/docker/set-environment-variables.rst:8 #, fuzzy msgid "Example" msgstr "Exemples de PyTorch" -#: ../../source/contributor-how-to-build-docker-images.rst:40 +#: ../../source/contributor-how-to-build-docker-images.rst:42 msgid "``DISTRO``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:41 +#: ../../source/contributor-how-to-build-docker-images.rst:43 #, fuzzy msgid "The Linux distribution to use as the base image." msgstr "Chargement des données" -#: ../../source/contributor-how-to-build-docker-images.rst:42 -#: ../../source/contributor-how-to-build-docker-images.rst:46 -#: ../../source/contributor-how-to-build-docker-images.rst:50 -#: ../../source/contributor-how-to-build-docker-images.rst:66 -#: ../../source/contributor-how-to-build-docker-images.rst:70 -#: ../../source/contributor-how-to-build-docker-images.rst:104 +#: ../../source/contributor-how-to-build-docker-images.rst:44 +#: ../../source/contributor-how-to-build-docker-images.rst:48 +#: ../../source/contributor-how-to-build-docker-images.rst:52 +#: ../../source/contributor-how-to-build-docker-images.rst:68 +#: ../../source/contributor-how-to-build-docker-images.rst:75 +#: ../../source/contributor-how-to-build-docker-images.rst:110 #, fuzzy msgid "No" msgstr "Aucun" -#: ../../source/contributor-how-to-build-docker-images.rst:43 +#: ../../source/contributor-how-to-build-docker-images.rst:45 msgid "``ubuntu``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:44 +#: ../../source/contributor-how-to-build-docker-images.rst:46 #, fuzzy msgid "``DISTRO_VERSION``" msgstr "Version Python" -#: ../../source/contributor-how-to-build-docker-images.rst:45 +#: ../../source/contributor-how-to-build-docker-images.rst:47 msgid "Version of the Linux distribution." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:47 +#: ../../source/contributor-how-to-build-docker-images.rst:49 msgid ":substitution-code:`|ubuntu_version|`" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:48 +#: ../../source/contributor-how-to-build-docker-images.rst:50 #, fuzzy msgid "``PYTHON_VERSION``" msgstr "Version Python" -#: ../../source/contributor-how-to-build-docker-images.rst:49 +#: ../../source/contributor-how-to-build-docker-images.rst:51 msgid "Version of ``python`` to be installed." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:51 +#: ../../source/contributor-how-to-build-docker-images.rst:53 msgid "``3.11`` or ``3.11.1``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:52 +#: ../../source/contributor-how-to-build-docker-images.rst:54 msgid "``PIP_VERSION``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:53 +#: ../../source/contributor-how-to-build-docker-images.rst:55 msgid "Version of ``pip`` to be installed." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:54 -#: ../../source/contributor-how-to-build-docker-images.rst:58 -#: ../../source/contributor-how-to-build-docker-images.rst:62 -#: ../../source/contributor-how-to-build-docker-images.rst:108 +#: ../../source/contributor-how-to-build-docker-images.rst:56 +#: ../../source/contributor-how-to-build-docker-images.rst:60 +#: ../../source/contributor-how-to-build-docker-images.rst:64 +#: ../../source/contributor-how-to-build-docker-images.rst:114 #, fuzzy msgid "Yes" msgstr "Types" -#: ../../source/contributor-how-to-build-docker-images.rst:55 +#: ../../source/contributor-how-to-build-docker-images.rst:57 msgid ":substitution-code:`|pip_version|`" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:56 +#: ../../source/contributor-how-to-build-docker-images.rst:58 msgid "``SETUPTOOLS_VERSION``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:57 +#: ../../source/contributor-how-to-build-docker-images.rst:59 msgid "Version of ``setuptools`` to be installed." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:59 +#: ../../source/contributor-how-to-build-docker-images.rst:61 msgid ":substitution-code:`|setuptools_version|`" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:60 +#: ../../source/contributor-how-to-build-docker-images.rst:62 msgid "``FLWR_VERSION``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:61 +#: ../../source/contributor-how-to-build-docker-images.rst:63 msgid "Version of Flower to be installed." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:63 +#: ../../source/contributor-how-to-build-docker-images.rst:65 msgid ":substitution-code:`|stable_flwr_version|`" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:64 +#: ../../source/contributor-how-to-build-docker-images.rst:66 msgid "``FLWR_PACKAGE``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:65 +#: ../../source/contributor-how-to-build-docker-images.rst:67 msgid "The Flower package to be installed." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:67 +#: ../../source/contributor-how-to-build-docker-images.rst:69 msgid "``flwr`` or ``flwr-nightly``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:68 +#: ../../source/contributor-how-to-build-docker-images.rst:70 #, fuzzy msgid "``FLWR_VERSION_REF``" msgstr "Version Python" -#: ../../source/contributor-how-to-build-docker-images.rst:69 +#: ../../source/contributor-how-to-build-docker-images.rst:71 msgid "" "A `direct reference " "`_." msgstr "" -#: ../../source/contributor-how-to-contribute-translations.rst:29 +#: ../../source/contributor-how-to-contribute-translations.rst:28 msgid "" "Once you are signed in to Weblate, you can navigate to the `Flower " "Framework project `_." msgstr "" -#: ../../source/contributor-how-to-contribute-translations.rst:67 +#: ../../source/contributor-how-to-contribute-translations.rst:64 msgid "Add new languages" msgstr "" -#: ../../source/contributor-how-to-contribute-translations.rst:69 +#: ../../source/contributor-how-to-contribute-translations.rst:66 msgid "" "If you want to add a new language, you will first have to contact us, " "either on `Slack `_, or by opening an issue" @@ -598,17 +598,17 @@ msgstr "" "formater le code ou exécuter des tests. À cette fin, nous utilisons " "l'extension VSCode Remote Containers. Qu'est-ce que c'est ?" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:7 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:8 +#, fuzzy msgid "" "The Visual Studio Code Remote - Containers extension lets you use a " "Docker container as a fully-featured development environment. It allows " "you to open any folder inside (or mounted into) a container and take " "advantage of Visual Studio Code's full feature set. A " -":code:`devcontainer.json` file in your project tells VS Code how to " -"access (or create) a development container with a well-defined tool and " -"runtime stack. This container can be used to run an application or to " -"separate tools, libraries, or runtimes needed for working with a " -"codebase." +"``devcontainer.json`` file in your project tells VS Code how to access " +"(or create) a development container with a well-defined tool and runtime " +"stack. This container can be used to run an application or to separate " +"tools, libraries, or runtimes needed for working with a codebase." msgstr "" "L'extension Visual Studio Code Remote - Containers te permet d'utiliser " "un conteneur Docker comme environnement de développement complet. Elle te" @@ -621,7 +621,7 @@ msgstr "" " les outils, les bibliothèques ou les exécutions nécessaires pour " "travailler avec une base de code." -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:9 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:16 msgid "" "Workspace files are mounted from the local file system or copied or " "cloned into the container. Extensions are installed and run inside the " @@ -637,7 +637,7 @@ msgstr "" "environnement de développement simplement en te connectant à un autre " "conteneur." -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:11 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:22 #, fuzzy msgid "" "Source: `Official VSCode documentation " @@ -646,19 +646,19 @@ msgstr "" "Source : `Documentation officielle de VSCode " "`_" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:15 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:26 msgid "Getting started" msgstr "Pour commencer" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:17 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:28 #, fuzzy msgid "" -"Configuring and setting up the :code:`Dockerfile` as well the " -"configuration for the devcontainer can be a bit more involved. The good " -"thing is you don't have to do it. Usually it should be enough to install " -"`Docker `_ on your system and " -"ensure its available on your command line. Additionally, install the " -"`VSCode Containers Extension `_ on your system and ensure its" +" available on your command line. Additionally, install the `VSCode " +"Containers Extension `_." msgstr "" "La configuration et le paramétrage du :code:`Dockerfile` ainsi que la " @@ -669,7 +669,7 @@ msgstr "" "`VSCode Containers Extension `_." -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:19 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:35 msgid "" "Now you should be good to go. When starting VSCode, it will ask you to " "run in the container environment and - if you confirm - automatically " @@ -686,7 +686,7 @@ msgstr "" "inférieur gauche de ta fenêtre VSCode et sélectionner l'option " "*(Re)Ouvrir le dossier dans le conteneur*." -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:21 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:41 msgid "" "In some cases your setup might be more involved. For those cases consult " "the following sources:" @@ -694,7 +694,7 @@ msgstr "" "Dans certains cas, ton installation peut être plus complexe. Pour ces " "cas-là, consulte les sources suivantes :" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:23 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:44 #, fuzzy msgid "" "`Developing inside a Container " @@ -705,7 +705,7 @@ msgstr "" "`_" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:24 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:46 #, fuzzy msgid "" "`Remote development in Containers " @@ -737,7 +737,7 @@ msgstr "" "supprimer ``poetry.lock`` (``rm poetry.lock``) avant d'exécuter ``poetry " "install``)." -#: ../../source/contributor-how-to-install-development-versions.rst:12 +#: ../../source/contributor-how-to-install-development-versions.rst:14 msgid "" "``flwr = { version = \"1.0.0a0\", allow-prereleases = true }`` (without " "extras)" @@ -745,7 +745,7 @@ msgstr "" "``flwr = { version = \"1.0.0a0\", allow-prereleases = true }`` (sans " "extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:13 +#: ../../source/contributor-how-to-install-development-versions.rst:15 msgid "" "``flwr = { version = \"1.0.0a0\", allow-prereleases = true, extras = " "[\"simulation\"] }`` (with extras)" @@ -753,7 +753,7 @@ msgstr "" "``flwr = { version = \"1.0.0a0\", allow-prereleases = true, extras = " "[\"simulation\"] }`` (avec extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:15 +#: ../../source/contributor-how-to-install-development-versions.rst:18 msgid "" "Install ``flwr`` from a local copy of the Flower source code via " "``pyproject.toml``:" @@ -761,11 +761,11 @@ msgstr "" "Installez ``flwr`` à partir d'une copie locale du code source de Flower " "via ``pyproject.toml`` :" -#: ../../source/contributor-how-to-install-development-versions.rst:17 +#: ../../source/contributor-how-to-install-development-versions.rst:20 msgid "``flwr = { path = \"../../\", develop = true }`` (without extras)" msgstr "``flwr = { path = \"../../\", develop = true }`` (sans extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:18 +#: ../../source/contributor-how-to-install-development-versions.rst:21 msgid "" "``flwr = { path = \"../../\", develop = true, extras = [\"simulation\"] " "}`` (with extras)" @@ -773,11 +773,11 @@ msgstr "" "``flwr = { path = \"../../\", develop = true, extras = [\"simulation\"] " "}`` (avec extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:20 +#: ../../source/contributor-how-to-install-development-versions.rst:23 msgid "Install ``flwr`` from a local wheel file via ``pyproject.toml``:" msgstr "Installez ``flwr`` à partir d'un fichier local via ``pyproject.toml`` :" -#: ../../source/contributor-how-to-install-development-versions.rst:22 +#: ../../source/contributor-how-to-install-development-versions.rst:25 #, fuzzy msgid "" "``flwr = { path = \"../../dist/flwr-1.8.0-py3-none-any.whl\" }`` (without" @@ -786,7 +786,7 @@ msgstr "" "``flwr = { path = \"../../dist/flwr-1.0.0-py3-none-any.whl\" }`` (sans " "extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:23 +#: ../../source/contributor-how-to-install-development-versions.rst:26 #, fuzzy msgid "" "``flwr = { path = \"../../dist/flwr-1.8.0-py3-none-any.whl\", extras = " @@ -795,7 +795,7 @@ msgstr "" "``flwr = { path = \"../../dist/flwr-1.0.0-py3-none-any.whl\", extras = " "[\"simulation\"] }`` (avec extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:25 +#: ../../source/contributor-how-to-install-development-versions.rst:29 msgid "" "Please refer to the Poetry documentation for further details: `Poetry " "Dependency Specification `_" -#: ../../source/contributor-how-to-install-development-versions.rst:28 +#: ../../source/contributor-how-to-install-development-versions.rst:33 msgid "Using pip (recommended on Colab)" msgstr "Utiliser pip (recommandé sur Colab)" -#: ../../source/contributor-how-to-install-development-versions.rst:30 +#: ../../source/contributor-how-to-install-development-versions.rst:35 msgid "Install a ``flwr`` pre-release from PyPI:" msgstr "Installe une pré-version de ``flwr`` depuis PyPI :" -#: ../../source/contributor-how-to-install-development-versions.rst:32 +#: ../../source/contributor-how-to-install-development-versions.rst:37 msgid "``pip install -U --pre flwr`` (without extras)" msgstr "``pip install -U --pre flwr`` (sans les extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:33 +#: ../../source/contributor-how-to-install-development-versions.rst:38 msgid "``pip install -U --pre 'flwr[simulation]'`` (with extras)" msgstr "``pip install -U --pre 'flwr[simulation]'`` (avec les extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:35 +#: ../../source/contributor-how-to-install-development-versions.rst:40 msgid "" "Python packages can be installed from git repositories. Use one of the " "following commands to install the Flower directly from GitHub." @@ -830,11 +830,11 @@ msgstr "" "Utilise l'une des commandes suivantes pour installer Flower directement à" " partir de GitHub." -#: ../../source/contributor-how-to-install-development-versions.rst:37 +#: ../../source/contributor-how-to-install-development-versions.rst:43 msgid "Install ``flwr`` from the default GitHub branch (``main``):" msgstr "Installez ``flwr`` à partir de la branche GitHub par défaut (``main``) :" -#: ../../source/contributor-how-to-install-development-versions.rst:39 +#: ../../source/contributor-how-to-install-development-versions.rst:45 msgid "" "``pip install flwr@git+https://github.com/adap/flower.git`` (without " "extras)" @@ -842,7 +842,7 @@ msgstr "" "``pip install flwr@git+https://github.com/adap/flower.git`` (sans les " "extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:40 +#: ../../source/contributor-how-to-install-development-versions.rst:46 msgid "" "``pip install 'flwr[simulation]@git+https://github.com/adap/flower.git'``" " (with extras)" @@ -850,13 +850,13 @@ msgstr "" "``pip install 'flwr[simulation]@git+https://github.com/adap/flower.git'``" " (avec les extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:42 +#: ../../source/contributor-how-to-install-development-versions.rst:49 msgid "Install ``flwr`` from a specific GitHub branch (``branch-name``):" msgstr "" "Installez ``flwr`` à partir d'une branche GitHub spécifique (``nom-" "branche``) :" -#: ../../source/contributor-how-to-install-development-versions.rst:44 +#: ../../source/contributor-how-to-install-development-versions.rst:51 msgid "" "``pip install flwr@git+https://github.com/adap/flower.git@branch-name`` " "(without extras)" @@ -864,7 +864,7 @@ msgstr "" "``pip install flwr@git+https://github.com/adap/flower.git@nom-branche`` " "(sans les extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:45 +#: ../../source/contributor-how-to-install-development-versions.rst:53 #, fuzzy msgid "" "``pip install 'flwr[simulation]@git+https://github.com/adap/flower.git" @@ -873,11 +873,11 @@ msgstr "" "``pip install 'flwr[simulation]@git+https://github.com/adap/flower.git" "``@nom-de-la-branche'`` (avec des extras)" -#: ../../source/contributor-how-to-install-development-versions.rst:49 +#: ../../source/contributor-how-to-install-development-versions.rst:57 msgid "Open Jupyter Notebooks on Google Colab" msgstr "Ouvre les carnets Jupyter sur Google Colab" -#: ../../source/contributor-how-to-install-development-versions.rst:51 +#: ../../source/contributor-how-to-install-development-versions.rst:59 #, fuzzy msgid "" "Open the notebook ``doc/source/tutorial-series-get-started-with-flower-" @@ -886,7 +886,7 @@ msgstr "" "Ouvrir le notebook ``doc/source/tutorial/Flower-1-Intro-to-FL-" "PyTorch.ipynb`` :" -#: ../../source/contributor-how-to-install-development-versions.rst:53 +#: ../../source/contributor-how-to-install-development-versions.rst:61 #, fuzzy msgid "" "https://colab.research.google.com/github/adap/flower/blob/main/doc/source" @@ -895,7 +895,7 @@ msgstr "" "https://colab.research.google.com/github/adap/flower/blob/main/doc/source" "/tutorial-get-started-with-flower-pytorch.ipynb" -#: ../../source/contributor-how-to-install-development-versions.rst:55 +#: ../../source/contributor-how-to-install-development-versions.rst:63 msgid "" "Open a development version of the same notebook from branch `branch-name`" " by changing ``main`` to ``branch-name`` (right after ``blob``):" @@ -904,7 +904,7 @@ msgstr "" "`nom-branche` en remplaçant `main` par `nom-branche` (juste après `blob`)" " :" -#: ../../source/contributor-how-to-install-development-versions.rst:57 +#: ../../source/contributor-how-to-install-development-versions.rst:66 #, fuzzy msgid "" "https://colab.research.google.com/github/adap/flower/blob/branch-" @@ -913,21 +913,21 @@ msgstr "" "https://colab.research.google.com/github/adap/flower/blob/branch-" "name/doc/source/tutorial-get-started-with-flower-pytorch.ipynb" -#: ../../source/contributor-how-to-install-development-versions.rst:59 +#: ../../source/contributor-how-to-install-development-versions.rst:68 msgid "Install a `whl` on Google Colab:" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:61 +#: ../../source/contributor-how-to-install-development-versions.rst:70 msgid "" "In the vertical icon grid on the left hand side, select ``Files`` > " "``Upload to session storage``" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:62 +#: ../../source/contributor-how-to-install-development-versions.rst:72 msgid "Upload the whl (e.g., ``flwr-1.8.0-py3-none-any.whl``)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:63 +#: ../../source/contributor-how-to-install-development-versions.rst:73 msgid "" "Change ``!pip install -q 'flwr[simulation]' torch torchvision " "matplotlib`` to ``!pip install -q 'flwr-1.8.0-py3-none-" @@ -946,11 +946,11 @@ msgstr "" "Ce document décrit le processus de diffusion actuel, qui peut ou non " "changer à l'avenir." -#: ../../source/contributor-how-to-release-flower.rst:7 +#: ../../source/contributor-how-to-release-flower.rst:8 msgid "During the release" msgstr "Lors de la sortie" -#: ../../source/contributor-how-to-release-flower.rst:9 +#: ../../source/contributor-how-to-release-flower.rst:10 msgid "" "The version number of a release is stated in ``pyproject.toml``. To " "release a new version of Flower, the following things need to happen (in " @@ -960,14 +960,14 @@ msgstr "" "Pour publier une nouvelle version de Flower, les choses suivantes doivent" " se produire (dans cet ordre) :" -#: ../../source/contributor-how-to-release-flower.rst:11 +#: ../../source/contributor-how-to-release-flower.rst:13 msgid "" "Run ``python3 src/py/flwr_tool/update_changelog.py `` in " "order to add every new change to the changelog (feel free to make manual " "changes to the changelog afterwards until it looks good)." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:12 +#: ../../source/contributor-how-to-release-flower.rst:16 msgid "" "Once the changelog has been updated with all the changes, run ``./dev" "/prepare-release-changelog.sh v``, where ```` " @@ -977,7 +977,7 @@ msgid "" "the contributors. Open a pull request with those changes." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:13 +#: ../../source/contributor-how-to-release-flower.rst:22 msgid "" "Once the pull request is merged, tag the release commit with the version " "number as soon as the PR is merged: ``git tag v`` (notice " @@ -986,33 +986,33 @@ msgid "" "artifacts and the relevant part of the changelog." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:14 +#: ../../source/contributor-how-to-release-flower.rst:26 msgid "Check the draft release on GitHub, and if everything is good, publish it." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:17 +#: ../../source/contributor-how-to-release-flower.rst:29 msgid "After the release" msgstr "Après la publication" -#: ../../source/contributor-how-to-release-flower.rst:19 +#: ../../source/contributor-how-to-release-flower.rst:31 msgid "Create a pull request which contains the following changes:" msgstr "Crée une demande de pull qui contient les modifications suivantes :" -#: ../../source/contributor-how-to-release-flower.rst:21 +#: ../../source/contributor-how-to-release-flower.rst:33 msgid "Increase the minor version in ``pyproject.toml`` by one." msgstr "Augmente la version mineure de ``pyproject.toml`` d'une unité." -#: ../../source/contributor-how-to-release-flower.rst:22 +#: ../../source/contributor-how-to-release-flower.rst:34 msgid "Update all files which contain the current version number if necessary." msgstr "" "Mets à jour tous les fichiers qui contiennent le numéro de version actuel" " si nécessaire." -#: ../../source/contributor-how-to-release-flower.rst:23 +#: ../../source/contributor-how-to-release-flower.rst:35 msgid "Add a new ``Unreleased`` section in ``changelog.md``." msgstr "Ajoute une nouvelle section ``Unreleased`` dans ``changelog.md``." -#: ../../source/contributor-how-to-release-flower.rst:25 +#: ../../source/contributor-how-to-release-flower.rst:37 msgid "" "Merge the pull request on the same day (i.e., before a new nightly " "release gets published to PyPI)." @@ -1020,15 +1020,15 @@ msgstr "" "Fusionne la pull request le jour même (c'est-à-dire avant qu'une nouvelle" " version nightly ne soit publiée sur PyPI)." -#: ../../source/contributor-how-to-release-flower.rst:28 +#: ../../source/contributor-how-to-release-flower.rst:41 msgid "Publishing a pre-release" msgstr "Publier une pré-version" -#: ../../source/contributor-how-to-release-flower.rst:31 +#: ../../source/contributor-how-to-release-flower.rst:44 msgid "Pre-release naming" msgstr "Nom de la pré-version" -#: ../../source/contributor-how-to-release-flower.rst:33 +#: ../../source/contributor-how-to-release-flower.rst:46 msgid "" "PyPI supports pre-releases (alpha, beta, release candidate). Pre-releases" " MUST use one of the following naming patterns:" @@ -1037,39 +1037,39 @@ msgstr "" "Les préversions DOIVENT utiliser l'un des modèles de dénomination " "suivants :" -#: ../../source/contributor-how-to-release-flower.rst:35 +#: ../../source/contributor-how-to-release-flower.rst:49 msgid "Alpha: ``MAJOR.MINOR.PATCHaN``" msgstr "Alpha : ``MAJOR.MINOR.PATCHaN``" -#: ../../source/contributor-how-to-release-flower.rst:36 +#: ../../source/contributor-how-to-release-flower.rst:50 msgid "Beta: ``MAJOR.MINOR.PATCHbN``" msgstr "Bêta : ``MAJOR.MINOR.PATCHbN``" -#: ../../source/contributor-how-to-release-flower.rst:37 +#: ../../source/contributor-how-to-release-flower.rst:51 msgid "Release candidate (RC): ``MAJOR.MINOR.PATCHrcN``" msgstr "Candidat à la publication (RC) : ``MAJOR.MINOR.PATCHrcN``" -#: ../../source/contributor-how-to-release-flower.rst:39 +#: ../../source/contributor-how-to-release-flower.rst:53 msgid "Examples include:" msgstr "Voici quelques exemples :" -#: ../../source/contributor-how-to-release-flower.rst:41 +#: ../../source/contributor-how-to-release-flower.rst:55 msgid "``1.0.0a0``" msgstr "``1.0.0a0``" -#: ../../source/contributor-how-to-release-flower.rst:42 +#: ../../source/contributor-how-to-release-flower.rst:56 msgid "``1.0.0b0``" msgstr "``1.0.0b0``" -#: ../../source/contributor-how-to-release-flower.rst:43 +#: ../../source/contributor-how-to-release-flower.rst:57 msgid "``1.0.0rc0``" msgstr "``1.0.0rc0``" -#: ../../source/contributor-how-to-release-flower.rst:44 +#: ../../source/contributor-how-to-release-flower.rst:58 msgid "``1.0.0rc1``" msgstr "1.0.0rc1" -#: ../../source/contributor-how-to-release-flower.rst:46 +#: ../../source/contributor-how-to-release-flower.rst:60 msgid "" "This is in line with PEP-440 and the recommendations from the Python " "Packaging Authority (PyPA):" @@ -1077,11 +1077,11 @@ msgstr "" "Ceci est conforme au PEP-440 et aux recommandations de l'Autorité de " "l'emballage Python (PyPA) :" -#: ../../source/contributor-how-to-release-flower.rst:49 +#: ../../source/contributor-how-to-release-flower.rst:63 msgid "`PEP-440 `_" msgstr "`PEP-440 `_" -#: ../../source/contributor-how-to-release-flower.rst:50 +#: ../../source/contributor-how-to-release-flower.rst:64 msgid "" "`PyPA Choosing a versioning scheme " "`_" -#: ../../source/contributor-how-to-release-flower.rst:52 +#: ../../source/contributor-how-to-release-flower.rst:67 msgid "" "Note that the approach defined by PyPA is not compatible with SemVer " "2.0.0 spec, for details consult the `Semantic Versioning Specification " @@ -1103,17 +1103,17 @@ msgstr "" "Versioning Specification `_ (en particulier le point 11 sur la préséance)." -#: ../../source/contributor-how-to-release-flower.rst:55 +#: ../../source/contributor-how-to-release-flower.rst:73 msgid "Pre-release classification" msgstr "Classification avant publication" -#: ../../source/contributor-how-to-release-flower.rst:57 +#: ../../source/contributor-how-to-release-flower.rst:75 msgid "Should the next pre-release be called alpha, beta, or release candidate?" msgstr "" "La prochaine préversion doit-elle être appelée alpha, bêta ou release " "candidate ?" -#: ../../source/contributor-how-to-release-flower.rst:59 +#: ../../source/contributor-how-to-release-flower.rst:77 msgid "" "RC: feature complete, no known issues (apart from issues that are " "classified as \"won't fix\" for the next stable release) - if no issues " @@ -1124,11 +1124,11 @@ msgstr "" "version stable) - si aucun problème n'apparaît, cette version deviendra " "la prochaine version stable" -#: ../../source/contributor-how-to-release-flower.rst:60 +#: ../../source/contributor-how-to-release-flower.rst:80 msgid "Beta: feature complete, allowed to have known issues" msgstr "Bêta : fonctionnalité complète, autorisée à avoir des problèmes connus" -#: ../../source/contributor-how-to-release-flower.rst:61 +#: ../../source/contributor-how-to-release-flower.rst:81 msgid "Alpha: not feature complete, allowed to have known issues" msgstr "" "Alpha : les fonctionnalités ne sont pas complètes, les problèmes connus " @@ -1151,12 +1151,12 @@ msgstr "" "Anaconda. Tu peux suivre les instructions ou choisir la configuration que" " tu préfères." -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:9 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:10 msgid "Python Version" msgstr "Version Python" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:11 -#: ../../source/how-to-install-flower.rst:8 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:12 +#: ../../source/how-to-install-flower.rst:7 #, fuzzy msgid "" "Flower requires at least `Python 3.9 `_, " @@ -1166,7 +1166,7 @@ msgstr "" "Flower nécessite `Python 3.9 `_ ou plus, " "nous recommandons `Python 3.10 `_." -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:14 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:17 msgid "" "Due to a known incompatibility with `ray " "`_, we currently recommend utilizing at " @@ -1174,12 +1174,12 @@ msgid "" "simulations." msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:19 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:22 #, fuzzy msgid "Virtualenv with Pyenv/Virtualenv" msgstr "Virutualenv avec Pyenv/Virtualenv" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:21 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:24 msgid "" "One of the recommended virtual environment is `pyenv " "`_/`virtualenv `_ pour plus de " "détails." -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:23 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:29 #, fuzzy msgid "" "Once Pyenv is set up, you can use it to install `Python Version 3.10 " @@ -1201,19 +1201,19 @@ msgstr "" "Une fois Pyenv mis en place, tu peux l'utiliser pour installer `Python " "Version 3.7 `_ ou supérieure :" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:29 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:36 msgid "Create the virtualenv with:" msgstr "Crée le virtualenv avec :" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:36 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:42 msgid "Activate the virtualenv by running the following command:" msgstr "Active la virtualenv en exécutant la commande suivante :" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:44 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:49 msgid "Virtualenv with Poetry" msgstr "Virtualenv et la poésie" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:46 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:51 msgid "" "The Flower examples are based on `Poetry `_ to manage dependencies. After installing Poetry you " @@ -1223,7 +1223,7 @@ msgstr "" "poetry.org/docs/>`_ pour gérer les dépendances. Après l'installation de " "Poetry, il te suffit de créer un environnement virtuel avec :" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:52 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:58 msgid "" "If you open a new terminal you can activate the previously created " "virtual environment with the following command:" @@ -1231,15 +1231,16 @@ msgstr "" "Si tu ouvres un nouveau terminal, tu peux activer l'environnement virtuel" " précédemment créé avec la commande suivante :" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:60 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:66 msgid "Virtualenv with Anaconda" msgstr "Virtualenv avec Anaconda" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:62 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:68 +#, fuzzy msgid "" "If you prefer to use Anaconda for your virtual environment then install " "and setup the `conda `_ package. After setting it up you can " +"/user-guide/install/index.html>`_ package. After setting it up you can " "create a virtual environment with:" msgstr "" "Si tu préfères utiliser Anaconda pour ton environnement virtuel, installe" @@ -1248,15 +1249,15 @@ msgstr "" "guide/install/index.html>`_. Après l'avoir configuré, tu peux créer un " "environnement virtuel avec :" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:68 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:76 msgid "and activate the virtual environment with:" msgstr "et active l'environnement virtuel avec :" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:76 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:83 msgid "And then?" msgstr "Et ensuite ?" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:78 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:85 msgid "" "As soon as you created your virtual environment you clone one of the " "`Flower examples `_." @@ -1268,11 +1269,11 @@ msgstr "" msgid "Write documentation" msgstr "Rédiger de la documentation" -#: ../../source/contributor-how-to-write-documentation.rst:6 +#: ../../source/contributor-how-to-write-documentation.rst:5 msgid "Project layout" msgstr "Schéma du projet" -#: ../../source/contributor-how-to-write-documentation.rst:8 +#: ../../source/contributor-how-to-write-documentation.rst:7 msgid "" "The Flower documentation lives in the ``doc`` directory. The Sphinx-based" " documentation system supports both reStructuredText (``.rst`` files) and" @@ -1283,7 +1284,7 @@ msgstr "" "reStructuredText (fichiers `.rst`) et Markdown (fichiers `.md`)." #: ../../source/contributor-how-to-write-documentation.rst:10 -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:169 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:193 #, fuzzy msgid "" "Note that, in order to build the documentation locally (with ``poetry run" @@ -1295,20 +1296,20 @@ msgstr "" "make html``, comme décrit plus bas), `Pandoc " "_` doit être installé sur le système." -#: ../../source/contributor-how-to-write-documentation.rst:14 +#: ../../source/contributor-how-to-write-documentation.rst:15 msgid "Edit an existing page" msgstr "Modifier une page existante" -#: ../../source/contributor-how-to-write-documentation.rst:16 +#: ../../source/contributor-how-to-write-documentation.rst:17 msgid "Edit an existing ``.rst`` (or ``.md``) file under ``doc/source/``" msgstr "Modifier un fichier ``.rst`` (ou ``.md``) existant sous ``doc/source/``" -#: ../../source/contributor-how-to-write-documentation.rst:17 +#: ../../source/contributor-how-to-write-documentation.rst:18 #: ../../source/contributor-how-to-write-documentation.rst:27 msgid "Compile the docs: ``cd doc``, then ``poetry run make html``" msgstr "Compilez les documents : ``cd doc``, puis ``poetry run make html``" -#: ../../source/contributor-how-to-write-documentation.rst:18 +#: ../../source/contributor-how-to-write-documentation.rst:19 #: ../../source/contributor-how-to-write-documentation.rst:28 msgid "Open ``doc/build/html/index.html`` in the browser to check the result" msgstr "" @@ -1347,11 +1348,11 @@ msgstr "" "quelques recommandations sur les points de départ pour augmenter tes " "chances de voir ton PR accepté dans la base de code de Flower." -#: ../../source/contributor-ref-good-first-contributions.rst:11 +#: ../../source/contributor-ref-good-first-contributions.rst:9 msgid "Where to start" msgstr "Par où commencer" -#: ../../source/contributor-ref-good-first-contributions.rst:13 +#: ../../source/contributor-ref-good-first-contributions.rst:11 msgid "" "Until the Flower core library matures it will be easier to get PR's " "accepted if they only touch non-core areas of the codebase. Good " @@ -1362,25 +1363,25 @@ msgstr "" " non essentielles de la base de code. Les bons candidats pour commencer " "sont :" -#: ../../source/contributor-ref-good-first-contributions.rst:17 +#: ../../source/contributor-ref-good-first-contributions.rst:14 msgid "Documentation: What's missing? What could be expressed more clearly?" msgstr "" "Documentation : Qu'est-ce qui manque ? Qu'est-ce qui pourrait être " "exprimé plus clairement ?" -#: ../../source/contributor-ref-good-first-contributions.rst:18 +#: ../../source/contributor-ref-good-first-contributions.rst:15 msgid "Baselines: See below." msgstr "Références : voir ci-dessous." -#: ../../source/contributor-ref-good-first-contributions.rst:19 +#: ../../source/contributor-ref-good-first-contributions.rst:16 msgid "Examples: See below." msgstr "Exemples : voir ci-dessous." -#: ../../source/contributor-ref-good-first-contributions.rst:23 +#: ../../source/contributor-ref-good-first-contributions.rst:19 msgid "Request for Flower Baselines" msgstr "Demande pour une nouvelle Flower Baseline" -#: ../../source/contributor-ref-good-first-contributions.rst:25 +#: ../../source/contributor-ref-good-first-contributions.rst:21 #, fuzzy msgid "" "If you are not familiar with Flower Baselines, you should probably check-" @@ -1391,7 +1392,7 @@ msgstr "" "probablement consulter notre `guide de contribution pour les baselines " "`_." -#: ../../source/contributor-ref-good-first-contributions.rst:27 +#: ../../source/contributor-ref-good-first-contributions.rst:25 #, fuzzy msgid "" "You should then check out the open `issues " @@ -1406,7 +1407,7 @@ msgstr "" " laquelle tu aimerais travailler et qui n'a pas d'assignés, n'hésite pas " "à te l'attribuer et à commencer à travailler dessus !" -#: ../../source/contributor-ref-good-first-contributions.rst:31 +#: ../../source/contributor-ref-good-first-contributions.rst:30 msgid "" "Otherwise, if you don't find a baseline you'd like to work on, be sure to" " open a new issue with the baseline request template!" @@ -1459,12 +1460,13 @@ msgstr "" "protocole SecAgg peut être considéré comme un cas particulier du " "protocole SecAgg+." -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:8 -msgid "The :code:`SecAgg+` abstraction" +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:9 +#, fuzzy +msgid "The ``SecAgg+`` abstraction" msgstr "L'abstraction :code:`SecAgg+`" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:10 -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:161 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:11 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:163 msgid "" "In this implementation, each client will be assigned with a unique index " "(int) for secure aggregation, and thus many python dictionaries used have" @@ -1475,8 +1477,8 @@ msgstr "" "dictionnaires python utilisés ont des clés de type int plutôt que de type" " ClientProxy." -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:65 -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:198 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:67 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:204 msgid "" "The Flower server will execute and process received results in the " "following order:" @@ -1484,11 +1486,12 @@ msgstr "" "Le serveur Flower exécutera et traitera les résultats reçus dans l'ordre " "suivant :" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:159 -msgid "The :code:`LightSecAgg` abstraction" +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:161 +#, fuzzy +msgid "The ``LightSecAgg`` abstraction" msgstr "L'abstraction :code:`LightSecAgg`" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:271 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:277 msgid "Types" msgstr "Types" @@ -1505,7 +1508,7 @@ msgstr "" "de Flower mais qui n'ont pas l'habitude de contribuer à des projets " "GitHub." -#: ../../source/contributor-tutorial-contribute-on-github.rst:6 +#: ../../source/contributor-tutorial-contribute-on-github.rst:7 #, fuzzy msgid "" "If you're familiar with how contributing on GitHub works, you can " @@ -1518,15 +1521,15 @@ msgstr "" "contributors.html>`_ et des exemples de `bonnes premières contributions " "`_." -#: ../../source/contributor-tutorial-contribute-on-github.rst:10 +#: ../../source/contributor-tutorial-contribute-on-github.rst:12 msgid "Setting up the repository" msgstr "Mise en place du référentiel" -#: ../../source/contributor-tutorial-contribute-on-github.rst:12 +#: ../../source/contributor-tutorial-contribute-on-github.rst:29 msgid "**Create a GitHub account and setup Git**" msgstr "**Créer un compte GitHub et configurer Git**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:13 +#: ../../source/contributor-tutorial-contribute-on-github.rst:15 #, fuzzy msgid "" "Git is a distributed version control tool. This allows for an entire " @@ -1541,7 +1544,7 @@ msgstr "" "locale, tu peux suivre ce `guide `_ pour le mettre en place." -#: ../../source/contributor-tutorial-contribute-on-github.rst:16 +#: ../../source/contributor-tutorial-contribute-on-github.rst:21 msgid "" "GitHub, itself, is a code hosting platform for version control and " "collaboration. It allows for everyone to collaborate and work from " @@ -1551,7 +1554,7 @@ msgstr "" "contrôle des versions et la collaboration. Il permet à chacun de " "collaborer et de travailler de n'importe où sur des dépôts à distance." -#: ../../source/contributor-tutorial-contribute-on-github.rst:18 +#: ../../source/contributor-tutorial-contribute-on-github.rst:25 msgid "" "If you haven't already, you will need to create an account on `GitHub " "`_." @@ -1559,7 +1562,7 @@ msgstr "" "Si ce n'est pas déjà fait, tu devras créer un compte sur `GitHub " "`_." -#: ../../source/contributor-tutorial-contribute-on-github.rst:20 +#: ../../source/contributor-tutorial-contribute-on-github.rst:28 msgid "" "The idea behind the generic Git and GitHub workflow boils down to this: " "you download code from a remote repository on GitHub, make changes " @@ -1571,15 +1574,15 @@ msgstr "" " des modifications localement et tu en gardes une trace à l'aide de Git, " "puis tu télécharges ton nouvel historique à nouveau sur GitHub." -#: ../../source/contributor-tutorial-contribute-on-github.rst:23 +#: ../../source/contributor-tutorial-contribute-on-github.rst:42 msgid "**Forking the Flower repository**" msgstr "**Fourche le dépôt de Flower**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:24 +#: ../../source/contributor-tutorial-contribute-on-github.rst:32 #, fuzzy msgid "" "A fork is a personal copy of a GitHub repository. To create one for " -"Flower, you must navigate to ``_ (while " +"Flower, you must navigate to https://github.com/adap/flower (while " "connected to your GitHub account) and click the ``Fork`` button situated " "on the top right of the page." msgstr "" @@ -1588,7 +1591,7 @@ msgstr "" "étant connecté à ton compte GitHub) et cliquer sur le bouton ``Fork`` " "situé en haut à droite de la page." -#: ../../source/contributor-tutorial-contribute-on-github.rst:29 +#: ../../source/contributor-tutorial-contribute-on-github.rst:38 msgid "" "You can change the name if you want, but this is not necessary as this " "version of Flower will be yours and will sit inside your own account " @@ -1601,11 +1604,11 @@ msgstr "" " devrais voir dans le coin supérieur gauche que tu es en train de " "regarder ta propre version de Flower." -#: ../../source/contributor-tutorial-contribute-on-github.rst:34 +#: ../../source/contributor-tutorial-contribute-on-github.rst:59 msgid "**Cloning your forked repository**" msgstr "**Clonage de ton dépôt forké**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:35 +#: ../../source/contributor-tutorial-contribute-on-github.rst:45 msgid "" "The next step is to download the forked repository on your machine to be " "able to make changes to it. On your forked repository page, you should " @@ -1617,7 +1620,7 @@ msgstr "" "forké, tu dois d'abord cliquer sur le bouton ``Code`` à droite, ce qui te" " permettra de copier le lien HTTPS du dépôt." -#: ../../source/contributor-tutorial-contribute-on-github.rst:41 +#: ../../source/contributor-tutorial-contribute-on-github.rst:52 msgid "" "Once you copied the \\, you can open a terminal on your machine, " "navigate to the place you want to download the repository to and type:" @@ -1626,7 +1629,7 @@ msgstr "" "machine, naviguer jusqu'à l'endroit où tu veux télécharger le référentiel" " et taper :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:47 +#: ../../source/contributor-tutorial-contribute-on-github.rst:59 #, fuzzy msgid "" "This will create a ``flower/`` (or the name of your fork if you renamed " @@ -1635,15 +1638,15 @@ msgstr "" "Cela créera un dossier `flower/` (ou le nom de ta fourche si tu l'as " "renommée) dans le répertoire de travail actuel." -#: ../../source/contributor-tutorial-contribute-on-github.rst:49 +#: ../../source/contributor-tutorial-contribute-on-github.rst:78 msgid "**Add origin**" msgstr "**Ajouter l'origine**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:50 +#: ../../source/contributor-tutorial-contribute-on-github.rst:62 msgid "You can then go into the repository folder:" msgstr "Tu peux ensuite aller dans le dossier du référentiel :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:56 +#: ../../source/contributor-tutorial-contribute-on-github.rst:68 msgid "" "And here we will need to add an origin to our repository. The origin is " "the \\ of the remote fork repository. To obtain it, we can do as " @@ -1655,7 +1658,7 @@ msgstr "" "indiqué précédemment en allant sur notre dépôt fork sur notre compte " "GitHub et en copiant le lien." -#: ../../source/contributor-tutorial-contribute-on-github.rst:61 +#: ../../source/contributor-tutorial-contribute-on-github.rst:75 msgid "" "Once the \\ is copied, we can type the following command in our " "terminal:" @@ -1663,11 +1666,11 @@ msgstr "" "Une fois que le \\ est copié, nous pouvons taper la commande " "suivante dans notre terminal :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:68 +#: ../../source/contributor-tutorial-contribute-on-github.rst:102 msgid "**Add upstream**" msgstr "**Ajouter en amont**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:69 +#: ../../source/contributor-tutorial-contribute-on-github.rst:81 #, fuzzy msgid "" "Now we will add an upstream address to our repository. Still in the same " @@ -1677,13 +1680,13 @@ msgstr "" "Toujours dans le même directroy, nous devons exécuter la commande " "suivante :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:76 +#: ../../source/contributor-tutorial-contribute-on-github.rst:88 msgid "The following diagram visually explains what we did in the previous steps:" msgstr "" "Le schéma suivant explique visuellement ce que nous avons fait dans les " "étapes précédentes :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:80 +#: ../../source/contributor-tutorial-contribute-on-github.rst:92 msgid "" "The upstream is the GitHub remote address of the parent repository (in " "this case Flower), i.e. the one we eventually want to contribute to and " @@ -1697,7 +1700,7 @@ msgstr "" "simplement l'adresse distante GitHub du dépôt forké que nous avons créé, " "c'est-à-dire la copie (fork) dans notre propre compte." -#: ../../source/contributor-tutorial-contribute-on-github.rst:84 +#: ../../source/contributor-tutorial-contribute-on-github.rst:97 msgid "" "To make sure our local version of the fork is up-to-date with the latest " "changes from the Flower repository, we can execute the following command:" @@ -1706,11 +1709,11 @@ msgstr "" "dernières modifications du dépôt Flower, nous pouvons exécuter la " "commande suivante :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:93 +#: ../../source/contributor-tutorial-contribute-on-github.rst:105 msgid "Setting up the coding environment" msgstr "Mise en place de l'environnement de codage" -#: ../../source/contributor-tutorial-contribute-on-github.rst:95 +#: ../../source/contributor-tutorial-contribute-on-github.rst:107 #, fuzzy msgid "" "This can be achieved by following this :doc:`getting started guide for " @@ -1723,11 +1726,11 @@ msgstr "" "fois que tu es capable d'écrire du code et de le tester, tu peux enfin " "commencer à faire des changements !" -#: ../../source/contributor-tutorial-contribute-on-github.rst:100 +#: ../../source/contributor-tutorial-contribute-on-github.rst:113 msgid "Making changes" msgstr "Apporter des changements" -#: ../../source/contributor-tutorial-contribute-on-github.rst:102 +#: ../../source/contributor-tutorial-contribute-on-github.rst:115 msgid "" "Before making any changes make sure you are up-to-date with your " "repository:" @@ -1735,15 +1738,15 @@ msgstr "" "Avant de faire des changements, assure-toi que tu es à jour avec ton " "référentiel :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:108 +#: ../../source/contributor-tutorial-contribute-on-github.rst:121 msgid "And with Flower's repository:" msgstr "Et avec le référentiel de Flower :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:114 +#: ../../source/contributor-tutorial-contribute-on-github.rst:134 msgid "**Create a new branch**" msgstr "**Créer une nouvelle branche**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:115 +#: ../../source/contributor-tutorial-contribute-on-github.rst:128 msgid "" "To make the history cleaner and easier to work with, it is good practice " "to create a new branch for each feature/project that needs to be " @@ -1753,7 +1756,7 @@ msgstr "" "une bonne pratique de créer une nouvelle branche pour chaque " "fonctionnalité/projet qui doit être mis en œuvre." -#: ../../source/contributor-tutorial-contribute-on-github.rst:118 +#: ../../source/contributor-tutorial-contribute-on-github.rst:131 msgid "" "To do so, just run the following command inside the repository's " "directory:" @@ -1761,21 +1764,21 @@ msgstr "" "Pour ce faire, il suffit d'exécuter la commande suivante dans le " "répertoire du référentiel :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:124 +#: ../../source/contributor-tutorial-contribute-on-github.rst:136 msgid "**Make changes**" msgstr "**Apporter des modifications**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:125 +#: ../../source/contributor-tutorial-contribute-on-github.rst:137 msgid "Write great code and create wonderful changes using your favorite editor!" msgstr "" "Écris du bon code et crée de merveilleuses modifications à l'aide de ton " "éditeur préféré !" -#: ../../source/contributor-tutorial-contribute-on-github.rst:127 +#: ../../source/contributor-tutorial-contribute-on-github.rst:149 msgid "**Test and format your code**" msgstr "**Teste et mets en forme ton code**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:128 +#: ../../source/contributor-tutorial-contribute-on-github.rst:139 msgid "" "Don't forget to test and format your code! Otherwise your code won't be " "able to be merged into the Flower repository. This is done so the " @@ -1785,15 +1788,15 @@ msgstr "" "pourra pas être fusionné dans le dépôt Flower, et ce, afin que la base de" " code reste cohérente et facile à comprendre." -#: ../../source/contributor-tutorial-contribute-on-github.rst:131 +#: ../../source/contributor-tutorial-contribute-on-github.rst:143 msgid "To do so, we have written a few scripts that you can execute:" msgstr "Pour ce faire, nous avons écrit quelques scripts que tu peux exécuter :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:140 +#: ../../source/contributor-tutorial-contribute-on-github.rst:162 msgid "**Stage changes**" msgstr "**Changements de scène**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:141 +#: ../../source/contributor-tutorial-contribute-on-github.rst:152 msgid "" "Before creating a commit that will update your history, you must specify " "to Git which files it needs to take into account." @@ -1801,48 +1804,51 @@ msgstr "" "Avant de créer un commit qui mettra à jour ton historique, tu dois " "spécifier à Git les fichiers qu'il doit prendre en compte." -#: ../../source/contributor-tutorial-contribute-on-github.rst:143 +#: ../../source/contributor-tutorial-contribute-on-github.rst:155 msgid "This can be done with:" msgstr "Cela peut se faire avec :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:149 +#: ../../source/contributor-tutorial-contribute-on-github.rst:161 +#, fuzzy msgid "" "To check which files have been modified compared to the last version " "(last commit) and to see which files are staged for commit, you can use " -"the :code:`git status` command." +"the ``git status`` command." msgstr "" "Pour vérifier quels fichiers ont été modifiés par rapport à la dernière " "version (last commit) et pour voir quels fichiers sont mis à disposition " "pour le commit, tu peux utiliser la commande :code:`git status`." -#: ../../source/contributor-tutorial-contribute-on-github.rst:152 +#: ../../source/contributor-tutorial-contribute-on-github.rst:173 msgid "**Commit changes**" msgstr "**Commit changes**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:153 +#: ../../source/contributor-tutorial-contribute-on-github.rst:165 +#, fuzzy msgid "" -"Once you have added all the files you wanted to commit using :code:`git " -"add`, you can finally create your commit using this command:" +"Once you have added all the files you wanted to commit using ``git add``," +" you can finally create your commit using this command:" msgstr "" "Une fois que tu as ajouté tous les fichiers que tu voulais livrer à " "l'aide de :code:`git add`, tu peux enfin créer ta livraison à l'aide de " "cette commande :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:159 +#: ../../source/contributor-tutorial-contribute-on-github.rst:172 +#, fuzzy msgid "" "The \\ is there to explain to others what the commit " "does. It should be written in an imperative style and be concise. An " -"example would be :code:`git commit -m \"Add images to README\"`." +"example would be ``git commit -m \"Add images to README\"``." msgstr "" "Le ``commit_message`` est là pour expliquer aux autres ce que fait le " "commit. Il doit être écrit dans un style impératif et être concis. Un " "exemple serait :code:`git commit -m \"Ajouter des images au README\"`." -#: ../../source/contributor-tutorial-contribute-on-github.rst:162 +#: ../../source/contributor-tutorial-contribute-on-github.rst:185 msgid "**Push the changes to the fork**" msgstr "**Pousser les changements vers la fourche**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:163 +#: ../../source/contributor-tutorial-contribute-on-github.rst:176 msgid "" "Once we have committed our changes, we have effectively updated our local" " history, but GitHub has no way of knowing this unless we push our " @@ -1853,7 +1859,7 @@ msgstr "" "moyen de le savoir à moins que nous ne poussions nos modifications vers " "l'adresse distante de notre origine :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:170 +#: ../../source/contributor-tutorial-contribute-on-github.rst:184 msgid "" "Once this is done, you will see on the GitHub that your forked repo was " "updated with the changes you have made." @@ -1861,15 +1867,15 @@ msgstr "" "Une fois que c'est fait, tu verras sur GitHub que ton repo forké a été " "mis à jour avec les modifications que tu as apportées." -#: ../../source/contributor-tutorial-contribute-on-github.rst:174 +#: ../../source/contributor-tutorial-contribute-on-github.rst:188 msgid "Creating and merging a pull request (PR)" msgstr "Créer et fusionner une pull request (PR)" -#: ../../source/contributor-tutorial-contribute-on-github.rst:176 +#: ../../source/contributor-tutorial-contribute-on-github.rst:226 msgid "**Create the PR**" msgstr "**Créer le PR**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:177 +#: ../../source/contributor-tutorial-contribute-on-github.rst:191 msgid "" "Once you have pushed changes, on the GitHub webpage of your repository " "you should see the following message:" @@ -1877,12 +1883,12 @@ msgstr "" "Une fois que tu as poussé les modifications, sur la page web GitHub de " "ton dépôt, tu devrais voir le message suivant :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:181 +#: ../../source/contributor-tutorial-contribute-on-github.rst:196 #, fuzzy msgid "Otherwise you can always find this option in the ``Branches`` page." msgstr "Sinon, tu peux toujours trouver cette option dans la page `Branches`." -#: ../../source/contributor-tutorial-contribute-on-github.rst:183 +#: ../../source/contributor-tutorial-contribute-on-github.rst:198 #, fuzzy msgid "" "Once you click the ``Compare & pull request`` button, you should see " @@ -1891,13 +1897,13 @@ msgstr "" "Une fois que tu as cliqué sur le bouton `Compare & pull request`, tu " "devrais voir quelque chose de similaire à ceci :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:187 +#: ../../source/contributor-tutorial-contribute-on-github.rst:203 msgid "At the top you have an explanation of which branch will be merged where:" msgstr "" "En haut, tu as une explication de quelle branche sera fusionnée à quel " "endroit :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:191 +#: ../../source/contributor-tutorial-contribute-on-github.rst:207 msgid "" "In this example you can see that the request is to merge the branch " "``doc-fixes`` from my forked repository to branch ``main`` from the " @@ -1907,14 +1913,14 @@ msgstr "" "branche ``doc-fixes`` de mon dépôt forké à la branche ``main`` du dépôt " "Flower." -#: ../../source/contributor-tutorial-contribute-on-github.rst:193 +#: ../../source/contributor-tutorial-contribute-on-github.rst:210 msgid "" "The title should be changed to adhere to the :ref:`pr_title_format` " "guidelines, otherwise it won't be possible to merge the PR. So in this " "case, a correct title might be ``docs(framework:skip) Fix typos``." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:196 +#: ../../source/contributor-tutorial-contribute-on-github.rst:214 msgid "" "The input box in the middle is there for you to describe what your PR " "does and to link it to existing issues. We have placed comments (that " @@ -1926,11 +1932,11 @@ msgstr "" "commentaires (qui ne seront pas rendus une fois le PR ouvert) pour te " "guider tout au long du processus." -#: ../../source/contributor-tutorial-contribute-on-github.rst:199 +#: ../../source/contributor-tutorial-contribute-on-github.rst:218 msgid "It is important to follow the instructions described in comments." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:201 +#: ../../source/contributor-tutorial-contribute-on-github.rst:220 msgid "" "At the bottom you will find the button to open the PR. This will notify " "reviewers that a new PR has been opened and that they should look over it" @@ -1940,7 +1946,7 @@ msgstr "" "qui informera les réviseurs qu'un nouveau PR a été ouvert et qu'ils " "doivent le consulter pour le fusionner ou demander des modifications." -#: ../../source/contributor-tutorial-contribute-on-github.rst:204 +#: ../../source/contributor-tutorial-contribute-on-github.rst:224 msgid "" "If your PR is not yet ready for review, and you don't want to notify " "anyone, you have the option to create a draft pull request:" @@ -1949,11 +1955,11 @@ msgstr "" " personne, tu as la possibilité de créer un brouillon de demande de " "traction :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:208 +#: ../../source/contributor-tutorial-contribute-on-github.rst:230 msgid "**Making new changes**" msgstr "**Faire de nouveaux changements**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:209 +#: ../../source/contributor-tutorial-contribute-on-github.rst:229 msgid "" "Once the PR has been opened (as draft or not), you can still push new " "commits to it the same way we did before, by making changes to the branch" @@ -1963,11 +1969,11 @@ msgstr "" "toujours y pousser de nouveaux commits de la même manière qu'auparavant, " "en apportant des modifications à la branche associée au PR." -#: ../../source/contributor-tutorial-contribute-on-github.rst:211 +#: ../../source/contributor-tutorial-contribute-on-github.rst:253 msgid "**Review the PR**" msgstr "**Review the PR**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:212 +#: ../../source/contributor-tutorial-contribute-on-github.rst:233 msgid "" "Once the PR has been opened or once the draft PR has been marked as " "ready, a review from code owners will be automatically requested:" @@ -1976,7 +1982,7 @@ msgstr "" " étant prêt, une révision des propriétaires de code sera automatiquement " "demandée :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:216 +#: ../../source/contributor-tutorial-contribute-on-github.rst:238 msgid "" "Code owners will then look into the code, ask questions, request changes " "or validate the PR." @@ -1984,11 +1990,11 @@ msgstr "" "Les propriétaires du code vont alors se pencher sur le code, poser des " "questions, demander des modifications ou valider le RP." -#: ../../source/contributor-tutorial-contribute-on-github.rst:218 +#: ../../source/contributor-tutorial-contribute-on-github.rst:241 msgid "Merging will be blocked if there are ongoing requested changes." msgstr "La fusion sera bloquée s'il y a des changements demandés en cours." -#: ../../source/contributor-tutorial-contribute-on-github.rst:222 +#: ../../source/contributor-tutorial-contribute-on-github.rst:245 msgid "" "To resolve them, just push the necessary changes to the branch associated" " with the PR:" @@ -1996,11 +2002,11 @@ msgstr "" "Pour les résoudre, il suffit de pousser les changements nécessaires vers " "la branche associée au PR :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:226 +#: ../../source/contributor-tutorial-contribute-on-github.rst:250 msgid "And resolve the conversation:" msgstr "Et résous la conversation :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:230 +#: ../../source/contributor-tutorial-contribute-on-github.rst:254 msgid "" "Once all the conversations have been resolved, you can re-request a " "review." @@ -2008,11 +2014,11 @@ msgstr "" "Une fois que toutes les conversations ont été résolues, tu peux " "redemander un examen." -#: ../../source/contributor-tutorial-contribute-on-github.rst:233 +#: ../../source/contributor-tutorial-contribute-on-github.rst:274 msgid "**Once the PR is merged**" msgstr "**Une fois que le PR est fusionné**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:234 +#: ../../source/contributor-tutorial-contribute-on-github.rst:256 msgid "" "If all the automatic tests have passed and reviewers have no more changes" " to request, they can approve the PR and merge it." @@ -2021,7 +2027,7 @@ msgstr "" " de modifications à demander, ils peuvent approuver le PR et le " "fusionner." -#: ../../source/contributor-tutorial-contribute-on-github.rst:238 +#: ../../source/contributor-tutorial-contribute-on-github.rst:261 msgid "" "Once it is merged, you can delete the branch on GitHub (a button should " "appear to do so) and also delete it locally by doing:" @@ -2030,19 +2036,19 @@ msgstr "" "(un bouton devrait apparaître pour le faire) et aussi la supprimer " "localement en faisant :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:245 +#: ../../source/contributor-tutorial-contribute-on-github.rst:269 msgid "Then you should update your forked repository by doing:" msgstr "Ensuite, tu dois mettre à jour ton dépôt forké en faisant :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:254 +#: ../../source/contributor-tutorial-contribute-on-github.rst:277 msgid "Example of first contribution" msgstr "Exemple de première contribution" -#: ../../source/contributor-tutorial-contribute-on-github.rst:257 +#: ../../source/contributor-tutorial-contribute-on-github.rst:280 msgid "Problem" msgstr "Problème" -#: ../../source/contributor-tutorial-contribute-on-github.rst:259 +#: ../../source/contributor-tutorial-contribute-on-github.rst:282 #, fuzzy msgid "" "For our documentation, we've started to use the `Diàtaxis framework " @@ -2051,7 +2057,7 @@ msgstr "" "Pour notre documentation, nous avons commencé à utiliser le cadre " "`Diàtaxis `_." -#: ../../source/contributor-tutorial-contribute-on-github.rst:261 +#: ../../source/contributor-tutorial-contribute-on-github.rst:285 #, fuzzy msgid "" "Our \"How to\" guides should have titles that continue the sentence \"How" @@ -2061,7 +2067,7 @@ msgstr "" "la phrase \"Comment faire pour...\", par exemple, \"Comment passer à " "Flower 1.0\"." -#: ../../source/contributor-tutorial-contribute-on-github.rst:263 +#: ../../source/contributor-tutorial-contribute-on-github.rst:288 msgid "" "Most of our guides do not follow this new format yet, and changing their " "title is (unfortunately) more involved than one might think." @@ -2070,7 +2076,7 @@ msgstr "" "changer leur titre est (malheureusement) plus compliqué qu'on ne le " "pense." -#: ../../source/contributor-tutorial-contribute-on-github.rst:265 +#: ../../source/contributor-tutorial-contribute-on-github.rst:291 #, fuzzy msgid "" "This issue is about changing the title of a doc from present continuous " @@ -2079,7 +2085,7 @@ msgstr "" "Cette question porte sur le changement du titre d'un document du présent " "continu au présent simple." -#: ../../source/contributor-tutorial-contribute-on-github.rst:267 +#: ../../source/contributor-tutorial-contribute-on-github.rst:294 #, fuzzy msgid "" "Let's take the example of \"Saving Progress\" which we changed to \"Save " @@ -2089,21 +2095,21 @@ msgstr "" "remplacé par \"Sauvegarder la progression\". Est-ce que cela passe notre " "contrôle ?" -#: ../../source/contributor-tutorial-contribute-on-github.rst:269 +#: ../../source/contributor-tutorial-contribute-on-github.rst:297 #, fuzzy msgid "Before: \"How to saving progress\" ❌" msgstr "Avant : \"Comment sauvegarder les progrès\" ❌" -#: ../../source/contributor-tutorial-contribute-on-github.rst:271 +#: ../../source/contributor-tutorial-contribute-on-github.rst:299 #, fuzzy msgid "After: \"How to save progress\" ✅" msgstr "Après : \"Comment sauvegarder la progression\" ✅" -#: ../../source/contributor-tutorial-contribute-on-github.rst:274 +#: ../../source/contributor-tutorial-contribute-on-github.rst:302 msgid "Solution" msgstr "Solution" -#: ../../source/contributor-tutorial-contribute-on-github.rst:276 +#: ../../source/contributor-tutorial-contribute-on-github.rst:304 #, fuzzy msgid "" "This is a tiny change, but it'll allow us to test your end-to-end setup. " @@ -2113,12 +2119,12 @@ msgstr "" "configuration de bout en bout. Après avoir cloné et configuré le repo " "Flower, voici ce que tu dois faire :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:278 +#: ../../source/contributor-tutorial-contribute-on-github.rst:307 #, fuzzy msgid "Find the source file in ``doc/source``" msgstr "Trouve le fichier source dans `doc/source`" -#: ../../source/contributor-tutorial-contribute-on-github.rst:279 +#: ../../source/contributor-tutorial-contribute-on-github.rst:308 #, fuzzy msgid "" "Make the change in the ``.rst`` file (beware, the dashes under the title " @@ -2127,7 +2133,7 @@ msgstr "" "Effectue la modification dans le fichier `.rst` (attention, les tirets " "sous le titre doivent être de la même longueur que le titre lui-même)" -#: ../../source/contributor-tutorial-contribute-on-github.rst:280 +#: ../../source/contributor-tutorial-contribute-on-github.rst:310 #, fuzzy msgid "" "Build the docs and `check the result `_" -#: ../../source/contributor-tutorial-contribute-on-github.rst:283 +#: ../../source/contributor-tutorial-contribute-on-github.rst:314 msgid "Rename file" msgstr "Renommer le fichier" -#: ../../source/contributor-tutorial-contribute-on-github.rst:285 +#: ../../source/contributor-tutorial-contribute-on-github.rst:316 msgid "" "You might have noticed that the file name still reflects the old wording." " If we just change the file, then we break all existing links to it - it " @@ -2153,22 +2159,22 @@ msgstr "" "important** d'éviter cela, car briser des liens peut nuire à notre " "classement dans les moteurs de recherche." -#: ../../source/contributor-tutorial-contribute-on-github.rst:288 +#: ../../source/contributor-tutorial-contribute-on-github.rst:320 #, fuzzy msgid "Here's how to change the file name:" msgstr "Voici comment changer le nom du fichier :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:290 +#: ../../source/contributor-tutorial-contribute-on-github.rst:322 #, fuzzy msgid "Change the file name to ``save-progress.rst``" msgstr "Change le nom du fichier en `save-progress.rst`" -#: ../../source/contributor-tutorial-contribute-on-github.rst:291 +#: ../../source/contributor-tutorial-contribute-on-github.rst:323 #, fuzzy msgid "Add a redirect rule to ``doc/source/conf.py``" msgstr "Ajouter une règle de redirection à `doc/source/conf.py`" -#: ../../source/contributor-tutorial-contribute-on-github.rst:293 +#: ../../source/contributor-tutorial-contribute-on-github.rst:325 #, fuzzy msgid "" "This will cause a redirect from ``saving-progress.html`` to ``save-" @@ -2177,11 +2183,11 @@ msgstr "" "Cela entraînera une redirection de `saving-progress.html` vers `save-" "progress.html`, les anciens liens continueront à fonctionner." -#: ../../source/contributor-tutorial-contribute-on-github.rst:296 +#: ../../source/contributor-tutorial-contribute-on-github.rst:329 msgid "Apply changes in the index file" msgstr "Applique les changements dans le fichier d'index" -#: ../../source/contributor-tutorial-contribute-on-github.rst:298 +#: ../../source/contributor-tutorial-contribute-on-github.rst:331 #, fuzzy msgid "" "For the lateral navigation bar to work properly, it is very important to " @@ -2192,16 +2198,16 @@ msgstr "" "très important de mettre également à jour le fichier `index.rst`. C'est " "là que nous définissons toute l'arborescence de la barre de navigation." -#: ../../source/contributor-tutorial-contribute-on-github.rst:301 +#: ../../source/contributor-tutorial-contribute-on-github.rst:335 #, fuzzy msgid "Find and modify the file name in ``index.rst``" msgstr "Trouve et modifie le nom du fichier dans `index.rst`" -#: ../../source/contributor-tutorial-contribute-on-github.rst:304 +#: ../../source/contributor-tutorial-contribute-on-github.rst:338 msgid "Open PR" msgstr "Open PR" -#: ../../source/contributor-tutorial-contribute-on-github.rst:306 +#: ../../source/contributor-tutorial-contribute-on-github.rst:340 #, fuzzy msgid "" "Commit the changes (commit messages are always imperative: \"Do " @@ -2210,27 +2216,27 @@ msgstr "" "Valide les modifications (les messages de validation sont toujours " "impératifs : \"Fais quelque chose\", dans ce cas \"Modifie...\")" -#: ../../source/contributor-tutorial-contribute-on-github.rst:307 +#: ../../source/contributor-tutorial-contribute-on-github.rst:342 msgid "Push the changes to your fork" msgstr "Transmets les changements à ta fourchette" -#: ../../source/contributor-tutorial-contribute-on-github.rst:308 +#: ../../source/contributor-tutorial-contribute-on-github.rst:343 msgid "" "Open a PR (as shown above) with title ``docs(framework) Update how-to " "guide title``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:309 +#: ../../source/contributor-tutorial-contribute-on-github.rst:344 msgid "Wait for it to be approved!" msgstr "Attends qu'elle soit approuvée !" -#: ../../source/contributor-tutorial-contribute-on-github.rst:310 +#: ../../source/contributor-tutorial-contribute-on-github.rst:345 msgid "Congrats! 🥳 You're now officially a Flower contributor!" msgstr "" "Félicitations 🥳 Tu es désormais officiellement une contributrice de " "Flower !" -#: ../../source/contributor-tutorial-contribute-on-github.rst:314 +#: ../../source/contributor-tutorial-contribute-on-github.rst:348 #: ../../source/tutorial-series-build-a-strategy-from-scratch-pytorch.ipynb:573 #: ../../source/tutorial-series-customize-the-client-pytorch.ipynb:1012 #: ../../source/tutorial-series-get-started-with-flower-pytorch.ipynb:811 @@ -2239,7 +2245,7 @@ msgstr "" msgid "Next steps" msgstr "Prochaines étapes" -#: ../../source/contributor-tutorial-contribute-on-github.rst:316 +#: ../../source/contributor-tutorial-contribute-on-github.rst:350 msgid "" "Once you have made your first PR, and want to contribute more, be sure to" " check out the following :" @@ -2247,37 +2253,37 @@ msgstr "" "Une fois que tu auras fait ton premier RP, et que tu voudras contribuer " "davantage, ne manque pas de consulter les sites suivants :" -#: ../../source/contributor-tutorial-contribute-on-github.rst:318 +#: ../../source/contributor-tutorial-contribute-on-github.rst:353 #, fuzzy msgid "" ":doc:`Good first contributions `, where you should particularly look into the " -":code:`baselines` contributions." +"``baselines`` contributions." msgstr "" "`Bonnes premières contributions `_, où vous devriez " "particulièrement regarder les contributions :code:`baselines`." -#: ../../source/contributor-tutorial-contribute-on-github.rst:322 +#: ../../source/contributor-tutorial-contribute-on-github.rst:357 #: ../../source/fed/0000-20200102-fed-template.md:60 msgid "Appendix" msgstr "Annexe" -#: ../../source/contributor-tutorial-contribute-on-github.rst:327 +#: ../../source/contributor-tutorial-contribute-on-github.rst:362 msgid "PR title format" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:329 +#: ../../source/contributor-tutorial-contribute-on-github.rst:364 msgid "We enforce the following PR title format:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:335 +#: ../../source/contributor-tutorial-contribute-on-github.rst:370 msgid "" "(or ``(:skip) `` to ignore the PR in the " "changelog)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:337 +#: ../../source/contributor-tutorial-contribute-on-github.rst:372 msgid "" "Where ```` needs to be in ``{ci, fix, feat, docs, refactor, " "break}``, ```` should be in ``{framework, baselines, datasets, " @@ -2286,51 +2292,51 @@ msgid "" "verb in the imperative mood." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:341 +#: ../../source/contributor-tutorial-contribute-on-github.rst:377 #, fuzzy msgid "Valid examples:" msgstr "Exemples de PyTorch" -#: ../../source/contributor-tutorial-contribute-on-github.rst:343 +#: ../../source/contributor-tutorial-contribute-on-github.rst:379 msgid "``feat(framework) Add flwr build CLI command``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:344 +#: ../../source/contributor-tutorial-contribute-on-github.rst:380 msgid "``refactor(examples:skip) Improve quickstart-pytorch logging``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:345 +#: ../../source/contributor-tutorial-contribute-on-github.rst:381 msgid "``ci(*:skip) Enforce PR title format``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:347 +#: ../../source/contributor-tutorial-contribute-on-github.rst:383 #, fuzzy msgid "Invalid examples:" msgstr "Exemples de PyTorch" -#: ../../source/contributor-tutorial-contribute-on-github.rst:349 +#: ../../source/contributor-tutorial-contribute-on-github.rst:385 msgid "``feat(framework): Add flwr build CLI command`` (extra ``:``)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:350 +#: ../../source/contributor-tutorial-contribute-on-github.rst:386 msgid "" "``feat(*) Add flwr build CLI command`` (missing ``skip`` flag along with " "``*``)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:351 +#: ../../source/contributor-tutorial-contribute-on-github.rst:387 msgid "``feat(skip) Add flwr build CLI command`` (missing ````)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:352 +#: ../../source/contributor-tutorial-contribute-on-github.rst:388 msgid "``feat(framework) add flwr build CLI command`` (non capitalised verb)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:353 +#: ../../source/contributor-tutorial-contribute-on-github.rst:389 msgid "``feat(framework) Add flwr build CLI command.`` (dot at the end)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:354 +#: ../../source/contributor-tutorial-contribute-on-github.rst:390 msgid "``Add flwr build CLI command.`` (missing ``()``)" msgstr "" @@ -2340,7 +2346,9 @@ msgstr "Devenez un·e contributeur·ice" #: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:5 #: ../../source/docker/run-as-subprocess.rst:11 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:12 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:16 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:18 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:13 #: ../../source/docker/tutorial-quickstart-docker.rst:11 msgid "Prerequisites" msgstr "Prérequis" @@ -2367,7 +2375,7 @@ msgstr "" #: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:12 #, fuzzy msgid "" -"Flower uses :code:`pyproject.toml` to manage dependencies and configure " +"Flower uses ``pyproject.toml`` to manage dependencies and configure " "development tools (the ones which support it). Poetry is a build tool " "which supports `PEP 517 `_." msgstr "" @@ -2376,11 +2384,11 @@ msgstr "" "le supportent). Poetry est un outil qui support `PEP 517 " "`_." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:18 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:17 msgid "Developer Machine Setup" msgstr "Setup de la machine" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:21 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:20 #, fuzzy msgid "Preliminaries" msgstr "Principes" @@ -2399,113 +2407,113 @@ msgid "" "installation actions to add `brew` to your PATH." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:28 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:29 msgid "" "Install `xz` (to install different Python versions) and `pandoc` to build" -" the docs::" +" the docs:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:34 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:36 msgid "For Ubuntu" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:35 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:38 msgid "" "Ensure you system (Ubuntu 22.04+) is up-to-date, and you have all " -"necessary packages::" +"necessary packages:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:44 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:47 #, fuzzy msgid "Create Flower Dev Environment" msgstr "Créer/Supprimer l'environment virtuel" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:46 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:49 #, fuzzy msgid "" -"1. Clone the `Flower repository `_ from " -"GitHub::" +"Clone the `Flower repository `_ from " +"GitHub:" msgstr "" "Pour commencer, cloner la `repo Flower `_" " depuis GitHub::" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:52 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:56 msgid "" "Let's create the Python environment for all-things Flower. If you wish to" -" use :code:`pyenv`, we provide two convenience scripts that you can use. " -"If you prefer using something else than :code:`pyenv`, create a new " -"environment, activate and skip to the last point where all packages are " -"installed." +" use ``pyenv``, we provide two convenience scripts that you can use. If " +"you prefer using something else than ``pyenv``, create a new environment," +" activate and skip to the last point where all packages are installed." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:54 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:61 #, fuzzy msgid "" -"If you don't have :code:`pyenv` installed, the following script that will" -" install it, set it up, and create the virtual environment (with " -":code:`Python 3.9.20` by default)::" +"If you don't have ``pyenv`` installed, the following script that will " +"install it, set it up, and create the virtual environment (with ``Python " +"3.9.20`` by default):" msgstr "" "Si vous n'avez pas :code:`pyenv` installé, vous pouvez utiliser le script" " suivant qui l'installera, le configurera et créera l'environnement " "virtuel (avec :code:`Python 3.9.20` par défaut)::" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:58 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:68 #, fuzzy msgid "" -"If you already have :code:`pyenv` installed (along with the :code:`pyenv-" -"virtualenv` plugin), you can use the following convenience script (with " -":code:`Python 3.9.20` by default)::" +"If you already have ``pyenv`` installed (along with the ``pyenv-" +"virtualenv`` plugin), you can use the following convenience script (with " +"``Python 3.9.20`` by default):" msgstr "" "Si vous n'avez pas :code:`pyenv` installé, vous pouvez utiliser le script" " suivant qui l'installera, le configurera et créera l'environnement " "virtuel (avec :code:`Python 3.9.20` par défaut)::" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:62 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:75 #, fuzzy msgid "" -"3. Install the Flower package in development mode (think :code:`pip " -"install -e`) along with all necessary dependencies::" +"3. Install the Flower package in development mode (think ``pip install " +"-e``) along with all necessary dependencies:" msgstr "" "Troisièmement, installez le paquet Flower en mode de développement ( " ":code :`pip install -e`) avec toutes les dépendances nécessaires :" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:69 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:83 msgid "Convenience Scripts" msgstr "Scripts pratiques" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:71 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:85 +#, fuzzy msgid "" "The Flower repository contains a number of convenience scripts to make " -"recurring development tasks easier and less error-prone. See the " -":code:`/dev` subdirectory for a full list. The following scripts are " -"amongst the most important ones:" +"recurring development tasks easier and less error-prone. See the ``/dev``" +" subdirectory for a full list. The following scripts are amongst the most" +" important ones:" msgstr "" "La repo de Flower contient un certain nombre de scripts de commodité pour" " rendre les tâches de développement récurrentes plus faciles et moins " "problématiques. Voir le sous-répertoire :code :`/dev` pour une liste " "complète. Les scripts suivants sont parmis les plus importants :" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:77 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:90 msgid "Create/Delete Virtual Environment" msgstr "Créer/Supprimer l'environment virtuel" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:85 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:98 msgid "Compile ProtoBuf Definitions" msgstr "Compiler les définitions ProtoBuf" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:92 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:105 msgid "Auto-Format Code" msgstr "Formatter le code" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:99 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:112 msgid "Run Linters and Tests" msgstr "Vérifier le format et tester le code" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:106 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:119 msgid "Add a pre-commit hook" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:108 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:121 msgid "" "Developers may integrate a pre-commit hook into their workflow utilizing " "the `pre-commit `_ library. The pre-" @@ -2513,50 +2521,50 @@ msgid "" "``./dev/format.sh`` and ``./dev/test.sh`` scripts." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:110 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:125 msgid "There are multiple ways developers can use this:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:112 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:127 msgid "Install the pre-commit hook to your local git directory by simply running:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:118 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:133 msgid "" "Each ``git commit`` will trigger the execution of formatting and " "linting/test scripts." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:119 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:135 msgid "" "If in a hurry, bypass the hook using ``--no-verify`` with the ``git " -"commit`` command. ::" +"commit`` command." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:124 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:142 msgid "" "For developers who prefer not to install the hook permanently, it is " "possible to execute a one-time check prior to committing changes by using" " the following command:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:130 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:149 msgid "" "This executes the formatting and linting checks/tests on all the files " "without modifying the default behavior of ``git commit``." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:133 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:153 msgid "Run Github Actions (CI) locally" msgstr "Exécuter les GitHub Actions (CI) localement" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:135 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:155 #, fuzzy msgid "" "Developers could run the full set of Github Actions workflows under their" " local environment by using `Act `_. " "Please refer to the installation instructions under the linked repository" -" and run the next command under Flower main cloned repository folder::" +" and run the next command under Flower main cloned repository folder:" msgstr "" "Il est possible d'exécuter l'ensemble des Github Actions sous leur " "environnement local en utilisant `Act _`." @@ -2564,7 +2572,7 @@ msgstr "" "fois installé, exécuter la commande suivante dans le dossier principale " "de Flower :" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:142 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:164 msgid "" "The Flower default workflow would run by setting up the required Docker " "machines underneath." @@ -2572,40 +2580,42 @@ msgstr "" "Le workflow par défaut de Flower sera exécuté en configurant les machines" " Docker requises en arrière plan." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:147 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:168 #, fuzzy msgid "Build Release" msgstr "Inédit" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:149 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:170 +#, fuzzy msgid "" "Flower uses Poetry to build releases. The necessary command is wrapped in" -" a simple script::" +" a simple script:" msgstr "" "Flower utilise Poetry pour construire les nouvelles versions. La commande" " nécessaire est comprise dans un script simple ::" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:154 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:177 +#, fuzzy msgid "" -"The resulting :code:`.whl` and :code:`.tar.gz` releases will be stored in" -" the :code:`/dist` subdirectory." +"The resulting ``.whl`` and ``.tar.gz`` releases will be stored in the " +"``/dist`` subdirectory." msgstr "" "Les versions résultantes :code:`.whl` et :code:`.tar.gz` seront stockées " "dans le sous-répertoire:code:`/dist`." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:159 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:181 #, fuzzy msgid "Build Documentation" msgstr "Amélioration de la documentation" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:161 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:183 msgid "" "Flower's documentation uses `Sphinx `_. " "There's no convenience script to re-build the documentation yet, but it's" -" pretty easy::" +" pretty easy:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:167 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:191 msgid "This will generate HTML documentation in ``doc/build/html``." msgstr "" @@ -2649,7 +2659,7 @@ msgid "" msgstr "" #: ../../source/docker/enable-tls.rst:23 -#: ../../source/docker/persist-superlink-state.rst:14 +#: ../../source/docker/persist-superlink-state.rst:15 msgid "" "If you later want to delete the directory, you can change the user ID " "back to the current user ID by running ``sudo chown -R $USER:$(id -gn) " @@ -2673,21 +2683,21 @@ msgstr "" msgid "Understanding the command" msgstr "Entraîne le modèle" -#: ../../source/docker/enable-tls.rst:44 ../../source/docker/enable-tls.rst:91 +#: ../../source/docker/enable-tls.rst:45 ../../source/docker/enable-tls.rst:92 #: ../../source/docker/enable-tls.rst:125 #: ../../source/docker/tutorial-quickstart-docker.rst:66 #: ../../source/docker/tutorial-quickstart-docker.rst:103 -#: ../../source/docker/tutorial-quickstart-docker.rst:213 -#: ../../source/docker/tutorial-quickstart-docker.rst:300 +#: ../../source/docker/tutorial-quickstart-docker.rst:217 +#: ../../source/docker/tutorial-quickstart-docker.rst:305 msgid "``docker run``: This tells Docker to run a container from an image." msgstr "" -#: ../../source/docker/enable-tls.rst:45 ../../source/docker/enable-tls.rst:92 +#: ../../source/docker/enable-tls.rst:46 ../../source/docker/enable-tls.rst:93 #: ../../source/docker/enable-tls.rst:126 #: ../../source/docker/tutorial-quickstart-docker.rst:67 #: ../../source/docker/tutorial-quickstart-docker.rst:104 -#: ../../source/docker/tutorial-quickstart-docker.rst:214 -#: ../../source/docker/tutorial-quickstart-docker.rst:301 +#: ../../source/docker/tutorial-quickstart-docker.rst:218 +#: ../../source/docker/tutorial-quickstart-docker.rst:306 msgid "``--rm``: Remove the container once it is stopped or the command exits." msgstr "" @@ -2792,19 +2802,19 @@ msgstr "" msgid "the network." msgstr "" -#: ../../source/docker/enable-tls.rst:71 +#: ../../source/docker/enable-tls.rst:72 #, fuzzy msgid "SuperNode" msgstr "flower-superlink" -#: ../../source/docker/enable-tls.rst:73 +#: ../../source/docker/enable-tls.rst:74 msgid "" "Assuming that the ``ca.crt`` certificate already exists locally, we can " "use the flag ``--volume`` to mount the local certificate into the " "container's ``/app/`` directory." msgstr "" -#: ../../source/docker/enable-tls.rst:78 +#: ../../source/docker/enable-tls.rst:79 msgid "" "If you're generating self-signed certificates and the ``ca.crt`` " "certificate doesn't exist on the SuperNode, you can copy it over after " @@ -2919,16 +2929,16 @@ msgstr "" msgid "Getting Started" msgstr "Pour commencer" -#: ../../source/docker/index.rst:20 +#: ../../source/docker/index.rst:19 msgid "Running in Production" msgstr "" -#: ../../source/docker/index.rst:29 +#: ../../source/docker/index.rst:28 #, fuzzy msgid "Advanced Options" msgstr "Options d'installation avancées" -#: ../../source/docker/index.rst:41 +#: ../../source/docker/index.rst:40 #, fuzzy msgid "Run Flower using Docker Compose" msgstr "Serveur de Flower" @@ -2951,7 +2961,7 @@ msgid "" " on your host system and a name for the database file." msgstr "" -#: ../../source/docker/persist-superlink-state.rst:10 +#: ../../source/docker/persist-superlink-state.rst:11 msgid "" "By default, the SuperLink container runs with a non-root user called " "``app`` with the user ID ``49999``. It is recommended to create a new " @@ -2959,7 +2969,7 @@ msgid "" "the mounted directory has the proper permissions." msgstr "" -#: ../../source/docker/persist-superlink-state.rst:20 +#: ../../source/docker/persist-superlink-state.rst:21 msgid "" "In the example below, we create a new directory called ``state``, change " "the user ID and tell Docker via the flag ``--volume`` to mount the local " @@ -2968,7 +2978,7 @@ msgid "" "database file." msgstr "" -#: ../../source/docker/persist-superlink-state.rst:35 +#: ../../source/docker/persist-superlink-state.rst:36 msgid "" "As soon as the SuperLink starts, the file ``state.db`` is created in the " "``state`` directory on your host system. If the file already exists, the " @@ -2993,17 +3003,17 @@ msgid "" "by-digest-immutable-identifier>`_ of the image instead of the tag." msgstr "" -#: ../../source/docker/pin-version.rst:13 +#: ../../source/docker/pin-version.rst:14 msgid "" "The following command returns the current image digest referenced by the " ":substitution-code:`superlink:|stable_flwr_version|` tag:" msgstr "" -#: ../../source/docker/pin-version.rst:22 +#: ../../source/docker/pin-version.rst:23 msgid "This will output" msgstr "" -#: ../../source/docker/pin-version.rst:29 +#: ../../source/docker/pin-version.rst:30 msgid "Next, we can pin the digest when running a new SuperLink container:" msgstr "" @@ -3050,7 +3060,7 @@ msgid "" "``USER root`` directive within your Dockerfile." msgstr "" -#: ../../source/docker/run-as-root-user.rst:29 +#: ../../source/docker/run-as-root-user.rst:30 #, fuzzy msgid "SuperNode Dockerfile" msgstr "Démarrer le serveur" @@ -3077,12 +3087,12 @@ msgid "" "done by extending the SuperNode image:" msgstr "" -#: ../../source/docker/run-as-subprocess.rst:16 +#: ../../source/docker/run-as-subprocess.rst:17 #, fuzzy msgid "Dockerfile.supernode" msgstr "Serveur de Flower" -#: ../../source/docker/run-as-subprocess.rst:30 +#: ../../source/docker/run-as-subprocess.rst:31 msgid "" "Next, build the SuperNode Docker image by running the following command " "in the directory where Dockerfile is located:" @@ -3098,6 +3108,242 @@ msgid "" " the SuperNode to execute the ClientApp as a subprocess:" msgstr "" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:2 +#, fuzzy +msgid "Run Flower Quickstart Examples with Docker Compose" +msgstr "Démarrage rapide XGBoost" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:4 +msgid "" +"Flower provides a set of `quickstart examples " +"`_ to help you get " +"started with the framework. These examples are designed to demonstrate " +"the capabilities of Flower and by default run using the Simulation " +"Engine. This guide demonstrates how to run them using Flower's Deployment" +" Engine via Docker Compose." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:12 +msgid "" +"Some quickstart examples may have limitations or requirements that " +"prevent them from running on every environment. For more information, " +"please see Limitations_." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:18 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:15 +#: ../../source/docker/tutorial-quickstart-docker.rst:13 +msgid "Before you start, make sure that:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:20 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:22 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:17 +#: ../../source/docker/tutorial-quickstart-docker.rst:15 +msgid "The ``flwr`` CLI is :doc:`installed <../how-to-install-flower>` locally." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:21 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:18 +#: ../../source/docker/tutorial-quickstart-docker.rst:16 +msgid "The Docker daemon is running." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:22 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:19 +msgid "Docker Compose is `installed `_." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:25 +#, fuzzy +msgid "Run the Quickstart Example" +msgstr "Demande pour un nouveau Flower Example" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:27 +msgid "" +"Clone the quickstart example you like to run. For example, ``quickstart-" +"pytorch``:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:35 +msgid "" +"Download the `compose.yml " +"`_" +" file into the example directory:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:44 +#, fuzzy +msgid "Build and start the services using the following command:" +msgstr "Active la virtualenv en exécutant la commande suivante :" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:50 +#, fuzzy +msgid "" +"Append the following lines to the end of the ``pyproject.toml`` file and " +"save it:" +msgstr "Augmente la version mineure de ``pyproject.toml`` d'une unité." + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:52 +#: ../../source/docker/tutorial-quickstart-docker.rst:324 +msgid "pyproject.toml" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:61 +msgid "" +"You can customize the string that follows ``tool.flwr.federations.`` to " +"fit your needs. However, please note that the string cannot contain a dot" +" (``.``)." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:64 +msgid "" +"In this example, ``local-deployment`` has been used. Just remember to " +"replace ``local-deployment`` with your chosen name in both the " +"``tool.flwr.federations.`` string and the corresponding ``flwr run .`` " +"command." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:68 +#, fuzzy +msgid "Run the example:" +msgstr "Fédérer l'exemple" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:74 +msgid "Follow the logs of the SuperExec service:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:80 +msgid "" +"That is all it takes! You can monitor the progress of the run through the" +" logs of the SuperExec." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:84 +msgid "Run a Different Quickstart Example" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:86 +msgid "" +"To run a different quickstart example, such as ``quickstart-tensorflow``," +" first, shut down the Docker Compose services of the current example:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:93 +msgid "After that, you can repeat the steps above." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:96 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:102 +#, fuzzy +msgid "Limitations" +msgstr "Simulation de moniteur" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:101 +#, fuzzy +msgid "Quickstart Example" +msgstr "Démarrage rapide de JAX" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:103 +#, fuzzy +msgid "quickstart-fastai" +msgstr "Démarrage rapide fastai" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:104 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:106 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:115 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:117 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:121 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:123 +#: ../../source/ref-changelog.md:33 ../../source/ref-changelog.md:399 +#: ../../source/ref-changelog.md:676 ../../source/ref-changelog.md:740 +#: ../../source/ref-changelog.md:798 ../../source/ref-changelog.md:867 +#: ../../source/ref-changelog.md:929 +msgid "None" +msgstr "Aucun" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:105 +#, fuzzy +msgid "quickstart-huggingface" +msgstr "Quickstart tutorials" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:107 +#, fuzzy +msgid "quickstart-jax" +msgstr "Démarrage rapide de JAX" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:108 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:110 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:125 +#, fuzzy +msgid "" +"The example has not yet been updated to work with the latest ``flwr`` " +"version." +msgstr "" +"Les exemples de code couvrant scikit-learn et PyTorch Lightning ont été " +"mis à jour pour fonctionner avec la dernière version de Flower." + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:109 +#, fuzzy +msgid "quickstart-mlcube" +msgstr "Démarrage rapide de JAX" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:111 +#, fuzzy +msgid "quickstart-mlx" +msgstr "Démarrage rapide de JAX" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:112 +msgid "" +"`Requires to run on macOS with Apple Silicon `_." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:114 +#, fuzzy +msgid "quickstart-monai" +msgstr "Démarrage rapide de JAX" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:116 +#, fuzzy +msgid "quickstart-pandas" +msgstr "Démarrage rapide des Pandas" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:118 +#, fuzzy +msgid "quickstart-pytorch-lightning" +msgstr "Démarrage rapide de PyTorch Lightning" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:119 +msgid "" +"Requires an older pip version that is not supported by the Flower Docker " +"images." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:120 +#, fuzzy +msgid "quickstart-pytorch" +msgstr "Démarrage rapide de PyTorch" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:122 +#, fuzzy +msgid "quickstart-sklearn-tabular" +msgstr "Démarrage rapide de scikit-learn" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:124 +#, fuzzy +msgid "quickstart-tabnet" +msgstr "Démarrage rapide de JAX" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:126 +#, fuzzy +msgid "quickstart-tensorflow" +msgstr "Démarrage rapide de TensorFlow" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:127 +msgid "Only runs on AMD64." +msgstr "" + #: ../../source/docker/set-environment-variables.rst:2 #, fuzzy msgid "Set Environment Variables" @@ -3110,45 +3356,228 @@ msgid "" "environment variables for a container." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:2 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:2 #, fuzzy -msgid "Quickstart with Docker" +msgid "Deploy Flower on Multiple Machines with Docker Compose" msgstr "Démarrage rapide XGBoost" -#: ../../source/docker/tutorial-quickstart-docker.rst:4 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:4 msgid "" -"This quickstart aims to guide you through the process of containerizing a" -" Flower project and running it end to end using Docker on your local " -"machine." +"This guide will help you set up a Flower project on multiple machines " +"using Docker Compose." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:7 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:7 msgid "" -"This tutorial does not use production-ready settings, so you can focus on" -" understanding the basic workflow that uses the minimum configurations." +"You will learn how to run the Flower client and server components on two " +"separate machines, with Flower configured to use TLS encryption and " +"persist SuperLink state across restarts. A server consists of a SuperLink" +" and ``SuperExec``. For more details about the Flower architecture, refer" +" to the :doc:`../explanation-flower-architecture` explainer page." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:14 -#: ../../source/docker/tutorial-quickstart-docker.rst:13 -msgid "Before you start, make sure that:" +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:13 +msgid "" +"This guide assumes you have completed the :doc:`tutorial-quickstart-" +"docker-compose` tutorial. It is highly recommended that you follow and " +"understand the contents of that tutorial before proceeding with this " +"guide." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:16 -#: ../../source/docker/tutorial-quickstart-docker.rst:15 -msgid "The ``flwr`` CLI is :doc:`installed <../how-to-install-flower>` locally." +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:20 +msgid "Before you begin, make sure you have the following prerequisites:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:17 -#: ../../source/docker/tutorial-quickstart-docker.rst:16 -msgid "The Docker daemon is running." +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:23 +msgid "The Docker daemon is running on your local machine and the remote machine." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:21 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:24 +msgid "" +"Docker Compose V2 is installed on both your local machine and the remote " +"machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:25 +msgid "You can connect to the remote machine from your local machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:26 +msgid "Ports ``9091`` and ``9093`` are accessible on the remote machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:30 +msgid "" +"The guide uses the |quickstart_sklearn_tabular|_ example as an example " +"project." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:32 +msgid "" +"If your project has a different name or location, please remember to " +"adjust the commands/paths accordingly." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:36 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:22 #: ../../source/docker/tutorial-quickstart-docker.rst:19 msgid "Step 1: Set Up" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:31 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:38 +msgid "Clone the Flower repository and change to the ``distributed`` directory:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:45 +msgid "Get the IP address from the remote machine and save it for later." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:46 +msgid "" +"Use the ``certs.yml`` Compose file to generate your own self-signed " +"certificates. If you have certificates, you can continue with Step 2." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:51 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:221 +msgid "These certificates should be used only for development purposes." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:53 +msgid "" +"For production environments, you may have to use dedicated services to " +"obtain your certificates." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:56 +msgid "" +"First, set the environment variables ``SUPERLINK_IP`` and " +"``SUPEREXEC_IP`` with the IP address from the remote machine. For " +"example, if the IP is ``192.168.2.33``, execute:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:65 +msgid "Next, generate the self-signed certificates:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:72 +msgid "Step 2: Copy the Server Compose Files" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:74 +msgid "" +"Use the method that works best for you to copy the ``server`` directory, " +"the certificates, and your Flower project to the remote machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:77 +msgid "For example, you can use ``scp`` to copy the directories:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:87 +#, fuzzy +msgid "Step 3: Start the Flower Server Components" +msgstr "Démarrer le serveur" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:89 +msgid "" +"Log into the remote machine using ``ssh`` and run the following command " +"to start the SuperLink and SuperExec services:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:102 +msgid "" +"The Path of the ``PROJECT_DIR`` should be relative to the location of the" +" ``server`` Docker Compose files." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:105 +msgid "Go back to your terminal on your local machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:108 +#, fuzzy +msgid "Step 4: Start the Flower Client Components" +msgstr "Démarrer le serveur" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:110 +msgid "" +"On your local machine, run the following command to start the client " +"components:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:120 +msgid "" +"The Path of the ``PROJECT_DIR`` should be relative to the location of the" +" ``client`` Docker Compose files." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:124 +#, fuzzy +msgid "Step 5: Run Your Flower Project" +msgstr "Serveur de Flower" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:126 +msgid "" +"Specify the remote SuperExec IP addresses and the path to the root " +"certificate in the ``[tool.flwr.federations.remote-superexec]`` table in " +"the ``pyproject.toml`` file. Here, we have named our remote federation " +"``remote-superexec``:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:130 +#, fuzzy +msgid "examples/quickstart-sklearn-tabular/pyproject.toml" +msgstr "Démarrage rapide de scikit-learn" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:139 +msgid "" +"The Path of the ``root-certificates`` should be relative to the location " +"of the ``pyproject.toml`` file." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:142 +msgid "To run the project, execute:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:148 +msgid "" +"That's it! With these steps, you've set up Flower on two separate " +"machines and are ready to start using it." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:152 +msgid "Step 6: Clean Up" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:154 +#, fuzzy +msgid "Shut down the Flower client components:" +msgstr "Client de Flower" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:161 +msgid "Shut down the Flower server components and delete the SuperLink state:" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:2 +#, fuzzy +msgid "Quickstart with Docker" +msgstr "Démarrage rapide XGBoost" + +#: ../../source/docker/tutorial-quickstart-docker.rst:4 +msgid "" +"This quickstart aims to guide you through the process of containerizing a" +" Flower project and running it end to end using Docker on your local " +"machine." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:7 +msgid "" +"This tutorial does not use production-ready settings, so you can focus on" +" understanding the basic workflow that uses the minimum configurations." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:32 #: ../../source/docker/tutorial-quickstart-docker.rst:21 msgid "Create a new Flower project (PyTorch):" msgstr "" @@ -3170,7 +3599,7 @@ msgstr "" msgid "Step 2: Start the SuperLink" msgstr "Démarrer le serveur" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:60 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:62 #: ../../source/docker/tutorial-quickstart-docker.rst:52 #, fuzzy msgid "Open your terminal and run:" @@ -3197,8 +3626,8 @@ msgstr "" #: ../../source/docker/tutorial-quickstart-docker.rst:71 #: ../../source/docker/tutorial-quickstart-docker.rst:108 -#: ../../source/docker/tutorial-quickstart-docker.rst:215 -#: ../../source/docker/tutorial-quickstart-docker.rst:304 +#: ../../source/docker/tutorial-quickstart-docker.rst:219 +#: ../../source/docker/tutorial-quickstart-docker.rst:309 msgid "" "``--network flwr-network``: Make the container join the network named " "``flwr-network``." @@ -3210,8 +3639,8 @@ msgstr "" #: ../../source/docker/tutorial-quickstart-docker.rst:73 #: ../../source/docker/tutorial-quickstart-docker.rst:110 -#: ../../source/docker/tutorial-quickstart-docker.rst:216 -#: ../../source/docker/tutorial-quickstart-docker.rst:306 +#: ../../source/docker/tutorial-quickstart-docker.rst:220 +#: ../../source/docker/tutorial-quickstart-docker.rst:311 msgid "" "``--detach``: Run the container in the background, freeing up the " "terminal." @@ -3329,13 +3758,13 @@ msgid "" "extends the ClientApp image and installs the required dependencies." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:148 +#: ../../source/docker/tutorial-quickstart-docker.rst:149 msgid "" "Create a ClientApp Dockerfile called ``Dockerfile.clientapp`` and paste " "the following code into it:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:150 +#: ../../source/docker/tutorial-quickstart-docker.rst:152 #, fuzzy msgid "Dockerfile.clientapp" msgstr "Flower ClientApp." @@ -3418,7 +3847,7 @@ msgstr "" msgid "the default command run when the container is started." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:184 +#: ../../source/docker/tutorial-quickstart-docker.rst:186 msgid "" "Note that `flwr `__ is already installed " "in the ``flwr/clientapp`` base image, so only other package dependencies " @@ -3427,20 +3856,20 @@ msgid "" "after it has been copied into the Docker image (see line 5)." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:189 +#: ../../source/docker/tutorial-quickstart-docker.rst:192 msgid "" "Next, build the ClientApp Docker image by running the following command " "in the directory where the Dockerfile is located:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:198 +#: ../../source/docker/tutorial-quickstart-docker.rst:201 msgid "" "The image name was set as ``flwr_clientapp`` with the tag ``0.0.1``. " "Remember that these values are merely examples, and you can customize " "them according to your requirements." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:201 +#: ../../source/docker/tutorial-quickstart-docker.rst:205 #, fuzzy msgid "Start the first ClientApp container:" msgstr "Utilisation du moteur du client virtuel" @@ -3461,34 +3890,34 @@ msgstr "" msgid "``supernode-1:9094``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:222 +#: ../../source/docker/tutorial-quickstart-docker.rst:226 msgid "Start the second ClientApp container:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:233 +#: ../../source/docker/tutorial-quickstart-docker.rst:237 #, fuzzy msgid "Step 5: Start the SuperExec" msgstr "Démarrer le serveur" -#: ../../source/docker/tutorial-quickstart-docker.rst:235 +#: ../../source/docker/tutorial-quickstart-docker.rst:239 msgid "" "The procedure for building and running a SuperExec image is almost " "identical to the ClientApp image." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:237 +#: ../../source/docker/tutorial-quickstart-docker.rst:242 msgid "" "Similar to the ClientApp image, you will need to create a Dockerfile that" " extends the SuperExec image and installs the required FAB dependencies." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:240 +#: ../../source/docker/tutorial-quickstart-docker.rst:245 msgid "" "Create a SuperExec Dockerfile called ``Dockerfile.superexec`` and paste " "the following code in:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:242 +#: ../../source/docker/tutorial-quickstart-docker.rst:248 msgid "Dockerfile.superexec" msgstr "" @@ -3518,13 +3947,13 @@ msgstr "" msgid "``flwr.superexec.deployment:executor`` executor to run the ServerApps." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:277 +#: ../../source/docker/tutorial-quickstart-docker.rst:283 msgid "" "Afterward, in the directory that holds the Dockerfile, execute this " "Docker command to build the SuperExec image:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:285 +#: ../../source/docker/tutorial-quickstart-docker.rst:290 #, fuzzy msgid "Start the SuperExec container:" msgstr "Démarrer le serveur" @@ -3539,7 +3968,7 @@ msgid "" "``http://localhost:9093``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:305 +#: ../../source/docker/tutorial-quickstart-docker.rst:310 msgid "``--name superexec``: Assign the name ``superexec`` to the container." msgstr "" @@ -3559,82 +3988,79 @@ msgstr "" msgid "connect to the SuperLink running on port ``9091``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:315 +#: ../../source/docker/tutorial-quickstart-docker.rst:320 msgid "Step 6: Run the Quickstart Project" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:317 +#: ../../source/docker/tutorial-quickstart-docker.rst:322 #, fuzzy msgid "Add the following lines to the ``pyproject.toml``:" msgstr "Augmente la version mineure de ``pyproject.toml`` d'une unité." -#: ../../source/docker/tutorial-quickstart-docker.rst:319 -msgid "pyproject.toml" -msgstr "" - -#: ../../source/docker/tutorial-quickstart-docker.rst:326 +#: ../../source/docker/tutorial-quickstart-docker.rst:331 msgid "Run the ``quickstart-docker`` project by executing the command:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:332 +#: ../../source/docker/tutorial-quickstart-docker.rst:337 msgid "Follow the SuperExec logs to track the execution of the run:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:339 +#: ../../source/docker/tutorial-quickstart-docker.rst:344 #, fuzzy msgid "Step 7: Update the Application" msgstr "Étape 3 : Sérialisation personnalisée" -#: ../../source/docker/tutorial-quickstart-docker.rst:341 +#: ../../source/docker/tutorial-quickstart-docker.rst:346 msgid "" -"Change the application code. For example, change the ``seed`` in " +"Change the application code. For example, change the ``seed`` in " "``quickstart_docker/task.py`` to ``43`` and save it:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:344 +#: ../../source/docker/tutorial-quickstart-docker.rst:349 #, fuzzy msgid "quickstart_docker/task.py" msgstr "Démarrage rapide des Pandas" -#: ../../source/docker/tutorial-quickstart-docker.rst:351 +#: ../../source/docker/tutorial-quickstart-docker.rst:356 msgid "Stop the current ClientApp containers:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:357 +#: ../../source/docker/tutorial-quickstart-docker.rst:362 #, fuzzy msgid "Rebuild the FAB and ClientApp image:" msgstr "Chargement des données" -#: ../../source/docker/tutorial-quickstart-docker.rst:363 +#: ../../source/docker/tutorial-quickstart-docker.rst:368 msgid "Launch two new ClientApp containers based on the newly built image:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:378 +#: ../../source/docker/tutorial-quickstart-docker.rst:383 msgid "Run the updated project:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:385 +#: ../../source/docker/tutorial-quickstart-docker.rst:390 msgid "Step 8: Clean Up" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:387 +#: ../../source/docker/tutorial-quickstart-docker.rst:392 msgid "Remove the containers and the bridge network:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:399 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:408 +#: ../../source/docker/tutorial-quickstart-docker.rst:404 #, fuzzy msgid "Where to Go Next" msgstr "Par où commencer" -#: ../../source/docker/tutorial-quickstart-docker.rst:401 +#: ../../source/docker/tutorial-quickstart-docker.rst:406 msgid ":doc:`enable-tls`" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:402 +#: ../../source/docker/tutorial-quickstart-docker.rst:407 msgid ":doc:`persist-superlink-state`" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:403 +#: ../../source/docker/tutorial-quickstart-docker.rst:408 msgid ":doc:`tutorial-quickstart-docker-compose`" msgstr "" @@ -3657,183 +4083,179 @@ msgid "" "configuration that best suits your project's needs." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:18 -msgid "Docker Compose is `installed `_." -msgstr "" - -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:23 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:24 msgid "Clone the Docker Compose ``complete`` directory:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:37 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:38 msgid "" "Export the path of the newly created project. The path should be relative" " to the location of the Docker Compose files:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:44 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:45 msgid "" "Setting the ``PROJECT_DIR`` helps Docker Compose locate the " "``pyproject.toml`` file, allowing it to install dependencies in the " "SuperExec and SuperNode images correctly." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:48 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:49 #, fuzzy msgid "Step 2: Run Flower in Insecure Mode" msgstr "Serveur de Flower" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:50 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:51 msgid "" "To begin, start Flower with the most basic configuration. In this setup, " "Flower will run without TLS and without persisting the state." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:55 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:56 msgid "" "Without TLS, the data sent between the services remains **unencrypted**. " "Use it only for development purposes." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:58 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:59 msgid "" "For production-oriented use cases, :ref:`enable TLS` for secure data" " transmission." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:68 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:179 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:70 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:184 msgid "``docker compose``: The Docker command to run the Docker Compose tool." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:69 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:180 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:71 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:185 msgid "" "``-f compose.yml``: Specify the YAML file that contains the basic Flower " "service definitions." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:70 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:185 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:72 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:190 msgid "" "``--build``: Rebuild the images for each service if they don't already " "exist." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:71 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:186 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:73 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:191 msgid "" "``-d``: Detach the containers from the terminal and run them in the " "background." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:74 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:76 msgid "Step 3: Run the Quickstart Project" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:76 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:78 msgid "" "Now that the Flower services have been started via Docker Compose, it is " "time to run the quickstart example." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:79 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:81 msgid "" "To ensure the ``flwr`` CLI connects to the SuperExec, you need to specify" " the SuperExec addresses in the ``pyproject.toml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:82 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:226 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:84 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:232 msgid "Add the following lines to the ``quickstart-compose/pyproject.toml``:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:84 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:228 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:86 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:234 msgid "quickstart-compose/pyproject.toml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:91 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:93 msgid "Execute the command to run the quickstart example:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:97 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:99 msgid "Monitor the SuperExec logs and wait for the summary to appear:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:104 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:106 #, fuzzy msgid "Step 4: Update the Application" msgstr "Étape 3 : Sérialisation personnalisée" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:106 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:108 msgid "In the next step, change the application code." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:108 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:110 msgid "" "For example, go to the ``task.py`` file in the ``quickstart-" "compose/quickstart_compose/`` directory and add a ``print`` call in the " "``get_weights`` function:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:111 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:114 msgid "quickstart-compose/quickstart_compose/task.py" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:120 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:125 #, fuzzy msgid "Rebuild and restart the services." msgstr "Nous pouvons déjà démarrer le *serveur* :" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:124 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:129 msgid "" "If you have modified the dependencies listed in your ``pyproject.toml`` " "file, it is essential to rebuild images." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:127 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:132 msgid "If you haven't made any changes, you can skip this step." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:129 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:134 msgid "Run the following command to rebuild and restart the services:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:135 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:140 msgid "Run the updated quickstart example:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:142 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:147 msgid "In the SuperExec logs, you should find the ``Get weights`` line:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:159 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:164 msgid "Step 5: Persisting the SuperLink State" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:161 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:166 msgid "" "In this step, Flower services are configured to persist the state of the " "SuperLink service, ensuring that it maintains its state even after a " "restart." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:166 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:171 msgid "" "When working with Docker Compose on Linux, you may need to create the " "``state`` directory first and change its ownership to ensure proper " "access and permissions." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:169 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:174 msgid "" "For more information, consult the following page: :doc:`persist-" "superlink-state`." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:171 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:220 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:176 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:226 msgid "Run the command:" msgstr "" @@ -3854,17 +4276,17 @@ msgid "" "rules>`_." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:188 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:241 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:362 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:193 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:247 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:375 msgid "Rerun the ``quickstart-compose`` project:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:194 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:199 msgid "Check the content of the ``state`` directory:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:201 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:206 msgid "" "You should see a ``state.db`` file in the ``state`` directory. If you " "restart the service, the state file will be used to restore the state " @@ -3872,123 +4294,124 @@ msgid "" "if the containers are stopped and started again." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:208 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:214 msgid "Step 6: Run Flower with TLS" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:210 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:216 msgid "" "To demonstrate how to enable TLS, generate self-signed certificates using" " the ``certs.yml`` Compose file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:215 -msgid "These certificates should be used only for development purposes." -msgstr "" - -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:217 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:223 msgid "" "For production environments, use a service like `Let's Encrypt " "`_ to obtain your certificates." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:235 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:241 msgid "Restart the services with TLS enabled:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:249 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:255 msgid "Step 7: Add another SuperNode" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:251 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:257 msgid "" "You can add more SuperNodes and ClientApps by duplicating their " "definitions in the ``compose.yml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:254 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:260 msgid "" "Just give each new SuperNode and ClientApp service a unique service name " "like ``supernode-3``, ``clientapp-3``, etc." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:257 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:263 msgid "In ``compose.yml``, add the following:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:259 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:265 msgid "compose.yml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:303 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:316 msgid "" "If you also want to enable TLS for the new SuperNodes, duplicate the " "SuperNode definition for each new SuperNode service in the ``with-" "tls.yml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:306 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:319 msgid "" "Make sure that the names of the services match with the one in the " "``compose.yml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:308 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:321 msgid "In ``with-tls.yml``, add the following:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:310 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:323 msgid "with-tls.yml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:332 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:345 msgid "Step 8: Persisting the SuperLink State and Enabling TLS" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:334 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:347 msgid "" "To run Flower with persisted SuperLink state and enabled TLS, a slight " "change in the ``with-state.yml`` file is required:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:337 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:350 msgid "Comment out the lines 2-4 and uncomment the lines 5-9:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:339 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:352 msgid "with-state.yml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:356 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:369 #, fuzzy msgid "Restart the services:" msgstr "Démarrer le serveur" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:370 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:383 msgid "Step 9: Merge Multiple Compose Files" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:372 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:385 msgid "" "You can merge multiple Compose files into a single file. For instance, if" " you wish to combine the basic configuration with the TLS configuration, " "execute the following command:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:380 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:394 msgid "" "This will merge the contents of ``compose.yml`` and ``with-tls.yml`` into" " a new file called ``my_compose.yml``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:384 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:398 msgid "Step 10: Clean Up" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:386 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:400 msgid "Remove all services and volumes:" msgstr "" +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:410 +#, fuzzy +msgid ":doc:`run-quickstart-examples-docker-compose`" +msgstr "Démarrage rapide XGBoost" + #: ../../source/docker/use-a-different-version.rst:2 msgid "Use a Different Flower Version" msgstr "" @@ -4000,7 +4423,7 @@ msgid "" " `Docker Hub `__." msgstr "" -#: ../../source/docker/use-a-different-version.rst:9 +#: ../../source/docker/use-a-different-version.rst:10 msgid "" "When using Flower nightly, the SuperLink nightly image must be paired " "with the corresponding SuperNode and ServerApp nightly images released on" @@ -4035,18 +4458,18 @@ msgstr "" "fédération `_." -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:9 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:10 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:12 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:12 msgid "Centralized Training" msgstr "Formation centralisée" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:10 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:14 #, fuzzy msgid "" "All files are revised based on :doc:`Example: PyTorch - From Centralized " "To Federated `. The only " -"thing to do is modifying the file called :code:`cifar.py`, revised part " -"is shown below:" +"thing to do is modifying the file called ``cifar.py``, revised part is " +"shown below:" msgstr "" "Tous les fichiers sont révisés sur la base de `Exemple : PyTorch - From " "Centralized To Federated `, the following parts are" -" easy to follow, only :code:`get_parameters` and :code:`set_parameters` " -"function in :code:`client.py` needed to revise. If not, please read the " -":doc:`Example: PyTorch - From Centralized To Federated `. first." +" easy to follow, only ``get_parameters`` and ``set_parameters`` function " +"in ``client.py`` needed to revise. If not, please read the :doc:`Example:" +" PyTorch - From Centralized To Federated `. first." msgstr "" "Si vous avez lu `Exemple : PyTorch - From Centralized To Federated " "`. d'abord." -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:56 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:66 +#, fuzzy msgid "" "Our example consists of one *server* and two *clients*. In FedBN, " -":code:`server.py` keeps unchanged, we can start the server directly." +"``server.py`` keeps unchanged, we can start the server directly." msgstr "" "Notre exemple consiste en un *serveur* et deux *clients*. Dans FedBN, " ":code:`server.py` reste inchangé, nous pouvons démarrer le serveur " "directement." -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:62 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:73 +#, fuzzy msgid "" -"Finally, we will revise our *client* logic by changing " -":code:`get_parameters` and :code:`set_parameters` in :code:`client.py`, " -"we will exclude batch normalization parameters from model parameter list " -"when sending to or receiving from the server." +"Finally, we will revise our *client* logic by changing ``get_parameters``" +" and ``set_parameters`` in ``client.py``, we will exclude batch " +"normalization parameters from model parameter list when sending to or " +"receiving from the server." msgstr "" "Enfin, nous allons réviser notre logique *client* en modifiant " ":code:`get_parameters` et :code:`set_parameters` dans :code:`client.py`, " @@ -4128,11 +4553,11 @@ msgstr "" "des paramètres du modèle lors de l'envoi ou de la réception depuis le " "serveur." -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:85 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:102 msgid "Now, you can now open two additional terminal windows and run" msgstr "Tu peux maintenant ouvrir deux autres fenêtres de terminal et lancer" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:91 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:108 msgid "" "in each window (make sure that the server is still running before you do " "so) and see your (previously centralized) PyTorch project run federated " @@ -4143,13 +4568,13 @@ msgstr "" "(auparavant centralisé) exécuter l'apprentissage fédéré avec la stratégie" " FedBN sur deux clients. Félicitations !" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:94 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:310 -#: ../../source/tutorial-quickstart-jax.rst:283 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:113 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:349 +#: ../../source/tutorial-quickstart-jax.rst:319 msgid "Next Steps" msgstr "Prochaines étapes" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:96 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:115 #, fuzzy msgid "" "The full source code for this example can be found `here " @@ -4195,7 +4620,7 @@ msgstr "" "Ensuite, nous nous appuyons sur le code d'entraînement centralisé pour " "exécuter l'entraînement de manière fédérée." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:12 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:14 msgid "" "We begin with a brief description of the centralized CNN training code. " "If you want a more in-depth explanation of what's going on then have a " @@ -4207,14 +4632,15 @@ msgstr "" "passe, jette un coup d'œil au tutoriel officiel `PyTorch " "`_." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:15 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:18 +#, fuzzy msgid "" -"Let's create a new file called :code:`cifar.py` with all the components " +"Let's create a new file called ``cifar.py`` with all the components " "required for a traditional (centralized) training on CIFAR-10. First, all" -" required packages (such as :code:`torch` and :code:`torchvision`) need " -"to be imported. You can see that we do not import any package for " -"federated learning. You can keep all these imports as they are even when " -"we add the federated learning components at a later point." +" required packages (such as ``torch`` and ``torchvision``) need to be " +"imported. You can see that we do not import any package for federated " +"learning. You can keep all these imports as they are even when we add the" +" federated learning components at a later point." msgstr "" "Créons un nouveau fichier appelé :code:`cifar.py` avec tous les " "composants requis pour une formation traditionnelle (centralisée) sur le " @@ -4224,30 +4650,33 @@ msgstr "" "toutes ces importations telles quelles même lorsque nous ajouterons les " "composants d'apprentissage fédéré à un moment ultérieur." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:32 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:36 +#, fuzzy msgid "" "As already mentioned we will use the CIFAR-10 dataset for this machine " "learning workload. The model architecture (a very simple Convolutional " -"Neural Network) is defined in :code:`class Net()`." +"Neural Network) is defined in ``class Net()``." msgstr "" "Comme nous l'avons déjà mentionné, nous utiliserons l'ensemble de données" " CIFAR-10 pour cette charge de travail d'apprentissage automatique. " "L'architecture du modèle (un réseau neuronal convolutif très simple) est " "définie dans :code:`class Net()`." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:56 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:62 +#, fuzzy msgid "" -"The :code:`load_data()` function loads the CIFAR-10 training and test " -"sets. The :code:`transform` normalized the data after loading." +"The ``load_data()`` function loads the CIFAR-10 training and test sets. " +"The ``transform`` normalized the data after loading." msgstr "" "La fonction :code:`load_data()` charge les ensembles d'entraînement et de" " test CIFAR-10. La fonction :code:`transform` normalise les données après" " leur chargement." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:74 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:84 +#, fuzzy msgid "" -"We now need to define the training (function :code:`train()`) which loops" -" over the training set, measures the loss, backpropagates it, and then " +"We now need to define the training (function ``train()``) which loops " +"over the training set, measures the loss, backpropagates it, and then " "takes one optimizer step for each batch of training examples." msgstr "" "Nous devons maintenant définir la formation (fonction :code:`train()`) " @@ -4255,17 +4684,18 @@ msgstr "" "rétropropage, puis effectue une étape d'optimisation pour chaque lot " "d'exemples de formation." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:76 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:88 +#, fuzzy msgid "" -"The evaluation of the model is defined in the function :code:`test()`. " -"The function loops over all test samples and measures the loss of the " -"model based on the test dataset." +"The evaluation of the model is defined in the function ``test()``. The " +"function loops over all test samples and measures the loss of the model " +"based on the test dataset." msgstr "" "L'évaluation du modèle est définie dans la fonction :code:`test()`. La " "fonction boucle sur tous les échantillons de test et mesure la perte du " "modèle en fonction de l'ensemble des données de test." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:136 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:149 msgid "" "Having defined the data loading, model architecture, training, and " "evaluation we can put everything together and train our CNN on CIFAR-10." @@ -4274,7 +4704,7 @@ msgstr "" "la formation et l'évaluation, nous pouvons tout mettre ensemble et former" " notre CNN sur CIFAR-10." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:163 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:177 msgid "" "So far, this should all look fairly familiar if you've used PyTorch " "before. Let's take the next step and use what we've built to create a " @@ -4286,7 +4716,7 @@ msgstr "" " avons construit pour créer un simple système d'apprentissage fédéré " "composé d'un serveur et de deux clients." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:169 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:184 msgid "" "The simple machine learning project discussed in the previous section " "trains the model on a single dataset (CIFAR-10), we call this centralized" @@ -4306,7 +4736,7 @@ msgstr "" "changer la plupart de ton code et tout mettre en place à partir de zéro, " "ce qui peut représenter un effort considérable." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:173 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:191 msgid "" "However, with Flower you can evolve your pre-existing code into a " "federated learning setup without the need for a major rewrite." @@ -4315,11 +4745,12 @@ msgstr "" "une configuration d'apprentissage fédéré sans avoir besoin d'une " "réécriture majeure." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:175 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:194 +#, fuzzy msgid "" "The concept is easy to understand. We have to start a *server* and then " -"use the code in :code:`cifar.py` for the *clients* that are connected to " -"the *server*. The *server* sends model parameters to the clients. The " +"use the code in ``cifar.py`` for the *clients* that are connected to the " +"*server*. The *server* sends model parameters to the clients. The " "*clients* run the training and update the parameters. The updated " "parameters are sent back to the *server* which averages all received " "parameter updates. This describes one round of the federated learning " @@ -4334,13 +4765,14 @@ msgstr "" "un tour du processus d'apprentissage fédéré et nous répétons cette " "opération pour plusieurs tours." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:181 -#: ../../source/tutorial-quickstart-jax.rst:129 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:201 +#: ../../source/tutorial-quickstart-jax.rst:147 +#, fuzzy msgid "" "Our example consists of one *server* and two *clients*. Let's set up " -":code:`server.py` first. The *server* needs to import the Flower package " -":code:`flwr`. Next, we use the :code:`start_server` function to start a " -"server and tell it to perform three rounds of federated learning." +"``server.py`` first. The *server* needs to import the Flower package " +"``flwr``. Next, we use the ``start_server`` function to start a server " +"and tell it to perform three rounds of federated learning." msgstr "" "Notre exemple consiste en un *serveur* et deux *clients*. Commençons par " "configurer :code:`server.py`. Le *serveur* doit importer le paquet Flower" @@ -4348,35 +4780,36 @@ msgstr "" "pour démarrer un serveur et lui demander d'effectuer trois cycles " "d'apprentissage fédéré." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:191 -#: ../../source/tutorial-quickstart-jax.rst:139 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:215 +#: ../../source/tutorial-quickstart-jax.rst:161 msgid "We can already start the *server*:" msgstr "Nous pouvons déjà démarrer le *serveur* :" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:197 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:221 +#, fuzzy msgid "" -"Finally, we will define our *client* logic in :code:`client.py` and build" -" upon the previously defined centralized training in :code:`cifar.py`. " -"Our *client* needs to import :code:`flwr`, but also :code:`torch` to " -"update the parameters on our PyTorch model:" +"Finally, we will define our *client* logic in ``client.py`` and build " +"upon the previously defined centralized training in ``cifar.py``. Our " +"*client* needs to import ``flwr``, but also ``torch`` to update the " +"parameters on our PyTorch model:" msgstr "" "Enfin, nous allons définir notre logique *client* dans :code:`client.py` " "et nous appuyer sur la formation centralisée définie précédemment dans " ":code:`cifar.py`. Notre *client* doit importer :code:`flwr`, mais aussi " ":code:`torch` pour mettre à jour les paramètres de notre modèle PyTorch :" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:213 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:238 +#, fuzzy msgid "" "Implementing a Flower *client* basically means implementing a subclass of" -" either :code:`flwr.client.Client` or :code:`flwr.client.NumPyClient`. " -"Our implementation will be based on :code:`flwr.client.NumPyClient` and " -"we'll call it :code:`CifarClient`. :code:`NumPyClient` is slightly easier" -" to implement than :code:`Client` if you use a framework with good NumPy " -"interoperability (like PyTorch or TensorFlow/Keras) because it avoids " -"some of the boilerplate that would otherwise be necessary. " -":code:`CifarClient` needs to implement four methods, two methods for " -"getting/setting model parameters, one method for training the model, and " -"one method for testing the model:" +" either ``flwr.client.Client`` or ``flwr.client.NumPyClient``. Our " +"implementation will be based on ``flwr.client.NumPyClient`` and we'll " +"call it ``CifarClient``. ``NumPyClient`` is slightly easier to implement " +"than ``Client`` if you use a framework with good NumPy interoperability " +"(like PyTorch or TensorFlow/Keras) because it avoids some of the " +"boilerplate that would otherwise be necessary. ``CifarClient`` needs to " +"implement four methods, two methods for getting/setting model parameters," +" one method for training the model, and one method for testing the model:" msgstr "" "Implementing a Flower *client* basically means implementing a subclass of" " either :code:`flwr.client.Client` or :code:`flwr.client.NumPyClient`. " @@ -4389,52 +4822,56 @@ msgstr "" "getting/setting model parameters, one method for training the model, and " "one method for testing the model:" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:218 -msgid ":code:`set_parameters`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:249 +#, fuzzy +msgid "``set_parameters``" msgstr ":code:`set_parameters`" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:219 -#: ../../source/tutorial-quickstart-jax.rst:166 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:248 +#: ../../source/tutorial-quickstart-jax.rst:192 msgid "" "set the model parameters on the local model that are received from the " "server" msgstr "règle les paramètres du modèle local reçus du serveur" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:220 -#: ../../source/tutorial-quickstart-jax.rst:168 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:249 +#: ../../source/tutorial-quickstart-jax.rst:194 +#, fuzzy msgid "" -"loop over the list of model parameters received as NumPy " -":code:`ndarray`'s (think list of neural network layers)" +"loop over the list of model parameters received as NumPy ``ndarray``'s " +"(think list of neural network layers)" msgstr "" "boucle sur la liste des paramètres du modèle reçus sous forme de NumPy " ":code:`ndarray`'s (pensez à la liste des couches du réseau neuronal)" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:221 -#: ../../source/tutorial-quickstart-jax.rst:169 -#: ../../source/tutorial-quickstart-scikitlearn.rst:118 -msgid ":code:`get_parameters`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:252 +#: ../../source/tutorial-quickstart-jax.rst:197 +#: ../../source/tutorial-quickstart-scikitlearn.rst:129 +#, fuzzy +msgid "``get_parameters``" msgstr ":code:`get_parameters`" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:222 -#: ../../source/tutorial-quickstart-jax.rst:170 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:252 +#: ../../source/tutorial-quickstart-jax.rst:197 +#, fuzzy msgid "" -"get the model parameters and return them as a list of NumPy " -":code:`ndarray`'s (which is what :code:`flwr.client.NumPyClient` expects)" +"get the model parameters and return them as a list of NumPy ``ndarray``'s" +" (which is what ``flwr.client.NumPyClient`` expects)" msgstr "" "récupère les paramètres du modèle et les renvoie sous forme de liste de " ":code:`ndarray` NumPy (ce qui correspond à ce que " ":code:`flwr.client.NumPyClient` attend)" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:223 -#: ../../source/tutorial-quickstart-jax.rst:171 -#: ../../source/tutorial-quickstart-scikitlearn.rst:123 -msgid ":code:`fit`" -msgstr ":code:`fit`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:257 +#: ../../source/tutorial-quickstart-jax.rst:202 +#: ../../source/tutorial-quickstart-scikitlearn.rst:136 +msgid "``fit``" +msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:224 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:228 -#: ../../source/tutorial-quickstart-jax.rst:172 -#: ../../source/tutorial-quickstart-jax.rst:176 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:255 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:260 +#: ../../source/tutorial-quickstart-jax.rst:200 +#: ../../source/tutorial-quickstart-jax.rst:205 msgid "" "update the parameters of the local model with the parameters received " "from the server" @@ -4442,39 +4879,40 @@ msgstr "" "mettre à jour les paramètres du modèle local avec les paramètres reçus du" " serveur" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:225 -#: ../../source/tutorial-quickstart-jax.rst:173 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:257 +#: ../../source/tutorial-quickstart-jax.rst:202 msgid "train the model on the local training set" msgstr "entraîne le modèle sur l'ensemble d'apprentissage local" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:226 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:258 msgid "get the updated local model weights and return them to the server" msgstr "récupère les poids du modèle local mis à jour et les renvoie au serveur" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:227 -#: ../../source/tutorial-quickstart-jax.rst:175 -#: ../../source/tutorial-quickstart-scikitlearn.rst:127 -msgid ":code:`evaluate`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:263 +#: ../../source/tutorial-quickstart-jax.rst:208 +#: ../../source/tutorial-quickstart-scikitlearn.rst:139 +#, fuzzy +msgid "``evaluate``" msgstr ":code:`évaluer`" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:229 -#: ../../source/tutorial-quickstart-jax.rst:177 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:262 +#: ../../source/tutorial-quickstart-jax.rst:207 msgid "evaluate the updated model on the local test set" msgstr "évaluer le modèle mis à jour sur l'ensemble de test local" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:230 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:263 msgid "return the local loss and accuracy to the server" msgstr "renvoie la perte locale et la précision au serveur" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:232 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:265 +#, fuzzy msgid "" -"The two :code:`NumPyClient` methods :code:`fit` and :code:`evaluate` make" -" use of the functions :code:`train()` and :code:`test()` previously " -"defined in :code:`cifar.py`. So what we really do here is we tell Flower " -"through our :code:`NumPyClient` subclass which of our already defined " -"functions to call for training and evaluation. We included type " -"annotations to give you a better understanding of the data types that get" -" passed around." +"The two ``NumPyClient`` methods ``fit`` and ``evaluate`` make use of the " +"functions ``train()`` and ``test()`` previously defined in ``cifar.py``. " +"So what we really do here is we tell Flower through our ``NumPyClient`` " +"subclass which of our already defined functions to call for training and " +"evaluation. We included type annotations to give you a better " +"understanding of the data types that get passed around." msgstr "" "Les deux méthodes :code:`NumPyClient` :code:`fit` et :code:`evaluate` " "utilisent les fonctions :code:`train()` et :code:`test()` définies " @@ -4485,14 +4923,14 @@ msgstr "" "annotations de type pour te donner une meilleure compréhension des types " "de données qui sont transmis." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:280 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:315 #, fuzzy msgid "" "All that's left to do it to define a function that loads both model and " -"data, creates a :code:`CifarClient`, and starts this client. You load " -"your data and model by using :code:`cifar.py`. Start :code:`CifarClient` " -"with the function :code:`fl.client.start_client()` by pointing it at the " -"same IP address we used in :code:`server.py`:" +"data, creates a ``CifarClient``, and starts this client. You load your " +"data and model by using ``cifar.py``. Start ``CifarClient`` with the " +"function ``fl.client.start_client()`` by pointing it at the same IP " +"address we used in ``server.py``:" msgstr "" "Il ne reste plus qu'à définir une fonction qui charge le modèle et les " "données, crée un :code:`CifarClient` et démarre ce client. Tu charges tes" @@ -4501,14 +4939,14 @@ msgstr "" "la faisant pointer sur la même adresse IP que celle que nous avons " "utilisée dans :code:`server.py` :" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:301 -#: ../../source/tutorial-quickstart-jax.rst:274 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:338 +#: ../../source/tutorial-quickstart-jax.rst:309 msgid "And that's it. You can now open two additional terminal windows and run" msgstr "" "Tu peux maintenant ouvrir deux autres fenêtres de terminal et exécuter " "les commandes suivantes" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:307 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:344 msgid "" "in each window (make sure that the server is running before you do so) " "and see your (previously centralized) PyTorch project run federated " @@ -4518,7 +4956,7 @@ msgstr "" "faire) et tu verras ton projet PyTorch (auparavant centralisé) exécuter " "l'apprentissage fédéré sur deux clients. Félicitations !" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:312 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:351 #, fuzzy msgid "" "The full source code for this example: `PyTorch: From Centralized To " @@ -4539,13 +4977,13 @@ msgstr "" "d'autres clients ?" #: ../../source/explanation-differential-privacy.rst:2 -#: ../../source/explanation-differential-privacy.rst:11 +#: ../../source/explanation-differential-privacy.rst:14 #: ../../source/tutorial-series-what-is-federated-learning.ipynb:303 #, fuzzy msgid "Differential Privacy" msgstr "Confidentialité différentielle" -#: ../../source/explanation-differential-privacy.rst:3 +#: ../../source/explanation-differential-privacy.rst:4 msgid "" "The information in datasets like healthcare, financial transactions, user" " preferences, etc., is valuable and has the potential for scientific " @@ -4554,7 +4992,7 @@ msgid "" "privacy." msgstr "" -#: ../../source/explanation-differential-privacy.rst:6 +#: ../../source/explanation-differential-privacy.rst:9 msgid "" "Traditional methods like anonymization alone would not work because of " "attacks like Re-identification and Data Linkage. That's where " @@ -4562,7 +5000,7 @@ msgid "" "data while ensuring the privacy of individuals." msgstr "" -#: ../../source/explanation-differential-privacy.rst:12 +#: ../../source/explanation-differential-privacy.rst:16 msgid "" "Imagine two datasets that are identical except for a single record (for " "instance, Alice's data). Differential Privacy (DP) guarantees that any " @@ -4572,11 +5010,11 @@ msgid "" " individual's information remains hidden in the crowd." msgstr "" -#: ../../source/explanation-differential-privacy.rst:16 +#: ../../source/explanation-differential-privacy.rst:-1 msgid "DP Intro" msgstr "" -#: ../../source/explanation-differential-privacy.rst:22 +#: ../../source/explanation-differential-privacy.rst:27 msgid "" "One of the most commonly used mechanisms to achieve DP is adding enough " "noise to the output of the analysis to mask the contribution of each " @@ -4584,12 +5022,12 @@ msgid "" "analysis." msgstr "" -#: ../../source/explanation-differential-privacy.rst:25 +#: ../../source/explanation-differential-privacy.rst:32 #, fuzzy msgid "Formal Definition" msgstr "Compiler les définitions ProtoBuf" -#: ../../source/explanation-differential-privacy.rst:26 +#: ../../source/explanation-differential-privacy.rst:34 msgid "" "Differential Privacy (DP) provides statistical guarantees against the " "information an adversary can infer through the output of a randomized " @@ -4601,13 +5039,13 @@ msgid "" "record, and for all possible outputs S ⊆ Range(A):" msgstr "" -#: ../../source/explanation-differential-privacy.rst:32 +#: ../../source/explanation-differential-privacy.rst:42 msgid "" "\\small\n" "P[M(D_{1} \\in A)] \\leq e^{\\epsilon} P[M(D_{2} \\in A)] + \\delta" msgstr "" -#: ../../source/explanation-differential-privacy.rst:38 +#: ../../source/explanation-differential-privacy.rst:47 msgid "" "The :math:`\\epsilon` parameter, also known as the privacy budget, is a " "metric of privacy loss. It also controls the privacy-utility trade-off; " @@ -4619,12 +5057,12 @@ msgid "" " change in the output due to the inclusion or removal of a single record." msgstr "" -#: ../../source/explanation-differential-privacy.rst:45 +#: ../../source/explanation-differential-privacy.rst:56 #, fuzzy msgid "Differential Privacy in Machine Learning" msgstr "Confidentialité différentielle" -#: ../../source/explanation-differential-privacy.rst:46 +#: ../../source/explanation-differential-privacy.rst:58 msgid "" "DP can be utilized in machine learning to preserve the privacy of the " "training data. Differentially private machine learning algorithms are " @@ -4639,12 +5077,12 @@ msgid "" "model's output." msgstr "" -#: ../../source/explanation-differential-privacy.rst:53 +#: ../../source/explanation-differential-privacy.rst:69 #, fuzzy msgid "Differential Privacy in Federated Learning" msgstr "Mise à l'échelle de l'apprentissage fédéré" -#: ../../source/explanation-differential-privacy.rst:54 +#: ../../source/explanation-differential-privacy.rst:71 msgid "" "Federated learning is a data minimization approach that allows multiple " "parties to collaboratively train a model without sharing their raw data. " @@ -4655,13 +5093,13 @@ msgid "" "attacks." msgstr "" -#: ../../source/explanation-differential-privacy.rst:58 +#: ../../source/explanation-differential-privacy.rst:78 msgid "" "DP can play a crucial role in federated learning to provide privacy for " "the clients' data." msgstr "" -#: ../../source/explanation-differential-privacy.rst:60 +#: ../../source/explanation-differential-privacy.rst:81 msgid "" "Depending on the granularity of privacy provision or the location of " "noise addition, different forms of DP exist in federated learning. In " @@ -4670,14 +5108,14 @@ msgid "" " the center) or at the client (also known as the local)." msgstr "" -#: ../../source/explanation-differential-privacy.rst:63 +#: ../../source/explanation-differential-privacy.rst:86 msgid "" "**Central Differential Privacy**: DP is applied by the server and the " "goal is to prevent the aggregated model from leaking information about " "each client's data." msgstr "" -#: ../../source/explanation-differential-privacy.rst:65 +#: ../../source/explanation-differential-privacy.rst:88 msgid "" "**Local Differential Privacy**: DP is applied on the client side before " "sending any information to the server and the goal is to prevent the " @@ -4685,21 +5123,21 @@ msgid "" "the client's data." msgstr "" -#: ../../source/explanation-differential-privacy.rst:68 -#: ../../source/explanation-differential-privacy.rst:71 -#: ../../source/how-to-use-differential-privacy.rst:11 +#: ../../source/explanation-differential-privacy.rst:-1 +#: ../../source/explanation-differential-privacy.rst:93 +#: ../../source/how-to-use-differential-privacy.rst:15 #, fuzzy msgid "Central Differential Privacy" msgstr "Confidentialité différentielle" -#: ../../source/explanation-differential-privacy.rst:69 +#: ../../source/explanation-differential-privacy.rst:95 msgid "" "In this approach, which is also known as user-level DP, the central " "server is responsible for adding noise to the globally aggregated " "parameters. It should be noted that trust in the server is required." msgstr "" -#: ../../source/explanation-differential-privacy.rst:76 +#: ../../source/explanation-differential-privacy.rst:104 msgid "" "While there are various ways to implement central DP in federated " "learning, we concentrate on the algorithms proposed by [2] and [3]. The " @@ -4714,11 +5152,11 @@ msgid "" "that larger updates are scaled down to fit within the norm `S`." msgstr "" -#: ../../source/explanation-differential-privacy.rst:84 +#: ../../source/explanation-differential-privacy.rst:-1 msgid "clipping" msgstr "" -#: ../../source/explanation-differential-privacy.rst:89 +#: ../../source/explanation-differential-privacy.rst:120 msgid "" "Afterwards, the Gaussian mechanism is used to add noise in order to " "distort the sum of all clients' updates. The amount of noise is scaled to" @@ -4727,24 +5165,24 @@ msgid "" "noise_scale * S ) / (number of sampled clients)`." msgstr "" -#: ../../source/explanation-differential-privacy.rst:94 +#: ../../source/explanation-differential-privacy.rst:126 msgid "Clipping" msgstr "" -#: ../../source/explanation-differential-privacy.rst:96 +#: ../../source/explanation-differential-privacy.rst:128 msgid "" "There are two forms of clipping commonly used in Central DP: Fixed " "Clipping and Adaptive Clipping." msgstr "" -#: ../../source/explanation-differential-privacy.rst:98 +#: ../../source/explanation-differential-privacy.rst:131 msgid "" "**Fixed Clipping** : A predefined fix threshold is set for the magnitude " "of clients' updates. Any update exceeding this threshold is clipped back " "to the threshold value." msgstr "" -#: ../../source/explanation-differential-privacy.rst:100 +#: ../../source/explanation-differential-privacy.rst:133 msgid "" "**Adaptive Clipping** : The clipping threshold dynamically adjusts based " "on the observed update distribution [4]. It means that the clipping value" @@ -4752,21 +5190,21 @@ msgid "" "norm distribution." msgstr "" -#: ../../source/explanation-differential-privacy.rst:102 +#: ../../source/explanation-differential-privacy.rst:137 msgid "" "The choice between fixed and adaptive clipping depends on various factors" " such as privacy requirements, data distribution, model complexity, and " "others." msgstr "" -#: ../../source/explanation-differential-privacy.rst:105 -#: ../../source/explanation-differential-privacy.rst:110 -#: ../../source/how-to-use-differential-privacy.rst:96 +#: ../../source/explanation-differential-privacy.rst:-1 +#: ../../source/explanation-differential-privacy.rst:141 +#: ../../source/how-to-use-differential-privacy.rst:113 #, fuzzy msgid "Local Differential Privacy" msgstr "Confidentialité différentielle" -#: ../../source/explanation-differential-privacy.rst:107 +#: ../../source/explanation-differential-privacy.rst:143 msgid "" "In this approach, each client is responsible for performing DP. Local DP " "avoids the need for a fully trusted aggregator, but it should be noted " @@ -4774,11 +5212,11 @@ msgid "" "comparison to central DP." msgstr "" -#: ../../source/explanation-differential-privacy.rst:116 +#: ../../source/explanation-differential-privacy.rst:152 msgid "In this explainer, we focus on two forms of achieving Local DP:" msgstr "" -#: ../../source/explanation-differential-privacy.rst:118 +#: ../../source/explanation-differential-privacy.rst:154 msgid "" "Each client adds noise to the local updates before sending them to the " "server. To achieve (:math:`\\epsilon`, :math:`\\delta`)-DP, considering " @@ -4786,37 +5224,36 @@ msgid "" "with a noise scale of σ where:" msgstr "" -#: ../../source/explanation-differential-privacy.rst:120 +#: ../../source/explanation-differential-privacy.rst:158 msgid "" "\\small\n" "\\frac{∆ \\times \\sqrt{2 \\times " -"\\log\\left(\\frac{1.25}{\\delta}\\right)}}{\\epsilon}\n" -"\n" +"\\log\\left(\\frac{1.25}{\\delta}\\right)}}{\\epsilon}" msgstr "" -#: ../../source/explanation-differential-privacy.rst:125 +#: ../../source/explanation-differential-privacy.rst:163 msgid "" "Each client adds noise to the gradients of the model during the local " "training (DP-SGD). More specifically, in this approach, gradients are " "clipped and an amount of calibrated noise is injected into the gradients." msgstr "" -#: ../../source/explanation-differential-privacy.rst:128 +#: ../../source/explanation-differential-privacy.rst:167 msgid "" "Please note that these two approaches are providing privacy at different " "levels." msgstr "" -#: ../../source/explanation-differential-privacy.rst:131 +#: ../../source/explanation-differential-privacy.rst:169 #, fuzzy msgid "**References:**" msgstr "Référence" -#: ../../source/explanation-differential-privacy.rst:133 +#: ../../source/explanation-differential-privacy.rst:171 msgid "[1] Dwork et al. The Algorithmic Foundations of Differential Privacy." msgstr "" -#: ../../source/explanation-differential-privacy.rst:135 +#: ../../source/explanation-differential-privacy.rst:173 #, fuzzy msgid "" "[2] McMahan et al. Learning Differentially Private Recurrent Language " @@ -4825,13 +5262,13 @@ msgstr "" "McMahan, H. Brendan, et al. \"Learning differentially private recurrent " "language models\", arXiv preprint arXiv:1710.06963 (2017)." -#: ../../source/explanation-differential-privacy.rst:137 +#: ../../source/explanation-differential-privacy.rst:175 msgid "" "[3] Geyer et al. Differentially Private Federated Learning: A Client " "Level Perspective." msgstr "" -#: ../../source/explanation-differential-privacy.rst:139 +#: ../../source/explanation-differential-privacy.rst:177 #, fuzzy msgid "[4] Galen et al. Differentially Private Learning with Adaptive Clipping." msgstr "" @@ -4875,17 +5312,17 @@ msgstr "" "prendre les paramètres du modèle global actuel comme entrée et renvoyer " "les résultats de l'évaluation :" -#: ../../source/explanation-federated-evaluation.rst:58 +#: ../../source/explanation-federated-evaluation.rst:61 msgid "Custom Strategies" msgstr "Stratégies personnalisées" -#: ../../source/explanation-federated-evaluation.rst:60 +#: ../../source/explanation-federated-evaluation.rst:63 +#, fuzzy msgid "" -"The :code:`Strategy` abstraction provides a method called " -":code:`evaluate` that can directly be used to evaluate the current global" -" model parameters. The current server implementation calls " -":code:`evaluate` after parameter aggregation and before federated " -"evaluation (see next paragraph)." +"The ``Strategy`` abstraction provides a method called ``evaluate`` that " +"can directly be used to evaluate the current global model parameters. The" +" current server implementation calls ``evaluate`` after parameter " +"aggregation and before federated evaluation (see next paragraph)." msgstr "" "L'abstraction :code:`Strategy` fournit une méthode appelée " ":code:`evaluate` qui peut être directement utilisée pour évaluer les " @@ -4893,27 +5330,28 @@ msgstr "" "appelle :code:`evaluate` après l'agrégation des paramètres et avant " "l'évaluation fédérée (voir le paragraphe suivant)." -#: ../../source/explanation-federated-evaluation.rst:65 +#: ../../source/explanation-federated-evaluation.rst:69 msgid "Federated Evaluation" msgstr "Évaluation fédérée" -#: ../../source/explanation-federated-evaluation.rst:68 +#: ../../source/explanation-federated-evaluation.rst:72 msgid "Implementing Federated Evaluation" msgstr "Mise en œuvre de l'évaluation fédérée" -#: ../../source/explanation-federated-evaluation.rst:70 +#: ../../source/explanation-federated-evaluation.rst:74 +#, fuzzy msgid "" -"Client-side evaluation happens in the :code:`Client.evaluate` method and " -"can be configured from the server side." +"Client-side evaluation happens in the ``Client.evaluate`` method and can " +"be configured from the server side." msgstr "" "L'évaluation côté client se fait dans la méthode :code:`Client.evaluate` " "et peut être configurée côté serveur." -#: ../../source/explanation-federated-evaluation.rst:101 +#: ../../source/explanation-federated-evaluation.rst:108 msgid "Configuring Federated Evaluation" msgstr "Configuration de l'évaluation fédérée" -#: ../../source/explanation-federated-evaluation.rst:103 +#: ../../source/explanation-federated-evaluation.rst:110 msgid "" "Federated evaluation can be configured from the server side. Built-in " "strategies support the following arguments:" @@ -4921,14 +5359,14 @@ msgstr "" "L'évaluation fédérée peut être configurée du côté du serveur. Les " "stratégies intégrées prennent en charge les arguments suivants :" -#: ../../source/explanation-federated-evaluation.rst:105 +#: ../../source/explanation-federated-evaluation.rst:113 +#, fuzzy msgid "" -":code:`fraction_evaluate`: a :code:`float` defining the fraction of " -"clients that will be selected for evaluation. If " -":code:`fraction_evaluate` is set to :code:`0.1` and :code:`100` clients " -"are connected to the server, then :code:`10` will be randomly selected " -"for evaluation. If :code:`fraction_evaluate` is set to :code:`0.0`, " -"federated evaluation will be disabled." +"``fraction_evaluate``: a ``float`` defining the fraction of clients that " +"will be selected for evaluation. If ``fraction_evaluate`` is set to " +"``0.1`` and ``100`` clients are connected to the server, then ``10`` will" +" be randomly selected for evaluation. If ``fraction_evaluate`` is set to " +"``0.0``, federated evaluation will be disabled." msgstr "" ":code:`fraction_evaluate` : un :code:`float` définissant la fraction de " "clients qui sera sélectionnée pour l'évaluation. Si " @@ -4937,27 +5375,27 @@ msgstr "" "aléatoirement pour l'évaluation. Si :code:`fraction_evaluate` est défini " "à :code:`0.0`, l'évaluation fédérée sera désactivée." -#: ../../source/explanation-federated-evaluation.rst:106 +#: ../../source/explanation-federated-evaluation.rst:118 +#, fuzzy msgid "" -":code:`min_evaluate_clients`: an :code:`int`: the minimum number of " -"clients to be selected for evaluation. If :code:`fraction_evaluate` is " -"set to :code:`0.1`, :code:`min_evaluate_clients` is set to 20, and " -":code:`100` clients are connected to the server, then :code:`20` clients " -"will be selected for evaluation." +"``min_evaluate_clients``: an ``int``: the minimum number of clients to be" +" selected for evaluation. If ``fraction_evaluate`` is set to ``0.1``, " +"``min_evaluate_clients`` is set to 20, and ``100`` clients are connected " +"to the server, then ``20`` clients will be selected for evaluation." msgstr "" "si :code:`fraction_evaluate` est réglé sur :code:`0.1`, " ":code:`min_evaluate_clients` est réglé sur 20, et que :code:`100` clients" " sont connectés au serveur, alors :code:`20` clients seront sélectionnés " "pour l'évaluation." -#: ../../source/explanation-federated-evaluation.rst:107 +#: ../../source/explanation-federated-evaluation.rst:122 +#, fuzzy msgid "" -":code:`min_available_clients`: an :code:`int` that defines the minimum " -"number of clients which need to be connected to the server before a round" -" of federated evaluation can start. If fewer than " -":code:`min_available_clients` are connected to the server, the server " -"will wait until more clients are connected before it continues to sample " -"clients for evaluation." +"``min_available_clients``: an ``int`` that defines the minimum number of " +"clients which need to be connected to the server before a round of " +"federated evaluation can start. If fewer than ``min_available_clients`` " +"are connected to the server, the server will wait until more clients are " +"connected before it continues to sample clients for evaluation." msgstr "" ":code:`min_available_clients` : un :code:`int` qui définit le nombre " "minimum de clients qui doivent être connectés au serveur avant qu'un " @@ -4966,9 +5404,10 @@ msgstr "" "attendra que d'autres clients soient connectés avant de continuer à " "échantillonner des clients pour l'évaluation." -#: ../../source/explanation-federated-evaluation.rst:108 +#: ../../source/explanation-federated-evaluation.rst:127 +#, fuzzy msgid "" -":code:`on_evaluate_config_fn`: a function that returns a configuration " +"``on_evaluate_config_fn``: a function that returns a configuration " "dictionary which will be sent to the selected clients. The function will " "be called during each round and provides a convenient way to customize " "client-side evaluation from the server side, for example, to configure " @@ -4980,25 +5419,25 @@ msgstr "" "l'évaluation côté client depuis le côté serveur, par exemple pour " "configurer le nombre d'étapes de validation effectuées." -#: ../../source/explanation-federated-evaluation.rst:135 +#: ../../source/explanation-federated-evaluation.rst:157 msgid "Evaluating Local Model Updates During Training" msgstr "Évaluer les mises à jour du modèle local pendant la formation" -#: ../../source/explanation-federated-evaluation.rst:137 +#: ../../source/explanation-federated-evaluation.rst:159 +#, fuzzy msgid "" -"Model parameters can also be evaluated during training. " -":code:`Client.fit` can return arbitrary evaluation results as a " -"dictionary:" +"Model parameters can also be evaluated during training. ``Client.fit`` " +"can return arbitrary evaluation results as a dictionary:" msgstr "" "Les paramètres du modèle peuvent également être évalués pendant la " "formation. :code:`Client.fit` peut renvoyer des résultats d'évaluation " "arbitraires sous forme de dictionnaire :" -#: ../../source/explanation-federated-evaluation.rst:177 +#: ../../source/explanation-federated-evaluation.rst:201 msgid "Full Code Example" msgstr "Exemple de code complet" -#: ../../source/explanation-federated-evaluation.rst:179 +#: ../../source/explanation-federated-evaluation.rst:203 #, fuzzy msgid "" "For a full code example that uses both centralized and federated " @@ -5019,46 +5458,46 @@ msgid "" "learning while preserving data privacy." msgstr "" -#: ../../source/explanation-flower-architecture.rst:3 +#: ../../source/explanation-flower-architecture.rst:2 msgid "Flower Architecture" msgstr "Architecture florale" -#: ../../source/explanation-flower-architecture.rst:5 +#: ../../source/explanation-flower-architecture.rst:4 msgid "" "This page explains the architecture of deployed Flower federated learning" " system." msgstr "" -#: ../../source/explanation-flower-architecture.rst:8 +#: ../../source/explanation-flower-architecture.rst:6 msgid "" "In federated learning (FL), there is typically one server and a number of" " clients that are connected to the server. This is often called a " "federation." msgstr "" -#: ../../source/explanation-flower-architecture.rst:12 +#: ../../source/explanation-flower-architecture.rst:9 msgid "" "The role of the server is to coordinate the training process. The role of" " each client is to receive tasks from the server, execute those tasks and" " return the results back to the server." msgstr "" -#: ../../source/explanation-flower-architecture.rst:16 +#: ../../source/explanation-flower-architecture.rst:13 msgid "This is sometimes called a hub-and-spoke topology:" msgstr "" -#: ../../source/explanation-flower-architecture.rst:18 +#: ../../source/explanation-flower-architecture.rst:21 #, fuzzy msgid "Hub-and-spoke topology in federated learning" msgstr "Qu'est-ce que l'apprentissage fédéré ?" -#: ../../source/explanation-flower-architecture.rst:24 +#: ../../source/explanation-flower-architecture.rst:21 msgid "" "Hub-and-spoke topology in federated learning (one server, multiple " "clients)." msgstr "" -#: ../../source/explanation-flower-architecture.rst:26 +#: ../../source/explanation-flower-architecture.rst:23 msgid "" "In a real-world deployment, we typically want to run different projects " "on such a federation. Each project could use different hyperparameters, " @@ -5066,7 +5505,7 @@ msgid "" "different machine learning frameworks like PyTorch and TensorFlow." msgstr "" -#: ../../source/explanation-flower-architecture.rst:31 +#: ../../source/explanation-flower-architecture.rst:28 msgid "" "This is why, in Flower, both the server side and the client side are " "split into two parts. One part is long-lived and responsible for " @@ -5074,17 +5513,17 @@ msgid "" "executes task-specific code." msgstr "" -#: ../../source/explanation-flower-architecture.rst:36 +#: ../../source/explanation-flower-architecture.rst:32 msgid "A Flower `server` consists of **SuperLink** and ``ServerApp``:" msgstr "" -#: ../../source/explanation-flower-architecture.rst:38 +#: ../../source/explanation-flower-architecture.rst:34 msgid "" "**SuperLink**: a long-running process that forwards task instructions to " "clients (SuperNodes) and receives task results back." msgstr "" -#: ../../source/explanation-flower-architecture.rst:41 +#: ../../source/explanation-flower-architecture.rst:36 msgid "" "``ServerApp``: a short-lived process with project-spcific code that " "customizes all server-side aspects of federated learning systems (client " @@ -5092,18 +5531,18 @@ msgid "" "researchers and AI engineers write when they build Flower apps." msgstr "" -#: ../../source/explanation-flower-architecture.rst:47 +#: ../../source/explanation-flower-architecture.rst:41 msgid "A Flower `client` consists of **SuperNode** and ``ClientApp``:" msgstr "" -#: ../../source/explanation-flower-architecture.rst:49 +#: ../../source/explanation-flower-architecture.rst:43 msgid "" "**SuperNode**: a long-running process that connects to the SuperLink, " "asks for tasks, executes tasks (for example, \"train this model on your " "local data\") and returns task results back to the SuperLink." msgstr "" -#: ../../source/explanation-flower-architecture.rst:53 +#: ../../source/explanation-flower-architecture.rst:46 msgid "" "``ClientApp``: a short-lived process with project-specific code that " "customizes all client-side aspects of federated learning systems (local " @@ -5111,7 +5550,7 @@ msgid "" " researchers and AI engineers write when they build Flower apps." msgstr "" -#: ../../source/explanation-flower-architecture.rst:59 +#: ../../source/explanation-flower-architecture.rst:51 msgid "" "Why SuperNode and SuperLink? Well, in federated learning, the clients are" " the actual stars of the show. They hold the training data and they run " @@ -5120,30 +5559,30 @@ msgid "" "`missing link` between all those SuperNodes." msgstr "" -#: ../../source/explanation-flower-architecture.rst:65 +#: ../../source/explanation-flower-architecture.rst:62 #, fuzzy msgid "Basic Flower architecture" msgstr "Architecture florale" -#: ../../source/explanation-flower-architecture.rst:71 +#: ../../source/explanation-flower-architecture.rst:62 #, fuzzy msgid "The basic Flower architecture for federated learning." msgstr "Qu'est-ce que l'apprentissage fédéré ?" -#: ../../source/explanation-flower-architecture.rst:73 +#: ../../source/explanation-flower-architecture.rst:64 msgid "" "In a Flower app project, users will typically develop the ``ServerApp`` " "and the ``ClientApp``. All the network communication between `server` and" " `clients` is taken care of by the SuperLink and SuperNodes." msgstr "" -#: ../../source/explanation-flower-architecture.rst:79 +#: ../../source/explanation-flower-architecture.rst:70 msgid "" "For more details, please refer to the |serverapp_link|_ and " "|clientapp_link|_ documentation." msgstr "" -#: ../../source/explanation-flower-architecture.rst:82 +#: ../../source/explanation-flower-architecture.rst:73 msgid "" "With *multi-run*, multiple ``ServerApp``\\s and ``ClientApp``\\s are now " "capable of running on the same federation consisting of a single long-" @@ -5151,24 +5590,24 @@ msgid "" " referred to as `multi-tenancy` or `multi-job`." msgstr "" -#: ../../source/explanation-flower-architecture.rst:87 +#: ../../source/explanation-flower-architecture.rst:78 msgid "" "As shown in the figure below, two projects, each consisting of a " "``ServerApp`` and a ``ClientApp``, could share the same SuperLink and " "SuperNodes." msgstr "" -#: ../../source/explanation-flower-architecture.rst:91 +#: ../../source/explanation-flower-architecture.rst:87 #, fuzzy msgid "Multi-tenancy federated learning architecture" msgstr "Stratégie de moyenne fédérée." -#: ../../source/explanation-flower-architecture.rst:97 +#: ../../source/explanation-flower-architecture.rst:87 #, fuzzy msgid "Multi-tenancy federated learning architecture with Flower" msgstr "Étape 2 : Apprentissage fédéré avec Flower" -#: ../../source/explanation-flower-architecture.rst:99 +#: ../../source/explanation-flower-architecture.rst:89 msgid "" "To illustrate how multi-run works, consider one federated learning " "training run where a ``ServerApp`` and a ``ClientApp`` are participating " @@ -5176,48 +5615,48 @@ msgid "" " is selected to participate in the training run." msgstr "" -#: ../../source/explanation-flower-architecture.rst:104 +#: ../../source/explanation-flower-architecture.rst:94 msgid "" "In ``[run 1]`` below, all the SuperNodes are selected and therefore run " "their corresponding ``ClientApp``\\s:" msgstr "" -#: ../../source/explanation-flower-architecture.rst:107 +#: ../../source/explanation-flower-architecture.rst:103 #, fuzzy msgid "Multi-tenancy federated learning architecture - Run 1" msgstr "Stratégie de moyenne fédérée." -#: ../../source/explanation-flower-architecture.rst:113 +#: ../../source/explanation-flower-architecture.rst:103 msgid "" "Run 1 in a multi-run federated learning architecture with Flower. All " "SuperNodes participate in the training round." msgstr "" -#: ../../source/explanation-flower-architecture.rst:116 +#: ../../source/explanation-flower-architecture.rst:106 msgid "" "However, in ``[run 2]``, only the first and third SuperNodes are selected" " to participate in the training:" msgstr "" -#: ../../source/explanation-flower-architecture.rst:119 +#: ../../source/explanation-flower-architecture.rst:115 #, fuzzy msgid "Multi-tenancy federated learning architecture - Run 2" msgstr "Stratégie de moyenne fédérée." -#: ../../source/explanation-flower-architecture.rst:125 +#: ../../source/explanation-flower-architecture.rst:115 msgid "" "Run 2 in a multi-run federated learning architecture with Flower. Only " "the first and third SuperNodes are selected to participate in the " "training round." msgstr "" -#: ../../source/explanation-flower-architecture.rst:129 +#: ../../source/explanation-flower-architecture.rst:118 msgid "" "Therefore, with Flower multi-run, different projects (each consisting of " "a ``ServerApp`` and ``ClientApp``) can run on different sets of clients." msgstr "" -#: ../../source/explanation-flower-architecture.rst:132 +#: ../../source/explanation-flower-architecture.rst:121 msgid "" "To help you start and manage all of the concurrently executing training " "runs, Flower offers one additional long-running server-side service " @@ -5229,28 +5668,28 @@ msgid "" "``ClientApp``." msgstr "" -#: ../../source/explanation-flower-architecture.rst:141 +#: ../../source/explanation-flower-architecture.rst:128 msgid "" "This architecture allows many users to (concurrently) run their projects " "on the same federation, simply by typing ``flwr run`` on their local " "developer machine." msgstr "" -#: ../../source/explanation-flower-architecture.rst:145 +#: ../../source/explanation-flower-architecture.rst:137 msgid "Flower Deployment Engine with SuperExec" msgstr "" -#: ../../source/explanation-flower-architecture.rst:151 +#: ../../source/explanation-flower-architecture.rst:137 msgid "The SuperExec service for managing concurrent training runs in Flower." msgstr "" -#: ../../source/explanation-flower-architecture.rst:156 +#: ../../source/explanation-flower-architecture.rst:141 msgid "" "This explanation covers the Flower Deployment Engine. An explanation " "covering the Flower Simulation Engine will follow." msgstr "" -#: ../../source/explanation-flower-architecture.rst:161 +#: ../../source/explanation-flower-architecture.rst:146 msgid "" "As we continue to enhance Flower at a rapid pace, we'll periodically " "update this explainer document. Feel free to share any feedback with us." @@ -5852,9 +6291,10 @@ msgid "Aggregate Custom Evaluation Results" msgstr "Agréger les résultats de l'évaluation personnalisée" #: ../../source/how-to-aggregate-evaluation-results.rst:10 +#, fuzzy msgid "" -"The same :code:`Strategy`-customization approach can be used to aggregate" -" custom evaluation results coming from individual clients. Clients can " +"The same ``Strategy``-customization approach can be used to aggregate " +"custom evaluation results coming from individual clients. Clients can " "return custom metrics to the server by returning a dictionary:" msgstr "" "La même approche de personnalisation :code:`Stratégie` peut être utilisée" @@ -5862,7 +6302,7 @@ msgstr "" "clients individuels. Les clients peuvent renvoyer des mesures " "personnalisées au serveur en renvoyant un dictionnaire :" -#: ../../source/how-to-aggregate-evaluation-results.rst:36 +#: ../../source/how-to-aggregate-evaluation-results.rst:39 msgid "" "The server can then use a customized strategy to aggregate the metrics " "provided in these dictionaries:" @@ -5882,27 +6322,27 @@ msgid "" " works:" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:7 +#: ../../source/how-to-authenticate-supernodes.rst:8 msgid "SuperLink (server) stores a list of known (client) node public keys" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:8 +#: ../../source/how-to-authenticate-supernodes.rst:9 msgid "" "Using ECDH, both SuperNode and SuperLink independently derive a shared " "secret" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:9 +#: ../../source/how-to-authenticate-supernodes.rst:10 msgid "" "Shared secret is used to compute the HMAC value of the message sent from " "SuperNode to SuperLink as a token" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:10 +#: ../../source/how-to-authenticate-supernodes.rst:12 msgid "SuperLink verifies the token" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:12 +#: ../../source/how-to-authenticate-supernodes.rst:14 #, fuzzy msgid "" "We recommend you to check out the complete `code example " @@ -5914,47 +6354,46 @@ msgstr "" "`_ " "pour en savoir plus." -#: ../../source/how-to-authenticate-supernodes.rst:15 +#: ../../source/how-to-authenticate-supernodes.rst:20 msgid "" "This guide covers a preview feature that might change in future versions " "of Flower." msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:18 +#: ../../source/how-to-authenticate-supernodes.rst:24 msgid "" "For increased security, node authentication can only be used when " "encrypted connections (SSL/TLS) are enabled." msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:21 -msgid "Enable node authentication in :code:`SuperLink`" +#: ../../source/how-to-authenticate-supernodes.rst:28 +msgid "Enable node authentication in ``SuperLink``" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:23 +#: ../../source/how-to-authenticate-supernodes.rst:30 msgid "" "To enable node authentication, first you need to configure SSL/TLS " "connections to secure the SuperLink<>SuperNode communication. You can " "find the complete guide `here `_. After configuring secure connections, you" -" can enable client authentication in a long-running Flower " -":code:`SuperLink`. Use the following terminal command to start a Flower " -":code:`SuperNode` that has both secure connections and node " -"authentication enabled:" +" can enable client authentication in a long-running Flower ``SuperLink``." +" Use the following terminal command to start a Flower ``SuperNode`` that " +"has both secure connections and node authentication enabled:" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:38 +#: ../../source/how-to-authenticate-supernodes.rst:47 msgid "Let's break down the authentication flags:" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:40 +#: ../../source/how-to-authenticate-supernodes.rst:49 msgid "" -"The first flag :code:`--auth-list-public-keys` expects a path to a CSV " -"file storing all known node public keys. You need to store all known node" -" public keys that are allowed to participate in a federation in one CSV " -"file (:code:`.csv`)." +"The first flag ``--auth-list-public-keys`` expects a path to a CSV file " +"storing all known node public keys. You need to store all known node " +"public keys that are allowed to participate in a federation in one CSV " +"file (``.csv``)." msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:42 +#: ../../source/how-to-authenticate-supernodes.rst:53 msgid "" "A valid CSV file storing known node public keys should list the keys in " "OpenSSH format, separated by commas and without any comments. For an " @@ -5962,15 +6401,15 @@ msgid "" "known node public keys." msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:44 +#: ../../source/how-to-authenticate-supernodes.rst:57 msgid "" -"The second and third flags :code:`--auth-superlink-private-key` and :code" -":`--auth-superlink-public-key` expect paths to the server's private and " -"public keys. For development purposes, you can generate a private and " -"public key pair using :code:`ssh-keygen -t ecdsa -b 384`." +"The second and third flags ``--auth-superlink-private-key`` and ``--auth-" +"superlink-public-key`` expect paths to the server's private and public " +"keys. For development purposes, you can generate a private and public key" +" pair using ``ssh-keygen -t ecdsa -b 384``." msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:47 +#: ../../source/how-to-authenticate-supernodes.rst:64 msgid "" "In Flower 1.9, there is no support for dynamically removing, editing, or " "adding known node public keys to the SuperLink. To change the set of " @@ -5979,32 +6418,32 @@ msgid "" " nodes is on the roadmap to be released in Flower 1.10 (ETA: June)." msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:53 -msgid "Enable node authentication in :code:`SuperNode`" +#: ../../source/how-to-authenticate-supernodes.rst:71 +msgid "Enable node authentication in ``SuperNode``" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:55 +#: ../../source/how-to-authenticate-supernodes.rst:73 msgid "" -"Similar to the long-running Flower server (:code:`SuperLink`), you can " -"easily enable node authentication in the long-running Flower client " -"(:code:`SuperNode`). Use the following terminal command to start an " -"authenticated :code:`SuperNode`:" +"Similar to the long-running Flower server (``SuperLink``), you can easily" +" enable node authentication in the long-running Flower client " +"(``SuperNode``). Use the following terminal command to start an " +"authenticated ``SuperNode``:" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:66 +#: ../../source/how-to-authenticate-supernodes.rst:85 msgid "" -"The :code:`--auth-supernode-private-key` flag expects a path to the " -"node's private key file and the :code:`--auth-supernode-public-key` flag " -"expects a path to the node's public key file. For development purposes, " -"you can generate a private and public key pair using :code:`ssh-keygen -t" -" ecdsa -b 384`." +"The ``--auth-supernode-private-key`` flag expects a path to the node's " +"private key file and the ``--auth-supernode-public-key`` flag expects a " +"path to the node's public key file. For development purposes, you can " +"generate a private and public key pair using ``ssh-keygen -t ecdsa -b " +"384``." msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:70 +#: ../../source/how-to-authenticate-supernodes.rst:91 msgid "Security notice" msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:72 +#: ../../source/how-to-authenticate-supernodes.rst:93 msgid "" "The system's security relies on the credentials of the SuperLink and each" " SuperNode. Therefore, it is imperative to safeguard and safely store the" @@ -6015,19 +6454,19 @@ msgid "" "methods." msgstr "" -#: ../../source/how-to-authenticate-supernodes.rst:77 -#: ../../source/how-to-enable-ssl-connections.rst:68 -#: ../../source/how-to-use-built-in-mods.rst:85 +#: ../../source/how-to-authenticate-supernodes.rst:100 +#: ../../source/how-to-enable-ssl-connections.rst:71 +#: ../../source/how-to-use-built-in-mods.rst:95 #: ../../source/tutorial-series-what-is-federated-learning.ipynb:287 msgid "Conclusion" msgstr "Conclusion" -#: ../../source/how-to-authenticate-supernodes.rst:79 +#: ../../source/how-to-authenticate-supernodes.rst:102 msgid "" "You should now have learned how to start a long-running Flower server " -"(:code:`SuperLink`) and client (:code:`SuperNode`) with node " -"authentication enabled. You should also know the significance of the " -"private key and store it safely to minimize security risks." +"(``SuperLink``) and client (``SuperNode``) with node authentication " +"enabled. You should also know the significance of the private key and " +"store it safely to minimize security risks." msgstr "" #: ../../source/how-to-configure-clients.rst:2 @@ -6048,11 +6487,11 @@ msgstr "" "populaire de contrôler les hyperparamètres côté client à partir du " "serveur." -#: ../../source/how-to-configure-clients.rst:7 +#: ../../source/how-to-configure-clients.rst:9 msgid "Configuration values" msgstr "Valeurs de configuration" -#: ../../source/how-to-configure-clients.rst:9 +#: ../../source/how-to-configure-clients.rst:11 msgid "" "Configuration values are represented as a dictionary with ``str`` keys " "and values of type ``bool``, ``bytes``, ``double`` (64-bit precision " @@ -6065,7 +6504,7 @@ msgstr "" "dans d'autres langages). Voici un exemple de dictionnaire de " "configuration en Python :" -#: ../../source/how-to-configure-clients.rst:20 +#: ../../source/how-to-configure-clients.rst:25 msgid "" "Flower serializes these configuration dictionaries (or *config dict* for " "short) to their ProtoBuf representation, transports them to the client " @@ -6075,7 +6514,7 @@ msgstr "" "abrégé) dans leur représentation ProtoBuf, les transporte vers le client " "à l'aide de gRPC, puis les désérialise à nouveau en dictionnaires Python." -#: ../../source/how-to-configure-clients.rst:24 +#: ../../source/how-to-configure-clients.rst:31 msgid "" "Currently, there is no support for directly sending collection types " "(e.g., ``Set``, ``List``, ``Map``) as values in configuration " @@ -6090,7 +6529,7 @@ msgstr "" "l'un des types de valeurs pris en charge (et en les reconvertissant du " "côté client)." -#: ../../source/how-to-configure-clients.rst:26 +#: ../../source/how-to-configure-clients.rst:36 msgid "" "One can, for example, convert a list of floating-point numbers to a JSON " "string, then send the JSON string using the configuration dictionary, and" @@ -6102,18 +6541,19 @@ msgstr "" "de configuration, et enfin reconvertir la chaîne JSON en une liste de " "nombres à virgule flottante sur le client." -#: ../../source/how-to-configure-clients.rst:30 +#: ../../source/how-to-configure-clients.rst:41 msgid "Configuration through built-in strategies" msgstr "Configuration par le biais de stratégies intégrées" -#: ../../source/how-to-configure-clients.rst:32 +#: ../../source/how-to-configure-clients.rst:43 +#, fuzzy msgid "" "The easiest way to send configuration values to clients is to use a " -"built-in strategy like :code:`FedAvg`. Built-in strategies support so-" -"called configuration functions. A configuration function is a function " -"that the built-in strategy calls to get the configuration dictionary for " -"the current round. It then forwards the configuration dictionary to all " -"the clients selected during that round." +"built-in strategy like ``FedAvg``. Built-in strategies support so-called " +"configuration functions. A configuration function is a function that the " +"built-in strategy calls to get the configuration dictionary for the " +"current round. It then forwards the configuration dictionary to all the " +"clients selected during that round." msgstr "" "La façon la plus simple d'envoyer des valeurs de configuration aux " "clients est d'utiliser une stratégie intégrée comme :code:`FedAvg`. Les " @@ -6123,7 +6563,7 @@ msgstr "" "pour le tour en cours. Elle transmet ensuite le dictionnaire de " "configuration à tous les clients sélectionnés au cours de ce tour." -#: ../../source/how-to-configure-clients.rst:34 +#: ../../source/how-to-configure-clients.rst:49 msgid "" "Let's start with a simple example. Imagine we want to send (a) the batch " "size that the client should use, (b) the current global round of " @@ -6135,21 +6575,22 @@ msgstr "" " de l'apprentissage fédéré et (c) le nombre d'époques à former du côté " "client. Notre fonction de configuration pourrait ressembler à ceci :" -#: ../../source/how-to-configure-clients.rst:47 +#: ../../source/how-to-configure-clients.rst:65 +#, fuzzy msgid "" "To make the built-in strategies use this function, we can pass it to " "``FedAvg`` during initialization using the parameter " -":code:`on_fit_config_fn`:" +"``on_fit_config_fn``:" msgstr "" "Pour que les stratégies intégrées utilisent cette fonction, nous pouvons " "la passer à ``FedAvg`` lors de l'initialisation en utilisant le paramètre" " :code:`on_fit_config_fn` :" -#: ../../source/how-to-configure-clients.rst:56 +#: ../../source/how-to-configure-clients.rst:75 msgid "One the client side, we receive the configuration dictionary in ``fit``:" msgstr "Côté client, nous recevons le dictionnaire de configuration dans ``fit`` :" -#: ../../source/how-to-configure-clients.rst:67 +#: ../../source/how-to-configure-clients.rst:86 msgid "" "There is also an `on_evaluate_config_fn` to configure evaluation, which " "works the same way. They are separate functions because one might want to" @@ -6161,7 +6602,7 @@ msgstr "" "séparées car on peut vouloir envoyer différentes valeurs de configuration" " à `evaluate` (par exemple, pour utiliser une taille de lot différente)." -#: ../../source/how-to-configure-clients.rst:69 +#: ../../source/how-to-configure-clients.rst:90 msgid "" "The built-in strategies call this function every round (that is, every " "time `Strategy.configure_fit` or `Strategy.configure_evaluate` runs). " @@ -6178,15 +6619,16 @@ msgstr "" "d'hyperparamètres, par exemple, pour augmenter le nombre d'époques " "locales au cours des derniers tours, nous pourrions faire ce qui suit :" -#: ../../source/how-to-configure-clients.rst:82 -msgid "The :code:`FedAvg` strategy will call this function *every round*." +#: ../../source/how-to-configure-clients.rst:107 +#, fuzzy +msgid "The ``FedAvg`` strategy will call this function *every round*." msgstr "La stratégie :code:`FedAvg` appellera cette fonction *à chaque tour*." -#: ../../source/how-to-configure-clients.rst:85 +#: ../../source/how-to-configure-clients.rst:110 msgid "Configuring individual clients" msgstr "Configuration des clients individuels" -#: ../../source/how-to-configure-clients.rst:87 +#: ../../source/how-to-configure-clients.rst:112 msgid "" "In some cases, it is necessary to send different configuration values to " "different clients." @@ -6194,16 +6636,16 @@ msgstr "" "Dans certains cas, il est nécessaire d'envoyer des valeurs de " "configuration différentes à des clients différents." -#: ../../source/how-to-configure-clients.rst:89 +#: ../../source/how-to-configure-clients.rst:115 #, fuzzy msgid "" "This can be achieved by customizing an existing strategy or by " ":doc:`implementing a custom strategy from scratch `. Here's a nonsensical example that customizes :code:`FedAvg`" -" by adding a custom ``\"hello\": \"world\"`` configuration key/value pair" -" to the config dict of a *single client* (only the first client in the " -"list, the other clients in this round to not receive this \"special\" " -"config value):" +"strategies>`. Here's a nonsensical example that customizes ``FedAvg`` by " +"adding a custom ``\"hello\": \"world\"`` configuration key/value pair to " +"the config dict of a *single client* (only the first client in the list, " +"the other clients in this round to not receive this \"special\" config " +"value):" msgstr "" "Ceci peut être réalisé en personnalisant une stratégie existante ou en " "`mettant en œuvre une stratégie personnalisée à partir de zéro " @@ -6229,34 +6671,35 @@ msgstr "" #: ../../source/how-to-configure-logging.rst:13 msgid "" "containing relevant information including: log message level (e.g. " -":code:`INFO`, :code:`DEBUG`), a timestamp, the line where the logging " -"took place from, as well as the log message itself. In this way, the " -"logger would typically display information on your terminal as follows:" +"``INFO``, ``DEBUG``), a timestamp, the line where the logging took place " +"from, as well as the log message itself. In this way, the logger would " +"typically display information on your terminal as follows:" msgstr "" -#: ../../source/how-to-configure-logging.rst:34 +#: ../../source/how-to-configure-logging.rst:35 msgid "Saving log to file" msgstr "" -#: ../../source/how-to-configure-logging.rst:36 +#: ../../source/how-to-configure-logging.rst:37 msgid "" "By default, the Flower log is outputted to the terminal where you launch " "your Federated Learning workload from. This applies for both gRPC-based " -"federation (i.e. when you do :code:`fl.server.start_server`) and when " -"using the :code:`VirtualClientEngine` (i.e. when you do " -":code:`fl.simulation.start_simulation`). In some situations you might " -"want to save this log to disk. You can do so by calling the " +"federation (i.e. when you do ``fl.server.start_server``) and when using " +"the ``VirtualClientEngine`` (i.e. when you do " +"``fl.simulation.start_simulation``). In some situations you might want to" +" save this log to disk. You can do so by calling the " "`fl.common.logger.configure() " "`_" " function. For example:" msgstr "" -#: ../../source/how-to-configure-logging.rst:53 +#: ../../source/how-to-configure-logging.rst:59 +#, fuzzy msgid "" "With the above, Flower will record the log you see on your terminal to " -":code:`log.txt`. This file will be created in the same directory as were " -"you are running the code from. If we inspect we see the log above is also" -" recorded but prefixing with :code:`identifier` each line:" +"``log.txt``. This file will be created in the same directory as were you " +"are running the code from. If we inspect we see the log above is also " +"recorded but prefixing with ``identifier`` each line:" msgstr "" "Avec ce qui précède, Flower enregistrera le log que vous voyez sur votre " "terminal dans :code:`log.txt`. Ce fichier sera créé dans le répertoire " @@ -6264,38 +6707,37 @@ msgstr "" "log ci-dessous est également enregistré mais préfixé avec " ":code:`identifier` sur chaque ligne :" -#: ../../source/how-to-configure-logging.rst:74 +#: ../../source/how-to-configure-logging.rst:81 msgid "Log your own messages" msgstr "Loggez vos propres messages" -#: ../../source/how-to-configure-logging.rst:76 +#: ../../source/how-to-configure-logging.rst:83 msgid "" "You might expand the information shown by default with the Flower logger " "by adding more messages relevant to your application. You can achieve " "this easily as follows." msgstr "" -#: ../../source/how-to-configure-logging.rst:102 +#: ../../source/how-to-configure-logging.rst:114 msgid "" "In this way your logger will show, in addition to the default messages, " "the ones introduced by the clients as specified above." msgstr "" -#: ../../source/how-to-configure-logging.rst:128 +#: ../../source/how-to-configure-logging.rst:140 msgid "Log to a remote service" msgstr "" -#: ../../source/how-to-configure-logging.rst:130 +#: ../../source/how-to-configure-logging.rst:142 msgid "" -"The :code:`fl.common.logger.configure` function, also allows specifying a" -" host to which logs can be pushed (via :code:`POST`) through a native " -"Python :code:`logging.handler.HTTPHandler`. This is a particularly useful" -" feature in :code:`gRPC`-based Federated Learning workloads where " -"otherwise gathering logs from all entities (i.e. the server and the " -"clients) might be cumbersome. Note that in Flower simulation, the server " -"automatically displays all logs. You can still specify a " -":code:`HTTPHandler` should you wish to backup or analyze the logs " -"somewhere else." +"The ``fl.common.logger.configure`` function, also allows specifying a " +"host to which logs can be pushed (via ``POST``) through a native Python " +"``logging.handler.HTTPHandler``. This is a particularly useful feature in" +" ``gRPC``-based Federated Learning workloads where otherwise gathering " +"logs from all entities (i.e. the server and the clients) might be " +"cumbersome. Note that in Flower simulation, the server automatically " +"displays all logs. You can still specify a ``HTTPHandler`` should you " +"wish to backup or analyze the logs somewhere else." msgstr "" #: ../../source/how-to-enable-ssl-connections.rst:2 @@ -6307,13 +6749,13 @@ msgstr "Collecte centralisée des données" #, fuzzy msgid "" "This guide describes how to a SSL-enabled secure Flower server " -"(:code:`SuperLink`) can be started and how a Flower client " -"(:code:`SuperNode`) can establish a secure connections to it." +"(``SuperLink``) can be started and how a Flower client (``SuperNode``) " +"can establish a secure connections to it." msgstr "" "Ce guide décrit comment démarrer un serveur Flower sécurisé par SSL et " "comment un client Flower peut établir une connexion sécurisée avec lui." -#: ../../source/how-to-enable-ssl-connections.rst:7 +#: ../../source/how-to-enable-ssl-connections.rst:8 #, fuzzy msgid "" "A complete code example demonstrating a secure connection can be found " @@ -6324,11 +6766,11 @@ msgstr "" "trouvé ici `_." -#: ../../source/how-to-enable-ssl-connections.rst:10 +#: ../../source/how-to-enable-ssl-connections.rst:11 #, fuzzy msgid "" -"The code example comes with a :code:`README.md` file which explains how " -"to start it. Although it is already SSL-enabled, it might be less " +"The code example comes with a ``README.md`` file which explains how to " +"start it. Although it is already SSL-enabled, it might be less " "descriptive on how it does so. Stick to this guide for a deeper " "introduction to the topic." msgstr "" @@ -6347,8 +6789,9 @@ msgid "" "Using SSL-enabled connections requires certificates to be passed to the " "server and client. For the purpose of this guide we are going to generate" " self-signed certificates. As this can become quite complex we are going " -"to ask you to run the script in :code:`examples/advanced-" -"tensorflow/certificates/generate.sh` with the following command sequence:" +"to ask you to run the script in ``examples/advanced-" +"tensorflow/certificates/generate.sh`` with the following command " +"sequence:" msgstr "" "L'utilisation de connexions compatibles avec le protocole SSL nécessite " "que des certificats soient transmis au serveur et au client. Pour les " @@ -6360,13 +6803,13 @@ msgstr "" #: ../../source/how-to-enable-ssl-connections.rst:29 #, fuzzy msgid "" -"This will generate the certificates in :code:`examples/advanced-" -"tensorflow/.cache/certificates`." +"This will generate the certificates in ``examples/advanced-" +"tensorflow/.cache/certificates``." msgstr "" "Cela générera les certificats dans :code:`examples/advanced-" "tensorflow/.cache/certificates`." -#: ../../source/how-to-enable-ssl-connections.rst:31 +#: ../../source/how-to-enable-ssl-connections.rst:32 #, fuzzy msgid "" "The approach for generating SSL certificates in the context of this " @@ -6381,12 +6824,12 @@ msgstr "" "servir d'inspiration et de point de départ, mais ne doit pas être " "considérée comme complète pour les environnements de production." -#: ../../source/how-to-enable-ssl-connections.rst:39 +#: ../../source/how-to-enable-ssl-connections.rst:40 #, fuzzy msgid "Server (SuperLink)" msgstr "flower-superlink" -#: ../../source/how-to-enable-ssl-connections.rst:41 +#: ../../source/how-to-enable-ssl-connections.rst:42 #, fuzzy msgid "" "Use the following terminal command to start a sever (SuperLink) that uses" @@ -6395,19 +6838,19 @@ msgstr "" "Nous allons maintenant montrer comment écrire un client qui utilise les " "scripts générés précédemment :" -#: ../../source/how-to-enable-ssl-connections.rst:50 +#: ../../source/how-to-enable-ssl-connections.rst:52 msgid "" "When providing certificates, the server expects a tuple of three " "certificates paths: CA certificate, server certificate and server private" " key." msgstr "" -#: ../../source/how-to-enable-ssl-connections.rst:54 +#: ../../source/how-to-enable-ssl-connections.rst:56 #, fuzzy msgid "Client (SuperNode)" msgstr "Codes d'état du client." -#: ../../source/how-to-enable-ssl-connections.rst:56 +#: ../../source/how-to-enable-ssl-connections.rst:58 #, fuzzy msgid "" "Use the following terminal command to start a client (SuperNode) that " @@ -6416,18 +6859,18 @@ msgstr "" "Nous allons maintenant montrer comment écrire un client qui utilise les " "scripts générés précédemment :" -#: ../../source/how-to-enable-ssl-connections.rst:64 +#: ../../source/how-to-enable-ssl-connections.rst:67 #, fuzzy msgid "" -"When setting :code:`root_certificates`, the client expects a file path to" -" PEM-encoded root certificates." +"When setting ``root_certificates``, the client expects a file path to " +"PEM-encoded root certificates." msgstr "" "En définissant :code:`root_certificates`, le client s'attend à recevoir " "les certificats racine codés en PEM sous forme de chaîne d'octets. Nous " "utilisons à nouveau :code:`Path` pour simplifier la lecture de ces " "certificats sous forme de chaînes d'octets." -#: ../../source/how-to-enable-ssl-connections.rst:70 +#: ../../source/how-to-enable-ssl-connections.rst:73 #, fuzzy msgid "" "You should now have learned how to generate self-signed certificates " @@ -6438,12 +6881,12 @@ msgstr "" "à l'aide du script donné, à démarrer un serveur compatible SSL et à " "demander à un client d'établir une connexion sécurisée avec lui." -#: ../../source/how-to-enable-ssl-connections.rst:75 +#: ../../source/how-to-enable-ssl-connections.rst:78 #, fuzzy msgid "Additional resources" msgstr "Ressources supplémentaires" -#: ../../source/how-to-enable-ssl-connections.rst:77 +#: ../../source/how-to-enable-ssl-connections.rst:80 msgid "" "These additional sources might be relevant if you would like to dive " "deeper into the topic of certificates:" @@ -6451,11 +6894,11 @@ msgstr "" "Ces sources supplémentaires peuvent être pertinentes si tu souhaites " "approfondir le sujet des certificats :" -#: ../../source/how-to-enable-ssl-connections.rst:79 +#: ../../source/how-to-enable-ssl-connections.rst:83 msgid "`Let's Encrypt `_" msgstr "`Let's Encrypt `_" -#: ../../source/how-to-enable-ssl-connections.rst:80 +#: ../../source/how-to-enable-ssl-connections.rst:84 msgid "`certbot `_" msgstr "`certbot `_" @@ -6482,13 +6925,15 @@ msgstr "" "intégrées qui sont basées sur la même API que celle décrite ci-dessous." #: ../../source/how-to-implement-strategies.rst:11 -msgid "The :code:`Strategy` abstraction" +#, fuzzy +msgid "The ``Strategy`` abstraction" msgstr "L'abstraction :code:`Stratégie`" #: ../../source/how-to-implement-strategies.rst:13 +#, fuzzy msgid "" "All strategy implementation are derived from the abstract base class " -":code:`flwr.server.strategy.Strategy`, both built-in implementations and " +"``flwr.server.strategy.Strategy``, both built-in implementations and " "third party implementations. This means that custom strategy " "implementations have the exact same capabilities at their disposal as " "built-in ones." @@ -6507,60 +6952,65 @@ msgstr "" "L'abstraction de la stratégie définit quelques méthodes abstraites qui " "doivent être mises en œuvre :" -#: ../../source/how-to-implement-strategies.rst:75 +#: ../../source/how-to-implement-strategies.rst:67 +#, fuzzy msgid "" -"Creating a new strategy means implementing a new :code:`class` (derived " -"from the abstract base class :code:`Strategy`) that implements for the " -"previously shown abstract methods:" +"Creating a new strategy means implementing a new ``class`` (derived from " +"the abstract base class ``Strategy``) that implements for the previously " +"shown abstract methods:" msgstr "" "La création d'une nouvelle stratégie implique la mise en œuvre d'une " "nouvelle :code:`classe` (dérivée de la classe de base abstraite " ":code:`Stratégie`) qui met en œuvre les méthodes abstraites présentées " "précédemment :" -#: ../../source/how-to-implement-strategies.rst:100 +#: ../../source/how-to-implement-strategies.rst:97 msgid "The Flower server calls these methods in the following order:" msgstr "Le serveur Flower appelle ces méthodes dans l'ordre suivant :" -#: ../../source/how-to-implement-strategies.rst:177 +#: ../../source/how-to-implement-strategies.rst:174 msgid "The following sections describe each of those methods in more detail." msgstr "Les sections suivantes décrivent chacune de ces méthodes plus en détail." -#: ../../source/how-to-implement-strategies.rst:180 -msgid "The :code:`initialize_parameters` method" +#: ../../source/how-to-implement-strategies.rst:177 +#, fuzzy +msgid "The ``initialize_parameters`` method" msgstr "La méthode :code:`initialize_parameters` (initialisation des paramètres)" -#: ../../source/how-to-implement-strategies.rst:182 +#: ../../source/how-to-implement-strategies.rst:179 +#, fuzzy msgid "" -":code:`initialize_parameters` is called only once, at the very beginning " -"of an execution. It is responsible for providing the initial global model" -" parameters in a serialized form (i.e., as a :code:`Parameters` object)." +"``initialize_parameters`` is called only once, at the very beginning of " +"an execution. It is responsible for providing the initial global model " +"parameters in a serialized form (i.e., as a ``Parameters`` object)." msgstr "" ":code:`initialize_parameters` n'est appelé qu'une seule fois, au tout " "début d'une exécution. Il est chargé de fournir les paramètres initiaux " "du modèle global sous une forme sérialisée (c'est-à-dire sous la forme " "d'un objet :code:`Parameters`)." -#: ../../source/how-to-implement-strategies.rst:184 +#: ../../source/how-to-implement-strategies.rst:183 +#, fuzzy msgid "" "Built-in strategies return user-provided initial parameters. The " "following example shows how initial parameters can be passed to " -":code:`FedAvg`:" +"``FedAvg``:" msgstr "" "Les stratégies intégrées renvoient les paramètres initiaux fournis par " "l'utilisateur. L'exemple suivant montre comment les paramètres initiaux " "peuvent être transmis à :code:`FedAvg` :" #: ../../source/how-to-implement-strategies.rst:209 +#, fuzzy msgid "" -"The Flower server will call :code:`initialize_parameters`, which either " -"returns the parameters that were passed to :code:`initial_parameters`, or" -" :code:`None`. If no parameters are returned from " -":code:`initialize_parameters` (i.e., :code:`None`), the server will " -"randomly select one client and ask it to provide its parameters. This is " -"a convenience feature and not recommended in practice, but it can be " -"useful for prototyping. In practice, it is recommended to always use " -"server-side parameter initialization." +"The Flower server will call ``initialize_parameters``, which either " +"returns the parameters that were passed to ``initial_parameters``, or " +"``None``. If no parameters are returned from ``initialize_parameters`` " +"(i.e., ``None``), the server will randomly select one client and ask it " +"to provide its parameters. This is a convenience feature and not " +"recommended in practice, but it can be useful for prototyping. In " +"practice, it is recommended to always use server-side parameter " +"initialization." msgstr "" "Le serveur Flower appelle :code:`initialize_parameters`, qui renvoie les " "paramètres passés à :code:`initial_parameters`, ou :code:`None`. Si aucun" @@ -6571,7 +7021,7 @@ msgstr "" "prototypage. Dans la pratique, il est recommandé de toujours utiliser " "l'initialisation des paramètres du côté du serveur." -#: ../../source/how-to-implement-strategies.rst:213 +#: ../../source/how-to-implement-strategies.rst:218 msgid "" "Server-side parameter initialization is a powerful mechanism. It can be " "used, for example, to resume training from a previously saved checkpoint." @@ -6586,58 +7036,63 @@ msgstr "" "hybrides, par exemple, pour affiner un modèle pré-entraîné à l'aide de " "l'apprentissage fédéré." -#: ../../source/how-to-implement-strategies.rst:216 -msgid "The :code:`configure_fit` method" +#: ../../source/how-to-implement-strategies.rst:224 +#, fuzzy +msgid "The ``configure_fit`` method" msgstr "La méthode :code:`configure_fit`" -#: ../../source/how-to-implement-strategies.rst:218 +#: ../../source/how-to-implement-strategies.rst:226 +#, fuzzy msgid "" -":code:`configure_fit` is responsible for configuring the upcoming round " -"of training. What does *configure* mean in this context? Configuring a " -"round means selecting clients and deciding what instructions to send to " -"these clients. The signature of :code:`configure_fit` makes this clear:" +"``configure_fit`` is responsible for configuring the upcoming round of " +"training. What does *configure* mean in this context? Configuring a round" +" means selecting clients and deciding what instructions to send to these " +"clients. The signature of ``configure_fit`` makes this clear:" msgstr "" ":code:`configure_fit` est chargé de configurer le prochain tour de " "formation. Que signifie *configurer* dans ce contexte ? Configurer un " "tour signifie sélectionner des clients et décider des instructions à leur" " envoyer. La signature de :code:`configure_fit` l'indique clairement :" -#: ../../source/how-to-implement-strategies.rst:231 +#: ../../source/how-to-implement-strategies.rst:239 +#, fuzzy msgid "" "The return value is a list of tuples, each representing the instructions " "that will be sent to a particular client. Strategy implementations " -"usually perform the following steps in :code:`configure_fit`:" +"usually perform the following steps in ``configure_fit``:" msgstr "" "La valeur de retour est une liste de tuples, chacun représentant les " "instructions qui seront envoyées à un client particulier. Les " "implémentations de stratégies effectuent généralement les étapes " "suivantes dans :code:`configure_fit` :" -#: ../../source/how-to-implement-strategies.rst:233 -#: ../../source/how-to-implement-strategies.rst:280 +#: ../../source/how-to-implement-strategies.rst:243 +#: ../../source/how-to-implement-strategies.rst:307 +#, fuzzy msgid "" -"Use the :code:`client_manager` to randomly sample all (or a subset of) " -"available clients (each represented as a :code:`ClientProxy` object)" +"Use the ``client_manager`` to randomly sample all (or a subset of) " +"available clients (each represented as a ``ClientProxy`` object)" msgstr "" "Utilise le :code:`client_manager` pour échantillonner au hasard tous les " "clients disponibles (ou un sous-ensemble d'entre eux) (chacun représenté " "par un objet :code:`ClientProxy`)" -#: ../../source/how-to-implement-strategies.rst:234 +#: ../../source/how-to-implement-strategies.rst:245 +#, fuzzy msgid "" -"Pair each :code:`ClientProxy` with the same :code:`FitIns` holding the " -"current global model :code:`parameters` and :code:`config` dict" +"Pair each ``ClientProxy`` with the same ``FitIns`` holding the current " +"global model ``parameters`` and ``config`` dict" msgstr "" "Associe chaque :code:`ClientProxy` au même :code:`FitIns` contenant le " "modèle global actuel :code:`parameters` et :code:`config` dict" -#: ../../source/how-to-implement-strategies.rst:236 +#: ../../source/how-to-implement-strategies.rst:248 #, fuzzy msgid "" -"More sophisticated implementations can use :code:`configure_fit` to " -"implement custom client selection logic. A client will only participate " -"in a round if the corresponding :code:`ClientProxy` is included in the " -"list returned from :code:`configure_fit`." +"More sophisticated implementations can use ``configure_fit`` to implement" +" custom client selection logic. A client will only participate in a round" +" if the corresponding ``ClientProxy`` is included in the list returned " +"from ``configure_fit``." msgstr "" "Les implémentations plus sophistiquées peuvent utiliser " ":code:`configure_fit` pour mettre en œuvre une logique de sélection des " @@ -6645,14 +7100,14 @@ msgstr "" ":code:`ClientProxy` correspondant est inclus dans la liste renvoyée par " ":code:`configure_fit`." -#: ../../source/how-to-implement-strategies.rst:240 +#: ../../source/how-to-implement-strategies.rst:254 +#, fuzzy msgid "" "The structure of this return value provides a lot of flexibility to the " "user. Since instructions are defined on a per-client basis, different " "instructions can be sent to each client. This enables custom strategies " "to train, for example, different models on different clients, or use " -"different hyperparameters on different clients (via the :code:`config` " -"dict)." +"different hyperparameters on different clients (via the ``config`` dict)." msgstr "" "La structure de cette valeur de retour offre beaucoup de souplesse à " "l'utilisateur. Comme les instructions sont définies par client, des " @@ -6661,26 +7116,28 @@ msgstr "" " différents modèles sur différents clients, ou utiliser différents " "hyperparamètres sur différents clients (via le dict :code:`config`)." -#: ../../source/how-to-implement-strategies.rst:243 -msgid "The :code:`aggregate_fit` method" +#: ../../source/how-to-implement-strategies.rst:261 +#, fuzzy +msgid "The ``aggregate_fit`` method" msgstr "La méthode :code:`aggregate_fit` (agrégation)" -#: ../../source/how-to-implement-strategies.rst:245 +#: ../../source/how-to-implement-strategies.rst:263 +#, fuzzy msgid "" -":code:`aggregate_fit` is responsible for aggregating the results returned" -" by the clients that were selected and asked to train in " -":code:`configure_fit`." +"``aggregate_fit`` is responsible for aggregating the results returned by " +"the clients that were selected and asked to train in ``configure_fit``." msgstr "" ":code:`aggregate_fit` est chargé d'agréger les résultats renvoyés par les" " clients qui ont été sélectionnés et à qui on a demandé de s'entraîner " "dans :code:`configure_fit`." -#: ../../source/how-to-implement-strategies.rst:258 +#: ../../source/how-to-implement-strategies.rst:277 +#, fuzzy msgid "" "Of course, failures can happen, so there is no guarantee that the server " "will get results from all the clients it sent instructions to (via " -":code:`configure_fit`). :code:`aggregate_fit` therefore receives a list " -"of :code:`results`, but also a list of :code:`failures`." +"``configure_fit``). ``aggregate_fit`` therefore receives a list of " +"``results``, but also a list of ``failures``." msgstr "" "Bien sûr, des échecs peuvent se produire, il n'y a donc aucune garantie " "que le serveur obtienne des résultats de tous les clients auxquels il a " @@ -6688,12 +7145,13 @@ msgstr "" ":code:`aggregate_fit` reçoit donc une liste de :code:`résultats`, mais " "aussi une liste de :code:`échecs`." -#: ../../source/how-to-implement-strategies.rst:260 +#: ../../source/how-to-implement-strategies.rst:282 +#, fuzzy msgid "" -":code:`aggregate_fit` returns an optional :code:`Parameters` object and a" -" dictionary of aggregated metrics. The :code:`Parameters` return value is" -" optional because :code:`aggregate_fit` might decide that the results " -"provided are not sufficient for aggregation (e.g., too many failures)." +"``aggregate_fit`` returns an optional ``Parameters`` object and a " +"dictionary of aggregated metrics. The ``Parameters`` return value is " +"optional because ``aggregate_fit`` might decide that the results provided" +" are not sufficient for aggregation (e.g., too many failures)." msgstr "" ":code:`aggregate_fit` renvoie un objet :code:`Parameters` facultatif et " "un dictionnaire de métriques agrégées. La valeur de retour " @@ -6701,17 +7159,18 @@ msgstr "" " que les résultats fournis ne sont pas suffisants pour l'agrégation (par " "exemple, trop d'échecs)." -#: ../../source/how-to-implement-strategies.rst:263 -msgid "The :code:`configure_evaluate` method" +#: ../../source/how-to-implement-strategies.rst:288 +#, fuzzy +msgid "The ``configure_evaluate`` method" msgstr "La méthode :code:`configure_evaluate` (en anglais)" -#: ../../source/how-to-implement-strategies.rst:265 +#: ../../source/how-to-implement-strategies.rst:290 +#, fuzzy msgid "" -":code:`configure_evaluate` is responsible for configuring the upcoming " -"round of evaluation. What does *configure* mean in this context? " -"Configuring a round means selecting clients and deciding what " -"instructions to send to these clients. The signature of " -":code:`configure_evaluate` makes this clear:" +"``configure_evaluate`` is responsible for configuring the upcoming round " +"of evaluation. What does *configure* mean in this context? Configuring a " +"round means selecting clients and deciding what instructions to send to " +"these clients. The signature of ``configure_evaluate`` makes this clear:" msgstr "" ":code:`configure_evaluate` est chargé de configurer le prochain tour " "d'évaluation. Que signifie *configurer* dans ce contexte ? Configurer un " @@ -6719,32 +7178,34 @@ msgstr "" " envoyer. La signature de :code:`configure_evaluate` l'indique clairement" " :" -#: ../../source/how-to-implement-strategies.rst:278 +#: ../../source/how-to-implement-strategies.rst:303 +#, fuzzy msgid "" "The return value is a list of tuples, each representing the instructions " "that will be sent to a particular client. Strategy implementations " -"usually perform the following steps in :code:`configure_evaluate`:" +"usually perform the following steps in ``configure_evaluate``:" msgstr "" "La valeur de retour est une liste de tuples, chacun représentant les " "instructions qui seront envoyées à un client particulier. Les " "implémentations de stratégies effectuent généralement les étapes " "suivantes dans :code:`configure_evaluate` :" -#: ../../source/how-to-implement-strategies.rst:281 +#: ../../source/how-to-implement-strategies.rst:309 +#, fuzzy msgid "" -"Pair each :code:`ClientProxy` with the same :code:`EvaluateIns` holding " -"the current global model :code:`parameters` and :code:`config` dict" +"Pair each ``ClientProxy`` with the same ``EvaluateIns`` holding the " +"current global model ``parameters`` and ``config`` dict" msgstr "" "Associe chaque :code:`ClientProxy` au même :code:`EvaluateIns` contenant " "le modèle global actuel :code:`parameters` et :code:`config` dict" -#: ../../source/how-to-implement-strategies.rst:283 +#: ../../source/how-to-implement-strategies.rst:312 #, fuzzy msgid "" -"More sophisticated implementations can use :code:`configure_evaluate` to " +"More sophisticated implementations can use ``configure_evaluate`` to " "implement custom client selection logic. A client will only participate " -"in a round if the corresponding :code:`ClientProxy` is included in the " -"list returned from :code:`configure_evaluate`." +"in a round if the corresponding ``ClientProxy`` is included in the list " +"returned from ``configure_evaluate``." msgstr "" "Les implémentations plus sophistiquées peuvent utiliser " ":code:`configure_evaluate` pour mettre en œuvre une logique de sélection " @@ -6752,14 +7213,14 @@ msgstr "" ":code:`ClientProxy` correspondant est inclus dans la liste renvoyée par " ":code:`configure_evaluate`." -#: ../../source/how-to-implement-strategies.rst:287 +#: ../../source/how-to-implement-strategies.rst:318 +#, fuzzy msgid "" "The structure of this return value provides a lot of flexibility to the " "user. Since instructions are defined on a per-client basis, different " "instructions can be sent to each client. This enables custom strategies " "to evaluate, for example, different models on different clients, or use " -"different hyperparameters on different clients (via the :code:`config` " -"dict)." +"different hyperparameters on different clients (via the ``config`` dict)." msgstr "" "La structure de cette valeur de retour offre beaucoup de souplesse à " "l'utilisateur. Comme les instructions sont définies par client, des " @@ -6768,26 +7229,29 @@ msgstr "" "modèles sur différents clients, ou d'utiliser différents hyperparamètres " "sur différents clients (via le dict :code:`config`)." -#: ../../source/how-to-implement-strategies.rst:291 -msgid "The :code:`aggregate_evaluate` method" +#: ../../source/how-to-implement-strategies.rst:325 +#, fuzzy +msgid "The ``aggregate_evaluate`` method" msgstr "La méthode :code:`aggregate_evaluate` (agréger_évaluer)" -#: ../../source/how-to-implement-strategies.rst:293 +#: ../../source/how-to-implement-strategies.rst:327 +#, fuzzy msgid "" -":code:`aggregate_evaluate` is responsible for aggregating the results " +"``aggregate_evaluate`` is responsible for aggregating the results " "returned by the clients that were selected and asked to evaluate in " -":code:`configure_evaluate`." +"``configure_evaluate``." msgstr "" ":code:`aggregate_evaluate` est chargé d'agréger les résultats renvoyés " "par les clients qui ont été sélectionnés et à qui l'on a demandé " "d'évaluer dans :code:`configure_evaluate`." -#: ../../source/how-to-implement-strategies.rst:306 +#: ../../source/how-to-implement-strategies.rst:341 +#, fuzzy msgid "" "Of course, failures can happen, so there is no guarantee that the server " "will get results from all the clients it sent instructions to (via " -":code:`configure_evaluate`). :code:`aggregate_evaluate` therefore " -"receives a list of :code:`results`, but also a list of :code:`failures`." +"``configure_evaluate``). ``aggregate_evaluate`` therefore receives a list" +" of ``results``, but also a list of ``failures``." msgstr "" "Bien sûr, des échecs peuvent se produire, il n'y a donc aucune garantie " "que le serveur obtienne des résultats de tous les clients auxquels il a " @@ -6795,12 +7259,13 @@ msgstr "" ":code:`aggregate_evaluate` reçoit donc une liste de :code:`résultats`, " "mais aussi une liste d' :code:`échecs`." -#: ../../source/how-to-implement-strategies.rst:308 +#: ../../source/how-to-implement-strategies.rst:346 +#, fuzzy msgid "" -":code:`aggregate_evaluate` returns an optional :code:`float` (loss) and a" -" dictionary of aggregated metrics. The :code:`float` return value is " -"optional because :code:`aggregate_evaluate` might decide that the results" -" provided are not sufficient for aggregation (e.g., too many failures)." +"``aggregate_evaluate`` returns an optional ``float`` (loss) and a " +"dictionary of aggregated metrics. The ``float`` return value is optional " +"because ``aggregate_evaluate`` might decide that the results provided are" +" not sufficient for aggregation (e.g., too many failures)." msgstr "" ":code:`aggregate_evaluate` renvoie un :code:`float` facultatif (perte) et" " un dictionnaire de mesures agrégées. La valeur de retour :code:`float` " @@ -6808,28 +7273,31 @@ msgstr "" "résultats fournis ne sont pas suffisants pour l'agrégation (par exemple, " "trop d'échecs)." -#: ../../source/how-to-implement-strategies.rst:311 -msgid "The :code:`evaluate` method" +#: ../../source/how-to-implement-strategies.rst:352 +#, fuzzy +msgid "The ``evaluate`` method" msgstr "La méthode :code:`évaluer`" -#: ../../source/how-to-implement-strategies.rst:313 +#: ../../source/how-to-implement-strategies.rst:354 +#, fuzzy msgid "" -":code:`evaluate` is responsible for evaluating model parameters on the " -"server-side. Having :code:`evaluate` in addition to " -":code:`configure_evaluate`/:code:`aggregate_evaluate` enables strategies " -"to perform both servers-side and client-side (federated) evaluation." +"``evaluate`` is responsible for evaluating model parameters on the " +"server-side. Having ``evaluate`` in addition to " +"``configure_evaluate``/``aggregate_evaluate`` enables strategies to " +"perform both servers-side and client-side (federated) evaluation." msgstr "" "le fait d'avoir :code:`evaluate` en plus de " ":code:`configure_evaluate`/:code:`aggregate_evaluate` permet aux " "stratégies d'effectuer des évaluations à la fois côté serveur et côté " "client (fédéré)." -#: ../../source/how-to-implement-strategies.rst:323 +#: ../../source/how-to-implement-strategies.rst:364 +#, fuzzy msgid "" "The return value is again optional because the strategy might not need to" " implement server-side evaluation or because the user-defined " -":code:`evaluate` method might not complete successfully (e.g., it might " -"fail to load the server-side evaluation data)." +"``evaluate`` method might not complete successfully (e.g., it might fail " +"to load the server-side evaluation data)." msgstr "" "La valeur de retour est à nouveau facultative parce que la stratégie peut" " ne pas avoir besoin de mettre en œuvre l'évaluation côté serveur ou " @@ -6842,65 +7310,65 @@ msgstr "" msgid "Install Flower" msgstr "Installer Flower" -#: ../../source/how-to-install-flower.rst:6 +#: ../../source/how-to-install-flower.rst:5 #, fuzzy msgid "Python version" msgstr "Version Python" -#: ../../source/how-to-install-flower.rst:12 +#: ../../source/how-to-install-flower.rst:11 msgid "Install stable release" msgstr "Installe la version stable" -#: ../../source/how-to-install-flower.rst:15 -#: ../../source/how-to-upgrade-to-flower-next.rst:46 +#: ../../source/how-to-install-flower.rst:14 +#: ../../source/how-to-upgrade-to-flower-next.rst:66 msgid "Using pip" msgstr "" -#: ../../source/how-to-install-flower.rst:17 -msgid "" -"Stable releases are available on `PyPI " -"`_::" +#: ../../source/how-to-install-flower.rst:16 +#, fuzzy +msgid "Stable releases are available on `PyPI `_:" msgstr "" "Les versions stables sont disponibles sur `PyPI " "`_: :" -#: ../../source/how-to-install-flower.rst:21 +#: ../../source/how-to-install-flower.rst:22 +#, fuzzy msgid "" "For simulations that use the Virtual Client Engine, ``flwr`` should be " -"installed with the ``simulation`` extra::" +"installed with the ``simulation`` extra:" msgstr "" "Pour les simulations qui utilisent le moteur de client virtuel, ``flwr`` " "doit être installé avec l'option ``simulation``: :" -#: ../../source/how-to-install-flower.rst:27 +#: ../../source/how-to-install-flower.rst:30 msgid "Using conda (or mamba)" msgstr "" -#: ../../source/how-to-install-flower.rst:29 +#: ../../source/how-to-install-flower.rst:32 msgid "Flower can also be installed from the ``conda-forge`` channel." msgstr "" -#: ../../source/how-to-install-flower.rst:31 +#: ../../source/how-to-install-flower.rst:34 msgid "" "If you have not added ``conda-forge`` to your channels, you will first " -"need to run the following::" +"need to run the following:" msgstr "" -#: ../../source/how-to-install-flower.rst:36 +#: ../../source/how-to-install-flower.rst:42 msgid "" "Once the ``conda-forge`` channel has been enabled, ``flwr`` can be " -"installed with ``conda``::" +"installed with ``conda``:" msgstr "" -#: ../../source/how-to-install-flower.rst:40 -msgid "or with ``mamba``::" +#: ../../source/how-to-install-flower.rst:49 +msgid "or with ``mamba``:" msgstr "" -#: ../../source/how-to-install-flower.rst:46 +#: ../../source/how-to-install-flower.rst:56 msgid "Verify installation" msgstr "Vérifie l'installation" -#: ../../source/how-to-install-flower.rst:48 +#: ../../source/how-to-install-flower.rst:58 #, fuzzy msgid "" "The following command can be used to verify if Flower was successfully " @@ -6911,58 +7379,62 @@ msgstr "" "installé avec succès. Si tout a fonctionné, la version de Flower devrait " "être imprimée sur la ligne de commande: :" -#: ../../source/how-to-install-flower.rst:58 +#: ../../source/how-to-install-flower.rst:68 msgid "Advanced installation options" msgstr "Options d'installation avancées" -#: ../../source/how-to-install-flower.rst:61 +#: ../../source/how-to-install-flower.rst:71 #, fuzzy msgid "Install via Docker" msgstr "Installer Flower" -#: ../../source/how-to-install-flower.rst:63 +#: ../../source/how-to-install-flower.rst:73 msgid ":doc:`Run Flower using Docker `" msgstr "" -#: ../../source/how-to-install-flower.rst:66 +#: ../../source/how-to-install-flower.rst:76 msgid "Install pre-release" msgstr "Installer la version pre-release" -#: ../../source/how-to-install-flower.rst:68 +#: ../../source/how-to-install-flower.rst:78 +#, fuzzy msgid "" "New (possibly unstable) versions of Flower are sometimes available as " "pre-release versions (alpha, beta, release candidate) before the stable " -"release happens::" +"release happens:" msgstr "" "Les nouvelles versions (éventuellement instables) de Flower sont parfois " "disponibles en tant que versions préliminaires (alpha, bêta, release " "candidate) avant que la version stable n'arrive : :" -#: ../../source/how-to-install-flower.rst:72 +#: ../../source/how-to-install-flower.rst:85 +#, fuzzy msgid "" "For simulations that use the Virtual Client Engine, ``flwr`` pre-releases" -" should be installed with the ``simulation`` extra::" +" should be installed with the ``simulation`` extra:" msgstr "" "Pour les simulations qui utilisent le moteur de client virtuel, les " "versions de ``flwr`` doivent être installées avec l'option " "``simulation``: :" -#: ../../source/how-to-install-flower.rst:77 +#: ../../source/how-to-install-flower.rst:93 msgid "Install nightly release" msgstr "Installer la version nightly" -#: ../../source/how-to-install-flower.rst:79 +#: ../../source/how-to-install-flower.rst:95 +#, fuzzy msgid "" "The latest (potentially unstable) changes in Flower are available as " -"nightly releases::" +"nightly releases:" msgstr "" "Les dernières modifications (potentiellement instables) de Flower sont " "disponibles sous forme de versions nocturnes: :" -#: ../../source/how-to-install-flower.rst:83 +#: ../../source/how-to-install-flower.rst:101 +#, fuzzy msgid "" "For simulations that use the Virtual Client Engine, ``flwr-nightly`` " -"should be installed with the ``simulation`` extra::" +"should be installed with the ``simulation`` extra:" msgstr "" "Pour les simulations qui utilisent le moteur de client virtuel, ``flwr-" "nightly`` doit être installé avec l'option ``simulation``: :" @@ -6987,7 +7459,7 @@ msgstr "" "sur la consommation des ressources peuvent t'aider à prendre des " "décisions plus intelligentes et à accélérer le temps d'exécution." -#: ../../source/how-to-monitor-simulation.rst:6 +#: ../../source/how-to-monitor-simulation.rst:9 msgid "" "The specific instructions assume you are using macOS and have the " "`Homebrew `_ package manager installed." @@ -6995,11 +7467,11 @@ msgstr "" "Les instructions spécifiques supposent que tu utilises macOS et que le " "gestionnaire de paquets `Homebrew `_ est installé." -#: ../../source/how-to-monitor-simulation.rst:10 +#: ../../source/how-to-monitor-simulation.rst:13 msgid "Downloads" msgstr "Téléchargements" -#: ../../source/how-to-monitor-simulation.rst:16 +#: ../../source/how-to-monitor-simulation.rst:19 msgid "" "`Prometheus `_ is used for data collection, while" " `Grafana `_ will enable you to visualize the " @@ -7011,7 +7483,7 @@ msgstr "" "visualiser les données collectées. Ils sont tous deux bien intégrés à " "`Ray `_ que Flower utilise sous le capot." -#: ../../source/how-to-monitor-simulation.rst:18 +#: ../../source/how-to-monitor-simulation.rst:23 msgid "" "Overwrite the configuration files (depending on your device, it might be " "installed on a different path)." @@ -7019,17 +7491,17 @@ msgstr "" "Écrase les fichiers de configuration (selon ton appareil, il se peut " "qu'il soit installé sur un chemin différent)." -#: ../../source/how-to-monitor-simulation.rst:20 +#: ../../source/how-to-monitor-simulation.rst:26 msgid "If you are on an M1 Mac, it should be:" msgstr "Si tu es sur un Mac M1, il devrait l'être :" -#: ../../source/how-to-monitor-simulation.rst:27 +#: ../../source/how-to-monitor-simulation.rst:33 msgid "On the previous generation Intel Mac devices, it should be:" msgstr "" "Sur les appareils Mac Intel de la génération précédente, ce devrait être " "le cas :" -#: ../../source/how-to-monitor-simulation.rst:34 +#: ../../source/how-to-monitor-simulation.rst:40 msgid "" "Open the respective configuration files and change them. Depending on " "your device, use one of the two following commands:" @@ -7037,7 +7509,7 @@ msgstr "" "Ouvre les fichiers de configuration respectifs et modifie-les. Selon ton " "appareil, utilise l'une des deux commandes suivantes :" -#: ../../source/how-to-monitor-simulation.rst:44 +#: ../../source/how-to-monitor-simulation.rst:51 msgid "" "and then delete all the text in the file and paste a new Prometheus " "config you see below. You may adjust the time intervals to your " @@ -7047,7 +7519,7 @@ msgstr "" "configuration Prometheus que tu vois ci-dessous. Tu peux adapter les " "intervalles de temps à tes besoins :" -#: ../../source/how-to-monitor-simulation.rst:59 +#: ../../source/how-to-monitor-simulation.rst:67 msgid "" "Now after you have edited the Prometheus configuration, do the same with " "the Grafana configuration files. Open those using one of the following " @@ -7057,7 +7529,7 @@ msgstr "" "même avec les fichiers de configuration de Grafana. Ouvre ces derniers à " "l'aide de l'une des commandes suivantes, comme précédemment :" -#: ../../source/how-to-monitor-simulation.rst:69 +#: ../../source/how-to-monitor-simulation.rst:78 msgid "" "Your terminal editor should open and allow you to apply the following " "configuration as before." @@ -7065,7 +7537,7 @@ msgstr "" "Ton éditeur de terminal devrait s'ouvrir et te permettre d'appliquer la " "configuration suivante comme précédemment." -#: ../../source/how-to-monitor-simulation.rst:84 +#: ../../source/how-to-monitor-simulation.rst:94 msgid "" "Congratulations, you just downloaded all the necessary software needed " "for metrics tracking. Now, let’s start it." @@ -7073,11 +7545,11 @@ msgstr "" "Félicitations, tu viens de télécharger tous les logiciels nécessaires au " "suivi des métriques, maintenant, démarrons-le." -#: ../../source/how-to-monitor-simulation.rst:88 +#: ../../source/how-to-monitor-simulation.rst:98 msgid "Tracking metrics" msgstr "Suivi des mesures" -#: ../../source/how-to-monitor-simulation.rst:90 +#: ../../source/how-to-monitor-simulation.rst:100 msgid "" "Before running your Flower simulation, you have to start the monitoring " "tools you have just installed and configured." @@ -7085,7 +7557,7 @@ msgstr "" "Avant de lancer ta simulation Flower, tu dois démarrer les outils de " "surveillance que tu viens d'installer et de configurer." -#: ../../source/how-to-monitor-simulation.rst:97 +#: ../../source/how-to-monitor-simulation.rst:108 msgid "" "Please include the following argument in your Python code when starting a" " simulation." @@ -7093,11 +7565,11 @@ msgstr "" "Tu dois inclure l'argument suivant dans ton code Python lorsque tu " "démarres une simulation." -#: ../../source/how-to-monitor-simulation.rst:108 +#: ../../source/how-to-monitor-simulation.rst:119 msgid "Now, you are ready to start your workload." msgstr "Maintenant, tu es prêt à commencer ta charge de travail." -#: ../../source/how-to-monitor-simulation.rst:110 +#: ../../source/how-to-monitor-simulation.rst:121 msgid "" "Shortly after the simulation starts, you should see the following logs in" " your terminal:" @@ -7105,11 +7577,12 @@ msgstr "" "Peu de temps après le début de la simulation, tu devrais voir les " "journaux suivants dans ton terminal :" -#: ../../source/how-to-monitor-simulation.rst:117 -msgid "You can look at everything at ``_ ." +#: ../../source/how-to-monitor-simulation.rst:127 +#, fuzzy +msgid "You can look at everything at http://127.0.0.1:8265 ." msgstr "Tu peux tout regarder sur ``_ ." -#: ../../source/how-to-monitor-simulation.rst:119 +#: ../../source/how-to-monitor-simulation.rst:129 msgid "" "It's a Ray Dashboard. You can navigate to Metrics (on the left panel, the" " lowest option)." @@ -7117,7 +7590,7 @@ msgstr "" "Il s'agit d'un tableau de bord Ray. Tu peux naviguer vers Metrics (sur le" " panneau de gauche, l'option la plus basse)." -#: ../../source/how-to-monitor-simulation.rst:121 +#: ../../source/how-to-monitor-simulation.rst:132 msgid "" "Or alternatively, you can just see them in Grafana by clicking on the " "right-up corner, “View in Grafana”. Please note that the Ray dashboard is" @@ -7131,21 +7604,22 @@ msgstr "" "terminée, tu ne peux utiliser Grafana que pour explorer les métriques. Tu" " peux démarrer Grafana en te rendant sur `http://localhost:3000/``." -#: ../../source/how-to-monitor-simulation.rst:123 +#: ../../source/how-to-monitor-simulation.rst:137 +#, fuzzy msgid "" "After you finish the visualization, stop Prometheus and Grafana. This is " -"important as they will otherwise block, for example port :code:`3000` on " -"your machine as long as they are running." +"important as they will otherwise block, for example port ``3000`` on your" +" machine as long as they are running." msgstr "" "Après avoir terminé la visualisation, arrête Prometheus et Grafana. C'est" " important car sinon ils bloqueront, par exemple, le port :code:`3000` " "sur ta machine tant qu'ils seront en cours d'exécution." -#: ../../source/how-to-monitor-simulation.rst:132 +#: ../../source/how-to-monitor-simulation.rst:147 msgid "Resource allocation" msgstr "Allocation des ressources" -#: ../../source/how-to-monitor-simulation.rst:134 +#: ../../source/how-to-monitor-simulation.rst:149 msgid "" "You must understand how the Ray library works to efficiently allocate " "system resources to simulation clients on your own." @@ -7154,7 +7628,7 @@ msgstr "" "efficacement les ressources du système aux clients de simulation de ton " "côté." -#: ../../source/how-to-monitor-simulation.rst:136 +#: ../../source/how-to-monitor-simulation.rst:152 msgid "" "Initially, the simulation (which Ray handles under the hood) starts by " "default with all the available resources on the system, which it shares " @@ -7171,11 +7645,11 @@ msgstr "" "ce blog. Tu peux vérifier les ressources du système en exécutant ce qui " "suit :" -#: ../../source/how-to-monitor-simulation.rst:143 +#: ../../source/how-to-monitor-simulation.rst:164 msgid "In Google Colab, the result you see might be similar to this:" msgstr "Dans Google Colab, le résultat que tu obtiens peut ressembler à ceci :" -#: ../../source/how-to-monitor-simulation.rst:155 +#: ../../source/how-to-monitor-simulation.rst:175 msgid "" "However, you can overwrite the defaults. When starting a simulation, do " "the following (you don't need to overwrite all of them):" @@ -7184,11 +7658,11 @@ msgstr "" "une simulation, fais ce qui suit (tu n'as pas besoin de les écraser " "toutes) :" -#: ../../source/how-to-monitor-simulation.rst:175 +#: ../../source/how-to-monitor-simulation.rst:195 msgid "Let’s also specify the resource for a single client." msgstr "Spécifions également la ressource pour un seul client." -#: ../../source/how-to-monitor-simulation.rst:205 +#: ../../source/how-to-monitor-simulation.rst:225 msgid "" "Now comes the crucial part. Ray will start a new client only when it has " "all the required resources (such that they run in parallel) when the " @@ -7198,14 +7672,15 @@ msgstr "" "ressources nécessaires (de manière à ce qu'ils fonctionnent en parallèle)" " lorsque les ressources le permettront." -#: ../../source/how-to-monitor-simulation.rst:207 +#: ../../source/how-to-monitor-simulation.rst:228 +#, fuzzy msgid "" "In the example above, only one client will be run, so your clients won't " -"run concurrently. Setting :code:`client_num_gpus = 0.5` would allow " -"running two clients and therefore enable them to run concurrently. Be " -"careful not to require more resources than available. If you specified " -":code:`client_num_gpus = 2`, the simulation wouldn't start (even if you " -"had 2 GPUs but decided to set 1 in :code:`ray_init_args`)." +"run concurrently. Setting ``client_num_gpus = 0.5`` would allow running " +"two clients and therefore enable them to run concurrently. Be careful not" +" to require more resources than available. If you specified " +"``client_num_gpus = 2``, the simulation wouldn't start (even if you had 2" +" GPUs but decided to set 1 in ``ray_init_args``)." msgstr "" "Dans l'exemple ci-dessus, un seul client sera exécuté, donc tes clients " "ne fonctionneront pas simultanément. En définissant " @@ -7216,15 +7691,15 @@ msgstr "" "as 2 GPU mais que tu as décidé d'en définir 1 dans " ":code:`ray_init_args`)." -#: ../../source/how-to-monitor-simulation.rst:212 ../../source/ref-faq.rst:2 +#: ../../source/how-to-monitor-simulation.rst:235 ../../source/ref-faq.rst:2 msgid "FAQ" msgstr "FAQ" -#: ../../source/how-to-monitor-simulation.rst:214 +#: ../../source/how-to-monitor-simulation.rst:237 msgid "Q: I don't see any metrics logged." msgstr "Q : Je ne vois aucune mesure enregistrée." -#: ../../source/how-to-monitor-simulation.rst:216 +#: ../../source/how-to-monitor-simulation.rst:239 msgid "" "A: The timeframe might not be properly set. The setting is in the top " "right corner (\"Last 30 minutes\" by default). Please change the " @@ -7235,7 +7710,7 @@ msgstr "" "défaut). Modifie le délai pour qu'il corresponde à la période pendant " "laquelle la simulation s'est déroulée." -#: ../../source/how-to-monitor-simulation.rst:218 +#: ../../source/how-to-monitor-simulation.rst:243 msgid "" "Q: I see “Grafana server not detected. Please make sure the Grafana " "server is running and refresh this page” after going to the Metrics tab " @@ -7245,7 +7720,7 @@ msgstr "" "serveur Grafana fonctionne et actualise cette page\" après avoir accédé à" " l'onglet Métriques dans Ray Dashboard." -#: ../../source/how-to-monitor-simulation.rst:220 +#: ../../source/how-to-monitor-simulation.rst:246 msgid "" "A: You probably don't have Grafana running. Please check the running " "services" @@ -7253,15 +7728,16 @@ msgstr "" "R : Grafana n'est probablement pas en cours d'exécution. Vérifie les " "services en cours d'exécution" -#: ../../source/how-to-monitor-simulation.rst:226 +#: ../../source/how-to-monitor-simulation.rst:252 +#, fuzzy msgid "" "Q: I see \"This site can't be reached\" when going to " -"``_." +"http://127.0.0.1:8265." msgstr "" "Q : Je vois \"This site can't be reached\" quand je vais sur " "``_." -#: ../../source/how-to-monitor-simulation.rst:228 +#: ../../source/how-to-monitor-simulation.rst:254 msgid "" "A: Either the simulation has already finished, or you still need to start" " Prometheus." @@ -7269,22 +7745,22 @@ msgstr "" "R : Soit la simulation est déjà terminée, soit tu dois encore démarrer " "Prometheus." -#: ../../source/how-to-monitor-simulation.rst:232 +#: ../../source/how-to-monitor-simulation.rst:257 msgid "Resources" msgstr "Ressources" -#: ../../source/how-to-monitor-simulation.rst:234 +#: ../../source/how-to-monitor-simulation.rst:259 #, fuzzy msgid "" -"Ray Dashboard: ``_" +"Ray Dashboard: https://docs.ray.io/en/latest/ray-observability/getting-" +"started.html" msgstr "" "Tableau de bord Ray : ``_" -#: ../../source/how-to-monitor-simulation.rst:236 +#: ../../source/how-to-monitor-simulation.rst:261 #, fuzzy -msgid "Ray Metrics: ``_" +msgid "Ray Metrics: https://docs.ray.io/en/latest/cluster/metrics.html" msgstr "" "Ray Metrics : ``_" @@ -7310,19 +7786,19 @@ msgid "" "VCE." msgstr "" -#: ../../source/how-to-run-simulations.rst:10 +#: ../../source/how-to-run-simulations.rst:19 msgid "" -"The :code:`VirtualClientEngine` schedules, launches and manages `virtual`" -" clients. These clients are identical to `non-virtual` clients (i.e. the " +"The ``VirtualClientEngine`` schedules, launches and manages `virtual` " +"clients. These clients are identical to `non-virtual` clients (i.e. the " "ones you launch via the command `flwr.client.start_client `_) in the sense that they can be configure by " "creating a class inheriting, for example, from `flwr.client.NumPyClient " "`_ and therefore behave in an " "identical way. In addition to that, clients managed by the " -":code:`VirtualClientEngine` are:" +"``VirtualClientEngine`` are:" msgstr "" -#: ../../source/how-to-run-simulations.rst:12 +#: ../../source/how-to-run-simulations.rst:26 msgid "" "resource-aware: this means that each client gets assigned a portion of " "the compute and memory on your system. You as a user can control this at " @@ -7331,14 +7807,14 @@ msgid "" "client, the more clients can run concurrently on the same hardware." msgstr "" -#: ../../source/how-to-run-simulations.rst:13 +#: ../../source/how-to-run-simulations.rst:31 msgid "" "self-managed: this means that you as a user do not need to launch clients" -" manually, instead this gets delegated to :code:`VirtualClientEngine`'s " +" manually, instead this gets delegated to ``VirtualClientEngine``'s " "internals." msgstr "" -#: ../../source/how-to-run-simulations.rst:14 +#: ../../source/how-to-run-simulations.rst:33 msgid "" "ephemeral: this means that a client is only materialized when it is " "required in the FL process (e.g. to do `fit() `_, an open-source framework for scalable Python " -"workloads. In particular, Flower's :code:`VirtualClientEngine` makes use " -"of `Actors `_ to " -"spawn `virtual` clients and run their workload." +"workloads. In particular, Flower's ``VirtualClientEngine`` makes use of " +"`Actors `_ to spawn " +"`virtual` clients and run their workload." msgstr "" -#: ../../source/how-to-run-simulations.rst:20 +#: ../../source/how-to-run-simulations.rst:45 msgid "Launch your Flower simulation" msgstr "" -#: ../../source/how-to-run-simulations.rst:22 +#: ../../source/how-to-run-simulations.rst:47 msgid "" "Running Flower simulations still require you to define your client class," " a strategy, and utility functions to download and load (and potentially " @@ -7370,37 +7846,37 @@ msgid "" " as follows:" msgstr "" -#: ../../source/how-to-run-simulations.rst:44 +#: ../../source/how-to-run-simulations.rst:73 #, fuzzy msgid "VirtualClientEngine resources" msgstr "Moteur de client virtuel" -#: ../../source/how-to-run-simulations.rst:45 +#: ../../source/how-to-run-simulations.rst:75 msgid "" "By default the VCE has access to all system resources (i.e. all CPUs, all" " GPUs, etc) since that is also the default behavior when starting Ray. " "However, in some settings you might want to limit how many of your system" " resources are used for simulation. You can do this via the " -":code:`ray_init_args` input argument to :code:`start_simulation` which " -"the VCE internally passes to Ray's :code:`ray.init` command. For a " -"complete list of settings you can configure check the `ray.init " +"``ray_init_args`` input argument to ``start_simulation`` which the VCE " +"internally passes to Ray's ``ray.init`` command. For a complete list of " +"settings you can configure check the `ray.init " "`_" -" documentation. Do not set :code:`ray_init_args` if you want the VCE to " -"use all your system's CPUs and GPUs." +" documentation. Do not set ``ray_init_args`` if you want the VCE to use " +"all your system's CPUs and GPUs." msgstr "" -#: ../../source/how-to-run-simulations.rst:62 +#: ../../source/how-to-run-simulations.rst:97 msgid "Assigning client resources" msgstr "" -#: ../../source/how-to-run-simulations.rst:63 +#: ../../source/how-to-run-simulations.rst:99 msgid "" -"By default the :code:`VirtualClientEngine` assigns a single CPU core (and" -" nothing else) to each virtual client. This means that if your system has" -" 10 cores, that many virtual clients can be concurrently running." +"By default the ``VirtualClientEngine`` assigns a single CPU core (and " +"nothing else) to each virtual client. This means that if your system has " +"10 cores, that many virtual clients can be concurrently running." msgstr "" -#: ../../source/how-to-run-simulations.rst:65 +#: ../../source/how-to-run-simulations.rst:103 msgid "" "More often than not, you would probably like to adjust the resources your" " clients get assigned based on the complexity (i.e. compute and memory " @@ -7411,34 +7887,32 @@ msgid "" "our case Flower clients):" msgstr "" -#: ../../source/how-to-run-simulations.rst:67 -msgid ":code:`num_cpus` indicates the number of CPU cores a client would get." +#: ../../source/how-to-run-simulations.rst:110 +msgid "``num_cpus`` indicates the number of CPU cores a client would get." msgstr "" -#: ../../source/how-to-run-simulations.rst:68 -msgid "" -":code:`num_gpus` indicates the **ratio** of GPU memory a client gets " -"assigned." +#: ../../source/how-to-run-simulations.rst:111 +msgid "``num_gpus`` indicates the **ratio** of GPU memory a client gets assigned." msgstr "" -#: ../../source/how-to-run-simulations.rst:70 +#: ../../source/how-to-run-simulations.rst:113 msgid "Let's see a few examples:" msgstr "" -#: ../../source/how-to-run-simulations.rst:89 +#: ../../source/how-to-run-simulations.rst:132 msgid "" -"While the :code:`client_resources` can be used to control the degree of " +"While the ``client_resources`` can be used to control the degree of " "concurrency in your FL simulation, this does not stop you from running " "dozens, hundreds or even thousands of clients in the same round and " "having orders of magnitude more `dormant` (i.e. not participating in a " "round) clients. Let's say you want to have 100 clients per round but your" " system can only accommodate 8 clients concurrently. The " -":code:`VirtualClientEngine` will schedule 100 jobs to run (each " -"simulating a client sampled by the strategy) and then will execute them " -"in a resource-aware manner in batches of 8." +"``VirtualClientEngine`` will schedule 100 jobs to run (each simulating a " +"client sampled by the strategy) and then will execute them in a resource-" +"aware manner in batches of 8." msgstr "" -#: ../../source/how-to-run-simulations.rst:91 +#: ../../source/how-to-run-simulations.rst:140 msgid "" "To understand all the intricate details on how resources are used to " "schedule FL clients and how to define custom resources, please take a " @@ -7446,19 +7920,19 @@ msgid "" "core/scheduling/resources.html>`_." msgstr "" -#: ../../source/how-to-run-simulations.rst:94 +#: ../../source/how-to-run-simulations.rst:145 #, fuzzy msgid "Simulation examples" msgstr "Exemples de PyTorch" -#: ../../source/how-to-run-simulations.rst:96 +#: ../../source/how-to-run-simulations.rst:147 msgid "" "A few ready-to-run complete examples for Flower simulation in " "Tensorflow/Keras and PyTorch are provided in the `Flower repository " "`_. You can run them on Google Colab too:" msgstr "" -#: ../../source/how-to-run-simulations.rst:98 +#: ../../source/how-to-run-simulations.rst:151 #, fuzzy msgid "" "`Tensorflow/Keras Simulation " @@ -7469,114 +7943,112 @@ msgstr "" "`_" -#: ../../source/how-to-run-simulations.rst:99 +#: ../../source/how-to-run-simulations.rst:154 msgid "" "`PyTorch Simulation `_: 100 clients collaboratively train a CNN model on " "MNIST." msgstr "" -#: ../../source/how-to-run-simulations.rst:104 +#: ../../source/how-to-run-simulations.rst:159 #, fuzzy msgid "Multi-node Flower simulations" msgstr "Simulation de moniteur" -#: ../../source/how-to-run-simulations.rst:106 +#: ../../source/how-to-run-simulations.rst:161 msgid "" -"Flower's :code:`VirtualClientEngine` allows you to run FL simulations " -"across multiple compute nodes. Before starting your multi-node simulation" -" ensure that you:" +"Flower's ``VirtualClientEngine`` allows you to run FL simulations across " +"multiple compute nodes. Before starting your multi-node simulation ensure" +" that you:" msgstr "" -#: ../../source/how-to-run-simulations.rst:108 +#: ../../source/how-to-run-simulations.rst:164 msgid "Have the same Python environment in all nodes." msgstr "" -#: ../../source/how-to-run-simulations.rst:109 +#: ../../source/how-to-run-simulations.rst:165 msgid "Have a copy of your code (e.g. your entire repo) in all nodes." msgstr "" -#: ../../source/how-to-run-simulations.rst:110 +#: ../../source/how-to-run-simulations.rst:166 msgid "" "Have a copy of your dataset in all nodes (more about this in " ":ref:`simulation considerations `)" msgstr "" -#: ../../source/how-to-run-simulations.rst:111 +#: ../../source/how-to-run-simulations.rst:168 msgid "" -"Pass :code:`ray_init_args={\"address\"=\"auto\"}` to `start_simulation " -"`_ so the " -":code:`VirtualClientEngine` attaches to a running Ray instance." +"Pass ``ray_init_args={\"address\"=\"auto\"}`` to `start_simulation `_ so the " +"``VirtualClientEngine`` attaches to a running Ray instance." msgstr "" -#: ../../source/how-to-run-simulations.rst:112 +#: ../../source/how-to-run-simulations.rst:171 msgid "" -"Start Ray on you head node: on the terminal type :code:`ray start " -"--head`. This command will print a few lines, one of which indicates how " -"to attach other nodes to the head node." +"Start Ray on you head node: on the terminal type ``ray start --head``. " +"This command will print a few lines, one of which indicates how to attach" +" other nodes to the head node." msgstr "" -#: ../../source/how-to-run-simulations.rst:113 +#: ../../source/how-to-run-simulations.rst:174 msgid "" "Attach other nodes to the head node: copy the command shown after " "starting the head and execute it on terminal of a new node: for example " -":code:`ray start --address='192.168.1.132:6379'`" +"``ray start --address='192.168.1.132:6379'``" msgstr "" -#: ../../source/how-to-run-simulations.rst:115 +#: ../../source/how-to-run-simulations.rst:178 msgid "" "With all the above done, you can run your code from the head node as you " "would if the simulation was running on a single node." msgstr "" -#: ../../source/how-to-run-simulations.rst:117 +#: ../../source/how-to-run-simulations.rst:181 msgid "" "Once your simulation is finished, if you'd like to dismantle your cluster" -" you simply need to run the command :code:`ray stop` in each node's " -"terminal (including the head node)." +" you simply need to run the command ``ray stop`` in each node's terminal " +"(including the head node)." msgstr "" -#: ../../source/how-to-run-simulations.rst:120 +#: ../../source/how-to-run-simulations.rst:185 msgid "Multi-node simulation good-to-know" msgstr "" -#: ../../source/how-to-run-simulations.rst:122 +#: ../../source/how-to-run-simulations.rst:187 msgid "" "Here we list a few interesting functionality when running multi-node FL " "simulations:" msgstr "" -#: ../../source/how-to-run-simulations.rst:124 +#: ../../source/how-to-run-simulations.rst:189 msgid "" -"User :code:`ray status` to check all nodes connected to your head node as" -" well as the total resources available to the " -":code:`VirtualClientEngine`." +"User ``ray status`` to check all nodes connected to your head node as " +"well as the total resources available to the ``VirtualClientEngine``." msgstr "" -#: ../../source/how-to-run-simulations.rst:126 +#: ../../source/how-to-run-simulations.rst:192 msgid "" "When attaching a new node to the head, all its resources (i.e. all CPUs, " "all GPUs) will be visible by the head node. This means that the " -":code:`VirtualClientEngine` can schedule as many `virtual` clients as " -"that node can possible run. In some settings you might want to exclude " -"certain resources from the simulation. You can do this by appending " -"`--num-cpus=` and/or `--num-" -"gpus=` in any :code:`ray start` command (including " -"when starting the head)" +"``VirtualClientEngine`` can schedule as many `virtual` clients as that " +"node can possible run. In some settings you might want to exclude certain" +" resources from the simulation. You can do this by appending `--num-" +"cpus=` and/or `--num-gpus=` in " +"any ``ray start`` command (including when starting the head)" msgstr "" -#: ../../source/how-to-run-simulations.rst:132 +#: ../../source/how-to-run-simulations.rst:202 #, fuzzy msgid "Considerations for simulations" msgstr "Simulation de moniteur" -#: ../../source/how-to-run-simulations.rst:135 +#: ../../source/how-to-run-simulations.rst:206 msgid "" "We are actively working on these fronts so to make it trivial to run any " "FL workload with Flower simulation." msgstr "" -#: ../../source/how-to-run-simulations.rst:138 +#: ../../source/how-to-run-simulations.rst:209 msgid "" "The current VCE allows you to run Federated Learning workloads in " "simulation mode whether you are prototyping simple scenarios on your " @@ -7587,61 +8059,60 @@ msgid "" "couple of current limitations in our implementation." msgstr "" -#: ../../source/how-to-run-simulations.rst:141 +#: ../../source/how-to-run-simulations.rst:217 #, fuzzy msgid "GPU resources" msgstr "Ressources" -#: ../../source/how-to-run-simulations.rst:143 +#: ../../source/how-to-run-simulations.rst:219 msgid "" "The VCE assigns a share of GPU memory to a client that specifies the key " -":code:`num_gpus` in :code:`client_resources`. This being said, Ray (used " +"``num_gpus`` in ``client_resources``. This being said, Ray (used " "internally by the VCE) is by default:" msgstr "" -#: ../../source/how-to-run-simulations.rst:146 +#: ../../source/how-to-run-simulations.rst:222 msgid "" "not aware of the total VRAM available on the GPUs. This means that if you" -" set :code:`num_gpus=0.5` and you have two GPUs in your system with " -"different (e.g. 32GB and 8GB) VRAM amounts, they both would run 2 clients" -" concurrently." +" set ``num_gpus=0.5`` and you have two GPUs in your system with different" +" (e.g. 32GB and 8GB) VRAM amounts, they both would run 2 clients " +"concurrently." msgstr "" -#: ../../source/how-to-run-simulations.rst:147 +#: ../../source/how-to-run-simulations.rst:225 msgid "" "not aware of other unrelated (i.e. not created by the VCE) workloads are " "running on the GPU. Two takeaways from this are:" msgstr "" -#: ../../source/how-to-run-simulations.rst:149 +#: ../../source/how-to-run-simulations.rst:228 msgid "" "Your Flower server might need a GPU to evaluate the `global model` after " "aggregation (by instance when making use of the `evaluate method `_)" msgstr "" -#: ../../source/how-to-run-simulations.rst:150 +#: ../../source/how-to-run-simulations.rst:231 msgid "" "If you want to run several independent Flower simulations on the same " "machine you need to mask-out your GPUs with " -":code:`CUDA_VISIBLE_DEVICES=\"\"` when launching your " -"experiment." +"``CUDA_VISIBLE_DEVICES=\"\"`` when launching your experiment." msgstr "" -#: ../../source/how-to-run-simulations.rst:153 +#: ../../source/how-to-run-simulations.rst:235 msgid "" -"In addition, the GPU resource limits passed to :code:`client_resources` " -"are not `enforced` (i.e. they can be exceeded) which can result in the " +"In addition, the GPU resource limits passed to ``client_resources`` are " +"not `enforced` (i.e. they can be exceeded) which can result in the " "situation of client using more VRAM than the ratio specified when " "starting the simulation." msgstr "" -#: ../../source/how-to-run-simulations.rst:156 +#: ../../source/how-to-run-simulations.rst:240 #, fuzzy msgid "TensorFlow with GPUs" msgstr "Exemples de TensorFlow" -#: ../../source/how-to-run-simulations.rst:158 +#: ../../source/how-to-run-simulations.rst:242 msgid "" "When `using a GPU with TensorFlow " "`_ nearly your entire GPU memory of" @@ -7653,17 +8124,17 @@ msgid "" "`_." msgstr "" -#: ../../source/how-to-run-simulations.rst:160 +#: ../../source/how-to-run-simulations.rst:249 msgid "" "This would need to be done in the main process (which is where the server" " would run) and in each Actor created by the VCE. By means of " -":code:`actor_kwargs` we can pass the reserved key `\"on_actor_init_fn\"` " -"in order to specify a function to be executed upon actor initialization. " -"In this case, to enable GPU growth for TF workloads. It would look as " +"``actor_kwargs`` we can pass the reserved key `\"on_actor_init_fn\"` in " +"order to specify a function to be executed upon actor initialization. In " +"this case, to enable GPU growth for TF workloads. It would look as " "follows:" msgstr "" -#: ../../source/how-to-run-simulations.rst:179 +#: ../../source/how-to-run-simulations.rst:272 #, fuzzy msgid "" "This is precisely the mechanism used in `Tensorflow/Keras Simulation " @@ -7674,11 +8145,11 @@ msgstr "" "`_" -#: ../../source/how-to-run-simulations.rst:183 +#: ../../source/how-to-run-simulations.rst:276 msgid "Multi-node setups" msgstr "" -#: ../../source/how-to-run-simulations.rst:185 +#: ../../source/how-to-run-simulations.rst:278 msgid "" "The VCE does not currently offer a way to control on which node a " "particular `virtual` client is executed. In other words, if more than a " @@ -7691,7 +8162,7 @@ msgid "" "circumvent data duplication." msgstr "" -#: ../../source/how-to-run-simulations.rst:187 +#: ../../source/how-to-run-simulations.rst:286 msgid "" "By definition virtual clients are `stateless` due to their ephemeral " "nature. A client state can be implemented as part of the Flower client " @@ -7720,17 +8191,17 @@ msgid "Model checkpointing" msgstr "Point de contrôle du modèle" #: ../../source/how-to-save-and-load-model-checkpoints.rst:10 +#, fuzzy msgid "" "Model updates can be persisted on the server-side by customizing " -":code:`Strategy` methods. Implementing custom strategies is always an " -"option, but for many cases it may be more convenient to simply customize " -"an existing strategy. The following code example defines a new " -":code:`SaveModelStrategy` which customized the existing built-in " -":code:`FedAvg` strategy. In particular, it customizes " -":code:`aggregate_fit` by calling :code:`aggregate_fit` in the base class " -"(:code:`FedAvg`). It then continues to save returned (aggregated) weights" -" before it returns those aggregated weights to the caller (i.e., the " -"server):" +"``Strategy`` methods. Implementing custom strategies is always an option," +" but for many cases it may be more convenient to simply customize an " +"existing strategy. The following code example defines a new " +"``SaveModelStrategy`` which customized the existing built-in ``FedAvg`` " +"strategy. In particular, it customizes ``aggregate_fit`` by calling " +"``aggregate_fit`` in the base class (``FedAvg``). It then continues to " +"save returned (aggregated) weights before it returns those aggregated " +"weights to the caller (i.e., the server):" msgstr "" "Les mises à jour du modèle peuvent être conservées côté serveur en " "personnalisant les méthodes :code:`Strategy`. L'implémentation de " @@ -7744,12 +8215,12 @@ msgstr "" "retournés (agrégés) avant de renvoyer ces poids agrégés à l'appelant " "(c'est-à-dire le serveur) :" -#: ../../source/how-to-save-and-load-model-checkpoints.rst:47 +#: ../../source/how-to-save-and-load-model-checkpoints.rst:53 #, fuzzy msgid "Save and load PyTorch checkpoints" msgstr "Sauvegarde et chargement des points de contrôle PyTorch" -#: ../../source/how-to-save-and-load-model-checkpoints.rst:49 +#: ../../source/how-to-save-and-load-model-checkpoints.rst:55 #, fuzzy msgid "" "Similar to the previous example but with a few extra steps, we'll show " @@ -7767,7 +8238,7 @@ msgstr "" "transformés en ``state_dict`` PyTorch en suivant la structure de la " "classe ``OrderedDict``." -#: ../../source/how-to-save-and-load-model-checkpoints.rst:85 +#: ../../source/how-to-save-and-load-model-checkpoints.rst:98 msgid "" "To load your progress, you simply append the following lines to your " "code. Note that this will iterate over all saved checkpoints and load the" @@ -7777,7 +8248,7 @@ msgstr "" "à ton code. Note que cela va itérer sur tous les points de contrôle " "sauvegardés et charger le plus récent :" -#: ../../source/how-to-save-and-load-model-checkpoints.rst:97 +#: ../../source/how-to-save-and-load-model-checkpoints.rst:111 msgid "" "Return/use this object of type ``Parameters`` wherever necessary, such as" " in the ``initial_parameters`` when defining a ``Strategy``." @@ -7800,12 +8271,12 @@ msgstr "" "changements qui nécessitent de modifier le code des projets de la série " "0.x existants." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:8 -#: ../../source/how-to-upgrade-to-flower-next.rst:43 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:10 +#: ../../source/how-to-upgrade-to-flower-next.rst:63 msgid "Install update" msgstr "Installer la mise à jour" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:10 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:12 msgid "" "Here's how to update an existing installation to Flower 1.0 using either " "pip or Poetry:" @@ -7813,11 +8284,11 @@ msgstr "" "Voici comment mettre à jour une installation existante vers Flower 1.0 en" " utilisant soit pip soit Poetry :" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:12 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:14 msgid "pip: add ``-U`` when installing." msgstr "pip : ajoute ``-U`` lors de l'installation." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:14 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:16 msgid "" "``python -m pip install -U flwr`` (when using ``start_server`` and " "``start_client``)" @@ -7825,7 +8296,7 @@ msgstr "" "``python -m pip install -U flwr`` (lors de l'utilisation de " "``start_server`` et ``start_client``)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:15 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:17 msgid "" "``python -m pip install -U 'flwr[simulation]'`` (when using " "``start_simulation``)" @@ -7833,7 +8304,7 @@ msgstr "" "``python -m pip install -U 'flwr[simulation]'`` (lors de l'utilisation de" " ``start_simulation``)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:17 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:19 msgid "" "Poetry: update the ``flwr`` dependency in ``pyproject.toml`` and then " "reinstall (don't forget to delete ``poetry.lock`` via ``rm poetry.lock`` " @@ -7843,13 +8314,13 @@ msgstr "" "puis réinstallez (n'oubliez pas de supprimer ``poetry.lock`` via ``rm " "poetry.lock`` avant d'exécuter ``poetry install``)." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:19 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:23 msgid "``flwr = \"^1.0.0\"`` (when using ``start_server`` and ``start_client``)" msgstr "" "``flwr = \"^1.0.0\"`` (lors de l'utilisation de ``start_server`` et " "``start_client``)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:20 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:24 msgid "" "``flwr = { version = \"^1.0.0\", extras = [\"simulation\"] }`` (when " "using ``start_simulation``)" @@ -7857,22 +8328,22 @@ msgstr "" "``flwr = { version = \"^1.0.0\", extras = [\"simulation\"] }`` (lors de " "l'utilisation de ``start_simulation``)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:24 -#: ../../source/how-to-upgrade-to-flower-next.rst:100 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:28 +#: ../../source/how-to-upgrade-to-flower-next.rst:120 msgid "Required changes" msgstr "Changements nécessaires" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:26 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:30 msgid "The following breaking changes require manual updates." msgstr "" "Les changements de rupture suivants nécessitent des mises à jour " "manuelles." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:29 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:33 msgid "General" msgstr "Généralités" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:31 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:35 msgid "" "Pass all arguments as keyword arguments (not as positional arguments). " "Here's an example:" @@ -7880,7 +8351,7 @@ msgstr "" "Passe tous les arguments comme des arguments de mots-clés (et non comme " "des arguments de position). Voici un exemple :" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:33 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:38 msgid "" "Flower 0.19 (positional arguments): ``start_client(\"127.0.0.1:8080\", " "FlowerClient())``" @@ -7888,7 +8359,7 @@ msgstr "" "Flower 0.19 (arguments positionnels) : ``start_client(\"127.0.0.1:8080\"," " FlowerClient())``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:34 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:39 msgid "" "Flower 1.0 (keyword arguments): " "``start_client(server_address=\"127.0.0.1:8080\", " @@ -7898,12 +8369,12 @@ msgstr "" "``start_client(server_address=\"127.0.0.1:8080\", " "client=FlowerClient())``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:37 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:43 #: ../../source/ref-api/flwr.client.Client.rst:2 msgid "Client" msgstr "Client" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:39 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:45 msgid "" "Subclasses of ``NumPyClient``: change ``def get_parameters(self):``` to " "``def get_parameters(self, config):``" @@ -7911,7 +8382,7 @@ msgstr "" "Sous-classes de ``NumPyClient`` : changez ``def get_parameters(self):`` " "en ``def get_parameters(self, config):``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:40 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:47 msgid "" "Subclasses of ``Client``: change ``def get_parameters(self):``` to ``def " "get_parameters(self, ins: GetParametersIns):``" @@ -7919,11 +8390,11 @@ msgstr "" "Sous-classes de ``Client`` : changez ``def get_parameters(self):`` en " "``def get_parameters(self, ins : GetParametersIns):``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:43 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:51 msgid "Strategies / ``start_server`` / ``start_simulation``" msgstr "Stratégies / ``démarrer_serveur`` / ``démarrer_simulation``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:45 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:53 msgid "" "Pass ``ServerConfig`` (instead of a dictionary) to ``start_server`` and " "``start_simulation``. Here's an example:" @@ -7931,7 +8402,7 @@ msgstr "" "Passez ``ServerConfig`` (au lieu d'un dictionnaire) à ``start_server`` et" " ``start_simulation``. Voici un exemple :" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:47 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:56 msgid "" "Flower 0.19: ``start_server(..., config={\"num_rounds\": 3, " "\"round_timeout\": 600.0}, ...)``" @@ -7939,7 +8410,7 @@ msgstr "" "Flower 0.19 : ``start_server(..., config={\"num_rounds\" : 3, " "\"round_timeout\" : 600.0}, ...)``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:48 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:58 msgid "" "Flower 1.0: ``start_server(..., " "config=flwr.server.ServerConfig(num_rounds=3, round_timeout=600.0), " @@ -7949,7 +8420,7 @@ msgstr "" "config=flwr.server.ServerConfig(num_rounds=3, round_timeout=600.0), " "...)``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:50 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:61 msgid "" "Replace ``num_rounds=1`` in ``start_simulation`` with the new " "``config=ServerConfig(...)`` (see previous item)" @@ -7957,7 +8428,7 @@ msgstr "" "Remplacer ``num_rounds=1`` dans ``start_simulation`` par le nouveau " "``config=ServerConfig(...)`` (voir point précédent)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:51 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:63 msgid "" "Remove ``force_final_distributed_eval`` parameter from calls to " "``start_server``. Distributed evaluation on all clients can be enabled by" @@ -7969,19 +8440,19 @@ msgstr "" "activée en configurant la stratégie pour échantillonner tous les clients " "pour l'évaluation après le dernier tour de formation." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:52 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:66 msgid "Rename parameter/ndarray conversion functions:" msgstr "Renomme les fonctions de conversion des paramètres et des tableaux :" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:54 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:68 msgid "``parameters_to_weights`` --> ``parameters_to_ndarrays``" msgstr "``parameters_to_weights`` --> ``parameters_to_ndarrays``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:55 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:69 msgid "``weights_to_parameters`` --> ``ndarrays_to_parameters``" msgstr "``Poids_à_paramètres`` --> ``Réseaux_à_paramètres``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:57 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:71 msgid "" "Strategy initialization: if the strategy relies on the default values for" " ``fraction_fit`` and ``fraction_evaluate``, set ``fraction_fit`` and " @@ -7998,23 +8469,23 @@ msgstr "" "stratégie) doivent maintenant initialiser manuellement FedAvg avec " "``fraction_fit`` et ``fraction_evaluate`` fixés à ``0.1``." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:58 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:77 msgid "Rename built-in strategy parameters (e.g., ``FedAvg``):" msgstr "Renommer les paramètres de stratégie intégrés (par exemple, ``FedAvg``) :" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:60 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:79 msgid "``fraction_eval`` --> ``fraction_evaluate``" msgstr "``fraction_eval`` --> ``fraction_evaluate``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:61 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:80 msgid "``min_eval_clients`` --> ``min_evaluate_clients``" msgstr "``min_eval_clients` --> ``min_evaluate_clients``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:62 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:81 msgid "``eval_fn`` --> ``evaluate_fn``" msgstr "``eval_fn`` --> ``evaluate_fn``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:64 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:83 msgid "" "Rename ``rnd`` to ``server_round``. This impacts multiple methods and " "functions, for example, ``configure_fit``, ``aggregate_fit``, " @@ -8024,11 +8495,11 @@ msgstr "" "méthodes et fonctions, par exemple, ``configure_fit``, ``aggregate_fit``," " ``configure_evaluate``, ``aggregate_evaluate``, et ``evaluate_fn``." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:65 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:86 msgid "Add ``server_round`` and ``config`` to ``evaluate_fn``:" msgstr "Ajoute ``server_round`` et ``config`` à `evaluate_fn`` :" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:67 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:88 msgid "" "Flower 0.19: ``def evaluate(parameters: NDArrays) -> " "Optional[Tuple[float, Dict[str, Scalar]]]:``" @@ -8036,7 +8507,7 @@ msgstr "" "Flower 0.19 : ``def evaluate(parameters : NDArrays) -> " "Optional[Tuple[float, Dict[str, Scalar]]]]:``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:68 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:90 msgid "" "Flower 1.0: ``def evaluate(server_round: int, parameters: NDArrays, " "config: Dict[str, Scalar]) -> Optional[Tuple[float, Dict[str, " @@ -8046,11 +8517,11 @@ msgstr "" "config : Dict[str, Scalar]) -> Optional[Tuple[float, Dict[str, " "Scalar]]]:``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:71 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:94 msgid "Custom strategies" msgstr "Stratégies personnalisées" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:73 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:96 msgid "" "The type of parameter ``failures`` has changed from " "``List[BaseException]`` to ``List[Union[Tuple[ClientProxy, FitRes], " @@ -8063,7 +8534,7 @@ msgstr "" "``aggregate_fit``) et ``List[Union[Tuple[ClientProxy, EvaluateRes], " "BaseException]]`` (dans ``aggregate_evaluate``)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:74 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:100 msgid "" "The ``Strategy`` method ``evaluate`` now receives the current round of " "federated learning/evaluation as the first parameter:" @@ -8071,7 +8542,7 @@ msgstr "" "La méthode ``Stratégie`` `évaluer`` reçoit maintenant le cycle actuel " "d'apprentissage/évaluation fédéré comme premier paramètre :" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:76 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:103 msgid "" "Flower 0.19: ``def evaluate(self, parameters: Parameters) -> " "Optional[Tuple[float, Dict[str, Scalar]]]:``" @@ -8079,7 +8550,7 @@ msgstr "" "Flower 0.19 : ``def evaluate(self, parameters : Parameters) -> " "Optional[Tuple[float, Dict[str, Scalar]]]]:``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:77 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:105 msgid "" "Flower 1.0: ``def evaluate(self, server_round: int, parameters: " "Parameters) -> Optional[Tuple[float, Dict[str, Scalar]]]:``" @@ -8087,11 +8558,11 @@ msgstr "" "Flower 1.0 : ``def evaluate(self, server_round : int, parameters : " "Parameters) -> Optional[Tuple[float, Dict[str, Scalar]]]]:``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:80 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:109 msgid "Optional improvements" msgstr "Améliorations facultatives" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:82 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:111 msgid "" "Along with the necessary changes above, there are a number of potential " "improvements that just became possible:" @@ -8100,7 +8571,7 @@ msgstr "" "certain nombre d'améliorations potentielles qui viennent d'être rendues " "possibles :" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:84 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:114 msgid "" "Remove \"placeholder\" methods from subclasses of ``Client`` or " "``NumPyClient``. If you, for example, use server-side evaluation, then " @@ -8112,7 +8583,7 @@ msgstr "" "serveur, alors les implémentations \"placeholder\" de ``evaluate`` ne " "sont plus nécessaires." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:85 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:117 msgid "" "Configure the round timeout via ``start_simulation``: " "``start_simulation(..., config=flwr.server.ServerConfig(num_rounds=3, " @@ -8122,12 +8593,12 @@ msgstr "" "``start_simulation(..., config=flwr.server.ServerConfig(num_rounds=3, " "round_timeout=600.0), ...)``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:89 -#: ../../source/how-to-upgrade-to-flower-next.rst:317 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:121 +#: ../../source/how-to-upgrade-to-flower-next.rst:348 msgid "Further help" msgstr "Aide supplémentaire" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:91 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:123 msgid "" "Most official `Flower code examples " "`_ are already updated" @@ -8156,7 +8627,7 @@ msgid "" "1.8." msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:9 +#: ../../source/how-to-upgrade-to-flower-next.rst:11 msgid "" "This guide shows how to reuse pre-``1.8`` Flower code with minimum code " "changes by using the *compatibility layer* in Flower Next. In another " @@ -8164,11 +8635,11 @@ msgid "" "Next APIs." msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:13 +#: ../../source/how-to-upgrade-to-flower-next.rst:15 msgid "Let's dive in!" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:48 +#: ../../source/how-to-upgrade-to-flower-next.rst:68 #, fuzzy msgid "" "Here's how to update an existing installation of Flower to Flower Next " @@ -8177,26 +8648,26 @@ msgstr "" "Voici comment mettre à jour une installation existante vers Flower 1.0 en" " utilisant soit pip soit Poetry :" -#: ../../source/how-to-upgrade-to-flower-next.rst:54 +#: ../../source/how-to-upgrade-to-flower-next.rst:74 msgid "or if you need Flower Next with simulation:" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:61 +#: ../../source/how-to-upgrade-to-flower-next.rst:80 msgid "" "Ensure you set the following version constraint in your " "``requirements.txt``" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:71 +#: ../../source/how-to-upgrade-to-flower-next.rst:90 msgid "or ``pyproject.toml``:" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:82 +#: ../../source/how-to-upgrade-to-flower-next.rst:101 #, fuzzy msgid "Using Poetry" msgstr "Utiliser la poésie (recommandé)" -#: ../../source/how-to-upgrade-to-flower-next.rst:84 +#: ../../source/how-to-upgrade-to-flower-next.rst:103 #, fuzzy msgid "" "Update the ``flwr`` dependency in ``pyproject.toml`` and then reinstall " @@ -8207,14 +8678,14 @@ msgstr "" "puis réinstallez (n'oubliez pas de supprimer ``poetry.lock`` via ``rm " "poetry.lock`` avant d'exécuter ``poetry install``)." -#: ../../source/how-to-upgrade-to-flower-next.rst:86 +#: ../../source/how-to-upgrade-to-flower-next.rst:106 #, fuzzy msgid "" "Ensure you set the following version constraint in your " "``pyproject.toml``:" msgstr "Augmente la version mineure de ``pyproject.toml`` d'une unité." -#: ../../source/how-to-upgrade-to-flower-next.rst:102 +#: ../../source/how-to-upgrade-to-flower-next.rst:122 msgid "" "In Flower Next, the *infrastructure* and *application layers* have been " "decoupled. Instead of starting a client in code via ``start_client()``, " @@ -8227,33 +8698,33 @@ msgid "" "way:" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:109 +#: ../../source/how-to-upgrade-to-flower-next.rst:131 #, fuzzy msgid "|clientapp_link|_" msgstr "client" -#: ../../source/how-to-upgrade-to-flower-next.rst:110 +#: ../../source/how-to-upgrade-to-flower-next.rst:133 msgid "" "Wrap your existing client with |clientapp_link|_ instead of launching it " "via |startclient_link|_. Here's an example:" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:132 +#: ../../source/how-to-upgrade-to-flower-next.rst:156 #, fuzzy msgid "|serverapp_link|_" msgstr "serveur" -#: ../../source/how-to-upgrade-to-flower-next.rst:133 +#: ../../source/how-to-upgrade-to-flower-next.rst:158 msgid "" "Wrap your existing strategy with |serverapp_link|_ instead of starting " "the server via |startserver_link|_. Here's an example:" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:154 +#: ../../source/how-to-upgrade-to-flower-next.rst:179 msgid "Deployment" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:155 +#: ../../source/how-to-upgrade-to-flower-next.rst:181 msgid "" "Run the ``SuperLink`` using |flowernext_superlink_link|_ before running, " "in sequence, |flowernext_clientapp_link|_ (2x) and " @@ -8261,13 +8732,13 @@ msgid "" " `server.py` as Python scripts." msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:158 +#: ../../source/how-to-upgrade-to-flower-next.rst:184 msgid "" "Here's an example to start the server without HTTPS (only for " "prototyping):" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:174 +#: ../../source/how-to-upgrade-to-flower-next.rst:200 msgid "" "Here's another example to start with HTTPS. Use the ``--ssl-ca-" "certfile``, ``--ssl-certfile``, and ``--ssl-keyfile`` command line " @@ -8275,19 +8746,19 @@ msgid "" "private key)." msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:201 +#: ../../source/how-to-upgrade-to-flower-next.rst:228 #, fuzzy msgid "Simulation in CLI" msgstr "Simulation de moniteur" -#: ../../source/how-to-upgrade-to-flower-next.rst:202 +#: ../../source/how-to-upgrade-to-flower-next.rst:230 msgid "" "Wrap your existing client and strategy with |clientapp_link|_ and " "|serverapp_link|_, respectively. There is no need to use |startsim_link|_" " anymore. Here's an example:" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:232 +#: ../../source/how-to-upgrade-to-flower-next.rst:263 msgid "" "Run |flower_simulation_link|_ in CLI and point to the ``server_app`` / " "``client_app`` object in the code instead of executing the Python script." @@ -8295,24 +8766,24 @@ msgid "" "objects are in a ``sim.py`` module):" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:249 +#: ../../source/how-to-upgrade-to-flower-next.rst:280 msgid "" "Set default resources for each |clientapp_link|_ using the ``--backend-" "config`` command line argument instead of setting the " "``client_resources`` argument in |startsim_link|_. Here's an example:" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:275 +#: ../../source/how-to-upgrade-to-flower-next.rst:304 msgid "Simulation in a Notebook" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:276 +#: ../../source/how-to-upgrade-to-flower-next.rst:306 msgid "" "Run |runsim_link|_ in your notebook instead of |startsim_link|_. Here's " "an example:" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:319 +#: ../../source/how-to-upgrade-to-flower-next.rst:350 #, fuzzy msgid "" "Some official `Flower code examples `_ " @@ -8330,19 +8801,19 @@ msgstr "" "Flower `_ et utilise le canal " "``#questions``." -#: ../../source/how-to-upgrade-to-flower-next.rst:325 +#: ../../source/how-to-upgrade-to-flower-next.rst:357 #, fuzzy msgid "Important" msgstr "Changements importants :" -#: ../../source/how-to-upgrade-to-flower-next.rst:328 +#: ../../source/how-to-upgrade-to-flower-next.rst:359 msgid "" "As we continuously enhance Flower Next at a rapid pace, we'll be " "periodically updating this guide. Please feel free to share any feedback " "with us!" msgstr "" -#: ../../source/how-to-upgrade-to-flower-next.rst:334 +#: ../../source/how-to-upgrade-to-flower-next.rst:365 msgid "Happy migrating! 🚀" msgstr "" @@ -8356,7 +8827,7 @@ msgid "" " interfaces may change in future versions.**" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:6 +#: ../../source/how-to-use-built-in-mods.rst:7 msgid "" "In this tutorial, we will learn how to utilize built-in mods to augment " "the behavior of a ``ClientApp``. Mods (sometimes also called Modifiers) " @@ -8364,105 +8835,105 @@ msgid "" "the ``ClientApp``." msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:9 +#: ../../source/how-to-use-built-in-mods.rst:12 msgid "What are Mods?" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:11 +#: ../../source/how-to-use-built-in-mods.rst:14 msgid "" "A Mod is a callable that wraps around a ``ClientApp``. It can manipulate " "or inspect the incoming ``Message`` and the resulting outgoing " "``Message``. The signature for a ``Mod`` is as follows:" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:18 +#: ../../source/how-to-use-built-in-mods.rst:23 msgid "A typical mod function might look something like this:" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:31 +#: ../../source/how-to-use-built-in-mods.rst:36 msgid "Using Mods" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:33 +#: ../../source/how-to-use-built-in-mods.rst:38 msgid "To use mods in your ``ClientApp``, you can follow these steps:" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:36 +#: ../../source/how-to-use-built-in-mods.rst:41 msgid "1. Import the required mods" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:38 +#: ../../source/how-to-use-built-in-mods.rst:43 msgid "First, import the built-in mod you intend to use:" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:46 +#: ../../source/how-to-use-built-in-mods.rst:51 msgid "2. Define your client function" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:48 +#: ../../source/how-to-use-built-in-mods.rst:53 msgid "" "Define your client function (``client_fn``) that will be wrapped by the " "mod(s):" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:57 +#: ../../source/how-to-use-built-in-mods.rst:62 msgid "3. Create the ``ClientApp`` with mods" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:59 +#: ../../source/how-to-use-built-in-mods.rst:64 msgid "" "Create your ``ClientApp`` and pass the mods as a list to the ``mods`` " "argument. The order in which you provide the mods matters:" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:72 +#: ../../source/how-to-use-built-in-mods.rst:78 #, fuzzy msgid "Order of execution" msgstr "Dépréciations" -#: ../../source/how-to-use-built-in-mods.rst:74 +#: ../../source/how-to-use-built-in-mods.rst:80 msgid "" "When the ``ClientApp`` runs, the mods are executed in the order they are " "provided in the list:" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:76 +#: ../../source/how-to-use-built-in-mods.rst:83 msgid "``example_mod_1`` (outermost mod)" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:77 +#: ../../source/how-to-use-built-in-mods.rst:84 msgid "``example_mod_2`` (next mod)" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:78 +#: ../../source/how-to-use-built-in-mods.rst:85 msgid "" "Message handler (core function that handles the incoming ``Message`` and " "returns the outgoing ``Message``)" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:79 +#: ../../source/how-to-use-built-in-mods.rst:87 msgid "``example_mod_2`` (on the way back)" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:80 +#: ../../source/how-to-use-built-in-mods.rst:88 msgid "``example_mod_1`` (outermost mod on the way back)" msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:82 +#: ../../source/how-to-use-built-in-mods.rst:90 msgid "" "Each mod has a chance to inspect and modify the incoming ``Message`` " "before passing it to the next mod, and likewise with the outgoing " "``Message`` before returning it up the stack." msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:87 +#: ../../source/how-to-use-built-in-mods.rst:97 msgid "" "By following this guide, you have learned how to effectively use mods to " "enhance your ``ClientApp``'s functionality. Remember that the order of " "mods is crucial and affects how the input and output are processed." msgstr "" -#: ../../source/how-to-use-built-in-mods.rst:89 +#: ../../source/how-to-use-built-in-mods.rst:101 msgid "Enjoy building a more robust and flexible ``ClientApp`` with mods!" msgstr "" @@ -8471,14 +8942,14 @@ msgstr "" msgid "Use Differential Privacy" msgstr "Confidentialité différentielle" -#: ../../source/how-to-use-differential-privacy.rst:3 +#: ../../source/how-to-use-differential-privacy.rst:4 msgid "" "This guide explains how you can utilize differential privacy in the " "Flower framework. If you are not yet familiar with differential privacy, " "you can refer to :doc:`explanation-differential-privacy`." msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:7 +#: ../../source/how-to-use-differential-privacy.rst:10 msgid "" "Differential Privacy in Flower is in a preview phase. If you plan to use " "these features in a production environment with sensitive data, feel free" @@ -8486,7 +8957,7 @@ msgid "" "to best use these features." msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:12 +#: ../../source/how-to-use-differential-privacy.rst:17 msgid "" "This approach consists of two separate phases: clipping of the updates " "and adding noise to the aggregated model. For the clipping phase, Flower " @@ -8494,7 +8965,7 @@ msgid "" "the server side or the client side." msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:15 +#: ../../source/how-to-use-differential-privacy.rst:21 msgid "" "**Server-side Clipping**: This approach has the advantage of the server " "enforcing uniform clipping across all clients' updates and reducing the " @@ -8503,7 +8974,7 @@ msgid "" "the need to perform the clipping operation for all clients." msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:16 +#: ../../source/how-to-use-differential-privacy.rst:26 msgid "" "**Client-side Clipping**: This approach has the advantage of reducing the" " computational overhead on the server. However, it also has the " @@ -8511,73 +8982,72 @@ msgid "" "control over the clipping process." msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:21 +#: ../../source/how-to-use-differential-privacy.rst:31 #, fuzzy msgid "Server-side Clipping" msgstr "Logique côté serveur" -#: ../../source/how-to-use-differential-privacy.rst:22 +#: ../../source/how-to-use-differential-privacy.rst:33 msgid "" -"For central DP with server-side clipping, there are two :code:`Strategy` " -"classes that act as wrappers around the actual :code:`Strategy` instance " -"(for example, :code:`FedAvg`). The two wrapper classes are " -":code:`DifferentialPrivacyServerSideFixedClipping` and " -":code:`DifferentialPrivacyServerSideAdaptiveClipping` for fixed and " -"adaptive clipping." +"For central DP with server-side clipping, there are two ``Strategy`` " +"classes that act as wrappers around the actual ``Strategy`` instance (for" +" example, ``FedAvg``). The two wrapper classes are " +"``DifferentialPrivacyServerSideFixedClipping`` and " +"``DifferentialPrivacyServerSideAdaptiveClipping`` for fixed and adaptive " +"clipping." msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:25 +#: ../../source/how-to-use-differential-privacy.rst:-1 #, fuzzy msgid "server side clipping" msgstr "Logique côté serveur" -#: ../../source/how-to-use-differential-privacy.rst:31 +#: ../../source/how-to-use-differential-privacy.rst:43 msgid "" -"The code sample below enables the :code:`FedAvg` strategy to use server-" -"side fixed clipping using the " -":code:`DifferentialPrivacyServerSideFixedClipping` wrapper class. The " -"same approach can be used with " -":code:`DifferentialPrivacyServerSideAdaptiveClipping` by adjusting the " +"The code sample below enables the ``FedAvg`` strategy to use server-side " +"fixed clipping using the ``DifferentialPrivacyServerSideFixedClipping`` " +"wrapper class. The same approach can be used with " +"``DifferentialPrivacyServerSideAdaptiveClipping`` by adjusting the " "corresponding input parameters." msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:52 +#: ../../source/how-to-use-differential-privacy.rst:64 #, fuzzy msgid "Client-side Clipping" msgstr "Logique côté client" -#: ../../source/how-to-use-differential-privacy.rst:53 +#: ../../source/how-to-use-differential-privacy.rst:66 msgid "" "For central DP with client-side clipping, the server sends the clipping " "value to selected clients on each round. Clients can use existing Flower " -":code:`Mods` to perform the clipping. Two mods are available for fixed " -"and adaptive client-side clipping: :code:`fixedclipping_mod` and " -":code:`adaptiveclipping_mod` with corresponding server-side wrappers " -":code:`DifferentialPrivacyClientSideFixedClipping` and " -":code:`DifferentialPrivacyClientSideAdaptiveClipping`." +"``Mods`` to perform the clipping. Two mods are available for fixed and " +"adaptive client-side clipping: ``fixedclipping_mod`` and " +"``adaptiveclipping_mod`` with corresponding server-side wrappers " +"``DifferentialPrivacyClientSideFixedClipping`` and " +"``DifferentialPrivacyClientSideAdaptiveClipping``." msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:57 +#: ../../source/how-to-use-differential-privacy.rst:-1 #, fuzzy msgid "client side clipping" msgstr "Logique côté client" -#: ../../source/how-to-use-differential-privacy.rst:63 +#: ../../source/how-to-use-differential-privacy.rst:78 msgid "" -"The code sample below enables the :code:`FedAvg` strategy to use " -"differential privacy with client-side fixed clipping using both the " -":code:`DifferentialPrivacyClientSideFixedClipping` wrapper class and, on " -"the client, :code:`fixedclipping_mod`:" +"The code sample below enables the ``FedAvg`` strategy to use differential" +" privacy with client-side fixed clipping using both the " +"``DifferentialPrivacyClientSideFixedClipping`` wrapper class and, on the " +"client, ``fixedclipping_mod``:" msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:80 +#: ../../source/how-to-use-differential-privacy.rst:97 msgid "" -"In addition to the server-side strategy wrapper, the :code:`ClientApp` " -"needs to configure the matching :code:`fixedclipping_mod` to perform the " -"client-side clipping:" +"In addition to the server-side strategy wrapper, the ``ClientApp`` needs " +"to configure the matching ``fixedclipping_mod`` to perform the client-" +"side clipping:" msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:97 +#: ../../source/how-to-use-differential-privacy.rst:115 msgid "" "To utilize local differential privacy (DP) and add noise to the client " "model parameters before transmitting them to the server in Flower, you " @@ -8585,15 +9055,15 @@ msgid "" "clipping norm value, sensitivity, epsilon, and delta." msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:99 +#: ../../source/how-to-use-differential-privacy.rst:-1 msgid "local DP mod" msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:104 -msgid "Below is a code example that shows how to use :code:`LocalDpMod`:" +#: ../../source/how-to-use-differential-privacy.rst:125 +msgid "Below is a code example that shows how to use ``LocalDpMod``:" msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:122 +#: ../../source/how-to-use-differential-privacy.rst:140 msgid "" "Please note that the order of mods, especially those that modify " "parameters, is important when using multiple modifiers. Typically, " @@ -8601,11 +9071,11 @@ msgid "" "parameters." msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:125 +#: ../../source/how-to-use-differential-privacy.rst:145 msgid "Local Training using Privacy Engines" msgstr "" -#: ../../source/how-to-use-differential-privacy.rst:126 +#: ../../source/how-to-use-differential-privacy.rst:147 msgid "" "For ensuring data instance-level privacy during local model training on " "the client side, consider leveraging privacy engines such as Opacus and " @@ -8622,16 +9092,17 @@ msgid "Use strategies" msgstr "Stratégies personnalisées" #: ../../source/how-to-use-strategies.rst:4 +#, fuzzy msgid "" "Flower allows full customization of the learning process through the " -":code:`Strategy` abstraction. A number of built-in strategies are " -"provided in the core framework." +"``Strategy`` abstraction. A number of built-in strategies are provided in" +" the core framework." msgstr "" "Flower permet une personnalisation complète du processus d'apprentissage " "grâce à l'abstraction :code:`Stratégie`. Un certain nombre de stratégies " "intégrées sont fournies dans le cadre principal." -#: ../../source/how-to-use-strategies.rst:6 +#: ../../source/how-to-use-strategies.rst:7 msgid "" "There are three ways to customize the way Flower orchestrates the " "learning process on the server side:" @@ -8639,25 +9110,26 @@ msgstr "" "Il y a trois façons de personnaliser la manière dont Flower orchestre le " "processus d'apprentissage du côté du serveur :" -#: ../../source/how-to-use-strategies.rst:8 -msgid "Use an existing strategy, for example, :code:`FedAvg`" +#: ../../source/how-to-use-strategies.rst:10 +#, fuzzy +msgid "Use an existing strategy, for example, ``FedAvg``" msgstr "Utilise une stratégie existante, par exemple :code:`FedAvg`" -#: ../../source/how-to-use-strategies.rst:9 -#: ../../source/how-to-use-strategies.rst:40 +#: ../../source/how-to-use-strategies.rst:11 +#: ../../source/how-to-use-strategies.rst:43 msgid "Customize an existing strategy with callback functions" msgstr "Personnalise une stratégie existante avec des fonctions de rappel" -#: ../../source/how-to-use-strategies.rst:10 -#: ../../source/how-to-use-strategies.rst:87 +#: ../../source/how-to-use-strategies.rst:12 +#: ../../source/how-to-use-strategies.rst:99 msgid "Implement a novel strategy" msgstr "Mets en place une nouvelle stratégie" -#: ../../source/how-to-use-strategies.rst:14 +#: ../../source/how-to-use-strategies.rst:15 msgid "Use an existing strategy" msgstr "Utilise une stratégie existante" -#: ../../source/how-to-use-strategies.rst:16 +#: ../../source/how-to-use-strategies.rst:17 msgid "" "Flower comes with a number of popular federated learning strategies " "built-in. A built-in strategy can be instantiated as follows:" @@ -8665,18 +9137,19 @@ msgstr "" "Flower intègre un certain nombre de stratégies d'apprentissage fédéré " "populaires. Une stratégie intégrée peut être instanciée comme suit :" -#: ../../source/how-to-use-strategies.rst:25 +#: ../../source/how-to-use-strategies.rst:27 +#, fuzzy msgid "" "This creates a strategy with all parameters left at their default values " -"and passes it to the :code:`start_server` function. It is usually " -"recommended to adjust a few parameters during instantiation:" +"and passes it to the ``start_server`` function. It is usually recommended" +" to adjust a few parameters during instantiation:" msgstr "" "Cela crée une stratégie dont tous les paramètres sont laissés à leur " "valeur par défaut et la transmet à la fonction :code:`start_server`. Il " "est généralement recommandé d'ajuster quelques paramètres lors de " "l'instanciation :" -#: ../../source/how-to-use-strategies.rst:42 +#: ../../source/how-to-use-strategies.rst:45 msgid "" "Existing strategies provide several ways to customize their behaviour. " "Callback functions allow strategies to call user-provided code during " @@ -8686,19 +9159,19 @@ msgstr "" "comportement. Les fonctions de rappel permettent aux stratégies d'appeler" " le code fourni par l'utilisateur pendant l'exécution." -#: ../../source/how-to-use-strategies.rst:45 +#: ../../source/how-to-use-strategies.rst:49 msgid "Configuring client fit and client evaluate" msgstr "Configurer l'adaptation et l'évaluation du client" -#: ../../source/how-to-use-strategies.rst:47 +#: ../../source/how-to-use-strategies.rst:51 +#, fuzzy msgid "" "The server can pass new configuration values to the client each round by " -"providing a function to :code:`on_fit_config_fn`. The provided function " -"will be called by the strategy and must return a dictionary of " -"configuration key values pairs that will be sent to the client. It must " -"return a dictionary of arbitrary configuration values :code:`client.fit`" -" and :code:`client.evaluate` functions during each round of federated " -"learning." +"providing a function to ``on_fit_config_fn``. The provided function will " +"be called by the strategy and must return a dictionary of configuration " +"key values pairs that will be sent to the client. It must return a " +"dictionary of arbitrary configuration values ``client.fit`` and " +"``client.evaluate`` functions during each round of federated learning." msgstr "" "Le serveur peut transmettre de nouvelles valeurs de configuration au " "client à chaque tour en fournissant une fonction à " @@ -8709,14 +9182,14 @@ msgstr "" "et :code:`client.evaluate` au cours de chaque tour d'apprentissage " "fédéré." -#: ../../source/how-to-use-strategies.rst:75 +#: ../../source/how-to-use-strategies.rst:84 #, fuzzy msgid "" -"The :code:`on_fit_config_fn` can be used to pass arbitrary configuration " +"The ``on_fit_config_fn`` can be used to pass arbitrary configuration " "values from server to client, and potentially change these values each " "round, for example, to adjust the learning rate. The client will receive " -"the dictionary returned by the :code:`on_fit_config_fn` in its own " -":code:`client.fit()` function." +"the dictionary returned by the ``on_fit_config_fn`` in its own " +"``client.fit()`` function." msgstr "" "Le :code:`on_fit_config_fn` peut être utilisé pour passer des valeurs de " "configuration arbitraires du serveur au client, et changer poétiquement " @@ -8724,29 +9197,30 @@ msgstr "" "d'apprentissage. Le client recevra le dictionnaire renvoyé par le " ":code:`on_fit_config_fn` dans sa propre fonction :code:`client.fit()`." -#: ../../source/how-to-use-strategies.rst:78 +#: ../../source/how-to-use-strategies.rst:89 +#, fuzzy msgid "" -"Similar to :code:`on_fit_config_fn`, there is also " -":code:`on_evaluate_config_fn` to customize the configuration sent to " -":code:`client.evaluate()`" +"Similar to ``on_fit_config_fn``, there is also ``on_evaluate_config_fn`` " +"to customize the configuration sent to ``client.evaluate()``" msgstr "" "Comme pour :code:`on_fit_config_fn`, il existe aussi " ":code:`on_evaluate_config_fn` pour personnaliser la configuration envoyée" " à :code:`client.evaluate()`" -#: ../../source/how-to-use-strategies.rst:81 +#: ../../source/how-to-use-strategies.rst:93 msgid "Configuring server-side evaluation" msgstr "Configuration de l'évaluation côté serveur" -#: ../../source/how-to-use-strategies.rst:83 +#: ../../source/how-to-use-strategies.rst:95 +#, fuzzy msgid "" "Server-side evaluation can be enabled by passing an evaluation function " -"to :code:`evaluate_fn`." +"to ``evaluate_fn``." msgstr "" "L'évaluation côté serveur peut être activée en passant une fonction " "d'évaluation à :code:`evaluate_fn`." -#: ../../source/how-to-use-strategies.rst:89 +#: ../../source/how-to-use-strategies.rst:101 #, fuzzy msgid "" "Writing a fully custom strategy is a bit more involved, but it provides " @@ -8766,15 +9240,15 @@ msgstr "Tutoriel" msgid "Quickstart tutorials" msgstr "Quickstart tutorials" -#: ../../source/index.rst:75 ../../source/index.rst:79 +#: ../../source/index.rst:81 ../../source/index.rst:85 msgid "How-to guides" msgstr "Guides" -#: ../../source/index.rst:100 +#: ../../source/index.rst:106 msgid "Legacy example guides" msgstr "" -#: ../../source/index.rst:108 ../../source/index.rst:112 +#: ../../source/index.rst:114 ../../source/index.rst:119 msgid "Explanations" msgstr "Explications" @@ -8782,26 +9256,26 @@ msgstr "Explications" msgid "API reference" msgstr "Référence pour l'API" -#: ../../source/index.rst:138 +#: ../../source/index.rst:145 msgid "Reference docs" msgstr "Référence pour la documentation" -#: ../../source/index.rst:154 +#: ../../source/index.rst:160 #, fuzzy msgid "Contributor tutorials" msgstr "Configuration du contributeur" -#: ../../source/index.rst:161 +#: ../../source/index.rst:167 #, fuzzy msgid "Contributor how-to guides" msgstr "Guide pour les contributeurs" -#: ../../source/index.rst:173 +#: ../../source/index.rst:179 #, fuzzy msgid "Contributor explanations" msgstr "Explications" -#: ../../source/index.rst:179 +#: ../../source/index.rst:185 #, fuzzy msgid "Contributor references" msgstr "Configuration du contributeur" @@ -8839,7 +9313,7 @@ msgstr "" " chercheurs, ingénieurs, étudiants, professionnels, académiques, et " "autres hobbyistes." -#: ../../source/index.rst:15 +#: ../../source/index.rst:16 msgid "Join us on Slack" msgstr "Join us on Slack" @@ -8893,16 +9367,16 @@ msgstr "" "` | :ref:`Android ` | :ref:`iOS " "`" -#: ../../source/index.rst:64 +#: ../../source/index.rst:70 msgid "We also made video tutorials for PyTorch:" msgstr "" -#: ../../source/index.rst:69 +#: ../../source/index.rst:75 #, fuzzy msgid "And TensorFlow:" msgstr "Exemples de TensorFlow" -#: ../../source/index.rst:77 +#: ../../source/index.rst:83 msgid "" "Problem-oriented how-to guides show step-by-step how to achieve a " "specific goal." @@ -8910,7 +9384,7 @@ msgstr "" "Guides orientés sur la résolutions étapes par étapes de problèmes ou " "objectifs specifiques." -#: ../../source/index.rst:110 +#: ../../source/index.rst:116 msgid "" "Understanding-oriented concept guides explain and discuss key topics and " "underlying ideas behind Flower and collaborative AI." @@ -8918,29 +9392,29 @@ msgstr "" "Guides orientés sur la compréhension et l'explication des sujets et idées" " de fonds sur lesquels sont construits Flower et l'IA collaborative." -#: ../../source/index.rst:121 +#: ../../source/index.rst:128 #, fuzzy msgid "References" msgstr "Référence" -#: ../../source/index.rst:123 +#: ../../source/index.rst:130 msgid "Information-oriented API reference and other reference material." msgstr "Référence de l'API orientée sur l'information pure." -#: ../../source/index.rst:132::1 +#: ../../source/index.rst:139::1 msgid ":py:obj:`flwr `\\" msgstr "" -#: ../../source/index.rst:132::1 flwr:1 of +#: ../../source/index.rst:139::1 flwr:1 of msgid "Flower main package." msgstr "" -#: ../../source/index.rst:149 +#: ../../source/index.rst:155 #, fuzzy msgid "Contributor docs" msgstr "Configuration du contributeur" -#: ../../source/index.rst:151 +#: ../../source/index.rst:157 #, fuzzy msgid "" "The Flower community welcomes contributions. The following docs are " @@ -9047,11 +9521,50 @@ msgstr "" msgid "Arguments" msgstr "Amélioration de la documentation" -#: ../../flwr install:1 new:1 run:1 +#: ../../flwr install:1 log:1 new:1 run:1 #, fuzzy msgid "Optional argument" msgstr "Améliorations facultatives" +#: ../../flwr install:1 +msgid "The source FAB file to install." +msgstr "" + +#: ../../flwr log:1 +msgid "Get logs from a Flower project run." +msgstr "" + +#: ../../flwr log:1 +msgid "Flag to stream or print logs from the Flower run" +msgstr "" + +#: ../../flwr log run +#, fuzzy +msgid "default" +msgstr "Flux de travail" + +#: ../../flwr log:1 +msgid "``True``" +msgstr "" + +#: ../../flwr log:1 +#, fuzzy +msgid "Required argument" +msgstr "Amélioration de la documentation" + +#: ../../flwr log:1 +#, fuzzy +msgid "The Flower run ID to query" +msgstr "Rejoignez la communauté de Flower" + +#: ../../flwr log:1 +msgid "Path of the Flower project to run" +msgstr "" + +#: ../../flwr log:1 +msgid "Name of the federation to run the app on" +msgstr "" + #: ../../flwr new:1 #, fuzzy msgid "Create new Flower App." @@ -9076,6 +9589,11 @@ msgstr "" msgid "The Flower username of the author" msgstr "" +#: ../../flwr new:1 +#, fuzzy +msgid "The name of the Flower App" +msgstr "Chargement des données" + #: ../../flwr run:1 #, fuzzy msgid "Run Flower App." @@ -9097,6 +9615,26 @@ msgid "" " the `pyproject.toml` in order to be properly overriden." msgstr "" +#: ../../flwr run:1 +msgid "" +"Use `--stream` with `flwr run` to display logs; logs are not streamed by " +"default." +msgstr "" + +#: ../../flwr run:1 +#, fuzzy +msgid "``False``" +msgstr ":code:`évaluer`" + +#: ../../flwr run:1 +#, fuzzy +msgid "Path of the Flower App to run." +msgstr "Chargement des données" + +#: ../../flwr run:1 +msgid "Name of the federation to run the app on." +msgstr "" + #: ../../source/ref-api-cli.rst:16 #, fuzzy msgid "flower-simulation" @@ -9116,17 +9654,16 @@ msgstr "Serveur de Flower" msgid "flower-server-app" msgstr "flower-driver-api" -#: ../../source/ref-api-cli.rst:49 +#: ../../source/ref-api-cli.rst:50 msgid "" -"Note that since version :code:`1.11.0`, :code:`flower-server-app` no " -"longer supports passing a reference to a `ServerApp` attribute. Instead, " -"you need to pass the path to Flower app via the argument :code:`--app`. " -"This is the path to a directory containing a `pyproject.toml`. You can " -"create a valid Flower app by executing :code:`flwr new` and following the" -" prompt." +"Note that since version ``1.11.0``, ``flower-server-app`` no longer " +"supports passing a reference to a `ServerApp` attribute. Instead, you " +"need to pass the path to Flower app via the argument ``--app``. This is " +"the path to a directory containing a `pyproject.toml`. You can create a " +"valid Flower app by executing ``flwr new`` and following the prompt." msgstr "" -#: ../../source/ref-api-cli.rst:62 +#: ../../source/ref-api-cli.rst:64 #, fuzzy msgid "flower-superexec" msgstr "flower-superlink" @@ -9143,7 +9680,7 @@ msgstr "" #: ../../source/ref-api/flwr.rst:35::1 #, fuzzy -msgid ":py:obj:`client `\\" +msgid ":py:obj:`flwr.client `\\" msgstr "serveur.stratégie.Stratégie" #: ../../source/ref-api/flwr.rst:35::1 flwr.client:1 of @@ -9153,7 +9690,7 @@ msgstr "Client de Flower" #: ../../source/ref-api/flwr.rst:35::1 #, fuzzy -msgid ":py:obj:`common `\\" +msgid ":py:obj:`flwr.common `\\" msgstr "serveur.stratégie.Stratégie" #: ../../source/ref-api/flwr.rst:35::1 flwr.common:1 of @@ -9162,7 +9699,7 @@ msgstr "Composants communs partagés entre le serveur et le client." #: ../../source/ref-api/flwr.rst:35::1 #, fuzzy -msgid ":py:obj:`server `\\" +msgid ":py:obj:`flwr.server `\\" msgstr "serveur.stratégie.Stratégie" #: ../../source/ref-api/flwr.rst:35::1 @@ -9174,7 +9711,7 @@ msgstr "Serveur de Flower" #: ../../source/ref-api/flwr.rst:35::1 #, fuzzy -msgid ":py:obj:`simulation `\\" +msgid ":py:obj:`flwr.simulation `\\" msgstr "serveur.stratégie.Stratégie" #: ../../source/ref-api/flwr.rst:35::1 flwr.simulation:1 of @@ -9258,7 +9795,7 @@ msgstr "" #: ../../source/ref-api/flwr.client.rst:50::1 #, fuzzy -msgid ":py:obj:`mod `\\" +msgid ":py:obj:`flwr.client.mod `\\" msgstr "serveur.stratégie.Stratégie" #: ../../source/ref-api/flwr.client.rst:50::1 flwr.client.mod:1 of @@ -9459,48 +9996,57 @@ msgstr "" msgid "Getter for `Context` client attribute." msgstr "" -#: ../../source/ref-api/flwr.client.Client.rst -#: ../../source/ref-api/flwr.client.NumPyClient.rst -#: ../../source/ref-api/flwr.client.mod.LocalDpMod.rst -#: ../../source/ref-api/flwr.common.Array.rst -#: ../../source/ref-api/flwr.common.ConfigsRecord.rst -#: ../../source/ref-api/flwr.common.Context.rst -#: ../../source/ref-api/flwr.common.Error.rst -#: ../../source/ref-api/flwr.common.Message.rst -#: ../../source/ref-api/flwr.common.Metadata.rst -#: ../../source/ref-api/flwr.common.MetricsRecord.rst #: ../../source/ref-api/flwr.common.Parameters.rst:2 -#: ../../source/ref-api/flwr.common.ParametersRecord.rst -#: ../../source/ref-api/flwr.common.RecordSet.rst -#: ../../source/ref-api/flwr.server.ClientManager.rst -#: ../../source/ref-api/flwr.server.Driver.rst -#: ../../source/ref-api/flwr.server.ServerAppComponents.rst -#: ../../source/ref-api/flwr.server.SimpleClientManager.rst -#: ../../source/ref-api/flwr.server.strategy.Bulyan.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgAdaptive.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgFixed.rst -#: ../../source/ref-api/flwr.server.strategy.DifferentialPrivacyClientSideAdaptiveClipping.rst -#: ../../source/ref-api/flwr.server.strategy.DifferentialPrivacyClientSideFixedClipping.rst -#: ../../source/ref-api/flwr.server.strategy.DifferentialPrivacyServerSideAdaptiveClipping.rst -#: ../../source/ref-api/flwr.server.strategy.DifferentialPrivacyServerSideFixedClipping.rst -#: ../../source/ref-api/flwr.server.strategy.FedAdagrad.rst -#: ../../source/ref-api/flwr.server.strategy.FedAdam.rst -#: ../../source/ref-api/flwr.server.strategy.FedAvg.rst -#: ../../source/ref-api/flwr.server.strategy.FedAvgAndroid.rst -#: ../../source/ref-api/flwr.server.strategy.FedAvgM.rst -#: ../../source/ref-api/flwr.server.strategy.FedOpt.rst -#: ../../source/ref-api/flwr.server.strategy.FedProx.rst -#: ../../source/ref-api/flwr.server.strategy.FedTrimmedAvg.rst -#: ../../source/ref-api/flwr.server.strategy.FedYogi.rst -#: ../../source/ref-api/flwr.server.strategy.Krum.rst -#: ../../source/ref-api/flwr.server.strategy.Strategy.rst -#: ../../source/ref-api/flwr.server.workflow.SecAggPlusWorkflow.rst -#: ../../source/ref-api/flwr.server.workflow.SecAggWorkflow.rst -#: ../../source/ref-api/flwr.simulation.run_simulation.rst -#: ../../source/ref-api/flwr.simulation.start_simulation.rst #: flwr.client.app.start_client flwr.client.app.start_numpy_client -#: flwr.server.app.start_server -#: flwr.server.driver.driver.Driver.send_and_receive of +#: flwr.client.client.Client.evaluate flwr.client.client.Client.fit +#: flwr.client.client.Client.get_parameters +#: flwr.client.client.Client.get_properties +#: flwr.client.mod.localdp_mod.LocalDpMod +#: flwr.client.numpy_client.NumPyClient.evaluate +#: flwr.client.numpy_client.NumPyClient.fit +#: flwr.client.numpy_client.NumPyClient.get_parameters +#: flwr.client.numpy_client.NumPyClient.get_properties +#: flwr.common.context.Context flwr.common.message.Error +#: flwr.common.message.Message flwr.common.message.Message.create_error_reply +#: flwr.common.message.Message.create_reply flwr.common.message.Metadata +#: flwr.common.record.configsrecord.ConfigsRecord +#: flwr.common.record.metricsrecord.MetricsRecord +#: flwr.common.record.parametersrecord.Array +#: flwr.common.record.parametersrecord.ParametersRecord +#: flwr.common.record.recordset.RecordSet flwr.server.app.start_server +#: flwr.server.client_manager.ClientManager.register +#: flwr.server.client_manager.ClientManager.unregister +#: flwr.server.client_manager.SimpleClientManager.register +#: flwr.server.client_manager.SimpleClientManager.unregister +#: flwr.server.client_manager.SimpleClientManager.wait_for +#: flwr.server.driver.driver.Driver.create_message +#: flwr.server.driver.driver.Driver.pull_messages +#: flwr.server.driver.driver.Driver.push_messages +#: flwr.server.driver.driver.Driver.send_and_receive +#: flwr.server.serverapp_components.ServerAppComponents +#: flwr.server.strategy.bulyan.Bulyan +#: flwr.server.strategy.dp_adaptive_clipping.DifferentialPrivacyClientSideAdaptiveClipping +#: flwr.server.strategy.dp_adaptive_clipping.DifferentialPrivacyServerSideAdaptiveClipping +#: flwr.server.strategy.dp_fixed_clipping.DifferentialPrivacyClientSideFixedClipping +#: flwr.server.strategy.dp_fixed_clipping.DifferentialPrivacyServerSideFixedClipping +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_evaluate +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_fit +#: flwr.server.strategy.fedadagrad.FedAdagrad +#: flwr.server.strategy.fedadam.FedAdam flwr.server.strategy.fedavg.FedAvg +#: flwr.server.strategy.fedavg_android.FedAvgAndroid +#: flwr.server.strategy.fedavgm.FedAvgM flwr.server.strategy.fedopt.FedOpt +#: flwr.server.strategy.fedprox.FedProx +#: flwr.server.strategy.fedtrimmedavg.FedTrimmedAvg +#: flwr.server.strategy.fedyogi.FedYogi flwr.server.strategy.krum.Krum +#: flwr.server.strategy.strategy.Strategy.aggregate_evaluate +#: flwr.server.strategy.strategy.Strategy.aggregate_fit +#: flwr.server.strategy.strategy.Strategy.configure_evaluate +#: flwr.server.strategy.strategy.Strategy.configure_fit +#: flwr.server.strategy.strategy.Strategy.evaluate +#: flwr.server.strategy.strategy.Strategy.initialize_parameters +#: flwr.server.workflow.secure_aggregation.secagg_workflow.SecAggWorkflow +#: flwr.server.workflow.secure_aggregation.secaggplus_workflow.SecAggPlusWorkflow +#: flwr.simulation.run_simulation.run_simulation of #, fuzzy msgid "Parameters" msgstr "Paramètres du modèle." @@ -9512,21 +10058,31 @@ msgid "" "customize the local evaluation process." msgstr "" -#: ../../source/ref-api/flwr.client.Client.rst -#: ../../source/ref-api/flwr.client.NumPyClient.rst -#: ../../source/ref-api/flwr.common.ConfigsRecord.rst -#: ../../source/ref-api/flwr.common.Message.rst -#: ../../source/ref-api/flwr.common.MetricsRecord.rst -#: ../../source/ref-api/flwr.common.ParametersRecord.rst -#: ../../source/ref-api/flwr.server.ClientManager.rst -#: ../../source/ref-api/flwr.server.Driver.rst -#: ../../source/ref-api/flwr.server.SimpleClientManager.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgAdaptive.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgFixed.rst -#: ../../source/ref-api/flwr.server.strategy.Strategy.rst -#: ../../source/ref-api/flwr.simulation.start_simulation.rst -#: flwr.server.app.start_server -#: flwr.server.driver.driver.Driver.send_and_receive of +#: flwr.client.client.Client.evaluate flwr.client.client.Client.fit +#: flwr.client.client.Client.get_parameters +#: flwr.client.client.Client.get_properties +#: flwr.client.numpy_client.NumPyClient.evaluate +#: flwr.client.numpy_client.NumPyClient.fit +#: flwr.client.numpy_client.NumPyClient.get_parameters +#: flwr.client.numpy_client.NumPyClient.get_properties +#: flwr.common.message.Message.create_reply flwr.server.app.start_server +#: flwr.server.client_manager.ClientManager.num_available +#: flwr.server.client_manager.ClientManager.register +#: flwr.server.client_manager.SimpleClientManager.num_available +#: flwr.server.client_manager.SimpleClientManager.register +#: flwr.server.client_manager.SimpleClientManager.wait_for +#: flwr.server.driver.driver.Driver.create_message +#: flwr.server.driver.driver.Driver.pull_messages +#: flwr.server.driver.driver.Driver.push_messages +#: flwr.server.driver.driver.Driver.send_and_receive +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_evaluate +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_fit +#: flwr.server.strategy.strategy.Strategy.aggregate_evaluate +#: flwr.server.strategy.strategy.Strategy.aggregate_fit +#: flwr.server.strategy.strategy.Strategy.configure_evaluate +#: flwr.server.strategy.strategy.Strategy.configure_fit +#: flwr.server.strategy.strategy.Strategy.evaluate +#: flwr.server.strategy.strategy.Strategy.initialize_parameters of #, fuzzy msgid "Returns" msgstr "Ressources" @@ -9537,18 +10093,29 @@ msgid "" "details such as the number of local data examples used for evaluation." msgstr "" -#: ../../source/ref-api/flwr.client.Client.rst -#: ../../source/ref-api/flwr.client.NumPyClient.rst -#: ../../source/ref-api/flwr.common.Message.rst -#: ../../source/ref-api/flwr.server.ClientManager.rst -#: ../../source/ref-api/flwr.server.Driver.rst -#: ../../source/ref-api/flwr.server.SimpleClientManager.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgAdaptive.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgFixed.rst -#: ../../source/ref-api/flwr.server.strategy.Strategy.rst -#: ../../source/ref-api/flwr.simulation.start_simulation.rst -#: flwr.server.app.start_server -#: flwr.server.driver.driver.Driver.send_and_receive of +#: flwr.client.client.Client.evaluate flwr.client.client.Client.fit +#: flwr.client.client.Client.get_parameters +#: flwr.client.client.Client.get_properties +#: flwr.client.numpy_client.NumPyClient.get_parameters +#: flwr.client.numpy_client.NumPyClient.get_properties +#: flwr.common.message.Message.create_reply flwr.server.app.start_server +#: flwr.server.client_manager.ClientManager.num_available +#: flwr.server.client_manager.ClientManager.register +#: flwr.server.client_manager.SimpleClientManager.num_available +#: flwr.server.client_manager.SimpleClientManager.register +#: flwr.server.client_manager.SimpleClientManager.wait_for +#: flwr.server.driver.driver.Driver.create_message +#: flwr.server.driver.driver.Driver.pull_messages +#: flwr.server.driver.driver.Driver.push_messages +#: flwr.server.driver.driver.Driver.send_and_receive +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_evaluate +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_fit +#: flwr.server.strategy.strategy.Strategy.aggregate_evaluate +#: flwr.server.strategy.strategy.Strategy.aggregate_fit +#: flwr.server.strategy.strategy.Strategy.configure_evaluate +#: flwr.server.strategy.strategy.Strategy.configure_fit +#: flwr.server.strategy.strategy.Strategy.evaluate +#: flwr.server.strategy.strategy.Strategy.initialize_parameters of msgid "Return type" msgstr "" @@ -9879,6 +10446,11 @@ msgstr "Logique côté client" msgid ":py:obj:`make_ffn `\\ \\(ffn\\, mods\\)" msgstr "serveur.stratégie.Stratégie" +#: ../../source/ref-api/flwr.client.mod.rst:28::1 +#: flwr.client.mod.utils.make_ffn:1 of +msgid "." +msgstr "" + #: ../../source/ref-api/flwr.client.mod.rst:28::1 msgid "" ":py:obj:`message_size_mod `\\ \\(msg\\," @@ -10046,10 +10618,6 @@ msgstr "" msgid "make\\_ffn" msgstr "" -#: flwr.client.mod.utils.make_ffn:1 of -msgid "." -msgstr "" - #: ../../source/ref-api/flwr.client.mod.message_size_mod.rst:2 msgid "message\\_size\\_mod" msgstr "" @@ -10078,15 +10646,6 @@ msgstr "" msgid "secaggplus\\_mod" msgstr "Flux de travail" -#: ../../source/ref-api/flwr.client.run_client_app.rst:2 -msgid "run\\_client\\_app" -msgstr "" - -#: ../../source/ref-api/flwr.client.run_supernode.rst:2 -#, fuzzy -msgid "run\\_supernode" -msgstr "flower-superlink" - #: ../../source/ref-api/flwr.client.start_client.rst:2 #, fuzzy msgid "start\\_client" @@ -10822,17 +11381,12 @@ msgstr "" #: collections.abc.MutableMapping.clear:1::1 of #, fuzzy -msgid ":py:obj:`get `\\ \\(key\\[\\, default\\]\\)" +msgid ":py:obj:`get `\\ \\(k\\[\\,d\\]\\)" msgstr "" "Flower 1.0 : ``start_server(..., " "config=flwr.server.ServerConfig(num_rounds=3, round_timeout=600.0), " "...)``" -#: collections.abc.Mapping.get:1 -#: collections.abc.MutableMapping.clear:1::1 of -msgid "Retrieve the corresponding layout by the string key." -msgstr "" - #: collections.abc.MutableMapping.clear:1::1 of msgid ":py:obj:`items `\\ \\(\\)" msgstr "" @@ -10889,22 +11443,6 @@ msgstr "" msgid "This function counts booleans as occupying 1 Byte." msgstr "" -#: collections.abc.Mapping.get:3 of -msgid "" -"When there isn't an exact match, all the existing keys in the layout map " -"will be treated as a regex and map against the input key again. The first" -" match will be returned, based on the key insertion order. Return None if" -" there isn't any match found." -msgstr "" - -#: collections.abc.Mapping.get:8 of -msgid "the string key as the query for the layout." -msgstr "" - -#: collections.abc.Mapping.get:10 of -msgid "Corresponding layout based on the query." -msgstr "" - #: ../../source/ref-api/flwr.common.Context.rst:2 msgid "Context" msgstr "" @@ -11663,7 +12201,7 @@ msgstr "" msgid "The encoding in which to encode the string." msgstr "" -#: flwr.common.EventType.encode:5 of +#: flwr.common.EventType.encode:9 of msgid "errors" msgstr "" @@ -11839,7 +12377,7 @@ msgid "" "string." msgstr "" -#: flwr.common.EventType.replace:3 of +#: flwr.common.EventType.replace:5 of msgid "count" msgstr "" @@ -11875,7 +12413,7 @@ msgid "" "strings and the original string." msgstr "" -#: flwr.common.EventType.rsplit:3 flwr.common.EventType.split:3 of +#: flwr.common.EventType.rsplit:7 flwr.common.EventType.split:7 of msgid "sep" msgstr "" @@ -11890,7 +12428,7 @@ msgid "" " empty strings from the result." msgstr "" -#: flwr.common.EventType.rsplit:9 flwr.common.EventType.split:9 of +#: flwr.common.EventType.rsplit:11 flwr.common.EventType.split:11 of msgid "maxsplit" msgstr "" @@ -11931,7 +12469,7 @@ msgid "" "remaining cased characters have lower case." msgstr "" -#: flwr.common.EventType.translate:3 of +#: flwr.common.EventType.translate:5 of #, fuzzy msgid "table" msgstr "Database" @@ -12354,7 +12892,7 @@ msgstr "" #: collections.abc.MutableMapping.clear:1::1 of #, fuzzy -msgid ":py:obj:`get `\\ \\(key\\[\\, default\\]\\)" +msgid ":py:obj:`get `\\ \\(k\\[\\,d\\]\\)" msgstr "serveur.stratégie.Stratégie" #: collections.abc.MutableMapping.clear:1::1 of @@ -12490,9 +13028,7 @@ msgstr "" #: collections.abc.MutableMapping.clear:1::1 of #, fuzzy -msgid "" -":py:obj:`get `\\ \\(key\\[\\, " -"default\\]\\)" +msgid ":py:obj:`get `\\ \\(k\\[\\,d\\]\\)" msgstr "serveur.stratégie.Stratégie" #: collections.abc.MutableMapping.clear:1::1 of @@ -12839,7 +13375,7 @@ msgstr "" #: ../../source/ref-api/flwr.server.rst:56::1 #, fuzzy -msgid ":py:obj:`strategy `\\" +msgid ":py:obj:`flwr.server.strategy `\\" msgstr "serveur.stratégie.Stratégie" #: ../../source/ref-api/flwr.server.rst:56::1 @@ -12849,7 +13385,7 @@ msgstr "" #: ../../source/ref-api/flwr.server.rst:56::1 #, fuzzy -msgid ":py:obj:`workflow `\\" +msgid ":py:obj:`flwr.server.workflow `\\" msgstr "serveur.stratégie.Stratégie" #: ../../source/ref-api/flwr.server.rst:56::1 @@ -13353,8 +13889,7 @@ msgid "" msgstr "" #: flwr.server.app.start_server:9 -#: flwr.server.serverapp_components.ServerAppComponents:6 -#: flwr.simulation.app.start_simulation:29 of +#: flwr.server.serverapp_components.ServerAppComponents:6 of msgid "" "Currently supported values are `num_rounds` (int, default: 1) and " "`round_timeout` in seconds (float, default: None)." @@ -13478,15 +14013,6 @@ msgstr "" msgid "**success**" msgstr "" -#: ../../source/ref-api/flwr.server.run_server_app.rst:2 -msgid "run\\_server\\_app" -msgstr "" - -#: ../../source/ref-api/flwr.server.run_superlink.rst:2 -#, fuzzy -msgid "run\\_superlink" -msgstr "flower-superlink" - #: ../../source/ref-api/flwr.server.start_server.rst:2 #, fuzzy msgid "start\\_server" @@ -16567,16 +17093,16 @@ msgid "Run a Flower App using the Simulation Engine." msgstr "" #: ../../source/ref-api/flwr.simulation.rst:18::1 +#, fuzzy msgid "" -":py:obj:`start_simulation `\\ \\(\\*\\," -" client\\_fn\\, num\\_clients\\)" -msgstr "" +":py:obj:`start_simulation `\\ " +"\\(\\*args\\, \\*\\*kwargs\\)" +msgstr "serveur.stratégie.Stratégie" #: ../../source/ref-api/flwr.simulation.rst:18::1 -#: flwr.simulation.app.start_simulation:1 of -#, fuzzy -msgid "Start a Ray-based Flower simulation server." -msgstr "Simulation de moniteur" +#: flwr.simulation.start_simulation:1 of +msgid "Log error stating that module `ray` could not be imported." +msgstr "" #: ../../source/ref-api/flwr.simulation.run_simulation.rst:2 #, fuzzy @@ -16638,120 +17164,6 @@ msgstr "" msgid "start\\_simulation" msgstr "démarrer_simulation" -#: flwr.simulation.app.start_simulation:3 of -msgid "" -"A function creating `Client` instances. The function must have the " -"signature `client_fn(context: Context). It should return a single client " -"instance of type `Client`. Note that the created client instances are " -"ephemeral and will often be destroyed after a single method invocation. " -"Since client instances are not long-lived, they should not attempt to " -"carry state over method invocations. Any state required by the instance " -"(model, dataset, hyperparameters, ...) should be (re-)created in either " -"the call to `client_fn` or the call to any of the client methods (e.g., " -"load evaluation data in the `evaluate` method itself)." -msgstr "" - -#: flwr.simulation.app.start_simulation:13 of -msgid "The total number of clients in this simulation." -msgstr "" - -#: flwr.simulation.app.start_simulation:15 of -msgid "" -"UNSUPPORTED, WILL BE REMOVED. USE `num_clients` INSTEAD. List " -"`client_id`s for each client. This is only required if `num_clients` is " -"not set. Setting both `num_clients` and `clients_ids` with " -"`len(clients_ids)` not equal to `num_clients` generates an error. Using " -"this argument will raise an error." -msgstr "" - -#: flwr.simulation.app.start_simulation:21 of -msgid "" -"CPU and GPU resources for a single client. Supported keys are `num_cpus` " -"and `num_gpus`. To understand the GPU utilization caused by `num_gpus`, " -"as well as using custom resources, please consult the Ray documentation." -msgstr "" - -#: flwr.simulation.app.start_simulation:26 of -msgid "" -"An implementation of the abstract base class `flwr.server.Server`. If no " -"instance is provided, then `start_server` will create one." -msgstr "" - -#: flwr.simulation.app.start_simulation:32 of -msgid "" -"An implementation of the abstract base class `flwr.server.Strategy`. If " -"no strategy is provided, then `start_server` will use " -"`flwr.server.strategy.FedAvg`." -msgstr "" - -#: flwr.simulation.app.start_simulation:36 of -msgid "" -"An implementation of the abstract base class `flwr.server.ClientManager`." -" If no implementation is provided, then `start_simulation` will use " -"`flwr.server.client_manager.SimpleClientManager`." -msgstr "" - -#: flwr.simulation.app.start_simulation:40 of -msgid "" -"Optional dictionary containing arguments for the call to `ray.init`. If " -"ray_init_args is None (the default), Ray will be initialized with the " -"following default args: { \"ignore_reinit_error\": True, " -"\"include_dashboard\": False } An empty dictionary can be used " -"(ray_init_args={}) to prevent any arguments from being passed to " -"ray.init." -msgstr "" - -#: flwr.simulation.app.start_simulation:40 of -msgid "" -"Optional dictionary containing arguments for the call to `ray.init`. If " -"ray_init_args is None (the default), Ray will be initialized with the " -"following default args:" -msgstr "" - -#: flwr.simulation.app.start_simulation:44 of -msgid "{ \"ignore_reinit_error\": True, \"include_dashboard\": False }" -msgstr "" - -#: flwr.simulation.app.start_simulation:46 of -msgid "" -"An empty dictionary can be used (ray_init_args={}) to prevent any " -"arguments from being passed to ray.init." -msgstr "" - -#: flwr.simulation.app.start_simulation:49 of -msgid "" -"Set to True to prevent `ray.shutdown()` in case " -"`ray.is_initialized()=True`." -msgstr "" - -#: flwr.simulation.app.start_simulation:51 of -msgid "" -"Optionally specify the type of actor to use. The actor object, which " -"persists throughout the simulation, will be the process in charge of " -"executing a ClientApp wrapping input argument `client_fn`." -msgstr "" - -#: flwr.simulation.app.start_simulation:55 of -msgid "" -"If you want to create your own Actor classes, you might need to pass some" -" input argument. You can use this dictionary for such purpose." -msgstr "" - -#: flwr.simulation.app.start_simulation:58 of -msgid "" -"(default: \"DEFAULT\") Optional string (\"DEFAULT\" or \"SPREAD\") for " -"the VCE to choose in which node the actor is placed. If you are an " -"advanced user needed more control you can use lower-level scheduling " -"strategies to pin actors to specific compute nodes (e.g. via " -"NodeAffinitySchedulingStrategy). Please note this is an advanced feature." -" For all details, please refer to the Ray documentation: " -"https://docs.ray.io/en/latest/ray-core/scheduling/index.html" -msgstr "" - -#: flwr.simulation.app.start_simulation:67 of -msgid "**hist** -- Object containing metrics from training." -msgstr "" - #: ../../source/ref-changelog.md:1 msgid "Changelog" msgstr "Changelog" @@ -16889,13 +17301,6 @@ msgstr "" msgid "Incompatible changes" msgstr "Changements incompatibles" -#: ../../source/ref-changelog.md:33 ../../source/ref-changelog.md:399 -#: ../../source/ref-changelog.md:676 ../../source/ref-changelog.md:740 -#: ../../source/ref-changelog.md:798 ../../source/ref-changelog.md:867 -#: ../../source/ref-changelog.md:929 -msgid "None" -msgstr "Aucun" - #: ../../source/ref-changelog.md:35 #, fuzzy msgid "v1.11.0 (2024-08-30)" @@ -23548,13 +23953,16 @@ msgstr "" "tels que `PyTorch `_ ou `TensorFlow " "`_." -#: ../../source/ref-example-projects.rst:10 +#: ../../source/ref-example-projects.rst:9 #, fuzzy -msgid "" -"The following examples are available as standalone projects. Quickstart " -"TensorFlow/Keras ---------------------------" +msgid "The following examples are available as standalone projects." msgstr "Les exemples suivants sont disponibles sous forme de projets autonomes." +#: ../../source/ref-example-projects.rst:12 +#, fuzzy +msgid "Quickstart TensorFlow/Keras" +msgstr "Démarrage rapide de TensorFlow" + #: ../../source/ref-example-projects.rst:14 msgid "" "The TensorFlow/Keras quickstart example shows CIFAR-10 image " @@ -23574,14 +23982,14 @@ msgstr "" "`_" -#: ../../source/ref-example-projects.rst:18 +#: ../../source/ref-example-projects.rst:19 #, fuzzy msgid ":doc:`Quickstart TensorFlow (Tutorial) `" msgstr "" "`Quickstart TensorFlow (Tutorial) `_" -#: ../../source/ref-example-projects.rst:19 +#: ../../source/ref-example-projects.rst:20 msgid "" "`Quickstart TensorFlow (Blog Post) `_" @@ -23589,12 +23997,12 @@ msgstr "" "`Quickstart TensorFlow (Blog Post) `_" -#: ../../source/ref-example-projects.rst:23 -#: ../../source/tutorial-quickstart-pytorch.rst:5 +#: ../../source/ref-example-projects.rst:24 +#: ../../source/tutorial-quickstart-pytorch.rst:4 msgid "Quickstart PyTorch" msgstr "Démarrage rapide de PyTorch" -#: ../../source/ref-example-projects.rst:25 +#: ../../source/ref-example-projects.rst:26 msgid "" "The PyTorch quickstart example shows CIFAR-10 image classification with a" " simple Convolutional Neural Network:" @@ -23602,7 +24010,7 @@ msgstr "" "L'exemple de démarrage rapide PyTorch montre la classification d'images " "CIFAR-10 avec un simple réseau neuronal convolutif :" -#: ../../source/ref-example-projects.rst:28 +#: ../../source/ref-example-projects.rst:29 #, fuzzy msgid "" "`Quickstart PyTorch (Code) " @@ -23611,18 +24019,18 @@ msgstr "" "`Quickstart PyTorch (Code) " "`_" -#: ../../source/ref-example-projects.rst:29 +#: ../../source/ref-example-projects.rst:31 #, fuzzy msgid ":doc:`Quickstart PyTorch (Tutorial) `" msgstr "" "`Quickstart PyTorch (Tutorial) `_" -#: ../../source/ref-example-projects.rst:33 +#: ../../source/ref-example-projects.rst:34 msgid "PyTorch: From Centralized To Federated" msgstr "PyTorch : De la centralisation à la fédération" -#: ../../source/ref-example-projects.rst:35 +#: ../../source/ref-example-projects.rst:36 msgid "" "This example shows how a regular PyTorch project can be federated using " "Flower:" @@ -23630,7 +24038,7 @@ msgstr "" "Cet exemple montre comment un projet PyTorch ordinaire peut être fédéré à" " l'aide de Flower :" -#: ../../source/ref-example-projects.rst:37 +#: ../../source/ref-example-projects.rst:38 #, fuzzy msgid "" "`PyTorch: From Centralized To Federated (Code) " @@ -23641,7 +24049,7 @@ msgstr "" "`_" -#: ../../source/ref-example-projects.rst:38 +#: ../../source/ref-example-projects.rst:40 #, fuzzy msgid "" ":doc:`PyTorch: From Centralized To Federated (Tutorial) `_" -#: ../../source/ref-example-projects.rst:42 +#: ../../source/ref-example-projects.rst:44 msgid "Federated Learning on Raspberry Pi and Nvidia Jetson" msgstr "Apprentissage fédéré sur Raspberry Pi et Nvidia Jetson" -#: ../../source/ref-example-projects.rst:44 +#: ../../source/ref-example-projects.rst:46 msgid "" "This example shows how Flower can be used to build a federated learning " "system that run across Raspberry Pi and Nvidia Jetson:" @@ -23664,7 +24072,7 @@ msgstr "" "système d'apprentissage fédéré qui fonctionne sur Raspberry Pi et Nvidia " "Jetson :" -#: ../../source/ref-example-projects.rst:46 +#: ../../source/ref-example-projects.rst:49 #, fuzzy msgid "" "`Federated Learning on Raspberry Pi and Nvidia Jetson (Code) " @@ -23673,7 +24081,7 @@ msgstr "" "`L'apprentissage fédéré sur Raspberry Pi et Nvidia Jetson (Code) " "`_" -#: ../../source/ref-example-projects.rst:47 +#: ../../source/ref-example-projects.rst:51 msgid "" "`Federated Learning on Raspberry Pi and Nvidia Jetson (Blog Post) " "`_" @@ -23696,7 +24104,7 @@ msgstr "" ":fa:`eye,mr-1` Flower peut-il fonctionner sur les ordinateurs portables " "Juptyter / Google Colab ?" -#: ../../source/ref-faq.rst:8 +#: ../../source/ref-faq.rst:9 msgid "" "Yes, it can! Flower even comes with a few under-the-hood optimizations to" " make it work even better on Colab. Here's a quickstart example:" @@ -23705,7 +24113,7 @@ msgstr "" "pour qu'il fonctionne encore mieux sur Colab. Voici un exemple de " "démarrage rapide :" -#: ../../source/ref-faq.rst:10 +#: ../../source/ref-faq.rst:11 #, fuzzy msgid "" "`Flower simulation PyTorch " @@ -23715,7 +24123,7 @@ msgstr "" "`Flower Quickstart (TensorFlow/Keras) " "`_" -#: ../../source/ref-faq.rst:11 +#: ../../source/ref-faq.rst:12 #, fuzzy msgid "" "`Flower simulation TensorFlow/Keras " @@ -23731,7 +24139,7 @@ msgstr "" ":fa:`eye,mr-1` Comment puis-je faire fonctionner l'apprentissage fédéré " "sur un Raspberry Pi ?" -#: ../../source/ref-faq.rst:15 +#: ../../source/ref-faq.rst:16 #, fuzzy msgid "" "Find the `blog post about federated learning on embedded device here " @@ -23750,7 +24158,7 @@ msgstr "" ":fa:`eye,mr-1` Est-ce que Flower prend en charge l'apprentissage fédéré " "sur les appareils Android ?" -#: ../../source/ref-faq.rst:19 +#: ../../source/ref-faq.rst:20 #, fuzzy msgid "" "Yes, it does. Please take a look at our `blog post " @@ -23762,13 +24170,13 @@ msgstr "" "with-flower>`_ ou consultez l'`exemple de code Android sur GitHub " "`_." -#: ../../source/ref-faq.rst:21 +#: ../../source/ref-faq.rst:22 msgid "" "`Android Kotlin example `_" msgstr "" -#: ../../source/ref-faq.rst:22 +#: ../../source/ref-faq.rst:23 msgid "`Android Java example `_" msgstr "" @@ -23778,7 +24186,7 @@ msgstr "" ":fa:`eye,mr-1` Puis-je combiner l'apprentissage fédéré avec la blockchain" " ?" -#: ../../source/ref-faq.rst:26 +#: ../../source/ref-faq.rst:27 msgid "" "Yes, of course. A list of available examples using Flower within a " "blockchain environment is available here:" @@ -23786,7 +24194,20 @@ msgstr "" "Oui, bien sûr, une liste d'exemples disponibles utilisant Flower dans un " "environnement blockchain est disponible ici :" -#: ../../source/ref-faq.rst:28 +#: ../../source/ref-faq.rst:30 +msgid "`FLock: A Decentralised AI Training Platform `_." +msgstr "" + +#: ../../source/ref-faq.rst:30 +msgid "Contribute to on-chain training the model and earn rewards." +msgstr "" + +#: ../../source/ref-faq.rst:31 +#, fuzzy +msgid "Local blockchain with federated learning simulation." +msgstr "Mise à l'échelle de l'apprentissage fédéré" + +#: ../../source/ref-faq.rst:32 msgid "" "`Flower meets Nevermined GitHub Repository `_." @@ -23794,7 +24215,7 @@ msgstr "" "`Flower meets Nevermined GitHub Repository `_." -#: ../../source/ref-faq.rst:29 +#: ../../source/ref-faq.rst:33 msgid "" "`Flower meets Nevermined YouTube video " "`_." @@ -23802,7 +24223,7 @@ msgstr "" "`Flower rencontre Nevermined vidéo YouTube " "`_." -#: ../../source/ref-faq.rst:30 +#: ../../source/ref-faq.rst:34 #, fuzzy msgid "" "`Flower meets KOSMoS `_." -#: ../../source/ref-faq.rst:31 +#: ../../source/ref-faq.rst:35 msgid "" "`Flower meets Talan blog post `_ ." -#: ../../source/ref-faq.rst:32 +#: ../../source/ref-faq.rst:36 msgid "" "`Flower meets Talan GitHub Repository " "`_ ." @@ -24122,12 +24543,12 @@ msgid "" "app using Flower." msgstr "" -#: ../../source/tutorial-quickstart-android.rst:5 +#: ../../source/tutorial-quickstart-android.rst:4 #, fuzzy msgid "Quickstart Android" msgstr "Démarrage rapide des Pandas" -#: ../../source/tutorial-quickstart-android.rst:10 +#: ../../source/tutorial-quickstart-android.rst:9 #, fuzzy msgid "" "Let's build a federated learning system using TFLite and Flower on " @@ -24136,7 +24557,7 @@ msgstr "" "Construisons un système d'apprentissage fédéré en utilisant fastai et " "Flower !" -#: ../../source/tutorial-quickstart-android.rst:12 +#: ../../source/tutorial-quickstart-android.rst:11 #, fuzzy msgid "" "Please refer to the `full code example " @@ -24147,239 +24568,325 @@ msgstr "" "`_ " "pour en savoir plus." -#: ../../source/tutorial-quickstart-fastai.rst:-1 -msgid "" -"Check out this Federated Learning quickstart tutorial for using Flower " -"with FastAI to train a vision model on CIFAR-10." -msgstr "" - -#: ../../source/tutorial-quickstart-fastai.rst:5 +#: ../../source/tutorial-quickstart-fastai.rst:4 msgid "Quickstart fastai" msgstr "Démarrage rapide fastai" +#: ../../source/tutorial-quickstart-fastai.rst:6 +#, fuzzy +msgid "" +"In this federated learning tutorial we will learn how to train a " +"SqueezeNet model on MNIST using Flower and fastai. It is recommended to " +"create a virtual environment and run everything within a :doc:`virtualenv" +" `." +msgstr "" +"Tout d'abord, il est recommandé de créer un environnement virtuel et de " +"tout exécuter au sein d'un `virtualenv `_." + #: ../../source/tutorial-quickstart-fastai.rst:10 -msgid "Let's build a federated learning system using fastai and Flower!" +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:11 +msgid "Then, clone the code example directly from GitHub:" msgstr "" -"Construisons un système d'apprentissage fédéré en utilisant fastai et " -"Flower !" -#: ../../source/tutorial-quickstart-fastai.rst:12 +#: ../../source/tutorial-quickstart-fastai.rst:18 +msgid "" +"This will create a new directory called `quickstart-fastai` containing " +"the following files:" +msgstr "" + +#: ../../source/tutorial-quickstart-fastai.rst:31 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:32 #, fuzzy +msgid "Next, activate your environment, then run:" +msgstr "et active l'environnement virtuel avec :" + +#: ../../source/tutorial-quickstart-fastai.rst:41 msgid "" -"Please refer to the `full code example " -"`_ " -"to learn more." +"This example by default runs the Flower Simulation Engine, creating a " +"federation of 10 nodes using `FedAvg `_ " +"as the aggregation strategy. The dataset will be partitioned using Flower" +" Dataset's `IidPartitioner `_." +" Let's run the project:" msgstr "" -"Réfère-toi à l'exemple de code complet " -"`_ " -"pour en savoir plus." + +#: ../../source/tutorial-quickstart-fastai.rst:54 +#: ../../source/tutorial-quickstart-huggingface.rst:61 +#: ../../source/tutorial-quickstart-mlx.rst:60 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:55 +#: ../../source/tutorial-quickstart-pytorch.rst:62 +#: ../../source/tutorial-quickstart-tensorflow.rst:62 +msgid "With default arguments you will see an output like this one:" +msgstr "" + +#: ../../source/tutorial-quickstart-fastai.rst:98 +#: ../../source/tutorial-quickstart-huggingface.rst:112 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:105 +#: ../../source/tutorial-quickstart-pytorch.rst:103 +#: ../../source/tutorial-quickstart-tensorflow.rst:103 +msgid "" +"You can also override the parameters defined in the " +"``[tool.flwr.app.config]`` section in ``pyproject.toml`` like this:" +msgstr "" + +#: ../../source/tutorial-quickstart-fastai.rst:108 +#, fuzzy +msgid "" +"Check the `source code `_ of this tutorial in ``examples/quickstart-fasai`` " +"in the Flower GitHub repository." +msgstr "" +"Félicitations ! Tu as réussi à construire et à faire fonctionner ton " +"premier système d'apprentissage fédéré. Le code source complet " +"`_ de cet exemple se trouve dans :code:`examples" +"/quickstart-mxnet`." #: ../../source/tutorial-quickstart-huggingface.rst:-1 msgid "" "Check out this Federating Learning quickstart tutorial for using Flower " -"with HuggingFace Transformers in order to fine-tune an LLM." +"with 🤗 HuggingFace Transformers in order to fine-tune an LLM." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:5 +#: ../../source/tutorial-quickstart-huggingface.rst:4 msgid "Quickstart 🤗 Transformers" msgstr "Démarrage rapide 🤗 Transformateurs" -#: ../../source/tutorial-quickstart-huggingface.rst:10 +#: ../../source/tutorial-quickstart-huggingface.rst:6 +#, fuzzy msgid "" -"Let's build a federated learning system using Hugging Face Transformers " -"and Flower!" +"In this federated learning tutorial we will learn how to train a large " +"language model (LLM) on the `IMDB " +"`_ dataset using Flower" +" and the 🤗 Hugging Face Transformers library. It is recommended to create" +" a virtual environment and run everything within a :doc:`virtualenv " +"`." msgstr "" -"Construisons un système d'apprentissage fédéré à l'aide des " -"transformateurs Hugging Face et de Flower !" +"Tout d'abord, il est recommandé de créer un environnement virtuel et de " +"tout exécuter au sein d'un `virtualenv `_." #: ../../source/tutorial-quickstart-huggingface.rst:12 msgid "" -"We will leverage Hugging Face to federate the training of language models" -" over multiple clients using Flower. More specifically, we will fine-tune" -" a pre-trained Transformer model (distilBERT) for sequence classification" -" over a dataset of IMDB ratings. The end goal is to detect if a movie " -"rating is positive or negative." +"Let's use ``flwr new`` to create a complete Flower+🤗 Hugging Face " +"project. It will generate all the files needed to run, by default with " +"the Flower Simulation Engine, a federation of 10 nodes using |fedavg|_ " +"The dataset will be partitioned using |flowerdatasets|_'s " +"|iidpartitioner|_." msgstr "" -"Nous nous appuierons sur Hugging Face pour fédérer l'entraînement de " -"modèles de langage sur plusieurs clients à l'aide de Flower. Plus " -"précisément, nous mettrons au point un modèle Transformer pré-entraîné " -"(distilBERT) pour la classification de séquences sur un ensemble de " -"données d'évaluations IMDB. L'objectif final est de détecter si " -"l'évaluation d'un film est positive ou négative." - -#: ../../source/tutorial-quickstart-huggingface.rst:18 -msgid "Dependencies" -msgstr "Dépendances" -#: ../../source/tutorial-quickstart-huggingface.rst:20 +#: ../../source/tutorial-quickstart-huggingface.rst:17 +#: ../../source/tutorial-quickstart-mlx.rst:17 +#: ../../source/tutorial-quickstart-pytorch.rst:18 +#: ../../source/tutorial-quickstart-tensorflow.rst:18 +#, fuzzy msgid "" -"To follow along this tutorial you will need to install the following " -"packages: :code:`datasets`, :code:`evaluate`, :code:`flwr`, " -":code:`torch`, and :code:`transformers`. This can be done using " -":code:`pip`:" +"Now that we have a rough idea of what this example is about, let's get " +"started. First, install Flower in your new environment:" msgstr "" -"Pour suivre ce tutoriel, tu devras installer les paquets suivants : " -":code:`datasets`, :code:`evaluate`, :code:`flwr`, :code:`torch`, et " -":code:`transformers`. Cela peut être fait en utilisant :code:`pip` :" +"Maintenant que nous avons une idée approximative de ce qui se passe, " +"commençons. Nous devons d'abord installer Flower. Tu peux le faire en " +"lançant :" -#: ../../source/tutorial-quickstart-huggingface.rst:30 -msgid "Standard Hugging Face workflow" -msgstr "Flux de travail standard pour le visage" +#: ../../source/tutorial-quickstart-huggingface.rst:25 +msgid "" +"Then, run the command below. You will be prompted to select one of the " +"available templates (choose ``HuggingFace``), give a name to your " +"project, and type in your developer name:" +msgstr "" #: ../../source/tutorial-quickstart-huggingface.rst:33 -msgid "Handling the data" -msgstr "Traitement des données" +#: ../../source/tutorial-quickstart-mlx.rst:32 +#: ../../source/tutorial-quickstart-pytorch.rst:34 +#: ../../source/tutorial-quickstart-tensorflow.rst:34 +msgid "" +"After running it you'll notice a new directory with your project name has" +" been created. It should have the following structure:" +msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:35 +#: ../../source/tutorial-quickstart-huggingface.rst:47 +#: ../../source/tutorial-quickstart-mlx.rst:46 +#: ../../source/tutorial-quickstart-pytorch.rst:48 +#: ../../source/tutorial-quickstart-tensorflow.rst:48 msgid "" -"To fetch the IMDB dataset, we will use Hugging Face's :code:`datasets` " -"library. We then need to tokenize the data and create :code:`PyTorch` " -"dataloaders, this is all done in the :code:`load_data` function:" +"If you haven't yet installed the project and its dependencies, you can do" +" so by:" +msgstr "" + +#: ../../source/tutorial-quickstart-huggingface.rst:54 +#: ../../source/tutorial-quickstart-pytorch.rst:55 +#: ../../source/tutorial-quickstart-tensorflow.rst:55 +msgid "To run the project, do:" msgstr "" -"Pour récupérer le jeu de données IMDB, nous utiliserons la bibliothèque " -":code:`datasets` de Hugging Face. Nous devons ensuite tokeniser les " -"données et créer des :code:`PyTorch` dataloaders, ce qui est fait dans la" -" fonction :code:`load_data` :" -#: ../../source/tutorial-quickstart-huggingface.rst:81 -msgid "Training and testing the model" -msgstr "Former et tester le modèle" +#: ../../source/tutorial-quickstart-huggingface.rst:102 +msgid "You can also run the project with GPU as follows:" +msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:83 +#: ../../source/tutorial-quickstart-huggingface.rst:109 msgid "" -"Once we have a way of creating our trainloader and testloader, we can " -"take care of the training and testing. This is very similar to any " -":code:`PyTorch` training or testing loop:" +"This will use the default arguments where each ``ClientApp`` will use 2 " +"CPUs and at most 4 ``ClientApp``\\s will run in a given GPU." msgstr "" -"Une fois que nous avons trouvé un moyen de créer notre trainloader et " -"notre testloader, nous pouvons nous occuper de l'entraînement et du test." -" C'est très similaire à n'importe quelle boucle d'entraînement ou de test" -" :code:`PyTorch` :" -#: ../../source/tutorial-quickstart-huggingface.rst:121 -msgid "Creating the model itself" -msgstr "Créer le modèle lui-même" +#: ../../source/tutorial-quickstart-huggingface.rst:120 +#: ../../source/tutorial-quickstart-mlx.rst:110 +#: ../../source/tutorial-quickstart-pytorch.rst:111 +msgid "" +"What follows is an explanation of each component in the project you just " +"created: dataset partition, the model, defining the ``ClientApp`` and " +"defining the ``ServerApp``." +msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:123 +#: ../../source/tutorial-quickstart-huggingface.rst:124 +#: ../../source/tutorial-quickstart-mlx.rst:114 +#: ../../source/tutorial-quickstart-pytorch.rst:115 +#: ../../source/tutorial-quickstart-tensorflow.rst:112 +#, fuzzy +msgid "The Data" +msgstr "Chargement des données" + +#: ../../source/tutorial-quickstart-huggingface.rst:126 msgid "" -"To create the model itself, we will just load the pre-trained distillBERT" -" model using Hugging Face’s :code:`AutoModelForSequenceClassification` :" +"This tutorial uses |flowerdatasets|_ to easily download and partition the" +" `IMDB `_ dataset. In " +"this example you'll make use of the |iidpartitioner|_ to generate " +"``num_partitions`` partitions. You can choose |otherpartitioners|_ " +"available in Flower Datasets. To tokenize the text, we will also load the" +" tokenizer from the pre-trained Transformer model that we'll use during " +"training - more on that in the next section. Each ``ClientApp`` will call" +" this function to create dataloaders with the data that correspond to " +"their data partition." msgstr "" -"Pour créer le modèle lui-même, nous allons simplement charger le modèle " -"distillBERT pré-entraîné en utilisant le " -":code:`AutoModelForSequenceClassification` de Hugging Face :" -#: ../../source/tutorial-quickstart-huggingface.rst:136 -msgid "Federating the example" -msgstr "Fédérer l'exemple" +#: ../../source/tutorial-quickstart-huggingface.rst:171 +#: ../../source/tutorial-quickstart-mlx.rst:155 +#: ../../source/tutorial-quickstart-pytorch.rst:150 +#: ../../source/tutorial-quickstart-tensorflow.rst:139 +#, fuzzy +msgid "The Model" +msgstr "Entraîne le modèle" -#: ../../source/tutorial-quickstart-huggingface.rst:139 -msgid "Creating the IMDBClient" -msgstr "Création du client IMDBC" +#: ../../source/tutorial-quickstart-huggingface.rst:173 +#, fuzzy +msgid "" +"We will leverage 🤗 Hugging Face to federate the training of language " +"models over multiple clients using Flower. More specifically, we will " +"fine-tune a pre-trained Transformer model (|berttiny|_) for sequence " +"classification over the dataset of IMDB ratings. The end goal is to " +"detect if a movie rating is positive or negative. If you have access to " +"larger GPUs, feel free to use larger models!" +msgstr "" +"Nous nous appuierons sur Hugging Face pour fédérer l'entraînement de " +"modèles de langage sur plusieurs clients à l'aide de Flower. Plus " +"précisément, nous mettrons au point un modèle Transformer pré-entraîné " +"(distilBERT) pour la classification de séquences sur un ensemble de " +"données d'évaluations IMDB. L'objectif final est de détecter si " +"l'évaluation d'un film est positive ou négative." -#: ../../source/tutorial-quickstart-huggingface.rst:141 +#: ../../source/tutorial-quickstart-huggingface.rst:185 msgid "" -"To federate our example to multiple clients, we first need to write our " -"Flower client class (inheriting from :code:`flwr.client.NumPyClient`). " -"This is very easy, as our model is a standard :code:`PyTorch` model:" +"Note that here, ``model_name`` is a string that will be loaded from the " +"``Context`` in the ClientApp and ServerApp." msgstr "" -"Pour fédérer notre exemple à plusieurs clients, nous devons d'abord " -"écrire notre classe de client Flower (héritant de " -":code:`flwr.client.NumPyClient`). C'est très facile, car notre modèle est" -" un modèle :code:`PyTorch` standard :" -#: ../../source/tutorial-quickstart-huggingface.rst:169 +#: ../../source/tutorial-quickstart-huggingface.rst:188 msgid "" -"The :code:`get_parameters` function lets the server get the client's " -"parameters. Inversely, the :code:`set_parameters` function allows the " -"server to send its parameters to the client. Finally, the :code:`fit` " -"function trains the model locally for the client, and the " -":code:`evaluate` function tests the model locally and returns the " -"relevant metrics." +"In addition to loading the pretrained model weights and architecture, we " +"also include two utility functions to perform both training (i.e. " +"``train()``) and evaluation (i.e. ``test()``) using the above model. " +"These functions should look fairly familiar if you have some prior " +"experience with PyTorch. Note these functions do not have anything " +"specific to Flower. That being said, the training function will normally " +"be called, as we'll see later, from a Flower client passing its own data." +" In summary, your clients can use standard training/testing functions to " +"perform local training or evaluation:" msgstr "" -"La fonction :code:`get_parameters` permet au serveur d'obtenir les " -"paramètres du client. Inversement, la fonction :code:`set_parameters` " -"permet au serveur d'envoyer ses paramètres au client. Enfin, la fonction " -":code:`fit` forme le modèle localement pour le client, et la fonction " -":code:`evaluate` teste le modèle localement et renvoie les mesures " -"correspondantes." -#: ../../source/tutorial-quickstart-huggingface.rst:175 -msgid "Starting the server" -msgstr "Démarrer le serveur" +#: ../../source/tutorial-quickstart-huggingface.rst:228 +#: ../../source/tutorial-quickstart-mlx.rst:199 +#: ../../source/tutorial-quickstart-pytorch.rst:224 +#: ../../source/tutorial-quickstart-tensorflow.rst:168 +#, fuzzy +msgid "The ClientApp" +msgstr "client" -#: ../../source/tutorial-quickstart-huggingface.rst:177 +#: ../../source/tutorial-quickstart-huggingface.rst:230 msgid "" -"Now that we have a way to instantiate clients, we need to create our " -"server in order to aggregate the results. Using Flower, this can be done " -"very easily by first choosing a strategy (here, we are using " -":code:`FedAvg`, which will define the global weights as the average of " -"all the clients' weights at each round) and then using the " -":code:`flwr.server.start_server` function:" +"The main changes we have to make to use 🤗 Hugging Face with Flower will " +"be found in the ``get_weights()`` and ``set_weights()`` functions. Under " +"the hood, the ``transformers`` library uses PyTorch, which means we can " +"reuse the ``get_weights()`` and ``set_weights()`` code that we defined in" +" the :doc:`Quickstart PyTorch ` tutorial. As" +" a reminder, in ``get_weights()``, PyTorch model parameters are extracted" +" and represented as a list of NumPy arrays. The ``set_weights()`` " +"function that's the opposite: given a list of NumPy arrays it applies " +"them to an existing PyTorch model. Doing this in fairly easy in PyTorch." msgstr "" -"Maintenant que nous avons un moyen d'instancier les clients, nous devons " -"créer notre serveur afin d'agréger les résultats. Avec Flower, cela peut " -"être fait très facilement en choisissant d'abord une stratégie (ici, nous" -" utilisons :code:`FedAvg`, qui définira les poids globaux comme la " -"moyenne des poids de tous les clients à chaque tour) et en utilisant " -"ensuite la fonction :code:`flwr.server.start_server` :" -#: ../../source/tutorial-quickstart-huggingface.rst:205 +#: ../../source/tutorial-quickstart-huggingface.rst:241 +#: ../../source/tutorial-quickstart-pytorch.rst:234 msgid "" -"The :code:`weighted_average` function is there to provide a way to " -"aggregate the metrics distributed amongst the clients (basically this " -"allows us to display a nice average accuracy and loss for every round)." +"The specific implementation of ``get_weights()`` and ``set_weights()`` " +"depends on the type of models you use. The ones shown below work for a " +"wide range of PyTorch models but you might need to adjust them if you " +"have more exotic model architectures." msgstr "" -"La fonction :code:`weighted_average` est là pour fournir un moyen " -"d'agréger les mesures réparties entre les clients (en gros, cela nous " -"permet d'afficher une belle moyenne de précision et de perte pour chaque " -"tour)." - -#: ../../source/tutorial-quickstart-huggingface.rst:209 -msgid "Putting everything together" -msgstr "Tout assembler" -#: ../../source/tutorial-quickstart-huggingface.rst:211 -msgid "We can now start client instances using:" -msgstr "Nous pouvons maintenant démarrer des instances de clients en utilisant :" +#: ../../source/tutorial-quickstart-huggingface.rst:257 +#: ../../source/tutorial-quickstart-pytorch.rst:250 +msgid "" +"The rest of the functionality is directly inspired by the centralized " +"case. The ``fit()`` method in the client trains the model using the local" +" dataset. Similarly, the ``evaluate()`` method is used to evaluate the " +"model received on a held-out validation set that the client might have:" +msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:221 +#: ../../source/tutorial-quickstart-huggingface.rst:283 msgid "" -"And they will be able to connect to the server and start the federated " -"training." -msgstr "Et ils pourront se connecter au serveur et démarrer la formation fédérée." +"Finally, we can construct a ``ClientApp`` using the ``FlowerClient`` " +"defined above by means of a ``client_fn()`` callback. Note that the " +"`context` enables you to get access to hyperparemeters defined in your " +"``pyproject.toml`` to configure the run. In this tutorial we access the " +"``local-epochs`` setting to control the number of epochs a ``ClientApp`` " +"will perform when running the ``fit()`` method. You could define " +"additional hyperparameters in ``pyproject.toml`` and access them here." +msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:223 +#: ../../source/tutorial-quickstart-huggingface.rst:316 +#: ../../source/tutorial-quickstart-mlx.rst:361 +#: ../../source/tutorial-quickstart-pytorch.rst:307 +#: ../../source/tutorial-quickstart-tensorflow.rst:232 #, fuzzy +msgid "The ServerApp" +msgstr "serveur" + +#: ../../source/tutorial-quickstart-huggingface.rst:318 msgid "" -"If you want to check out everything put together, you should check out " -"the `full code example `_ ." +"To construct a ``ServerApp`` we define a ``server_fn()`` callback with an" +" identical signature to that of ``client_fn()`` but the return type is " +"|serverappcomponents|_ as opposed to a |client|_ In this example we use " +"the `FedAvg` strategy. To it we pass a randomly initialized model that " +"will server as the global model to federated. Note that the value of " +"``fraction_fit`` is read from the run config. You can find the default " +"value defined in the ``pyproject.toml``." msgstr "" -"Si tu veux voir tout ce qui est mis ensemble, tu devrais consulter " -"l'exemple de code complet : " -"[https://github.com/adap/flower/tree/main/examples/quickstart-" -"huggingface](https://github.com/adap/flower/tree/main/examples" -"/quickstart-huggingface)." -#: ../../source/tutorial-quickstart-huggingface.rst:226 +#: ../../source/tutorial-quickstart-huggingface.rst:356 msgid "" -"Of course, this is a very basic example, and a lot can be added or " -"modified, it was just to showcase how simply we could federate a Hugging " -"Face workflow using Flower." +"Congratulations! You've successfully built and run your first federated " +"learning system for an LLM." msgstr "" -"Bien sûr, c'est un exemple très basique, et beaucoup de choses peuvent " -"être ajoutées ou modifiées, il s'agissait juste de montrer avec quelle " -"simplicité on pouvait fédérer un flux de travail Hugging Face à l'aide de" -" Flower." -#: ../../source/tutorial-quickstart-huggingface.rst:229 +#: ../../source/tutorial-quickstart-huggingface.rst:361 msgid "" -"Note that in this example we used :code:`PyTorch`, but we could have very" -" well used :code:`TensorFlow`." +"Check the source code of the extended version of this tutorial in " +"|quickstart_hf_link|_ in the Flower GitHub repository. For a " +"comprehensive example of a federated fine-tuning of an LLM with Flower, " +"refer to the |flowertune|_ example in the Flower GitHub repository." msgstr "" -"Notez que dans cet exemple, nous avons utilisé :code:`PyTorch`, mais nous" -" aurions très bien pu utiliser :code:`TensorFlow`." #: ../../source/tutorial-quickstart-ios.rst:-1 msgid "" @@ -24387,12 +24894,12 @@ msgid "" "using Flower to train a neural network on MNIST." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:5 +#: ../../source/tutorial-quickstart-ios.rst:4 #, fuzzy msgid "Quickstart iOS" msgstr "Démarrage rapide XGBoost" -#: ../../source/tutorial-quickstart-ios.rst:10 +#: ../../source/tutorial-quickstart-ios.rst:9 #, fuzzy msgid "" "In this tutorial we will learn how to train a Neural Network on MNIST " @@ -24413,7 +24920,7 @@ msgstr "" "tout exécuter au sein d'un `virtualenv `_." -#: ../../source/tutorial-quickstart-ios.rst:15 +#: ../../source/tutorial-quickstart-ios.rst:17 #, fuzzy msgid "" "Our example consists of one Python *server* and two iPhone *clients* that" @@ -24422,7 +24929,7 @@ msgstr "" "Notre exemple consiste en un *serveur* et deux *clients* ayant tous le " "même modèle." -#: ../../source/tutorial-quickstart-ios.rst:17 +#: ../../source/tutorial-quickstart-ios.rst:20 #, fuzzy msgid "" "*Clients* are responsible for generating individual weight updates for " @@ -24438,7 +24945,7 @@ msgstr "" "cette version améliorée du modèle à chaque *client*. Un cycle complet de " "mises à jour de poids s'appelle un *round*." -#: ../../source/tutorial-quickstart-ios.rst:21 +#: ../../source/tutorial-quickstart-ios.rst:26 #, fuzzy msgid "" "Now that we have a rough idea of what is going on, let's get started to " @@ -24449,18 +24956,17 @@ msgstr "" "commençons. Nous devons d'abord installer Flower. Tu peux le faire en " "exécutant :" -#: ../../source/tutorial-quickstart-ios.rst:27 +#: ../../source/tutorial-quickstart-ios.rst:33 msgid "Or Poetry:" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:34 -#: ../../source/tutorial-quickstart-scikitlearn.rst:40 -#: ../../source/tutorial-quickstart-tensorflow.rst:29 -#: ../../source/tutorial-quickstart-xgboost.rst:55 +#: ../../source/tutorial-quickstart-ios.rst:40 +#: ../../source/tutorial-quickstart-scikitlearn.rst:43 +#: ../../source/tutorial-quickstart-xgboost.rst:65 msgid "Flower Client" msgstr "Client de la fleur" -#: ../../source/tutorial-quickstart-ios.rst:36 +#: ../../source/tutorial-quickstart-ios.rst:42 msgid "" "Now that we have all our dependencies installed, let's run a simple " "distributed training using CoreML as our local training pipeline and " @@ -24469,92 +24975,88 @@ msgid "" "the Swift SDK. The client implementation can be seen below:" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:72 +#: ../../source/tutorial-quickstart-ios.rst:80 msgid "" -"Let's create a new application project in Xcode and add :code:`flwr` as a" -" dependency in your project. For our application, we will store the logic" -" of our app in :code:`FLiOSModel.swift` and the UI elements in " -":code:`ContentView.swift`. We will focus more on :code:`FLiOSModel.swift`" -" in this quickstart. Please refer to the `full code example " +"Let's create a new application project in Xcode and add ``flwr`` as a " +"dependency in your project. For our application, we will store the logic " +"of our app in ``FLiOSModel.swift`` and the UI elements in " +"``ContentView.swift``. We will focus more on ``FLiOSModel.swift`` in this" +" quickstart. Please refer to the `full code example " "`_ to learn more " "about the app." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:75 -msgid "Import Flower and CoreML related packages in :code:`FLiOSModel.swift`:" +#: ../../source/tutorial-quickstart-ios.rst:86 +msgid "Import Flower and CoreML related packages in ``FLiOSModel.swift``:" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:83 +#: ../../source/tutorial-quickstart-ios.rst:94 msgid "" "Then add the mlmodel to the project simply by drag-and-drop, the mlmodel " "will be bundled inside the application during deployment to your iOS " "device. We need to pass the url to access mlmodel and run CoreML machine " "learning processes, it can be retrieved by calling the function " -":code:`Bundle.main.url`. For the MNIST dataset, we need to preprocess it " -"into :code:`MLBatchProvider` object. The preprocessing is done inside " -":code:`DataLoader.swift`." +"``Bundle.main.url``. For the MNIST dataset, we need to preprocess it into" +" ``MLBatchProvider`` object. The preprocessing is done inside " +"``DataLoader.swift``." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:99 +#: ../../source/tutorial-quickstart-ios.rst:112 msgid "" "Since CoreML does not allow the model parameters to be seen before " "training, and accessing the model parameters during or after the training" " can only be done by specifying the layer name, we need to know this " "information beforehand, through looking at the model specification, which" " are written as proto files. The implementation can be seen in " -":code:`MLModelInspect`." +"``MLModelInspect``." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:102 +#: ../../source/tutorial-quickstart-ios.rst:118 msgid "" "After we have all of the necessary information, let's create our Flower " "client." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:117 +#: ../../source/tutorial-quickstart-ios.rst:133 msgid "" "Then start the Flower gRPC client and start communicating to the server " -"by passing our Flower client to the function :code:`startFlwrGRPC`." +"by passing our Flower client to the function ``startFlwrGRPC``." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:124 +#: ../../source/tutorial-quickstart-ios.rst:141 msgid "" -"That's it for the client. We only have to implement :code:`Client` or " -"call the provided :code:`MLFlwrClient` and call :code:`startFlwrGRPC()`. " -"The attribute :code:`hostname` and :code:`port` tells the client which " -"server to connect to. This can be done by entering the hostname and port " -"in the application before clicking the start button to start the " -"federated learning process." +"That's it for the client. We only have to implement ``Client`` or call " +"the provided ``MLFlwrClient`` and call ``startFlwrGRPC()``. The attribute" +" ``hostname`` and ``port`` tells the client which server to connect to. " +"This can be done by entering the hostname and port in the application " +"before clicking the start button to start the federated learning process." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:129 -#: ../../source/tutorial-quickstart-scikitlearn.rst:167 -#: ../../source/tutorial-quickstart-tensorflow.rst:98 -#: ../../source/tutorial-quickstart-xgboost.rst:341 +#: ../../source/tutorial-quickstart-ios.rst:148 +#: ../../source/tutorial-quickstart-scikitlearn.rst:179 +#: ../../source/tutorial-quickstart-xgboost.rst:358 msgid "Flower Server" msgstr "Serveur de Flower" -#: ../../source/tutorial-quickstart-ios.rst:131 -#: ../../source/tutorial-quickstart-tensorflow.rst:100 +#: ../../source/tutorial-quickstart-ios.rst:150 +#, fuzzy msgid "" "For simple workloads we can start a Flower server and leave all the " "configuration possibilities at their default values. In a file named " -":code:`server.py`, import Flower and start the server:" +"``server.py``, import Flower and start the server:" msgstr "" "Pour les charges de travail simples, nous pouvons démarrer un serveur " "Flower et laisser toutes les possibilités de configuration à leurs " "valeurs par défaut. Dans un fichier nommé :code:`server.py`, importe " "Flower et démarre le serveur :" -#: ../../source/tutorial-quickstart-ios.rst:142 -#: ../../source/tutorial-quickstart-scikitlearn.rst:230 -#: ../../source/tutorial-quickstart-tensorflow.rst:112 +#: ../../source/tutorial-quickstart-ios.rst:161 +#: ../../source/tutorial-quickstart-scikitlearn.rst:254 msgid "Train the model, federated!" msgstr "Entraîne le modèle, fédéré !" -#: ../../source/tutorial-quickstart-ios.rst:144 -#: ../../source/tutorial-quickstart-tensorflow.rst:114 -#: ../../source/tutorial-quickstart-xgboost.rst:567 +#: ../../source/tutorial-quickstart-ios.rst:163 +#: ../../source/tutorial-quickstart-xgboost.rst:590 msgid "" "With both client and server ready, we can now run everything and see " "federated learning in action. FL systems usually have a server and " @@ -24565,7 +25067,7 @@ msgstr "" "généralement un serveur et plusieurs clients. Nous devons donc commencer " "par démarrer le serveur :" -#: ../../source/tutorial-quickstart-ios.rst:152 +#: ../../source/tutorial-quickstart-ios.rst:171 msgid "" "Once the server is running we can start the clients in different " "terminals. Build and run the client through your Xcode, one through Xcode" @@ -24575,13 +25077,13 @@ msgid "" "simulator-or-on-a-device>`_." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:156 +#: ../../source/tutorial-quickstart-ios.rst:177 #, fuzzy msgid "" "Congratulations! You've successfully built and run your first federated " "learning system in your ios device. The full `source code " "`_ for this " -"example can be found in :code:`examples/ios`." +"example can be found in ``examples/ios``." msgstr "" "Félicitations ! Tu as réussi à construire et à faire fonctionner ton " "premier système d'apprentissage fédéré. Le code source complet " @@ -24595,11 +25097,11 @@ msgid "" "with Jax to train a linear regression model on a scikit-learn dataset." msgstr "" -#: ../../source/tutorial-quickstart-jax.rst:5 +#: ../../source/tutorial-quickstart-jax.rst:4 msgid "Quickstart JAX" msgstr "Démarrage rapide de JAX" -#: ../../source/tutorial-quickstart-jax.rst:10 +#: ../../source/tutorial-quickstart-jax.rst:9 #, fuzzy msgid "" "This tutorial will show you how to use Flower to build a federated " @@ -24626,41 +25128,44 @@ msgstr "" " Ensuite, nous nous appuyons sur le code d'entraînement centralisé pour " "exécuter l'entraînement de manière fédérée." -#: ../../source/tutorial-quickstart-jax.rst:16 +#: ../../source/tutorial-quickstart-jax.rst:20 +#, fuzzy msgid "" "Before we start building our JAX example, we need install the packages " -":code:`jax`, :code:`jaxlib`, :code:`scikit-learn`, and :code:`flwr`:" +"``jax``, ``jaxlib``, ``scikit-learn``, and ``flwr``:" msgstr "" "Avant de commencer à construire notre exemple JAX, nous devons installer " "les paquets :code:`jax`, :code:`jaxlib`, :code:`scikit-learn`, et " ":code:`flwr` :" -#: ../../source/tutorial-quickstart-jax.rst:24 +#: ../../source/tutorial-quickstart-jax.rst:28 msgid "Linear Regression with JAX" msgstr "Régression linéaire avec JAX" -#: ../../source/tutorial-quickstart-jax.rst:26 +#: ../../source/tutorial-quickstart-jax.rst:30 +#, fuzzy msgid "" "We begin with a brief description of the centralized training code based " -"on a :code:`Linear Regression` model. If you want a more in-depth " -"explanation of what's going on then have a look at the official `JAX " -"documentation `_." +"on a ``Linear Regression`` model. If you want a more in-depth explanation" +" of what's going on then have a look at the official `JAX documentation " +"`_." msgstr "" "Nous commençons par une brève description du code d'entraînement " "centralisé basé sur un modèle :code:`Régression linéaire`. Si tu veux une" " explication plus approfondie de ce qui se passe, jette un coup d'œil à " "la documentation officielle `JAX `_." -#: ../../source/tutorial-quickstart-jax.rst:29 +#: ../../source/tutorial-quickstart-jax.rst:34 +#, fuzzy msgid "" -"Let's create a new file called :code:`jax_training.py` with all the " +"Let's create a new file called ``jax_training.py`` with all the " "components required for a traditional (centralized) linear regression " -"training. First, the JAX packages :code:`jax` and :code:`jaxlib` need to " -"be imported. In addition, we need to import :code:`sklearn` since we use " -":code:`make_regression` for the dataset and :code:`train_test_split` to " -"split the dataset into a training and test set. You can see that we do " -"not yet import the :code:`flwr` package for federated learning. This will" -" be done later." +"training. First, the JAX packages ``jax`` and ``jaxlib`` need to be " +"imported. In addition, we need to import ``sklearn`` since we use " +"``make_regression`` for the dataset and ``train_test_split`` to split the" +" dataset into a training and test set. You can see that we do not yet " +"import the ``flwr`` package for federated learning. This will be done " +"later." msgstr "" "Créons un nouveau fichier appelé :code:`jax_training.py` avec tous les " "composants nécessaires pour un apprentissage traditionnel (centralisé) de" @@ -24672,29 +25177,30 @@ msgstr "" "n'avons pas encore importé le paquet :code:`flwr` pour l'apprentissage " "fédéré, ce qui sera fait plus tard." -#: ../../source/tutorial-quickstart-jax.rst:43 -msgid "" -"The :code:`load_data()` function loads the mentioned training and test " -"sets." +#: ../../source/tutorial-quickstart-jax.rst:51 +#, fuzzy +msgid "The ``load_data()`` function loads the mentioned training and test sets." msgstr "" "La fonction :code:`load_data()` charge les ensembles d'entraînement et de" " test mentionnés." -#: ../../source/tutorial-quickstart-jax.rst:53 +#: ../../source/tutorial-quickstart-jax.rst:63 +#, fuzzy msgid "" -"The model architecture (a very simple :code:`Linear Regression` model) is" -" defined in :code:`load_model()`." +"The model architecture (a very simple ``Linear Regression`` model) is " +"defined in ``load_model()``." msgstr "" "L'architecture du modèle (un modèle :code:`Régression linéaire` très " "simple) est définie dans :code:`load_model()`." -#: ../../source/tutorial-quickstart-jax.rst:65 +#: ../../source/tutorial-quickstart-jax.rst:73 +#, fuzzy msgid "" -"We now need to define the training (function :code:`train()`), which " -"loops over the training set and measures the loss (function " -":code:`loss_fn()`) for each batch of training examples. The loss function" -" is separate since JAX takes derivatives with a :code:`grad()` function " -"(defined in the :code:`main()` function and called in :code:`train()`)." +"We now need to define the training (function ``train()``), which loops " +"over the training set and measures the loss (function ``loss_fn()``) for " +"each batch of training examples. The loss function is separate since JAX " +"takes derivatives with a ``grad()`` function (defined in the ``main()`` " +"function and called in ``train()``)." msgstr "" "Nous devons maintenant définir l'entraînement (fonction :code:`train()`)," " qui boucle sur l'ensemble d'entraînement et mesure la perte (fonction " @@ -24703,22 +25209,24 @@ msgstr "" ":code:`grad()` (définie dans la fonction :code:`main()` et appelée dans " ":code:`train()`)." -#: ../../source/tutorial-quickstart-jax.rst:83 +#: ../../source/tutorial-quickstart-jax.rst:95 +#, fuzzy msgid "" -"The evaluation of the model is defined in the function " -":code:`evaluation()`. The function takes all test examples and measures " -"the loss of the linear regression model." +"The evaluation of the model is defined in the function ``evaluation()``. " +"The function takes all test examples and measures the loss of the linear " +"regression model." msgstr "" "L'évaluation du modèle est définie dans la fonction :code:`evaluation()`." " La fonction prend tous les exemples de test et mesure la perte du modèle" " de régression linéaire." -#: ../../source/tutorial-quickstart-jax.rst:94 +#: ../../source/tutorial-quickstart-jax.rst:107 +#, fuzzy msgid "" "Having defined the data loading, model architecture, training, and " "evaluation we can put everything together and train our model using JAX. " -"As already mentioned, the :code:`jax.grad()` function is defined in " -":code:`main()` and passed to :code:`train()`." +"As already mentioned, the ``jax.grad()`` function is defined in " +"``main()`` and passed to ``train()``." msgstr "" "Après avoir défini le chargement des données, l'architecture du modèle, " "l'entraînement et l'évaluation, nous pouvons tout assembler et entraîner " @@ -24726,13 +25234,13 @@ msgstr "" "fonction :code:`jax.grad()` est définie dans :code:`main()` et transmise " "à :code:`train()`." -#: ../../source/tutorial-quickstart-jax.rst:111 +#: ../../source/tutorial-quickstart-jax.rst:126 msgid "You can now run your (centralized) JAX linear regression workload:" msgstr "" "Tu peux maintenant exécuter ta charge de travail (centralisée) de " "régression linéaire JAX :" -#: ../../source/tutorial-quickstart-jax.rst:117 +#: ../../source/tutorial-quickstart-jax.rst:132 msgid "" "So far this should all look fairly familiar if you've used JAX before. " "Let's take the next step and use what we've built to create a simple " @@ -24743,20 +25251,21 @@ msgstr "" "avons construit pour créer un simple système d'apprentissage fédéré " "composé d'un serveur et de deux clients." -#: ../../source/tutorial-quickstart-jax.rst:121 +#: ../../source/tutorial-quickstart-jax.rst:137 msgid "JAX meets Flower" msgstr "JAX rencontre Flower" -#: ../../source/tutorial-quickstart-jax.rst:123 +#: ../../source/tutorial-quickstart-jax.rst:139 +#, fuzzy msgid "" "The concept of federating an existing workload is always the same and " "easy to understand. We have to start a *server* and then use the code in " -":code:`jax_training.py` for the *clients* that are connected to the " -"*server*. The *server* sends model parameters to the clients. The " -"*clients* run the training and update the parameters. The updated " -"parameters are sent back to the *server*, which averages all received " -"parameter updates. This describes one round of the federated learning " -"process, and we repeat this for multiple rounds." +"``jax_training.py`` for the *clients* that are connected to the *server*." +" The *server* sends model parameters to the clients. The *clients* run " +"the training and update the parameters. The updated parameters are sent " +"back to the *server*, which averages all received parameter updates. This" +" describes one round of the federated learning process, and we repeat " +"this for multiple rounds." msgstr "" "Le concept de fédération d'une charge de travail existante est toujours " "le même et facile à comprendre. Nous devons démarrer un *serveur*, puis " @@ -24768,12 +25277,13 @@ msgstr "" "un tour du processus d'apprentissage fédéré, et nous répétons cette " "opération pour plusieurs tours." -#: ../../source/tutorial-quickstart-jax.rst:145 +#: ../../source/tutorial-quickstart-jax.rst:167 +#, fuzzy msgid "" -"Finally, we will define our *client* logic in :code:`client.py` and build" -" upon the previously defined JAX training in :code:`jax_training.py`. Our" -" *client* needs to import :code:`flwr`, but also :code:`jax` and " -":code:`jaxlib` to update the parameters on our JAX model:" +"Finally, we will define our *client* logic in ``client.py`` and build " +"upon the previously defined JAX training in ``jax_training.py``. Our " +"*client* needs to import ``flwr``, but also ``jax`` and ``jaxlib`` to " +"update the parameters on our JAX model:" msgstr "" "Enfin, nous allons définir la logique de notre *client* dans " ":code:`client.py` et nous appuyer sur la formation JAX définie " @@ -24781,18 +25291,18 @@ msgstr "" ":code:`flwr`, mais aussi :code:`jax` et :code:`jaxlib` pour mettre à jour" " les paramètres de notre modèle JAX :" -#: ../../source/tutorial-quickstart-jax.rst:160 +#: ../../source/tutorial-quickstart-jax.rst:182 +#, fuzzy msgid "" "Implementing a Flower *client* basically means implementing a subclass of" -" either :code:`flwr.client.Client` or :code:`flwr.client.NumPyClient`. " -"Our implementation will be based on :code:`flwr.client.NumPyClient` and " -"we'll call it :code:`FlowerClient`. :code:`NumPyClient` is slightly " -"easier to implement than :code:`Client` if you use a framework with good " -"NumPy interoperability (like JAX) because it avoids some of the " -"boilerplate that would otherwise be necessary. :code:`FlowerClient` needs" -" to implement four methods, two methods for getting/setting model " -"parameters, one method for training the model, and one method for testing" -" the model:" +" either ``flwr.client.Client`` or ``flwr.client.NumPyClient``. Our " +"implementation will be based on ``flwr.client.NumPyClient`` and we'll " +"call it ``FlowerClient``. ``NumPyClient`` is slightly easier to implement" +" than ``Client`` if you use a framework with good NumPy interoperability " +"(like JAX) because it avoids some of the boilerplate that would otherwise" +" be necessary. ``FlowerClient`` needs to implement four methods, two " +"methods for getting/setting model parameters, one method for training the" +" model, and one method for testing the model:" msgstr "" "L'implémentation d'un *client* Flower signifie essentiellement " "l'implémentation d'une sous-classe de :code:`flwr.client.Client` ou " @@ -24806,43 +25316,46 @@ msgstr "" "paramètres du modèle, une méthode pour former le modèle, et une méthode " "pour tester le modèle :" -#: ../../source/tutorial-quickstart-jax.rst:165 -msgid ":code:`set_parameters (optional)`" +#: ../../source/tutorial-quickstart-jax.rst:194 +#, fuzzy +msgid "``set_parameters (optional)``" msgstr ":code:`set_parameters (optional)`" -#: ../../source/tutorial-quickstart-jax.rst:167 -msgid "transform parameters to NumPy :code:`ndarray`'s" +#: ../../source/tutorial-quickstart-jax.rst:193 +#, fuzzy +msgid "transform parameters to NumPy ``ndarray``'s" msgstr "transforme les paramètres en NumPy :code:`ndarray`'s" -#: ../../source/tutorial-quickstart-jax.rst:174 +#: ../../source/tutorial-quickstart-jax.rst:203 msgid "get the updated local model parameters and return them to the server" msgstr "" "récupère les paramètres du modèle local mis à jour et les renvoie au " "serveur" -#: ../../source/tutorial-quickstart-jax.rst:178 +#: ../../source/tutorial-quickstart-jax.rst:208 msgid "return the local loss to the server" msgstr "renvoie la perte locale au serveur" -#: ../../source/tutorial-quickstart-jax.rst:180 +#: ../../source/tutorial-quickstart-jax.rst:210 +#, fuzzy msgid "" "The challenging part is to transform the JAX model parameters from " -":code:`DeviceArray` to :code:`NumPy ndarray` to make them compatible with" -" `NumPyClient`." +"``DeviceArray`` to ``NumPy ndarray`` to make them compatible with " +"`NumPyClient`." msgstr "" "La partie la plus difficile consiste à transformer les paramètres du " "modèle JAX de :code:`DeviceArray` en :code:`NumPy ndarray` pour les " "rendre compatibles avec `NumPyClient`." -#: ../../source/tutorial-quickstart-jax.rst:182 +#: ../../source/tutorial-quickstart-jax.rst:213 +#, fuzzy msgid "" -"The two :code:`NumPyClient` methods :code:`fit` and :code:`evaluate` make" -" use of the functions :code:`train()` and :code:`evaluate()` previously " -"defined in :code:`jax_training.py`. So what we really do here is we tell " -"Flower through our :code:`NumPyClient` subclass which of our already " -"defined functions to call for training and evaluation. We included type " -"annotations to give you a better understanding of the data types that get" -" passed around." +"The two ``NumPyClient`` methods ``fit`` and ``evaluate`` make use of the " +"functions ``train()`` and ``evaluate()`` previously defined in " +"``jax_training.py``. So what we really do here is we tell Flower through " +"our ``NumPyClient`` subclass which of our already defined functions to " +"call for training and evaluation. We included type annotations to give " +"you a better understanding of the data types that get passed around." msgstr "" "Les deux méthodes :code:`NumPyClient` :code:`fit` et :code:`evaluate` " "utilisent les fonctions :code:`train()` et :code:`evaluate()` définies " @@ -24853,11 +25366,11 @@ msgstr "" "annotations de type pour te donner une meilleure compréhension des types " "de données qui sont transmis." -#: ../../source/tutorial-quickstart-jax.rst:251 +#: ../../source/tutorial-quickstart-jax.rst:286 msgid "Having defined the federation process, we can run it." msgstr "Après avoir défini le processus de fédération, nous pouvons l'exécuter." -#: ../../source/tutorial-quickstart-jax.rst:280 +#: ../../source/tutorial-quickstart-jax.rst:315 msgid "" "in each window (make sure that the server is still running before you do " "so) and see your JAX project run federated learning across two clients. " @@ -24867,7 +25380,7 @@ msgstr "" "d'exécution avant de le faire) et tu verras que ton projet JAX exécute " "l'apprentissage fédéré sur deux clients. Félicitations !" -#: ../../source/tutorial-quickstart-jax.rst:285 +#: ../../source/tutorial-quickstart-jax.rst:321 #, fuzzy msgid "" "The source code of this example was improved over time and can be found " @@ -24881,7 +25394,7 @@ msgstr "" "Notre exemple est quelque peu simplifié à l'extrême car les deux clients " "chargent le même jeu de données." -#: ../../source/tutorial-quickstart-jax.rst:288 +#: ../../source/tutorial-quickstart-jax.rst:325 msgid "" "You're now prepared to explore this topic further. How about using a more" " sophisticated model or using a different dataset? How about adding more " @@ -24891,12 +25404,12 @@ msgstr "" " modèle plus sophistiqué ou un ensemble de données différent ? Pourquoi " "ne pas ajouter d'autres clients ?" -#: ../../source/tutorial-quickstart-mlx.rst:5 +#: ../../source/tutorial-quickstart-mlx.rst:4 #, fuzzy msgid "Quickstart MLX" msgstr "Démarrage rapide de JAX" -#: ../../source/tutorial-quickstart-mlx.rst:7 +#: ../../source/tutorial-quickstart-mlx.rst:6 #, fuzzy msgid "" "In this federated learning tutorial we will learn how to train simple MLP" @@ -24908,7 +25421,7 @@ msgstr "" "tout exécuter au sein d'un `virtualenv `_." -#: ../../source/tutorial-quickstart-mlx.rst:12 +#: ../../source/tutorial-quickstart-mlx.rst:10 msgid "" "Let's use `flwr new` to create a complete Flower+MLX project. It will " "generate all the files needed to run, by default with the Simulation " @@ -24920,68 +25433,24 @@ msgid "" "api/flwr_datasets.partitioner.IidPartitioner.html#flwr_datasets.partitioner.IidPartitioner>`_." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:19 -#: ../../source/tutorial-quickstart-pytorch.rst:19 -#, fuzzy -msgid "" -"Now that we have a rough idea of what this example is about, let's get " -"started. First, install Flower in your new environment:" -msgstr "" -"Maintenant que nous avons une idée approximative de ce qui se passe, " -"commençons. Nous devons d'abord installer Flower. Tu peux le faire en " -"lançant :" - -#: ../../source/tutorial-quickstart-mlx.rst:27 +#: ../../source/tutorial-quickstart-mlx.rst:25 msgid "" "Then, run the command below. You will be prompted to select of the " "available templates (choose ``MLX``), give a name to your project, and " "type in your developer name:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:35 -#: ../../source/tutorial-quickstart-pytorch.rst:35 -msgid "" -"After running it you'll notice a new directory with your project name has" -" been created. It should have the following structure:" -msgstr "" - -#: ../../source/tutorial-quickstart-mlx.rst:49 -#: ../../source/tutorial-quickstart-pytorch.rst:49 -msgid "" -"If you haven't yet installed the project and its dependencies, you can do" -" so by:" -msgstr "" - -#: ../../source/tutorial-quickstart-mlx.rst:57 +#: ../../source/tutorial-quickstart-mlx.rst:53 msgid "To run the project do:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:64 -#: ../../source/tutorial-quickstart-pytorch.rst:64 -msgid "With default arguments you will see an output like this one:" -msgstr "" - -#: ../../source/tutorial-quickstart-mlx.rst:106 +#: ../../source/tutorial-quickstart-mlx.rst:102 msgid "" "You can also override the parameters defined in " "``[tool.flwr.app.config]`` section in the ``pyproject.toml`` like this:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:114 -#: ../../source/tutorial-quickstart-pytorch.rst:113 -msgid "" -"What follows is an explanation of each component in the project you just " -"created: dataset partition, the model, defining the ``ClientApp`` and " -"defining the ``ServerApp``." -msgstr "" - -#: ../../source/tutorial-quickstart-mlx.rst:120 -#: ../../source/tutorial-quickstart-pytorch.rst:119 -#, fuzzy -msgid "The Data" -msgstr "Chargement des données" - -#: ../../source/tutorial-quickstart-mlx.rst:122 +#: ../../source/tutorial-quickstart-mlx.rst:116 msgid "" "We will use `Flower Datasets `_ to " "easily download and partition the `MNIST` dataset. In this example you'll" @@ -24992,32 +25461,20 @@ msgid "" "api/flwr_datasets.partitioner.html>`_ available in Flower Datasets:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:164 -#: ../../source/tutorial-quickstart-pytorch.rst:157 -#, fuzzy -msgid "The Model" -msgstr "Entraîne le modèle" - -#: ../../source/tutorial-quickstart-mlx.rst:166 +#: ../../source/tutorial-quickstart-mlx.rst:157 msgid "" "We define the model as in the `centralized MLX example " "`_, it's a " "simple MLP:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:190 +#: ../../source/tutorial-quickstart-mlx.rst:180 msgid "" "We also define some utility functions to test our model and to iterate " "over batches." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:210 -#: ../../source/tutorial-quickstart-pytorch.rst:234 -#, fuzzy -msgid "The ClientApp" -msgstr "client" - -#: ../../source/tutorial-quickstart-mlx.rst:212 +#: ../../source/tutorial-quickstart-mlx.rst:201 msgid "" "The main changes we have to make to use `MLX` with `Flower` will be found" " in the ``get_params()`` and ``set_params()`` functions. Indeed, MLX " @@ -25026,17 +25483,17 @@ msgid "" "messages to work)." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:218 +#: ../../source/tutorial-quickstart-mlx.rst:206 msgid "The way MLX stores its parameters is as follows:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:231 +#: ../../source/tutorial-quickstart-mlx.rst:219 msgid "" "Therefore, to get our list of ``np.array`` objects, we need to extract " "each array and convert them into a NumPy array:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:240 +#: ../../source/tutorial-quickstart-mlx.rst:228 msgid "" "For the ``set_params()`` function, we perform the reverse operation. We " "receive a list of NumPy arrays and want to convert them into MLX " @@ -25044,24 +25501,24 @@ msgid "" "them to the `weight` and `bias` keys of each layer dict:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:255 +#: ../../source/tutorial-quickstart-mlx.rst:243 msgid "" "The rest of the functionality is directly inspired by the centralized " "case. The ``fit()`` method in the client trains the model using the local" " dataset:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:272 +#: ../../source/tutorial-quickstart-mlx.rst:259 msgid "" "Here, after updating the parameters, we perform the training as in the " "centralized case, and return the new parameters." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:275 +#: ../../source/tutorial-quickstart-mlx.rst:262 msgid "And for the ``evaluate()`` method of the client:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:285 +#: ../../source/tutorial-quickstart-mlx.rst:272 msgid "" "We also begin by updating the parameters with the ones sent by the " "server, and then we compute the loss and accuracy using the functions " @@ -25069,12 +25526,12 @@ msgid "" "the `MLP` model as well as other components such as the optimizer." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:290 +#: ../../source/tutorial-quickstart-mlx.rst:277 #, fuzzy msgid "Putting everything together we have:" msgstr "Tout assembler" -#: ../../source/tutorial-quickstart-mlx.rst:344 +#: ../../source/tutorial-quickstart-mlx.rst:331 msgid "" "Finally, we can construct a ``ClientApp`` using the ``FlowerClient`` " "defined above by means of a ``client_fn()`` callback. Note that " @@ -25085,13 +25542,7 @@ msgid "" "method." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:376 -#: ../../source/tutorial-quickstart-pytorch.rst:321 -#, fuzzy -msgid "The ServerApp" -msgstr "serveur" - -#: ../../source/tutorial-quickstart-mlx.rst:378 +#: ../../source/tutorial-quickstart-mlx.rst:363 msgid "" "To construct a ``ServerApp``, we define a ``server_fn()`` callback with " "an identical signature to that of ``client_fn()``, but the return type is" @@ -25102,14 +25553,15 @@ msgid "" "``FedAvg`` strategy." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:402 -#: ../../source/tutorial-quickstart-pytorch.rst:360 +#: ../../source/tutorial-quickstart-mlx.rst:386 +#: ../../source/tutorial-quickstart-pytorch.rst:344 +#: ../../source/tutorial-quickstart-tensorflow.rst:266 msgid "" "Congratulations! You've successfully built and run your first federated " "learning system." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:407 +#: ../../source/tutorial-quickstart-mlx.rst:390 #, fuzzy msgid "" "Check the `source code `_." -#: ../../source/tutorial-quickstart-pytorch.rst:12 +#: ../../source/tutorial-quickstart-pytorch.rst:11 msgid "" "Let's use `flwr new` to create a complete Flower+PyTorch project. It will" " generate all the files needed to run, by default with the Flower " @@ -25177,24 +25629,14 @@ msgid "" "api/flwr_datasets.partitioner.IidPartitioner.html#flwr_datasets.partitioner.IidPartitioner>`_." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:27 +#: ../../source/tutorial-quickstart-pytorch.rst:26 msgid "" "Then, run the command below. You will be prompted to select one of the " "available templates (choose ``PyTorch``), give a name to your project, " "and type in your developer name:" msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:57 -msgid "To run the project, do:" -msgstr "" - -#: ../../source/tutorial-quickstart-pytorch.rst:105 -msgid "" -"You can also override the parameters defined in the " -"``[tool.flwr.app.config]`` section in ``pyproject.toml`` like this:" -msgstr "" - -#: ../../source/tutorial-quickstart-pytorch.rst:121 +#: ../../source/tutorial-quickstart-pytorch.rst:117 msgid "" "This tutorial uses `Flower Datasets `_ " "to easily download and partition the `CIFAR-10` dataset. In this example " @@ -25208,13 +25650,13 @@ msgid "" " that correspond to their data partition." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:159 +#: ../../source/tutorial-quickstart-pytorch.rst:152 msgid "" "We defined a simple Convolutional Neural Network (CNN), but feel free to " "replace it with a more sophisticated model if you'd like:" msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:184 +#: ../../source/tutorial-quickstart-pytorch.rst:177 msgid "" "In addition to defining the model architecture, we also include two " "utility functions to perform both training (i.e. ``train()``) and " @@ -25227,7 +25669,7 @@ msgid "" "training or evaluation:" msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:236 +#: ../../source/tutorial-quickstart-pytorch.rst:226 msgid "" "The main changes we have to make to use `PyTorch` with `Flower` will be " "found in the ``get_weights()`` and ``set_weights()`` functions. In " @@ -25237,23 +25679,7 @@ msgid "" "PyTorch model. Doing this in fairly easy in PyTorch." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:245 -msgid "" -"The specific implementation of ``get_weights()`` and ``set_weights()`` " -"depends on the type of models you use. The ones shown below work for a " -"wide range of PyTorch models but you might need to adjust them if you " -"have more exotic model architectures." -msgstr "" - -#: ../../source/tutorial-quickstart-pytorch.rst:261 -msgid "" -"The rest of the functionality is directly inspired by the centralized " -"case. The ``fit()`` method in the client trains the model using the local" -" dataset. Similarly, the ``evaluate()`` method is used to evaluate the " -"model received on a held-out validation set that the client might have:" -msgstr "" - -#: ../../source/tutorial-quickstart-pytorch.rst:294 +#: ../../source/tutorial-quickstart-pytorch.rst:282 msgid "" "Finally, we can construct a ``ClientApp`` using the ``FlowerClient`` " "defined above by means of a ``client_fn()`` callback. Note that the " @@ -25264,7 +25690,7 @@ msgid "" "additioinal hyperparameters in ``pyproject.toml`` and access them here." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:323 +#: ../../source/tutorial-quickstart-pytorch.rst:309 msgid "" "To construct a ``ServerApp`` we define a ``server_fn()`` callback with an" " identical signature to that of ``client_fn()`` but the return type is " @@ -25278,7 +25704,7 @@ msgid "" "``pyproject.toml``." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:365 +#: ../../source/tutorial-quickstart-pytorch.rst:348 #, fuzzy msgid "" "Check the `source code `_ de cet exemple se trouve dans :code:`examples" "/quickstart-mxnet`." -#: ../../source/tutorial-quickstart-pytorch.rst:372 +#: ../../source/tutorial-quickstart-pytorch.rst:354 +#: ../../source/tutorial-quickstart-tensorflow.rst:278 #, fuzzy msgid "Video tutorial" msgstr "Tutoriel" -#: ../../source/tutorial-quickstart-pytorch.rst:376 +#: ../../source/tutorial-quickstart-pytorch.rst:358 msgid "" "The video shown below shows how to setup a PyTorch + Flower project using" " our previously recommended APIs. A new video tutorial will be released " "that shows the new APIs (as the content above does)" msgstr "" -#: ../../source/tutorial-quickstart-pytorch-lightning.rst:-1 -msgid "" -"Check out this Federated Learning quickstart tutorial for using Flower " -"with PyTorch Lightning to train an Auto Encoder model on MNIST." -msgstr "" - -#: ../../source/tutorial-quickstart-pytorch-lightning.rst:5 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:4 msgid "Quickstart PyTorch Lightning" msgstr "Démarrage rapide de PyTorch Lightning" -#: ../../source/tutorial-quickstart-pytorch-lightning.rst:10 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:6 #, fuzzy msgid "" -"Let's build a horizontal federated learning system using PyTorch " -"Lightning and Flower!" +"In this federated learning tutorial we will learn how to train an " +"AutoEncoder model on MNIST using Flower and PyTorch Lightning. It is " +"recommended to create a virtual environment and run everything within a " +":doc:`virtualenv `." +msgstr "" +"Tout d'abord, il est recommandé de créer un environnement virtuel et de " +"tout exécuter au sein d'un `virtualenv `_." + +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:19 +msgid "" +"This will create a new directory called `quickstart-pytorch-lightning` " +"containing the following files:" +msgstr "" + +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:42 +msgid "" +"By default, Flower Simulation Engine will be started and it will create a" +" federation of 4 nodes using `FedAvg `_ " +"as the aggregation strategy. The dataset will be partitioned using Flower" +" Dataset's `IidPartitioner `_." +" To run the project, do:" msgstr "" -"Construisons un système d'apprentissage fédéré en utilisant PyTorch " -"Lightning et Flower !" -#: ../../source/tutorial-quickstart-pytorch-lightning.rst:12 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:93 +msgid "" +"Each simulated `ClientApp` (two per round) will also log a summary of " +"their local training process. Expect this output to be similar to:" +msgstr "" + +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:115 #, fuzzy msgid "" -"Please refer to the `full code example " -"`_ to learn more." +"Check the `source code `_ of this tutorial in ``examples" +"/quickstart-pytorch-lightning`` in the Flower GitHub repository." msgstr "" -"Réfère-toi à l'exemple de code complet " -"`_ pour en savoir plus." +"Félicitations ! Tu as réussi à construire et à faire fonctionner ton " +"premier système d'apprentissage fédéré. Le code source complet " +"`_ de cet exemple se trouve dans :code:`examples" +"/quickstart-mxnet`." #: ../../source/tutorial-quickstart-scikitlearn.rst:-1 msgid "" @@ -25339,14 +25788,15 @@ msgid "" "with scikit-learn to train a linear regression model." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:5 +#: ../../source/tutorial-quickstart-scikitlearn.rst:4 msgid "Quickstart scikit-learn" msgstr "Démarrage rapide de scikit-learn" -#: ../../source/tutorial-quickstart-scikitlearn.rst:10 +#: ../../source/tutorial-quickstart-scikitlearn.rst:9 +#, fuzzy msgid "" -"In this tutorial, we will learn how to train a :code:`Logistic " -"Regression` model on MNIST using Flower and scikit-learn." +"In this tutorial, we will learn how to train a ``Logistic Regression`` " +"model on MNIST using Flower and scikit-learn." msgstr "" "Dans ce tutoriel, nous allons apprendre à former un :code:`modèle de " "régression logistique` sur MNIST en utilisant Flower et scikit-learn." @@ -25361,7 +25811,7 @@ msgstr "" "dans ce `virtualenv `_." -#: ../../source/tutorial-quickstart-scikitlearn.rst:14 +#: ../../source/tutorial-quickstart-scikitlearn.rst:15 msgid "" "Our example consists of one *server* and two *clients* all having the " "same model." @@ -25369,7 +25819,7 @@ msgstr "" "Notre exemple consiste en un *serveur* et deux *clients* ayant tous le " "même modèle." -#: ../../source/tutorial-quickstart-scikitlearn.rst:16 +#: ../../source/tutorial-quickstart-scikitlearn.rst:17 msgid "" "*Clients* are responsible for generating individual model parameter " "updates for the model based on their local datasets. These updates are " @@ -25385,7 +25835,7 @@ msgstr "" "version améliorée du modèle à chaque *client*. Un cycle complet de mises " "à jour des paramètres s'appelle un *round*." -#: ../../source/tutorial-quickstart-scikitlearn.rst:20 +#: ../../source/tutorial-quickstart-scikitlearn.rst:23 msgid "" "Now that we have a rough idea of what is going on, let's get started. We " "first need to install Flower. You can do this by running:" @@ -25394,22 +25844,23 @@ msgstr "" "commençons. Nous devons d'abord installer Flower. Tu peux le faire en " "lançant :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:26 +#: ../../source/tutorial-quickstart-scikitlearn.rst:30 #, fuzzy msgid "Since we want to use scikit-learn, let's go ahead and install it:" msgstr "Puisque nous voulons utiliser scikt-learn, allons-y et installons-le :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:32 +#: ../../source/tutorial-quickstart-scikitlearn.rst:36 msgid "Or simply install all dependencies using Poetry:" msgstr "Ou installe simplement toutes les dépendances à l'aide de Poetry :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:42 +#: ../../source/tutorial-quickstart-scikitlearn.rst:45 +#, fuzzy msgid "" "Now that we have all our dependencies installed, let's run a simple " "distributed training with two clients and one server. However, before " "setting up the client and server, we will define all functionalities that" -" we need for our federated learning setup within :code:`utils.py`. The " -":code:`utils.py` contains different functions defining all the machine " +" we need for our federated learning setup within ``utils.py``. The " +"``utils.py`` contains different functions defining all the machine " "learning basics:" msgstr "" "Maintenant que toutes nos dépendances sont installées, exécutons une " @@ -25420,40 +25871,45 @@ msgstr "" "contient différentes fonctions définissant toutes les bases de " "l'apprentissage automatique :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:45 -msgid ":code:`get_model_parameters()`" +#: ../../source/tutorial-quickstart-scikitlearn.rst:51 +#, fuzzy +msgid "``get_model_parameters()``" msgstr ":code:`get_model_parameters()`" -#: ../../source/tutorial-quickstart-scikitlearn.rst:46 -msgid "Returns the parameters of a :code:`sklearn` LogisticRegression model" +#: ../../source/tutorial-quickstart-scikitlearn.rst:52 +#, fuzzy +msgid "Returns the parameters of a ``sklearn`` LogisticRegression model" msgstr "" "Renvoie les paramètres d'un modèle de régression logistique " ":code:`sklearn`" -#: ../../source/tutorial-quickstart-scikitlearn.rst:47 -msgid ":code:`set_model_params()`" +#: ../../source/tutorial-quickstart-scikitlearn.rst:53 +#, fuzzy +msgid "``set_model_params()``" msgstr ":code:`set_model_params()`" -#: ../../source/tutorial-quickstart-scikitlearn.rst:48 +#: ../../source/tutorial-quickstart-scikitlearn.rst:54 #, fuzzy -msgid "Sets the parameters of a :code:`sklearn` LogisticRegression model" +msgid "Sets the parameters of a ``sklearn`` LogisticRegression model" msgstr "Définit les paramètres d'un modèle de régression logistique :code:`sklean`" -#: ../../source/tutorial-quickstart-scikitlearn.rst:49 -msgid ":code:`set_initial_params()`" +#: ../../source/tutorial-quickstart-scikitlearn.rst:56 +#, fuzzy +msgid "``set_initial_params()``" msgstr ":code:`set_initial_params()`" -#: ../../source/tutorial-quickstart-scikitlearn.rst:50 +#: ../../source/tutorial-quickstart-scikitlearn.rst:56 msgid "Initializes the model parameters that the Flower server will ask for" msgstr "Initialise les paramètres du modèle que le serveur de Flower demandera" -#: ../../source/tutorial-quickstart-scikitlearn.rst:52 +#: ../../source/tutorial-quickstart-scikitlearn.rst:58 +#, fuzzy msgid "" -"Please check out :code:`utils.py` `here " +"Please check out ``utils.py`` `here " "`_ for more details. The pre-defined functions are used in" -" the :code:`client.py` and imported. The :code:`client.py` also requires " -"to import several packages such as Flower and scikit-learn:" +" the ``client.py`` and imported. The ``client.py`` also requires to " +"import several packages such as Flower and scikit-learn:" msgstr "" "Tu peux consulter :code:`utils.py` `ici " "`_. The " -":code:`FederatedDataset.load_partition()` method loads the partitioned " -"training set for each partition ID defined in the :code:`--partition-id` " +"``FederatedDataset.load_partition()`` method loads the partitioned " +"training set for each partition ID defined in the ``--partition-id`` " "argument." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:95 +#: ../../source/tutorial-quickstart-scikitlearn.rst:106 +#, fuzzy msgid "" "Next, the logistic regression model is defined and initialized with " -":code:`utils.set_initial_params()`." +"``utils.set_initial_params()``." msgstr "" "Ensuite, le modèle de régression logistique est défini et initialisé avec" " :code:`utils.set_initial_params()`." -#: ../../source/tutorial-quickstart-scikitlearn.rst:107 +#: ../../source/tutorial-quickstart-scikitlearn.rst:119 +#, fuzzy msgid "" "The Flower server interacts with clients through an interface called " -":code:`Client`. When the server selects a particular client for training," -" it sends training instructions over the network. The client receives " -"those instructions and calls one of the :code:`Client` methods to run " -"your code (i.e., to fit the logistic regression we defined earlier)." +"``Client``. When the server selects a particular client for training, it " +"sends training instructions over the network. The client receives those " +"instructions and calls one of the ``Client`` methods to run your code " +"(i.e., to fit the logistic regression we defined earlier)." msgstr "" "Le serveur Flower interagit avec les clients par le biais d'une interface" " appelée :code:`Client`. Lorsque le serveur sélectionne un client " @@ -25496,13 +25954,13 @@ msgstr "" "méthodes :code:`Client` pour exécuter ton code (c'est-à-dire pour ajuster" " la régression logistique que nous avons définie plus tôt)." -#: ../../source/tutorial-quickstart-scikitlearn.rst:113 +#: ../../source/tutorial-quickstart-scikitlearn.rst:124 +#, fuzzy msgid "" -"Flower provides a convenience class called :code:`NumPyClient` which " -"makes it easier to implement the :code:`Client` interface when your " -"workload uses scikit-learn. Implementing :code:`NumPyClient` usually " -"means defining the following methods (:code:`set_parameters` is optional " -"though):" +"Flower provides a convenience class called ``NumPyClient`` which makes it" +" easier to implement the ``Client`` interface when your workload uses " +"scikit-learn. Implementing ``NumPyClient`` usually means defining the " +"following methods (``set_parameters`` is optional though):" msgstr "" "Flower fournit une classe de commodité appelée :code:`NumPyClient` qui " "facilite la mise en œuvre de l'interface :code:`Client` lorsque ta charge" @@ -25510,15 +25968,16 @@ msgstr "" "signifie généralement définir les méthodes suivantes " "(:code:`set_parameters` est cependant facultatif) :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:119 +#: ../../source/tutorial-quickstart-scikitlearn.rst:130 msgid "return the model weight as a list of NumPy ndarrays" msgstr "renvoie le poids du modèle sous la forme d'une liste de ndarrays NumPy" -#: ../../source/tutorial-quickstart-scikitlearn.rst:120 -msgid ":code:`set_parameters` (optional)" +#: ../../source/tutorial-quickstart-scikitlearn.rst:132 +#, fuzzy +msgid "``set_parameters`` (optional)" msgstr ":code:`set_parameters` (optionnel)" -#: ../../source/tutorial-quickstart-scikitlearn.rst:121 +#: ../../source/tutorial-quickstart-scikitlearn.rst:132 msgid "" "update the local model weights with the parameters received from the " "server" @@ -25526,51 +25985,53 @@ msgstr "" "mettre à jour les poids du modèle local avec les paramètres reçus du " "serveur" -#: ../../source/tutorial-quickstart-scikitlearn.rst:122 -msgid "is directly imported with :code:`utils.set_model_params()`" +#: ../../source/tutorial-quickstart-scikitlearn.rst:133 +#, fuzzy +msgid "is directly imported with ``utils.set_model_params()``" msgstr "est directement importé avec :code:`utils.set_model_params()`" -#: ../../source/tutorial-quickstart-scikitlearn.rst:124 +#: ../../source/tutorial-quickstart-scikitlearn.rst:135 msgid "set the local model weights" msgstr "fixe les poids du modèle local" -#: ../../source/tutorial-quickstart-scikitlearn.rst:125 +#: ../../source/tutorial-quickstart-scikitlearn.rst:136 msgid "train the local model" msgstr "entraîne le modèle local" -#: ../../source/tutorial-quickstart-scikitlearn.rst:126 +#: ../../source/tutorial-quickstart-scikitlearn.rst:137 #, fuzzy msgid "return the updated local model weights" msgstr "recevoir les poids du modèle local mis à jour" -#: ../../source/tutorial-quickstart-scikitlearn.rst:128 +#: ../../source/tutorial-quickstart-scikitlearn.rst:139 msgid "test the local model" msgstr "teste le modèle local" -#: ../../source/tutorial-quickstart-scikitlearn.rst:130 +#: ../../source/tutorial-quickstart-scikitlearn.rst:141 msgid "The methods can be implemented in the following way:" msgstr "Les méthodes peuvent être mises en œuvre de la manière suivante :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:153 +#: ../../source/tutorial-quickstart-scikitlearn.rst:163 +#, fuzzy msgid "" -"We can now create an instance of our class :code:`MnistClient` and add " -"one line to actually run this client:" +"We can now create an instance of our class ``MnistClient`` and add one " +"line to actually run this client:" msgstr "" "Nous pouvons maintenant créer une instance de notre classe " ":code:`MnistClient` et ajouter une ligne pour exécuter ce client :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:160 +#: ../../source/tutorial-quickstart-scikitlearn.rst:170 #, fuzzy msgid "" -"That's it for the client. We only have to implement :code:`Client` or " -":code:`NumPyClient` and call :code:`fl.client.start_client()`. If you " -"implement a client of type :code:`NumPyClient` you'll need to first call " -"its :code:`to_client()` method. The string :code:`\"0.0.0.0:8080\"` tells" -" the client which server to connect to. In our case we can run the server" -" and the client on the same machine, therefore we use " -":code:`\"0.0.0.0:8080\"`. If we run a truly federated workload with the " -"server and clients running on different machines, all that needs to " -"change is the :code:`server_address` we pass to the client." +"That's it for the client. We only have to implement ``Client`` or " +"``NumPyClient`` and call ``fl.client.start_client()``. If you implement a" +" client of type ``NumPyClient`` you'll need to first call its " +"``to_client()`` method. The string ``\"0.0.0.0:8080\"`` tells the client " +"which server to connect to. In our case we can run the server and the " +"client on the same machine, therefore we use ``\"0.0.0.0:8080\"``. If we " +"run a truly federated workload with the server and clients running on " +"different machines, all that needs to change is the ``server_address`` we" +" pass to the client." msgstr "" "C'est tout pour le client. Il nous suffit d'implémenter :code:`Client` ou" " :code:`NumPyClient` et d'appeler :code:`fl.client.start_client()`. La " @@ -25582,7 +26043,7 @@ msgstr "" "machines différentes, tout ce qui doit changer est :code:`server_address`" " que nous transmettons au client." -#: ../../source/tutorial-quickstart-scikitlearn.rst:169 +#: ../../source/tutorial-quickstart-scikitlearn.rst:181 msgid "" "The following Flower server is a little bit more advanced and returns an " "evaluation function for the server-side evaluation. First, we import " @@ -25593,19 +26054,20 @@ msgstr "" " à nouveau toutes les bibliothèques requises telles que Flower et scikit-" "learn." -#: ../../source/tutorial-quickstart-scikitlearn.rst:172 -msgid ":code:`server.py`, import Flower and start the server:" +#: ../../source/tutorial-quickstart-scikitlearn.rst:185 +#, fuzzy +msgid "``server.py``, import Flower and start the server:" msgstr ":code:`server.py`, importe Flower et démarre le serveur :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:185 +#: ../../source/tutorial-quickstart-scikitlearn.rst:198 #, fuzzy msgid "" -"The number of federated learning rounds is set in :code:`fit_round()` and" -" the evaluation is defined in :code:`get_evaluate_fn()`. The evaluation " -"function is called after each federated learning round and gives you " -"information about loss and accuracy. Note that we also make use of Flower" -" Datasets here to load the test split of the MNIST dataset for server-" -"side evaluation." +"The number of federated learning rounds is set in ``fit_round()`` and the" +" evaluation is defined in ``get_evaluate_fn()``. The evaluation function " +"is called after each federated learning round and gives you information " +"about loss and accuracy. Note that we also make use of Flower Datasets " +"here to load the test split of the MNIST dataset for server-side " +"evaluation." msgstr "" "Le nombre de tours d'apprentissage fédéré est défini dans " ":code:`fit_round()` et l'évaluation est définie dans " @@ -25613,15 +26075,16 @@ msgstr "" "chaque tour d'apprentissage fédéré et te donne des informations sur la " "perte et la précision." -#: ../../source/tutorial-quickstart-scikitlearn.rst:213 +#: ../../source/tutorial-quickstart-scikitlearn.rst:228 +#, fuzzy msgid "" -"The :code:`main` contains the server-side parameter initialization " -":code:`utils.set_initial_params()` as well as the aggregation strategy " -":code:`fl.server.strategy:FedAvg()`. The strategy is the default one, " +"The ``main`` contains the server-side parameter initialization " +"``utils.set_initial_params()`` as well as the aggregation strategy " +"``fl.server.strategy:FedAvg()``. The strategy is the default one, " "federated averaging (or FedAvg), with two clients and evaluation after " "each federated learning round. The server can be started with the command" -" :code:`fl.server.start_server(server_address=\"0.0.0.0:8080\", " -"strategy=strategy, config=fl.server.ServerConfig(num_rounds=3))`." +" ``fl.server.start_server(server_address=\"0.0.0.0:8080\", " +"strategy=strategy, config=fl.server.ServerConfig(num_rounds=3))``." msgstr "" "Le :code:`main` contient l'initialisation des paramètres côté serveur " ":code:`utils.set_initial_params()` ainsi que la stratégie d'agrégation " @@ -25631,7 +26094,7 @@ msgstr "" " commande :code:`fl.server.start_server(server_address=\"0.0.0.0:8080\", " "strategy=strategy, config=fl.server.ServerConfig(num_rounds=3))`." -#: ../../source/tutorial-quickstart-scikitlearn.rst:232 +#: ../../source/tutorial-quickstart-scikitlearn.rst:256 msgid "" "With both client and server ready, we can now run everything and see " "federated learning in action. Federated learning systems usually have a " @@ -25643,9 +26106,8 @@ msgstr "" "fédéré ont généralement un serveur et plusieurs clients. Nous devons donc" " commencer par lancer le serveur :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:239 -#: ../../source/tutorial-quickstart-tensorflow.rst:122 -#: ../../source/tutorial-quickstart-xgboost.rst:575 +#: ../../source/tutorial-quickstart-scikitlearn.rst:264 +#: ../../source/tutorial-quickstart-xgboost.rst:598 msgid "" "Once the server is running we can start the clients in different " "terminals. Open a new terminal and start the first client:" @@ -25654,14 +26116,13 @@ msgstr "" "dans différents terminaux. Ouvre un nouveau terminal et démarre le " "premier client :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:246 -#: ../../source/tutorial-quickstart-tensorflow.rst:129 -#: ../../source/tutorial-quickstart-xgboost.rst:582 +#: ../../source/tutorial-quickstart-scikitlearn.rst:271 +#: ../../source/tutorial-quickstart-xgboost.rst:605 msgid "Open another terminal and start the second client:" msgstr "Ouvre un autre terminal et démarre le deuxième client :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:252 -#: ../../source/tutorial-quickstart-xgboost.rst:588 +#: ../../source/tutorial-quickstart-scikitlearn.rst:277 +#: ../../source/tutorial-quickstart-xgboost.rst:611 msgid "" "Each client will have its own dataset. You should now see how the " "training does in the very first terminal (the one that started the " @@ -25671,13 +26132,14 @@ msgstr "" "voir comment la formation se déroule dans le tout premier terminal (celui" " qui a démarré le serveur) :" -#: ../../source/tutorial-quickstart-scikitlearn.rst:286 +#: ../../source/tutorial-quickstart-scikitlearn.rst:311 +#, fuzzy msgid "" "Congratulations! You've successfully built and run your first federated " "learning system. The full `source code " "`_ for this example can be found in :code:`examples/sklearn-logreg-" -"mnist`." +"mnist>`_ for this example can be found in ``examples/sklearn-logreg-" +"mnist``." msgstr "" "Félicitations ! Tu as réussi à construire et à faire fonctionner ton " "premier système d'apprentissage fédéré. Le code source complet " @@ -25688,144 +26150,118 @@ msgstr "" #: ../../source/tutorial-quickstart-tensorflow.rst:-1 msgid "" "Check out this Federated Learning quickstart tutorial for using Flower " -"with TensorFlow to train a MobilNetV2 model on CIFAR-10." +"with TensorFlow to train a CNN model on CIFAR-10." msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:5 +#: ../../source/tutorial-quickstart-tensorflow.rst:4 msgid "Quickstart TensorFlow" msgstr "Démarrage rapide de TensorFlow" -#: ../../source/tutorial-quickstart-tensorflow.rst:13 -msgid "Let's build a federated learning system in less than 20 lines of code!" -msgstr "" -"Construisons un système d'apprentissage fédéré en moins de 20 lignes de " -"code !" - -#: ../../source/tutorial-quickstart-tensorflow.rst:15 -msgid "Before Flower can be imported we have to install it:" -msgstr "Avant de pouvoir importer une fleur, nous devons l'installer :" - -#: ../../source/tutorial-quickstart-tensorflow.rst:21 +#: ../../source/tutorial-quickstart-tensorflow.rst:6 +#, fuzzy msgid "" -"Since we want to use the Keras API of TensorFlow (TF), we have to install" -" TF as well:" -msgstr "" -"Comme nous voulons utiliser l'API Keras de TensorFlow (TF), nous devons " -"également installer TF :" - -#: ../../source/tutorial-quickstart-tensorflow.rst:31 -msgid "Next, in a file called :code:`client.py`, import Flower and TensorFlow:" +"In this tutorial we will learn how to train a Convolutional Neural " +"Network on CIFAR-10 using the Flower framework and TensorFlow. First of " +"all, it is recommended to create a virtual environment and run everything" +" within a :doc:`virtualenv `." msgstr "" -"Ensuite, dans un fichier appelé :code:`client.py`, importe Flower et " -"TensorFlow :" +"Tout d'abord, il est recommandé de créer un environnement virtuel et de " +"tout exécuter au sein d'un `virtualenv `_." -#: ../../source/tutorial-quickstart-tensorflow.rst:38 +#: ../../source/tutorial-quickstart-tensorflow.rst:11 msgid "" -"We use the Keras utilities of TF to load CIFAR10, a popular colored image" -" classification dataset for machine learning. The call to " -":code:`tf.keras.datasets.cifar10.load_data()` downloads CIFAR10, caches " -"it locally, and then returns the entire training and test set as NumPy " -"ndarrays." +"Let's use `flwr new` to create a complete Flower+TensorFlow project. It " +"will generate all the files needed to run, by default with the Flower " +"Simulation Engine, a federation of 10 nodes using `FedAvg " +"`_. The " +"dataset will be partitioned using Flower Dataset's `IidPartitioner " +"`_." msgstr "" -"Nous utilisons les utilitaires Keras de TF pour charger CIFAR10, un " -"ensemble de données de classification d'images colorées populaire pour " -"l'apprentissage automatique. L'appel à " -":code:`tf.keras.datasets.cifar10.load_data()` télécharge CIFAR10, le met " -"en cache localement, puis renvoie l'ensemble d'entraînement et de test " -"sous forme de NumPy ndarrays." -#: ../../source/tutorial-quickstart-tensorflow.rst:47 +#: ../../source/tutorial-quickstart-tensorflow.rst:26 msgid "" -"Next, we need a model. For the purpose of this tutorial, we use " -"MobilNetV2 with 10 output classes:" +"Then, run the command below. You will be prompted to select one of the " +"available templates (choose ``TensorFlow``), give a name to your project," +" and type in your developer name:" msgstr "" -"Ensuite, nous avons besoin d'un modèle. Pour les besoins de ce tutoriel, " -"nous utilisons MobilNetV2 avec 10 classes de sortie :" -#: ../../source/tutorial-quickstart-tensorflow.rst:54 +#: ../../source/tutorial-quickstart-tensorflow.rst:114 msgid "" -"The Flower server interacts with clients through an interface called " -":code:`Client`. When the server selects a particular client for training," -" it sends training instructions over the network. The client receives " -"those instructions and calls one of the :code:`Client` methods to run " -"your code (i.e., to train the neural network we defined earlier)." +"This tutorial uses `Flower Datasets `_ " +"to easily download and partition the `CIFAR-10` dataset. In this example " +"you'll make use of the `IidPartitioner `_" +" to generate `num_partitions` partitions. You can choose `other " +"partitioners `_ available in Flower Datasets. Each " +"``ClientApp`` will call this function to create the ``NumPy`` arrays that" +" correspond to their data partition." msgstr "" -"Le serveur Flower interagit avec les clients par le biais d'une interface" -" appelée :code:`Client`. Lorsque le serveur sélectionne un client " -"particulier pour la formation, il envoie des instructions de formation " -"sur le réseau. Le client reçoit ces instructions et appelle l'une des " -"méthodes :code:`Client` pour exécuter ton code (c'est-à-dire pour former " -"le réseau neuronal que nous avons défini plus tôt)." -#: ../../source/tutorial-quickstart-tensorflow.rst:60 +#: ../../source/tutorial-quickstart-tensorflow.rst:141 msgid "" -"Flower provides a convenience class called :code:`NumPyClient` which " -"makes it easier to implement the :code:`Client` interface when your " -"workload uses Keras. The :code:`NumPyClient` interface defines three " -"methods which can be implemented in the following way:" +"Next, we need a model. We defined a simple Convolutional Neural Network " +"(CNN), but feel free to replace it with a more sophisticated model if " +"you'd like:" msgstr "" -"Flower fournit une classe de commodité appelée :code:`NumPyClient` qui " -"facilite la mise en œuvre de l'interface :code:`Client` lorsque ta charge" -" de travail utilise Keras. L'interface :code:`NumPyClient` définit trois " -"méthodes qui peuvent être mises en œuvre de la manière suivante :" -#: ../../source/tutorial-quickstart-tensorflow.rst:82 +#: ../../source/tutorial-quickstart-tensorflow.rst:170 msgid "" -"We can now create an instance of our class :code:`CifarClient` and add " -"one line to actually run this client:" +"With `TensorFlow`, we can use the built-in ``get_weights()`` and " +"``set_weights()`` functions, which simplifies the implementation with " +"`Flower`. The rest of the functionality in the ClientApp is directly " +"inspired by the centralized case. The ``fit()`` method in the client " +"trains the model using the local dataset. Similarly, the ``evaluate()`` " +"method is used to evaluate the model received on a held-out validation " +"set that the client might have:" msgstr "" -"Nous pouvons maintenant créer une instance de notre classe " -":code:`CifarClient` et ajouter une ligne pour exécuter ce client :" -#: ../../source/tutorial-quickstart-tensorflow.rst:90 -#, fuzzy +#: ../../source/tutorial-quickstart-tensorflow.rst:203 msgid "" -"That's it for the client. We only have to implement :code:`Client` or " -":code:`NumPyClient` and call :code:`fl.client.start_client()`. If you " -"implement a client of type :code:`NumPyClient` you'll need to first call " -"its :code:`to_client()` method. The string :code:`\"[::]:8080\"` tells " -"the client which server to connect to. In our case we can run the server " -"and the client on the same machine, therefore we use " -":code:`\"[::]:8080\"`. If we run a truly federated workload with the " -"server and clients running on different machines, all that needs to " -"change is the :code:`server_address` we point the client at." +"Finally, we can construct a ``ClientApp`` using the ``FlowerClient`` " +"defined above by means of a ``client_fn()`` callback. Note that the " +"`context` enables you to get access to hyperparameters defined in your " +"``pyproject.toml`` to configure the run. For example, in this tutorial we" +" access the `local-epochs` setting to control the number of epochs a " +"``ClientApp`` will perform when running the ``fit()`` method, in addition" +" to `batch-size`. You could define additional hyperparameters in " +"``pyproject.toml`` and access them here." msgstr "" -"C'est tout pour le client. Il nous suffit d'implémenter :code:`Client` ou" -" :code:`NumPyClient` et d'appeler :code:`fl.client.start_client()`. La " -"chaîne :code:`\"[: :]:8080\"` indique au client à quel serveur se " -"connecter. Dans notre cas, nous pouvons exécuter le serveur et le client " -"sur la même machine, c'est pourquoi nous utilisons :code:`\"[: " -":]:8080\"`. Si nous exécutons une charge de travail véritablement fédérée" -" avec le serveur et les clients fonctionnant sur des machines " -"différentes, tout ce qui doit changer est l'adresse " -":code:`server_address` vers laquelle nous dirigeons le client." - -#: ../../source/tutorial-quickstart-tensorflow.rst:135 -msgid "Each client will have its own dataset." -msgstr "Chaque client aura son propre ensemble de données." -#: ../../source/tutorial-quickstart-tensorflow.rst:137 +#: ../../source/tutorial-quickstart-tensorflow.rst:234 msgid "" -"You should now see how the training does in the very first terminal (the " -"one that started the server):" +"To construct a ``ServerApp`` we define a ``server_fn()`` callback with an" +" identical signature to that of ``client_fn()`` but the return type is " +"`ServerAppComponents `_ as " +"opposed to a `Client `_. In this example we use the " +"`FedAvg`. To it we pass a randomly initialized model that will serve as " +"the global model to federate." msgstr "" -"Tu devrais maintenant voir comment la formation se déroule dans le tout " -"premier terminal (celui qui a démarré le serveur) :" -#: ../../source/tutorial-quickstart-tensorflow.rst:169 +#: ../../source/tutorial-quickstart-tensorflow.rst:270 #, fuzzy msgid "" -"Congratulations! You've successfully built and run your first federated " -"learning system. The full `source code " -"`_ for this can be found in :code:`examples" -"/quickstart-tensorflow/client.py`." +"Check the source code of the extended version of this tutorial in " +"|quickstart_tf_link|_ in the Flower GitHub repository." msgstr "" "Félicitations ! Tu as réussi à construire et à faire fonctionner ton " -"premier système d'apprentissage fédéré. Le `code source complet " +"premier système d'apprentissage fédéré. Le code source complet " "`_ pour cela se trouve dans :code:`examples" -"/quickstart-tensorflow/client.py`." +"mxnet/client.py>`_ de cet exemple se trouve dans :code:`examples" +"/quickstart-mxnet`." + +#: ../../source/tutorial-quickstart-tensorflow.rst:282 +msgid "" +"The video shown below shows how to setup a TensorFlow + Flower project " +"using our previously recommended APIs. A new video tutorial will be " +"released that shows the new APIs (as the content above does)" +msgstr "" #: ../../source/tutorial-quickstart-xgboost.rst:-1 msgid "" @@ -25833,16 +26269,16 @@ msgid "" "with XGBoost to train classification models on trees." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:5 +#: ../../source/tutorial-quickstart-xgboost.rst:4 msgid "Quickstart XGBoost" msgstr "Démarrage rapide XGBoost" -#: ../../source/tutorial-quickstart-xgboost.rst:14 +#: ../../source/tutorial-quickstart-xgboost.rst:13 #, fuzzy msgid "Federated XGBoost" msgstr "Formation fédérée" -#: ../../source/tutorial-quickstart-xgboost.rst:16 +#: ../../source/tutorial-quickstart-xgboost.rst:15 msgid "" "EXtreme Gradient Boosting (**XGBoost**) is a robust and efficient " "implementation of gradient-boosted decision tree (**GBDT**), that " @@ -25852,19 +26288,19 @@ msgid "" "concurrently, unlike the sequential approach taken by GBDT." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:20 +#: ../../source/tutorial-quickstart-xgboost.rst:21 msgid "" "Often, for tabular data on medium-sized datasets with fewer than 10k " "training examples, XGBoost surpasses the results of deep learning " "techniques." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:23 +#: ../../source/tutorial-quickstart-xgboost.rst:25 #, fuzzy msgid "Why federated XGBoost?" msgstr "Qu'est-ce que l'apprentissage fédéré ?" -#: ../../source/tutorial-quickstart-xgboost.rst:25 +#: ../../source/tutorial-quickstart-xgboost.rst:27 msgid "" "Indeed, as the demand for data privacy and decentralized learning grows, " "there's an increasing requirement to implement federated XGBoost systems " @@ -25872,7 +26308,7 @@ msgid "" "detection." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:27 +#: ../../source/tutorial-quickstart-xgboost.rst:31 msgid "" "Federated learning ensures that raw data remains on the local device, " "making it an attractive approach for sensitive domains where data " @@ -25881,10 +26317,10 @@ msgid "" "solution for these specific challenges." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:30 +#: ../../source/tutorial-quickstart-xgboost.rst:36 msgid "" "In this tutorial we will learn how to train a federated XGBoost model on " -"HIGGS dataset using Flower and :code:`xgboost` package. We use a simple " +"HIGGS dataset using Flower and ``xgboost`` package. We use a simple " "example (`full code xgboost-quickstart " "`_)" " with two *clients* and one *server* to demonstrate how federated XGBoost" @@ -25893,11 +26329,11 @@ msgid "" "comprehensive>`_) to run various experiments." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:37 +#: ../../source/tutorial-quickstart-xgboost.rst:46 msgid "Environment Setup" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:39 +#: ../../source/tutorial-quickstart-xgboost.rst:48 #, fuzzy msgid "" "First of all, it is recommended to create a virtual environment and run " @@ -25908,20 +26344,20 @@ msgstr "" "tout exécuter au sein d'un `virtualenv `_." -#: ../../source/tutorial-quickstart-xgboost.rst:41 +#: ../../source/tutorial-quickstart-xgboost.rst:51 msgid "" "We first need to install Flower and Flower Datasets. You can do this by " "running :" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:47 +#: ../../source/tutorial-quickstart-xgboost.rst:57 #, fuzzy msgid "" -"Since we want to use :code:`xgboost` package to build up XGBoost trees, " -"let's go ahead and install :code:`xgboost`:" +"Since we want to use ``xgboost`` package to build up XGBoost trees, let's" +" go ahead and install ``xgboost``:" msgstr "Puisque nous voulons utiliser scikt-learn, allons-y et installons-le :" -#: ../../source/tutorial-quickstart-xgboost.rst:57 +#: ../../source/tutorial-quickstart-xgboost.rst:67 msgid "" "*Clients* are responsible for generating individual weight-updates for " "the model based on their local datasets. Now that we have all our " @@ -25929,131 +26365,129 @@ msgid "" "clients and one server." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:60 +#: ../../source/tutorial-quickstart-xgboost.rst:71 #, fuzzy msgid "" -"In a file called :code:`client.py`, import xgboost, Flower, Flower " -"Datasets and other related functions:" +"In a file called ``client.py``, import xgboost, Flower, Flower Datasets " +"and other related functions:" msgstr "" "Dans un fichier appelé :code:`client.py`, importe Flower et les paquets " "liés à PyTorch :" -#: ../../source/tutorial-quickstart-xgboost.rst:87 +#: ../../source/tutorial-quickstart-xgboost.rst:99 msgid "Dataset partition and hyper-parameter selection" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:89 +#: ../../source/tutorial-quickstart-xgboost.rst:101 msgid "" "Prior to local training, we require loading the HIGGS dataset from Flower" " Datasets and conduct data partitioning for FL:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:102 +#: ../../source/tutorial-quickstart-xgboost.rst:115 msgid "" "In this example, we split the dataset into 30 partitions with uniform " -"distribution (:code:`IidPartitioner(num_partitions=30)`). Then, we load " -"the partition for the given client based on :code:`partition_id`:" +"distribution (``IidPartitioner(num_partitions=30)``). Then, we load the " +"partition for the given client based on ``partition_id``:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:121 +#: ../../source/tutorial-quickstart-xgboost.rst:135 msgid "" "After that, we do train/test splitting on the given partition (client's " -"local data), and transform data format for :code:`xgboost` package." +"local data), and transform data format for ``xgboost`` package." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:134 +#: ../../source/tutorial-quickstart-xgboost.rst:149 msgid "" -"The functions of :code:`train_test_split` and " -":code:`transform_dataset_to_dmatrix` are defined as below:" +"The functions of ``train_test_split`` and " +"``transform_dataset_to_dmatrix`` are defined as below:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:158 +#: ../../source/tutorial-quickstart-xgboost.rst:174 msgid "Finally, we define the hyper-parameters used for XGBoost training." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:174 +#: ../../source/tutorial-quickstart-xgboost.rst:190 msgid "" -"The :code:`num_local_round` represents the number of iterations for local" -" tree boost. We use CPU for the training in default. One can shift it to " -"GPU by setting :code:`tree_method` to :code:`gpu_hist`. We use AUC as " -"evaluation metric." +"The ``num_local_round`` represents the number of iterations for local " +"tree boost. We use CPU for the training in default. One can shift it to " +"GPU by setting ``tree_method`` to ``gpu_hist``. We use AUC as evaluation " +"metric." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:181 +#: ../../source/tutorial-quickstart-xgboost.rst:195 msgid "Flower client definition for XGBoost" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:183 +#: ../../source/tutorial-quickstart-xgboost.rst:197 msgid "" "After loading the dataset we define the Flower client. We follow the " -"general rule to define :code:`XgbClient` class inherited from " -":code:`fl.client.Client`." +"general rule to define ``XgbClient`` class inherited from " +"``fl.client.Client``." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:205 +#: ../../source/tutorial-quickstart-xgboost.rst:219 msgid "" -"All required parameters defined above are passed to :code:`XgbClient`'s " +"All required parameters defined above are passed to ``XgbClient``'s " "constructor." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:207 +#: ../../source/tutorial-quickstart-xgboost.rst:221 msgid "" -"Then, we override :code:`get_parameters`, :code:`fit` and " -":code:`evaluate` methods insides :code:`XgbClient` class as follows." +"Then, we override ``get_parameters``, ``fit`` and ``evaluate`` methods " +"insides ``XgbClient`` class as follows." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:221 +#: ../../source/tutorial-quickstart-xgboost.rst:236 msgid "" "Unlike neural network training, XGBoost trees are not started from a " -"specified random weights. In this case, we do not use " -":code:`get_parameters` and :code:`set_parameters` to initialise model " -"parameters for XGBoost. As a result, let's return an empty tensor in " -":code:`get_parameters` when it is called by the server at the first " -"round." +"specified random weights. In this case, we do not use ``get_parameters`` " +"and ``set_parameters`` to initialise model parameters for XGBoost. As a " +"result, let's return an empty tensor in ``get_parameters`` when it is " +"called by the server at the first round." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:262 +#: ../../source/tutorial-quickstart-xgboost.rst:278 msgid "" -"In :code:`fit`, at the first round, we call :code:`xgb.train()` to build " -"up the first set of trees. From the second round, we load the global " -"model sent from server to new build Booster object, and then update model" -" weights on local training data with function :code:`local_boost` as " -"follows:" +"In ``fit``, at the first round, we call ``xgb.train()`` to build up the " +"first set of trees. From the second round, we load the global model sent " +"from server to new build Booster object, and then update model weights on" +" local training data with function ``local_boost`` as follows:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:281 +#: ../../source/tutorial-quickstart-xgboost.rst:298 msgid "" -"Given :code:`num_local_round`, we update trees by calling " -":code:`bst_input.update` method. After training, the last " -":code:`N=num_local_round` trees will be extracted to send to the server." +"Given ``num_local_round``, we update trees by calling " +"``bst_input.update`` method. After training, the last " +"``N=num_local_round`` trees will be extracted to send to the server." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:313 +#: ../../source/tutorial-quickstart-xgboost.rst:330 msgid "" -"In :code:`evaluate`, after loading the global model, we call " -":code:`bst.eval_set` function to conduct evaluation on valid set. The AUC" -" value will be returned." +"In ``evaluate``, after loading the global model, we call ``bst.eval_set``" +" function to conduct evaluation on valid set. The AUC value will be " +"returned." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:316 +#: ../../source/tutorial-quickstart-xgboost.rst:333 #, fuzzy msgid "" -"Now, we can create an instance of our class :code:`XgbClient` and add one" -" line to actually run this client:" +"Now, we can create an instance of our class ``XgbClient`` and add one " +"line to actually run this client:" msgstr "" "Nous pouvons maintenant créer une instance de notre classe " ":code:`MnistClient` et ajouter une ligne pour exécuter ce client :" -#: ../../source/tutorial-quickstart-xgboost.rst:332 +#: ../../source/tutorial-quickstart-xgboost.rst:350 #, fuzzy msgid "" -"That's it for the client. We only have to implement :code:`Client` and " -"call :code:`fl.client.start_client()`. The string :code:`\"[::]:8080\"` " -"tells the client which server to connect to. In our case we can run the " -"server and the client on the same machine, therefore we use " -":code:`\"[::]:8080\"`. If we run a truly federated workload with the " -"server and clients running on different machines, all that needs to " -"change is the :code:`server_address` we point the client at." +"That's it for the client. We only have to implement ``Client`` and call " +"``fl.client.start_client()``. The string ``\"[::]:8080\"`` tells the " +"client which server to connect to. In our case we can run the server and " +"the client on the same machine, therefore we use ``\"[::]:8080\"``. If we" +" run a truly federated workload with the server and clients running on " +"different machines, all that needs to change is the ``server_address`` we" +" point the client at." msgstr "" "C'est tout pour le client. Il nous suffit d'implémenter :code:`Client` ou" " :code:`NumPyClient` et d'appeler :code:`fl.client.start_client()`. La " @@ -26065,7 +26499,7 @@ msgstr "" "différentes, tout ce qui doit changer est l'adresse " ":code:`server_address` vers laquelle nous dirigeons le client." -#: ../../source/tutorial-quickstart-xgboost.rst:343 +#: ../../source/tutorial-quickstart-xgboost.rst:360 #, fuzzy msgid "" "These updates are then sent to the *server* which will aggregate them to " @@ -26079,90 +26513,88 @@ msgstr "" "cette version améliorée du modèle à chaque *client*. Un cycle complet de " "mises à jour de poids s'appelle un *round*." -#: ../../source/tutorial-quickstart-xgboost.rst:346 +#: ../../source/tutorial-quickstart-xgboost.rst:364 #, fuzzy msgid "" -"In a file named :code:`server.py`, import Flower and FedXgbBagging from " -":code:`flwr.server.strategy`." +"In a file named ``server.py``, import Flower and FedXgbBagging from " +"``flwr.server.strategy``." msgstr "" "Dans un fichier appelé :code:`client.py`, importe Flower et les paquets " "liés au MXNet :" -#: ../../source/tutorial-quickstart-xgboost.rst:348 +#: ../../source/tutorial-quickstart-xgboost.rst:367 msgid "We first define a strategy for XGBoost bagging aggregation." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:380 +#: ../../source/tutorial-quickstart-xgboost.rst:401 msgid "" -"We use two clients for this example. An " -":code:`evaluate_metrics_aggregation` function is defined to collect and " -"wighted average the AUC values from clients. The :code:`config_func` " -"function is to return the current FL round number to client's " -":code:`fit()` and :code:`evaluate()` methods." +"We use two clients for this example. An ``evaluate_metrics_aggregation`` " +"function is defined to collect and wighted average the AUC values from " +"clients. The ``config_func`` function is to return the current FL round " +"number to client's ``fit()`` and ``evaluate()`` methods." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:384 +#: ../../source/tutorial-quickstart-xgboost.rst:406 #, fuzzy msgid "Then, we start the server:" msgstr "Démarrer le serveur" -#: ../../source/tutorial-quickstart-xgboost.rst:396 +#: ../../source/tutorial-quickstart-xgboost.rst:418 msgid "Tree-based bagging aggregation" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:398 +#: ../../source/tutorial-quickstart-xgboost.rst:420 msgid "" "You must be curious about how bagging aggregation works. Let's look into " "the details." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:400 +#: ../../source/tutorial-quickstart-xgboost.rst:422 msgid "" -"In file :code:`flwr.server.strategy.fedxgb_bagging.py`, we define " -":code:`FedXgbBagging` inherited from :code:`flwr.server.strategy.FedAvg`." -" Then, we override the :code:`aggregate_fit`, :code:`aggregate_evaluate` " -"and :code:`evaluate` methods as follows:" +"In file ``flwr.server.strategy.fedxgb_bagging.py``, we define " +"``FedXgbBagging`` inherited from ``flwr.server.strategy.FedAvg``. Then, " +"we override the ``aggregate_fit``, ``aggregate_evaluate`` and " +"``evaluate`` methods as follows:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:496 +#: ../../source/tutorial-quickstart-xgboost.rst:519 msgid "" -"In :code:`aggregate_fit`, we sequentially aggregate the clients' XGBoost " -"trees by calling :code:`aggregate()` function:" +"In ``aggregate_fit``, we sequentially aggregate the clients' XGBoost " +"trees by calling ``aggregate()`` function:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:555 +#: ../../source/tutorial-quickstart-xgboost.rst:579 msgid "" "In this function, we first fetch the number of trees and the number of " "parallel trees for the current and previous model by calling " -":code:`_get_tree_nums`. Then, the fetched information will be aggregated." -" After that, the trees (containing model weights) are aggregated to " +"``_get_tree_nums``. Then, the fetched information will be aggregated. " +"After that, the trees (containing model weights) are aggregated to " "generate a new tree model." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:560 +#: ../../source/tutorial-quickstart-xgboost.rst:584 msgid "" "After traversal of all clients' models, a new global model is generated, " "followed by the serialisation, and sending back to each client." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:565 +#: ../../source/tutorial-quickstart-xgboost.rst:588 msgid "Launch Federated XGBoost!" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:641 +#: ../../source/tutorial-quickstart-xgboost.rst:664 msgid "" "Congratulations! You've successfully built and run your first federated " -"XGBoost system. The AUC values can be checked in " -":code:`metrics_distributed`. One can see that the average AUC increases " -"over FL rounds." +"XGBoost system. The AUC values can be checked in ``metrics_distributed``." +" One can see that the average AUC increases over FL rounds." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:646 +#: ../../source/tutorial-quickstart-xgboost.rst:668 #, fuzzy msgid "" "The full `source code `_ for this example can be found in :code:`examples" -"/xgboost-quickstart`." +"/xgboost-quickstart/>`_ for this example can be found in ``examples" +"/xgboost-quickstart``." msgstr "" "Félicitations ! Tu as réussi à construire et à faire fonctionner ton " "premier système d'apprentissage fédéré. Le code source complet " @@ -26170,11 +26602,11 @@ msgstr "" "mxnet/client.py>`_ de cet exemple se trouve dans :code:`examples" "/quickstart-mxnet`." -#: ../../source/tutorial-quickstart-xgboost.rst:650 +#: ../../source/tutorial-quickstart-xgboost.rst:673 msgid "Comprehensive Federated XGBoost" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:652 +#: ../../source/tutorial-quickstart-xgboost.rst:675 msgid "" "Now that you have known how federated XGBoost work with Flower, it's time" " to run some more comprehensive experiments by customising the " @@ -26187,12 +26619,12 @@ msgid "" "client cohorts in a resource-aware manner. Let's take a look!" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:659 +#: ../../source/tutorial-quickstart-xgboost.rst:685 #, fuzzy msgid "Cyclic training" msgstr "Formation centralisée" -#: ../../source/tutorial-quickstart-xgboost.rst:661 +#: ../../source/tutorial-quickstart-xgboost.rst:687 msgid "" "In addition to bagging aggregation, we offer a cyclic training scheme, " "which performs FL in a client-by-client fashion. Instead of aggregating " @@ -26202,183 +26634,181 @@ msgid "" "for next round's boosting." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:665 -msgid "" -"To do this, we first customise a :code:`ClientManager` in " -":code:`server_utils.py`:" +#: ../../source/tutorial-quickstart-xgboost.rst:693 +msgid "To do this, we first customise a ``ClientManager`` in ``server_utils.py``:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:705 +#: ../../source/tutorial-quickstart-xgboost.rst:733 msgid "" -"The customised :code:`ClientManager` samples all available clients in " -"each FL round based on the order of connection to the server. Then, we " -"define a new strategy :code:`FedXgbCyclic` in " -":code:`flwr.server.strategy.fedxgb_cyclic.py`, in order to sequentially " +"The customised ``ClientManager`` samples all available clients in each FL" +" round based on the order of connection to the server. Then, we define a " +"new strategy ``FedXgbCyclic`` in " +"``flwr.server.strategy.fedxgb_cyclic.py``, in order to sequentially " "select only one client in given round and pass the received model to next" " client." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:746 +#: ../../source/tutorial-quickstart-xgboost.rst:775 msgid "" -"Unlike the original :code:`FedAvg`, we don't perform aggregation here. " +"Unlike the original ``FedAvg``, we don't perform aggregation here. " "Instead, we just make a copy of the received client model as global model" -" by overriding :code:`aggregate_fit`." +" by overriding ``aggregate_fit``." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:749 +#: ../../source/tutorial-quickstart-xgboost.rst:778 msgid "" -"Also, the customised :code:`configure_fit` and :code:`configure_evaluate`" -" methods ensure the clients to be sequentially selected given FL round:" +"Also, the customised ``configure_fit`` and ``configure_evaluate`` methods" +" ensure the clients to be sequentially selected given FL round:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:813 +#: ../../source/tutorial-quickstart-xgboost.rst:840 msgid "Customised data partitioning" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:815 +#: ../../source/tutorial-quickstart-xgboost.rst:842 msgid "" -"In :code:`dataset.py`, we have a function :code:`instantiate_partitioner`" -" to instantiate the data partitioner based on the given " -":code:`num_partitions` and :code:`partitioner_type`. Currently, we " -"provide four supported partitioner type to simulate the uniformity/non-" -"uniformity in data quantity (uniform, linear, square, exponential)." +"In ``dataset.py``, we have a function ``instantiate_partitioner`` to " +"instantiate the data partitioner based on the given ``num_partitions`` " +"and ``partitioner_type``. Currently, we provide four supported " +"partitioner type to simulate the uniformity/non-uniformity in data " +"quantity (uniform, linear, square, exponential)." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:846 +#: ../../source/tutorial-quickstart-xgboost.rst:873 #, fuzzy msgid "Customised centralised/distributed evaluation" msgstr "Évaluation centralisée" -#: ../../source/tutorial-quickstart-xgboost.rst:848 +#: ../../source/tutorial-quickstart-xgboost.rst:875 msgid "" "To facilitate centralised evaluation, we define a function in " -":code:`server_utils.py`:" +"``server_utils.py``:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:880 +#: ../../source/tutorial-quickstart-xgboost.rst:907 msgid "" "This function returns a evaluation function which instantiates a " -":code:`Booster` object and loads the global model weights to it. The " -"evaluation is conducted by calling :code:`eval_set()` method, and the " -"tested AUC value is reported." +"``Booster`` object and loads the global model weights to it. The " +"evaluation is conducted by calling ``eval_set()`` method, and the tested " +"AUC value is reported." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:883 +#: ../../source/tutorial-quickstart-xgboost.rst:911 msgid "" "As for distributed evaluation on the clients, it's same as the quick-" -"start example by overriding the :code:`evaluate()` method insides the " -":code:`XgbClient` class in :code:`client_utils.py`." +"start example by overriding the ``evaluate()`` method insides the " +"``XgbClient`` class in ``client_utils.py``." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:887 +#: ../../source/tutorial-quickstart-xgboost.rst:916 #, fuzzy msgid "Flower simulation" msgstr "Simulation de moniteur" -#: ../../source/tutorial-quickstart-xgboost.rst:888 +#: ../../source/tutorial-quickstart-xgboost.rst:918 msgid "" -"We also provide an example code (:code:`sim.py`) to use the simulation " +"We also provide an example code (``sim.py``) to use the simulation " "capabilities of Flower to simulate federated XGBoost training on either a" " single machine or a cluster of machines." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:922 +#: ../../source/tutorial-quickstart-xgboost.rst:954 msgid "" -"After importing all required packages, we define a :code:`main()` " -"function to perform the simulation process:" +"After importing all required packages, we define a ``main()`` function to" +" perform the simulation process:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:977 +#: ../../source/tutorial-quickstart-xgboost.rst:1010 msgid "" "We first load the dataset and perform data partitioning, and the pre-" -"processed data is stored in a :code:`list`. After the simulation begins, " -"the clients won't need to pre-process their partitions again." +"processed data is stored in a ``list``. After the simulation begins, the " +"clients won't need to pre-process their partitions again." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:980 +#: ../../source/tutorial-quickstart-xgboost.rst:1014 msgid "Then, we define the strategies and other hyper-parameters:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1031 +#: ../../source/tutorial-quickstart-xgboost.rst:1065 msgid "" "After that, we start the simulation by calling " -":code:`fl.simulation.start_simulation`:" +"``fl.simulation.start_simulation``:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1051 +#: ../../source/tutorial-quickstart-xgboost.rst:1085 msgid "" -"One of key parameters for :code:`start_simulation` is :code:`client_fn` " -"which returns a function to construct a client. We define it as follows:" +"One of key parameters for ``start_simulation`` is ``client_fn`` which " +"returns a function to construct a client. We define it as follows:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1094 +#: ../../source/tutorial-quickstart-xgboost.rst:1126 msgid "Arguments parser" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1096 +#: ../../source/tutorial-quickstart-xgboost.rst:1128 msgid "" -"In :code:`utils.py`, we define the arguments parsers for clients, server " -"and simulation, allowing users to specify different experimental " -"settings. Let's first see the sever side:" +"In ``utils.py``, we define the arguments parsers for clients, server and " +"simulation, allowing users to specify different experimental settings. " +"Let's first see the sever side:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1142 +#: ../../source/tutorial-quickstart-xgboost.rst:1175 msgid "" "This allows user to specify training strategies / the number of total " "clients / FL rounds / participating clients / clients for evaluation, and" -" evaluation fashion. Note that with :code:`--centralised-eval`, the sever" -" will do centralised evaluation and all functionalities for client " +" evaluation fashion. Note that with ``--centralised-eval``, the sever " +"will do centralised evaluation and all functionalities for client " "evaluation will be disabled." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1146 +#: ../../source/tutorial-quickstart-xgboost.rst:1180 msgid "Then, the argument parser on client side:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1200 +#: ../../source/tutorial-quickstart-xgboost.rst:1234 msgid "" "This defines various options for client data partitioning. Besides, " "clients also have an option to conduct evaluation on centralised test set" -" by setting :code:`--centralised-eval`, as well as an option to perform " -"scaled learning rate based on the number of clients by setting :code" -":`--scaled-lr`." +" by setting ``--centralised-eval``, as well as an option to perform " +"scaled learning rate based on the number of clients by setting " +"``--scaled-lr``." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1204 +#: ../../source/tutorial-quickstart-xgboost.rst:1239 msgid "We also have an argument parser for simulation:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1282 +#: ../../source/tutorial-quickstart-xgboost.rst:1317 msgid "This integrates all arguments for both client and server sides." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1285 +#: ../../source/tutorial-quickstart-xgboost.rst:1320 #, fuzzy msgid "Example commands" msgstr "Exemples de PyTorch" -#: ../../source/tutorial-quickstart-xgboost.rst:1287 +#: ../../source/tutorial-quickstart-xgboost.rst:1322 msgid "" "To run a centralised evaluated experiment with bagging strategy on 5 " "clients with exponential distribution for 50 rounds, we first start the " "server as below:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1294 +#: ../../source/tutorial-quickstart-xgboost.rst:1329 #, fuzzy msgid "Then, on each client terminal, we start the clients:" msgstr "Ouvre un autre terminal et démarre le deuxième client :" -#: ../../source/tutorial-quickstart-xgboost.rst:1300 +#: ../../source/tutorial-quickstart-xgboost.rst:1335 msgid "To run the same experiment with Flower simulation:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1306 +#: ../../source/tutorial-quickstart-xgboost.rst:1341 #, fuzzy msgid "" "The full `code `_ for this comprehensive example can be found in" -" :code:`examples/xgboost-comprehensive`." +" ``examples/xgboost-comprehensive``." msgstr "" "Félicitations ! Tu as réussi à construire et à faire fonctionner ton " "premier système d'apprentissage fédéré. Le code source complet " @@ -28377,7 +28807,7 @@ msgstr "" "chose d'autre, comme la régression linéaire classique." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:41 -msgid "|e5918c1c06a4434bbe4bf49235e40059|" +msgid "|3a7aceef05f0421794726ac54aaf12fd|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:109 @@ -28396,7 +28826,7 @@ msgstr "" " Go." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:53 -msgid "|c0165741bd1944f09ec55ce49032377d|" +msgid "|d741075f8e624331b42c0746f7d258a0|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:111 @@ -28427,7 +28857,7 @@ msgstr "" "chanson." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:67 -msgid "|0a0ac9427ac7487b8e52d75ed514f04e|" +msgid "|8fc92d668bcb42b8bda55143847f2329|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:113 @@ -28448,7 +28878,7 @@ msgstr "" " données pour la même tâche." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:79 -msgid "|5defee3ea4ca40d99fcd3e4ea045be25|" +msgid "|1c705d833a024f22adcaeb8ae3d13b0b|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:115 @@ -28469,7 +28899,7 @@ msgstr "" "cloud." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:91 -msgid "|74f26ca701254d3db57d7899bd91eb55|" +msgid "|77a037b546a84262b608e04bc82a2c96|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:117 @@ -28490,7 +28920,7 @@ msgstr "" "appuyés." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:103 -msgid "|bda79f21f8154258a40e5766b2634ad7|" +msgid "|f568e24c9fb0435690ac628210a4be96|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:119 @@ -28515,7 +28945,7 @@ msgstr "" " sur un serveur centralisé." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:138 -msgid "|89d30862e62e4f9989e193483a08680a|" +msgid "|a7bf029981514e2593aa3a2b48c9d76a|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:173 @@ -28534,7 +28964,7 @@ msgstr "" "suffisantes pour former un bon modèle." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:150 -msgid "|77e9918671c54b4f86e01369c0785ce8|" +msgid "|3f645ad807f84be8b1f8f3267173939c|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:175 @@ -28756,7 +29186,7 @@ msgstr "" "partir d'un point de contrôle précédemment sauvegardé." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:210 -msgid "|7e4ccef37cc94148a067107b34eb7447|" +msgid "|a06a9dbd603f45819afd8e8cfc3c4b8f|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:307 @@ -28791,7 +29221,7 @@ msgstr "" "rendements décroissants." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:225 -msgid "|28e47e4cded14479a0846c8e5f22c872|" +msgid "|edcf9a04d96e42608fd01a333375febe|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:309 @@ -28824,7 +29254,7 @@ msgstr "" "données locales, ou même de quelques étapes (mini-batchs)." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:240 -msgid "|4b8c5d1afa144294b76ffc76e4658a38|" +msgid "|3dae22fe797043968e2b7aa7073c78bd|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:311 @@ -28855,7 +29285,7 @@ msgstr "" " l'entraînement local." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:255 -msgid "|9dbdb3a0f6cb4a129fac863eaa414c17|" +msgid "|ba178f75267d4ad8aa7363f20709195f|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:313 @@ -28914,7 +29344,7 @@ msgstr "" "times as much as each of the 100 examples." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:273 -msgid "|81749d0ac0834c36a83bd38f433fea31|" +msgid "|c380c750bfd2444abce039a1c6fa8e60|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:315 @@ -29057,7 +29487,7 @@ msgstr "" "quel cadre de ML et n'importe quel langage de programmation." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:334 -msgid "|ed9aae51da70428eab7eef32f21e819e|" +msgid "|e7cec00a114b48359935c6510595132e|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:340 @@ -30268,16 +30698,6 @@ msgstr "" #~ "typically display information on your " #~ "terminal as follows:" #~ msgstr "" -#~ "contenant des informations pertinentes, " -#~ "notamment : le niveau du message " -#~ "de journal (par exemple :code:`INFO`, " -#~ ":code:`DEBUG`), un horodatage, la ligne " -#~ "à partir de laquelle l'enregistrement a" -#~ " eu lieu, ainsi que le message " -#~ "de journal lui-même. De cette " -#~ "façon, le logger afficherait typiquement " -#~ "des informations sur ton terminal comme" -#~ " suit :" #~ msgid "Saving log to file" #~ msgstr "Enregistrement du journal dans un fichier" @@ -30298,22 +30718,6 @@ msgstr "" #~ "`_" #~ " function. For example:" #~ msgstr "" -#~ "Par défaut, le journal de Flower " -#~ "est affiché dans le terminal à " -#~ "partir duquel tu as lancé ta " -#~ "charge de travail d'apprentissage fédéré. " -#~ "Cela s'applique à la fois à la " -#~ "fédération basée sur gRPC (c'est-à-dire " -#~ "lorsque tu fais :code:`fl.server.start_server`) " -#~ "et à l'utilisation du " -#~ ":code:`VirtualClientEngine` (c'est-à-dire lorsque tu" -#~ " fais :code:`fl.simulation.start_simulation`). Dans " -#~ "certaines situations, tu peux vouloir " -#~ "sauvegarder ce journal sur le disque." -#~ " Tu peux le faire en appelant " -#~ "la fonction `fl.common.logger.configure() " -#~ "`_." -#~ " Par exemple :" #~ msgid "" #~ "With the above, Flower will record " @@ -30384,24 +30788,6 @@ msgstr "" #~ "should you wish to backup or " #~ "analyze the logs somewhere else." #~ msgstr "" -#~ "La fonction :code:`fl.common.logger.configure` " -#~ "permet également de spécifier un hôte" -#~ " vers lequel les journaux peuvent " -#~ "être envoyés (via :code:`POST`) par " -#~ "l'intermédiaire d'un :code:`logging.handler.HTTPHandler`" -#~ " natif de Python. Il s'agit d'une " -#~ "fonction particulièrement utile dans les " -#~ "charges de travail d'apprentissage fédéré " -#~ "basées sur :code:`gRPC` où la collecte" -#~ " des journaux de toutes les entités" -#~ " (c'est-à-dire le serveur et les " -#~ "clients) pourrait s'avérer fastidieuse. Notez" -#~ " que dans la simulation Flower, le" -#~ " serveur affiche automatiquement tous les" -#~ " journaux. Vous pouvez toujours spécifier" -#~ " un :code:`HTTPHandler` si vous souhaitez" -#~ " sauvegarder ou analyser les journaux " -#~ "à un autre endroit." #~ msgid "Enable SSL connections" #~ msgstr "Collecte centralisée des données" @@ -31300,24 +31686,9 @@ msgstr "" #~ "`_ to " #~ "learn more about the app." #~ msgstr "" -#~ "Créons un nouveau projet d'application " -#~ "dans Xcode et ajoutons :code:`flwr` " -#~ "comme dépendance dans ton projet. Pour" -#~ " notre application, nous stockerons la " -#~ "logique de notre application dans " -#~ ":code:`FLiOSModel.swift` et les éléments de" -#~ " l'interface utilisateur dans " -#~ ":code:`ContentView.swift`.Nous nous concentrerons " -#~ "davantage sur :code:`FLiOSModel.swift` dans ce" -#~ " quickstart. N'hésite pas à te " -#~ "référer à l'`exemple de code complet " -#~ "`_ pour" -#~ " en savoir plus sur l'application." #~ msgid "Import Flower and CoreML related packages in :code:`FLiOSModel.swift`:" #~ msgstr "" -#~ "Importe les paquets liés à Flower " -#~ "et CoreML dans :code:`FLiOSModel.swift` :" #~ msgid "" #~ "Then add the mlmodel to the " @@ -31334,20 +31705,6 @@ msgstr "" #~ " preprocessing is done inside " #~ ":code:`DataLoader.swift`." #~ msgstr "" -#~ "Ensuite, ajoute le mlmodel au projet " -#~ "simplement par glisser-déposer, le " -#~ "mlmodel sera regroupé à l'intérieur de" -#~ " l'application lors du déploiement sur " -#~ "ton appareil iOS. Nous devons passer " -#~ "l'url pour accéder au mlmodel et " -#~ "exécuter les processus d'apprentissage " -#~ "automatique CoreML, elle peut être " -#~ "récupérée en appelant la fonction " -#~ ":code:`Bundle.main.url`. Pour l'ensemble de " -#~ "données MNIST, nous devons le prétraiter" -#~ " dans l'objet :code:`MLBatchProvider`. Le " -#~ "prétraitement est effectué à l'intérieur " -#~ "de :code:`DataLoader.swift`." #~ msgid "" #~ "Since CoreML does not allow the " @@ -31375,10 +31732,6 @@ msgstr "" #~ " by passing our Flower client to " #~ "the function :code:`startFlwrGRPC`." #~ msgstr "" -#~ "Lance ensuite le client Flower gRPC " -#~ "et commence à communiquer avec le " -#~ "serveur en passant notre client Flower" -#~ " à la fonction :code:`startFlwrGRPC`." #~ msgid "" #~ "That's it for the client. We only" @@ -31393,17 +31746,6 @@ msgstr "" #~ "button to start the federated learning" #~ " process." #~ msgstr "" -#~ "C'est tout pour le client. Il nous" -#~ " suffit d'implémenter :code:`Client` ou " -#~ "d'appeler le :code:`MLFlwrClient` fourni et" -#~ " d'appeler :code:`startFlwrGRPC()`. L'attribut " -#~ ":code:`hostname` et :code:`port` indique au" -#~ " client à quel serveur se connecter." -#~ " Pour ce faire, il suffit d'entrer" -#~ " le nom d'hôte et le port dans" -#~ " l'application avant de cliquer sur " -#~ "le bouton de démarrage pour lancer " -#~ "le processus d'apprentissage fédéré." #~ msgid "" #~ "Once the server is running we can" @@ -36805,3 +37147,1285 @@ msgstr "" #~ msgid "|c00bf2750bc24d229737a0fe1395f0fc|" #~ msgstr "" +#~ msgid "run\\_client\\_app" +#~ msgstr "" + +#~ msgid "run\\_supernode" +#~ msgstr "flower-superlink" + +#~ msgid "Retrieve the corresponding layout by the string key." +#~ msgstr "" + +#~ msgid "" +#~ "When there isn't an exact match, " +#~ "all the existing keys in the " +#~ "layout map will be treated as a" +#~ " regex and map against the input " +#~ "key again. The first match will be" +#~ " returned, based on the key insertion" +#~ " order. Return None if there isn't" +#~ " any match found." +#~ msgstr "" + +#~ msgid "the string key as the query for the layout." +#~ msgstr "" + +#~ msgid "Corresponding layout based on the query." +#~ msgstr "" + +#~ msgid "run\\_server\\_app" +#~ msgstr "" + +#~ msgid "run\\_superlink" +#~ msgstr "flower-superlink" + +#~ msgid "" +#~ ":py:obj:`start_simulation `\\" +#~ " \\(\\*\\, client\\_fn\\, num\\_clients\\)" +#~ msgstr "" + +#~ msgid "" +#~ "A function creating `Client` instances. " +#~ "The function must have the signature " +#~ "`client_fn(context: Context). It should return" +#~ " a single client instance of type " +#~ "`Client`. Note that the created client" +#~ " instances are ephemeral and will " +#~ "often be destroyed after a single " +#~ "method invocation. Since client instances " +#~ "are not long-lived, they should " +#~ "not attempt to carry state over " +#~ "method invocations. Any state required " +#~ "by the instance (model, dataset, " +#~ "hyperparameters, ...) should be (re-)created" +#~ " in either the call to `client_fn`" +#~ " or the call to any of the " +#~ "client methods (e.g., load evaluation " +#~ "data in the `evaluate` method itself)." +#~ msgstr "" + +#~ msgid "The total number of clients in this simulation." +#~ msgstr "" + +#~ msgid "" +#~ "UNSUPPORTED, WILL BE REMOVED. USE " +#~ "`num_clients` INSTEAD. List `client_id`s for" +#~ " each client. This is only required" +#~ " if `num_clients` is not set. Setting" +#~ " both `num_clients` and `clients_ids` with" +#~ " `len(clients_ids)` not equal to " +#~ "`num_clients` generates an error. Using " +#~ "this argument will raise an error." +#~ msgstr "" + +#~ msgid "" +#~ "CPU and GPU resources for a single" +#~ " client. Supported keys are `num_cpus` " +#~ "and `num_gpus`. To understand the GPU" +#~ " utilization caused by `num_gpus`, as " +#~ "well as using custom resources, please" +#~ " consult the Ray documentation." +#~ msgstr "" + +#~ msgid "" +#~ "Optionally specify the type of actor " +#~ "to use. The actor object, which " +#~ "persists throughout the simulation, will " +#~ "be the process in charge of " +#~ "executing a ClientApp wrapping input " +#~ "argument `client_fn`." +#~ msgstr "" + +#~ msgid "" +#~ "If you want to create your own " +#~ "Actor classes, you might need to " +#~ "pass some input argument. You can " +#~ "use this dictionary for such purpose." +#~ msgstr "" + +#~ msgid "" +#~ "(default: \"DEFAULT\") Optional string " +#~ "(\"DEFAULT\" or \"SPREAD\") for the VCE" +#~ " to choose in which node the " +#~ "actor is placed. If you are an " +#~ "advanced user needed more control you" +#~ " can use lower-level scheduling " +#~ "strategies to pin actors to specific " +#~ "compute nodes (e.g. via " +#~ "NodeAffinitySchedulingStrategy). Please note this" +#~ " is an advanced feature. For all " +#~ "details, please refer to the Ray " +#~ "documentation: https://docs.ray.io/en/latest/ray-" +#~ "core/scheduling/index.html" +#~ msgstr "" + +#~ msgid "" +#~ "Check out this Federated Learning " +#~ "quickstart tutorial for using Flower " +#~ "with FastAI to train a vision " +#~ "model on CIFAR-10." +#~ msgstr "" + +#~ msgid "Let's build a federated learning system using fastai and Flower!" +#~ msgstr "" +#~ "Construisons un système d'apprentissage fédéré" +#~ " en utilisant fastai et Flower !" + +#~ msgid "" +#~ "Please refer to the `full code " +#~ "example `_ to learn more." +#~ msgstr "" +#~ "Réfère-toi à l'exemple de code " +#~ "complet `_ pour en savoir plus." + +#~ msgid "" +#~ "Check out this Federating Learning " +#~ "quickstart tutorial for using Flower " +#~ "with HuggingFace Transformers in order " +#~ "to fine-tune an LLM." +#~ msgstr "" + +#~ msgid "" +#~ "Let's build a federated learning system" +#~ " using Hugging Face Transformers and " +#~ "Flower!" +#~ msgstr "" +#~ "Construisons un système d'apprentissage fédéré" +#~ " à l'aide des transformateurs Hugging " +#~ "Face et de Flower !" + +#~ msgid "Dependencies" +#~ msgstr "Dépendances" + +#~ msgid "" +#~ "To follow along this tutorial you " +#~ "will need to install the following " +#~ "packages: :code:`datasets`, :code:`evaluate`, " +#~ ":code:`flwr`, :code:`torch`, and " +#~ ":code:`transformers`. This can be done " +#~ "using :code:`pip`:" +#~ msgstr "" +#~ "Pour suivre ce tutoriel, tu devras " +#~ "installer les paquets suivants : " +#~ ":code:`datasets`, :code:`evaluate`, :code:`flwr`, " +#~ ":code:`torch`, et :code:`transformers`. Cela " +#~ "peut être fait en utilisant :code:`pip`" +#~ " :" + +#~ msgid "Standard Hugging Face workflow" +#~ msgstr "Flux de travail standard pour le visage" + +#~ msgid "Handling the data" +#~ msgstr "Traitement des données" + +#~ msgid "" +#~ "To fetch the IMDB dataset, we will" +#~ " use Hugging Face's :code:`datasets` " +#~ "library. We then need to tokenize " +#~ "the data and create :code:`PyTorch` " +#~ "dataloaders, this is all done in " +#~ "the :code:`load_data` function:" +#~ msgstr "" +#~ "Pour récupérer le jeu de données " +#~ "IMDB, nous utiliserons la bibliothèque " +#~ ":code:`datasets` de Hugging Face. Nous " +#~ "devons ensuite tokeniser les données et" +#~ " créer des :code:`PyTorch` dataloaders, ce" +#~ " qui est fait dans la fonction " +#~ ":code:`load_data` :" + +#~ msgid "Training and testing the model" +#~ msgstr "Former et tester le modèle" + +#~ msgid "" +#~ "Once we have a way of creating " +#~ "our trainloader and testloader, we can" +#~ " take care of the training and " +#~ "testing. This is very similar to " +#~ "any :code:`PyTorch` training or testing " +#~ "loop:" +#~ msgstr "" +#~ "Une fois que nous avons trouvé un" +#~ " moyen de créer notre trainloader et" +#~ " notre testloader, nous pouvons nous " +#~ "occuper de l'entraînement et du test." +#~ " C'est très similaire à n'importe " +#~ "quelle boucle d'entraînement ou de test" +#~ " :code:`PyTorch` :" + +#~ msgid "Creating the model itself" +#~ msgstr "Créer le modèle lui-même" + +#~ msgid "" +#~ "To create the model itself, we " +#~ "will just load the pre-trained " +#~ "distillBERT model using Hugging Face’s " +#~ ":code:`AutoModelForSequenceClassification` :" +#~ msgstr "" +#~ "Pour créer le modèle lui-même, " +#~ "nous allons simplement charger le modèle" +#~ " distillBERT pré-entraîné en utilisant le" +#~ " :code:`AutoModelForSequenceClassification` de Hugging" +#~ " Face :" + +#~ msgid "Creating the IMDBClient" +#~ msgstr "Création du client IMDBC" + +#~ msgid "" +#~ "To federate our example to multiple " +#~ "clients, we first need to write " +#~ "our Flower client class (inheriting from" +#~ " :code:`flwr.client.NumPyClient`). This is very" +#~ " easy, as our model is a " +#~ "standard :code:`PyTorch` model:" +#~ msgstr "" +#~ "Pour fédérer notre exemple à plusieurs" +#~ " clients, nous devons d'abord écrire " +#~ "notre classe de client Flower (héritant" +#~ " de :code:`flwr.client.NumPyClient`). C'est très" +#~ " facile, car notre modèle est un " +#~ "modèle :code:`PyTorch` standard :" + +#~ msgid "" +#~ "The :code:`get_parameters` function lets the" +#~ " server get the client's parameters. " +#~ "Inversely, the :code:`set_parameters` function " +#~ "allows the server to send its " +#~ "parameters to the client. Finally, the" +#~ " :code:`fit` function trains the model " +#~ "locally for the client, and the " +#~ ":code:`evaluate` function tests the model " +#~ "locally and returns the relevant " +#~ "metrics." +#~ msgstr "" +#~ "La fonction :code:`get_parameters` permet au" +#~ " serveur d'obtenir les paramètres du " +#~ "client. Inversement, la fonction " +#~ ":code:`set_parameters` permet au serveur " +#~ "d'envoyer ses paramètres au client. " +#~ "Enfin, la fonction :code:`fit` forme le" +#~ " modèle localement pour le client, et" +#~ " la fonction :code:`evaluate` teste le " +#~ "modèle localement et renvoie les mesures" +#~ " correspondantes." + +#~ msgid "Starting the server" +#~ msgstr "Démarrer le serveur" + +#~ msgid "" +#~ "Now that we have a way to " +#~ "instantiate clients, we need to create" +#~ " our server in order to aggregate " +#~ "the results. Using Flower, this can " +#~ "be done very easily by first " +#~ "choosing a strategy (here, we are " +#~ "using :code:`FedAvg`, which will define " +#~ "the global weights as the average " +#~ "of all the clients' weights at " +#~ "each round) and then using the " +#~ ":code:`flwr.server.start_server` function:" +#~ msgstr "" +#~ "Maintenant que nous avons un moyen " +#~ "d'instancier les clients, nous devons " +#~ "créer notre serveur afin d'agréger les" +#~ " résultats. Avec Flower, cela peut " +#~ "être fait très facilement en choisissant" +#~ " d'abord une stratégie (ici, nous " +#~ "utilisons :code:`FedAvg`, qui définira les " +#~ "poids globaux comme la moyenne des " +#~ "poids de tous les clients à chaque" +#~ " tour) et en utilisant ensuite la " +#~ "fonction :code:`flwr.server.start_server` :" + +#~ msgid "" +#~ "The :code:`weighted_average` function is there" +#~ " to provide a way to aggregate " +#~ "the metrics distributed amongst the " +#~ "clients (basically this allows us to " +#~ "display a nice average accuracy and " +#~ "loss for every round)." +#~ msgstr "" +#~ "La fonction :code:`weighted_average` est là" +#~ " pour fournir un moyen d'agréger les" +#~ " mesures réparties entre les clients " +#~ "(en gros, cela nous permet d'afficher" +#~ " une belle moyenne de précision et" +#~ " de perte pour chaque tour)." + +#~ msgid "Putting everything together" +#~ msgstr "Tout assembler" + +#~ msgid "We can now start client instances using:" +#~ msgstr "" +#~ "Nous pouvons maintenant démarrer des " +#~ "instances de clients en utilisant :" + +#~ msgid "" +#~ "And they will be able to connect" +#~ " to the server and start the " +#~ "federated training." +#~ msgstr "" +#~ "Et ils pourront se connecter au " +#~ "serveur et démarrer la formation " +#~ "fédérée." + +#~ msgid "" +#~ "If you want to check out " +#~ "everything put together, you should " +#~ "check out the `full code example " +#~ "`_ ." +#~ msgstr "" +#~ "Si tu veux voir tout ce qui " +#~ "est mis ensemble, tu devrais consulter" +#~ " l'exemple de code complet : " +#~ "[https://github.com/adap/flower/tree/main/examples/quickstart-" +#~ "huggingface](https://github.com/adap/flower/tree/main/examples" +#~ "/quickstart-huggingface)." + +#~ msgid "" +#~ "Of course, this is a very basic" +#~ " example, and a lot can be " +#~ "added or modified, it was just to" +#~ " showcase how simply we could " +#~ "federate a Hugging Face workflow using" +#~ " Flower." +#~ msgstr "" +#~ "Bien sûr, c'est un exemple très " +#~ "basique, et beaucoup de choses peuvent" +#~ " être ajoutées ou modifiées, il " +#~ "s'agissait juste de montrer avec quelle" +#~ " simplicité on pouvait fédérer un " +#~ "flux de travail Hugging Face à " +#~ "l'aide de Flower." + +#~ msgid "" +#~ "Note that in this example we used" +#~ " :code:`PyTorch`, but we could have " +#~ "very well used :code:`TensorFlow`." +#~ msgstr "" +#~ "Notez que dans cet exemple, nous " +#~ "avons utilisé :code:`PyTorch`, mais nous " +#~ "aurions très bien pu utiliser " +#~ ":code:`TensorFlow`." + +#~ msgid "" +#~ "Check out this Federated Learning " +#~ "quickstart tutorial for using Flower " +#~ "with PyTorch Lightning to train an " +#~ "Auto Encoder model on MNIST." +#~ msgstr "" + +#~ msgid "" +#~ "Let's build a horizontal federated " +#~ "learning system using PyTorch Lightning " +#~ "and Flower!" +#~ msgstr "" +#~ "Construisons un système d'apprentissage fédéré" +#~ " en utilisant PyTorch Lightning et " +#~ "Flower !" + +#~ msgid "" +#~ "Please refer to the `full code " +#~ "example `_ to learn " +#~ "more." +#~ msgstr "" +#~ "Réfère-toi à l'exemple de code " +#~ "complet `_ pour en " +#~ "savoir plus." + +#~ msgid "" +#~ "Check out this Federated Learning " +#~ "quickstart tutorial for using Flower " +#~ "with TensorFlow to train a MobilNetV2" +#~ " model on CIFAR-10." +#~ msgstr "" + +#~ msgid "Let's build a federated learning system in less than 20 lines of code!" +#~ msgstr "" +#~ "Construisons un système d'apprentissage fédéré" +#~ " en moins de 20 lignes de code" +#~ " !" + +#~ msgid "Before Flower can be imported we have to install it:" +#~ msgstr "Avant de pouvoir importer une fleur, nous devons l'installer :" + +#~ msgid "" +#~ "Since we want to use the Keras " +#~ "API of TensorFlow (TF), we have to" +#~ " install TF as well:" +#~ msgstr "" +#~ "Comme nous voulons utiliser l'API Keras" +#~ " de TensorFlow (TF), nous devons " +#~ "également installer TF :" + +#~ msgid "Next, in a file called :code:`client.py`, import Flower and TensorFlow:" +#~ msgstr "" +#~ "Ensuite, dans un fichier appelé " +#~ ":code:`client.py`, importe Flower et " +#~ "TensorFlow :" + +#~ msgid "" +#~ "We use the Keras utilities of TF" +#~ " to load CIFAR10, a popular colored" +#~ " image classification dataset for machine" +#~ " learning. The call to " +#~ ":code:`tf.keras.datasets.cifar10.load_data()` downloads " +#~ "CIFAR10, caches it locally, and then " +#~ "returns the entire training and test " +#~ "set as NumPy ndarrays." +#~ msgstr "" +#~ "Nous utilisons les utilitaires Keras de" +#~ " TF pour charger CIFAR10, un ensemble" +#~ " de données de classification d'images " +#~ "colorées populaire pour l'apprentissage " +#~ "automatique. L'appel à " +#~ ":code:`tf.keras.datasets.cifar10.load_data()` télécharge " +#~ "CIFAR10, le met en cache localement, " +#~ "puis renvoie l'ensemble d'entraînement et " +#~ "de test sous forme de NumPy " +#~ "ndarrays." + +#~ msgid "" +#~ "Next, we need a model. For the " +#~ "purpose of this tutorial, we use " +#~ "MobilNetV2 with 10 output classes:" +#~ msgstr "" +#~ "Ensuite, nous avons besoin d'un modèle." +#~ " Pour les besoins de ce tutoriel, " +#~ "nous utilisons MobilNetV2 avec 10 " +#~ "classes de sortie :" + +#~ msgid "" +#~ "The Flower server interacts with clients" +#~ " through an interface called " +#~ ":code:`Client`. When the server selects " +#~ "a particular client for training, it " +#~ "sends training instructions over the " +#~ "network. The client receives those " +#~ "instructions and calls one of the " +#~ ":code:`Client` methods to run your code" +#~ " (i.e., to train the neural network" +#~ " we defined earlier)." +#~ msgstr "" +#~ "Le serveur Flower interagit avec les " +#~ "clients par le biais d'une interface " +#~ "appelée :code:`Client`. Lorsque le serveur " +#~ "sélectionne un client particulier pour " +#~ "la formation, il envoie des instructions" +#~ " de formation sur le réseau. Le " +#~ "client reçoit ces instructions et " +#~ "appelle l'une des méthodes :code:`Client` " +#~ "pour exécuter ton code (c'est-à-dire " +#~ "pour former le réseau neuronal que " +#~ "nous avons défini plus tôt)." + +#~ msgid "" +#~ "Flower provides a convenience class " +#~ "called :code:`NumPyClient` which makes it " +#~ "easier to implement the :code:`Client` " +#~ "interface when your workload uses Keras." +#~ " The :code:`NumPyClient` interface defines " +#~ "three methods which can be implemented" +#~ " in the following way:" +#~ msgstr "" +#~ "Flower fournit une classe de commodité" +#~ " appelée :code:`NumPyClient` qui facilite " +#~ "la mise en œuvre de l'interface " +#~ ":code:`Client` lorsque ta charge de " +#~ "travail utilise Keras. L'interface " +#~ ":code:`NumPyClient` définit trois méthodes qui" +#~ " peuvent être mises en œuvre de " +#~ "la manière suivante :" + +#~ msgid "" +#~ "We can now create an instance of" +#~ " our class :code:`CifarClient` and add " +#~ "one line to actually run this " +#~ "client:" +#~ msgstr "" +#~ "Nous pouvons maintenant créer une " +#~ "instance de notre classe :code:`CifarClient`" +#~ " et ajouter une ligne pour exécuter" +#~ " ce client :" + +#~ msgid "" +#~ "That's it for the client. We only" +#~ " have to implement :code:`Client` or " +#~ ":code:`NumPyClient` and call " +#~ ":code:`fl.client.start_client()`. If you implement" +#~ " a client of type :code:`NumPyClient` " +#~ "you'll need to first call its " +#~ ":code:`to_client()` method. The string " +#~ ":code:`\"[::]:8080\"` tells the client which" +#~ " server to connect to. In our " +#~ "case we can run the server and " +#~ "the client on the same machine, " +#~ "therefore we use :code:`\"[::]:8080\"`. If " +#~ "we run a truly federated workload " +#~ "with the server and clients running " +#~ "on different machines, all that needs" +#~ " to change is the :code:`server_address`" +#~ " we point the client at." +#~ msgstr "" +#~ "C'est tout pour le client. Il nous" +#~ " suffit d'implémenter :code:`Client` ou " +#~ ":code:`NumPyClient` et d'appeler " +#~ ":code:`fl.client.start_client()`. La chaîne " +#~ ":code:`\"[: :]:8080\"` indique au client " +#~ "à quel serveur se connecter. Dans " +#~ "notre cas, nous pouvons exécuter le " +#~ "serveur et le client sur la même" +#~ " machine, c'est pourquoi nous utilisons " +#~ ":code:`\"[: :]:8080\"`. Si nous exécutons " +#~ "une charge de travail véritablement " +#~ "fédérée avec le serveur et les " +#~ "clients fonctionnant sur des machines " +#~ "différentes, tout ce qui doit changer" +#~ " est l'adresse :code:`server_address` vers " +#~ "laquelle nous dirigeons le client." + +#~ msgid "Each client will have its own dataset." +#~ msgstr "Chaque client aura son propre ensemble de données." + +#~ msgid "" +#~ "You should now see how the " +#~ "training does in the very first " +#~ "terminal (the one that started the " +#~ "server):" +#~ msgstr "" +#~ "Tu devrais maintenant voir comment la" +#~ " formation se déroule dans le tout" +#~ " premier terminal (celui qui a " +#~ "démarré le serveur) :" + +#~ msgid "" +#~ "Congratulations! You've successfully built and" +#~ " run your first federated learning " +#~ "system. The full `source code " +#~ "`_ for this can be " +#~ "found in :code:`examples/quickstart-" +#~ "tensorflow/client.py`." +#~ msgstr "" +#~ "Félicitations ! Tu as réussi à " +#~ "construire et à faire fonctionner ton" +#~ " premier système d'apprentissage fédéré. Le" +#~ " `code source complet " +#~ "`_ pour cela se trouve" +#~ " dans :code:`examples/quickstart-tensorflow/client.py`." + +#~ msgid "|e5918c1c06a4434bbe4bf49235e40059|" +#~ msgstr "" + +#~ msgid "|c0165741bd1944f09ec55ce49032377d|" +#~ msgstr "" + +#~ msgid "|0a0ac9427ac7487b8e52d75ed514f04e|" +#~ msgstr "" + +#~ msgid "|5defee3ea4ca40d99fcd3e4ea045be25|" +#~ msgstr "" + +#~ msgid "|74f26ca701254d3db57d7899bd91eb55|" +#~ msgstr "" + +#~ msgid "|bda79f21f8154258a40e5766b2634ad7|" +#~ msgstr "" + +#~ msgid "|89d30862e62e4f9989e193483a08680a|" +#~ msgstr "" + +#~ msgid "|77e9918671c54b4f86e01369c0785ce8|" +#~ msgstr "" + +#~ msgid "|7e4ccef37cc94148a067107b34eb7447|" +#~ msgstr "" + +#~ msgid "|28e47e4cded14479a0846c8e5f22c872|" +#~ msgstr "" + +#~ msgid "|4b8c5d1afa144294b76ffc76e4658a38|" +#~ msgstr "" + +#~ msgid "|9dbdb3a0f6cb4a129fac863eaa414c17|" +#~ msgstr "" + +#~ msgid "|81749d0ac0834c36a83bd38f433fea31|" +#~ msgstr "" + +#~ msgid "|ed9aae51da70428eab7eef32f21e819e|" +#~ msgstr "" + +#~ msgid "|e87b69b2ada74ea49412df16f4a0b9cc|" +#~ msgstr "" + +#~ msgid "|33cacb7d985c4906b348515c1a5cd993|" +#~ msgstr "" + +#~ msgid "|cc080a555947492fa66131dc3a967603|" +#~ msgstr "" + +#~ msgid "|085c3e0fb8664c6aa06246636524b20b|" +#~ msgstr "" + +#~ msgid "|bfe69c74e48c45d49b50251c38c2a019|" +#~ msgstr "" + +#~ msgid "|ebbecd651f0348d99c6511ea859bf4ca|" +#~ msgstr "" + +#~ msgid "|163117eb654a4273babba413cf8065f5|" +#~ msgstr "" + +#~ msgid "|452ac3ba453b4cd1be27be1ba7560d64|" +#~ msgstr "" + +#~ msgid "|f403fcd69e4e44409627e748b404c086|" +#~ msgstr "" + +#~ msgid "|4b00fe63870145968f8443619a792a42|" +#~ msgstr "" + +#~ msgid "|368378731066486fa4397e89bc6b870c|" +#~ msgstr "" + +#~ msgid "|a66aa83d85bf4ffba7ed660b718066da|" +#~ msgstr "" + +#~ msgid "|82324b9af72a4582a81839d55caab767|" +#~ msgstr "" + +#~ msgid "|fbf2da0da3cc4f8ab3b3eff852d80c41|" +#~ msgstr "" + +#~ msgid "" +#~ "Install `xz` (to install different " +#~ "Python versions) and `pandoc` to build" +#~ " the docs::" +#~ msgstr "" + +#~ msgid "" +#~ "Ensure you system (Ubuntu 22.04+) is " +#~ "up-to-date, and you have all " +#~ "necessary packages::" +#~ msgstr "" + +#~ msgid "" +#~ "Let's create the Python environment for" +#~ " all-things Flower. If you wish " +#~ "to use :code:`pyenv`, we provide two " +#~ "convenience scripts that you can use." +#~ " If you prefer using something else" +#~ " than :code:`pyenv`, create a new " +#~ "environment, activate and skip to the" +#~ " last point where all packages are" +#~ " installed." +#~ msgstr "" + +#~ msgid "" +#~ "If in a hurry, bypass the hook " +#~ "using ``--no-verify`` with the ``git " +#~ "commit`` command. ::" +#~ msgstr "" + +#~ msgid "" +#~ "Flower's documentation uses `Sphinx " +#~ "`_. There's no " +#~ "convenience script to re-build the " +#~ "documentation yet, but it's pretty " +#~ "easy::" +#~ msgstr "" + +#~ msgid "" +#~ "Some quickstart examples may have " +#~ "limitations or requirements that prevent " +#~ "them from running on every environment." +#~ " For more information, please see " +#~ "`Limitations`_." +#~ msgstr "" + +#~ msgid "" +#~ "Change the application code. For " +#~ "example, change the ``seed`` in " +#~ "``quickstart_docker/task.py`` to ``43`` and " +#~ "save it:" +#~ msgstr "" + +#~ msgid ":code:`fit`" +#~ msgstr ":code:`fit`" + +#~ msgid "" +#~ "\\small\n" +#~ "\\frac{∆ \\times \\sqrt{2 \\times " +#~ "\\log\\left(\\frac{1.25}{\\delta}\\right)}}{\\epsilon}\n" +#~ "\n" +#~ msgstr "" + +#~ msgid "Enable node authentication in :code:`SuperLink`" +#~ msgstr "" + +#~ msgid "" +#~ "To enable node authentication, first you" +#~ " need to configure SSL/TLS connections " +#~ "to secure the SuperLink<>SuperNode " +#~ "communication. You can find the complete" +#~ " guide `here `_. After " +#~ "configuring secure connections, you can " +#~ "enable client authentication in a " +#~ "long-running Flower :code:`SuperLink`. Use " +#~ "the following terminal command to start" +#~ " a Flower :code:`SuperNode` that has " +#~ "both secure connections and node " +#~ "authentication enabled:" +#~ msgstr "" + +#~ msgid "" +#~ "The first flag :code:`--auth-list-" +#~ "public-keys` expects a path to a " +#~ "CSV file storing all known node " +#~ "public keys. You need to store all" +#~ " known node public keys that are " +#~ "allowed to participate in a federation" +#~ " in one CSV file (:code:`.csv`)." +#~ msgstr "" + +#~ msgid "" +#~ "The second and third flags :code" +#~ ":`--auth-superlink-private-key` and :code" +#~ ":`--auth-superlink-public-key` expect paths" +#~ " to the server's private and public" +#~ " keys. For development purposes, you " +#~ "can generate a private and public " +#~ "key pair using :code:`ssh-keygen -t " +#~ "ecdsa -b 384`." +#~ msgstr "" + +#~ msgid "Enable node authentication in :code:`SuperNode`" +#~ msgstr "" + +#~ msgid "" +#~ "Similar to the long-running Flower " +#~ "server (:code:`SuperLink`), you can easily " +#~ "enable node authentication in the " +#~ "long-running Flower client (:code:`SuperNode`)." +#~ " Use the following terminal command " +#~ "to start an authenticated :code:`SuperNode`:" +#~ msgstr "" + +#~ msgid "" +#~ "The :code:`--auth-supernode-private-key` " +#~ "flag expects a path to the node's" +#~ " private key file and the :code" +#~ ":`--auth-supernode-public-key` flag expects" +#~ " a path to the node's public " +#~ "key file. For development purposes, you" +#~ " can generate a private and public" +#~ " key pair using :code:`ssh-keygen -t" +#~ " ecdsa -b 384`." +#~ msgstr "" + +#~ msgid "" +#~ "You should now have learned how to" +#~ " start a long-running Flower server" +#~ " (:code:`SuperLink`) and client " +#~ "(:code:`SuperNode`) with node authentication " +#~ "enabled. You should also know the " +#~ "significance of the private key and " +#~ "store it safely to minimize security " +#~ "risks." +#~ msgstr "" + +#~ msgid "" +#~ "If you have not added ``conda-" +#~ "forge`` to your channels, you will " +#~ "first need to run the following::" +#~ msgstr "" + +#~ msgid "" +#~ "Once the ``conda-forge`` channel has " +#~ "been enabled, ``flwr`` can be installed" +#~ " with ``conda``::" +#~ msgstr "" + +#~ msgid "or with ``mamba``::" +#~ msgstr "" + +#~ msgid "" +#~ "For central DP with server-side " +#~ "clipping, there are two :code:`Strategy` " +#~ "classes that act as wrappers around " +#~ "the actual :code:`Strategy` instance (for " +#~ "example, :code:`FedAvg`). The two wrapper " +#~ "classes are " +#~ ":code:`DifferentialPrivacyServerSideFixedClipping` and " +#~ ":code:`DifferentialPrivacyServerSideAdaptiveClipping` for " +#~ "fixed and adaptive clipping." +#~ msgstr "" + +#~ msgid "" +#~ "The code sample below enables the " +#~ ":code:`FedAvg` strategy to use server-" +#~ "side fixed clipping using the " +#~ ":code:`DifferentialPrivacyServerSideFixedClipping` wrapper " +#~ "class. The same approach can be " +#~ "used with " +#~ ":code:`DifferentialPrivacyServerSideAdaptiveClipping` by " +#~ "adjusting the corresponding input parameters." +#~ msgstr "" + +#~ msgid "" +#~ "For central DP with client-side " +#~ "clipping, the server sends the clipping" +#~ " value to selected clients on each" +#~ " round. Clients can use existing " +#~ "Flower :code:`Mods` to perform the " +#~ "clipping. Two mods are available for " +#~ "fixed and adaptive client-side clipping:" +#~ " :code:`fixedclipping_mod` and " +#~ ":code:`adaptiveclipping_mod` with corresponding " +#~ "server-side wrappers " +#~ ":code:`DifferentialPrivacyClientSideFixedClipping` and " +#~ ":code:`DifferentialPrivacyClientSideAdaptiveClipping`." +#~ msgstr "" + +#~ msgid "" +#~ "The code sample below enables the " +#~ ":code:`FedAvg` strategy to use differential" +#~ " privacy with client-side fixed " +#~ "clipping using both the " +#~ ":code:`DifferentialPrivacyClientSideFixedClipping` wrapper " +#~ "class and, on the client, " +#~ ":code:`fixedclipping_mod`:" +#~ msgstr "" + +#~ msgid "" +#~ "In addition to the server-side " +#~ "strategy wrapper, the :code:`ClientApp` needs" +#~ " to configure the matching " +#~ ":code:`fixedclipping_mod` to perform the " +#~ "client-side clipping:" +#~ msgstr "" + +#~ msgid "Below is a code example that shows how to use :code:`LocalDpMod`:" +#~ msgstr "" + +#~ msgid "" +#~ "Note that since version :code:`1.11.0`, " +#~ ":code:`flower-server-app` no longer " +#~ "supports passing a reference to a " +#~ "`ServerApp` attribute. Instead, you need " +#~ "to pass the path to Flower app " +#~ "via the argument :code:`--app`. This is" +#~ " the path to a directory containing" +#~ " a `pyproject.toml`. You can create a" +#~ " valid Flower app by executing " +#~ ":code:`flwr new` and following the " +#~ "prompt." +#~ msgstr "" + +#~ msgid "" +#~ "Since CoreML does not allow the " +#~ "model parameters to be seen before " +#~ "training, and accessing the model " +#~ "parameters during or after the training" +#~ " can only be done by specifying " +#~ "the layer name, we need to know" +#~ " this information beforehand, through " +#~ "looking at the model specification, " +#~ "which are written as proto files. " +#~ "The implementation can be seen in " +#~ ":code:`MLModelInspect`." +#~ msgstr "" + +#~ msgid "" +#~ "Prior to local training, we need " +#~ "to load the MNIST dataset, a " +#~ "popular image classification dataset of " +#~ "handwritten digits for machine learning, " +#~ "and partition the dataset for FL. " +#~ "This can be conveniently achieved using" +#~ " `Flower Datasets `_." +#~ " The :code:`FederatedDataset.load_partition()` method" +#~ " loads the partitioned training set " +#~ "for each partition ID defined in " +#~ "the :code:`--partition-id` argument." +#~ msgstr "" + +#~ msgid "" +#~ "In this tutorial we will learn how" +#~ " to train a federated XGBoost model" +#~ " on HIGGS dataset using Flower and" +#~ " :code:`xgboost` package. We use a " +#~ "simple example (`full code xgboost-" +#~ "quickstart `_) with two *clients* " +#~ "and one *server* to demonstrate how " +#~ "federated XGBoost works, and then we " +#~ "dive into a more complex example " +#~ "(`full code xgboost-comprehensive " +#~ "`_) to run various experiments." +#~ msgstr "" + +#~ msgid "" +#~ "In this example, we split the " +#~ "dataset into 30 partitions with uniform" +#~ " distribution (:code:`IidPartitioner(num_partitions=30)`)." +#~ " Then, we load the partition for " +#~ "the given client based on " +#~ ":code:`partition_id`:" +#~ msgstr "" + +#~ msgid "" +#~ "After that, we do train/test splitting" +#~ " on the given partition (client's " +#~ "local data), and transform data format" +#~ " for :code:`xgboost` package." +#~ msgstr "" + +#~ msgid "" +#~ "The functions of :code:`train_test_split` and" +#~ " :code:`transform_dataset_to_dmatrix` are defined " +#~ "as below:" +#~ msgstr "" + +#~ msgid "" +#~ "The :code:`num_local_round` represents the " +#~ "number of iterations for local tree " +#~ "boost. We use CPU for the training" +#~ " in default. One can shift it " +#~ "to GPU by setting :code:`tree_method` to" +#~ " :code:`gpu_hist`. We use AUC as " +#~ "evaluation metric." +#~ msgstr "" + +#~ msgid "" +#~ "After loading the dataset we define " +#~ "the Flower client. We follow the " +#~ "general rule to define :code:`XgbClient` " +#~ "class inherited from :code:`fl.client.Client`." +#~ msgstr "" + +#~ msgid "" +#~ "All required parameters defined above " +#~ "are passed to :code:`XgbClient`'s constructor." +#~ msgstr "" + +#~ msgid "" +#~ "Then, we override :code:`get_parameters`, " +#~ ":code:`fit` and :code:`evaluate` methods " +#~ "insides :code:`XgbClient` class as follows." +#~ msgstr "" + +#~ msgid "" +#~ "Unlike neural network training, XGBoost " +#~ "trees are not started from a " +#~ "specified random weights. In this case," +#~ " we do not use :code:`get_parameters` " +#~ "and :code:`set_parameters` to initialise model" +#~ " parameters for XGBoost. As a result," +#~ " let's return an empty tensor in " +#~ ":code:`get_parameters` when it is called " +#~ "by the server at the first round." +#~ msgstr "" + +#~ msgid "" +#~ "In :code:`fit`, at the first round, " +#~ "we call :code:`xgb.train()` to build up" +#~ " the first set of trees. From " +#~ "the second round, we load the " +#~ "global model sent from server to " +#~ "new build Booster object, and then " +#~ "update model weights on local training" +#~ " data with function :code:`local_boost` as" +#~ " follows:" +#~ msgstr "" + +#~ msgid "" +#~ "Given :code:`num_local_round`, we update trees" +#~ " by calling :code:`bst_input.update` method. " +#~ "After training, the last " +#~ ":code:`N=num_local_round` trees will be " +#~ "extracted to send to the server." +#~ msgstr "" + +#~ msgid "" +#~ "In :code:`evaluate`, after loading the " +#~ "global model, we call :code:`bst.eval_set` " +#~ "function to conduct evaluation on valid" +#~ " set. The AUC value will be " +#~ "returned." +#~ msgstr "" + +#~ msgid "" +#~ "We use two clients for this " +#~ "example. An :code:`evaluate_metrics_aggregation` " +#~ "function is defined to collect and " +#~ "wighted average the AUC values from " +#~ "clients. The :code:`config_func` function is" +#~ " to return the current FL round " +#~ "number to client's :code:`fit()` and " +#~ ":code:`evaluate()` methods." +#~ msgstr "" + +#~ msgid "" +#~ "In file :code:`flwr.server.strategy.fedxgb_bagging.py`," +#~ " we define :code:`FedXgbBagging` inherited " +#~ "from :code:`flwr.server.strategy.FedAvg`. Then, we" +#~ " override the :code:`aggregate_fit`, " +#~ ":code:`aggregate_evaluate` and :code:`evaluate` " +#~ "methods as follows:" +#~ msgstr "" + +#~ msgid "" +#~ "In :code:`aggregate_fit`, we sequentially " +#~ "aggregate the clients' XGBoost trees by" +#~ " calling :code:`aggregate()` function:" +#~ msgstr "" + +#~ msgid "" +#~ "In this function, we first fetch " +#~ "the number of trees and the number" +#~ " of parallel trees for the current" +#~ " and previous model by calling " +#~ ":code:`_get_tree_nums`. Then, the fetched " +#~ "information will be aggregated. After " +#~ "that, the trees (containing model " +#~ "weights) are aggregated to generate a" +#~ " new tree model." +#~ msgstr "" + +#~ msgid "" +#~ "Congratulations! You've successfully built and" +#~ " run your first federated XGBoost " +#~ "system. The AUC values can be " +#~ "checked in :code:`metrics_distributed`. One " +#~ "can see that the average AUC " +#~ "increases over FL rounds." +#~ msgstr "" + +#~ msgid "" +#~ "To do this, we first customise a" +#~ " :code:`ClientManager` in :code:`server_utils.py`:" +#~ msgstr "" + +#~ msgid "" +#~ "The customised :code:`ClientManager` samples " +#~ "all available clients in each FL " +#~ "round based on the order of " +#~ "connection to the server. Then, we " +#~ "define a new strategy :code:`FedXgbCyclic` " +#~ "in :code:`flwr.server.strategy.fedxgb_cyclic.py`, in " +#~ "order to sequentially select only one" +#~ " client in given round and pass " +#~ "the received model to next client." +#~ msgstr "" + +#~ msgid "" +#~ "Unlike the original :code:`FedAvg`, we " +#~ "don't perform aggregation here. Instead, " +#~ "we just make a copy of the " +#~ "received client model as global model" +#~ " by overriding :code:`aggregate_fit`." +#~ msgstr "" + +#~ msgid "" +#~ "Also, the customised :code:`configure_fit` and" +#~ " :code:`configure_evaluate` methods ensure the" +#~ " clients to be sequentially selected " +#~ "given FL round:" +#~ msgstr "" + +#~ msgid "" +#~ "In :code:`dataset.py`, we have a " +#~ "function :code:`instantiate_partitioner` to " +#~ "instantiate the data partitioner based " +#~ "on the given :code:`num_partitions` and " +#~ ":code:`partitioner_type`. Currently, we provide " +#~ "four supported partitioner type to " +#~ "simulate the uniformity/non-uniformity in " +#~ "data quantity (uniform, linear, square, " +#~ "exponential)." +#~ msgstr "" + +#~ msgid "" +#~ "To facilitate centralised evaluation, we " +#~ "define a function in :code:`server_utils.py`:" +#~ msgstr "" + +#~ msgid "" +#~ "This function returns a evaluation " +#~ "function which instantiates a :code:`Booster`" +#~ " object and loads the global model" +#~ " weights to it. The evaluation is " +#~ "conducted by calling :code:`eval_set()` " +#~ "method, and the tested AUC value " +#~ "is reported." +#~ msgstr "" + +#~ msgid "" +#~ "As for distributed evaluation on the " +#~ "clients, it's same as the quick-" +#~ "start example by overriding the " +#~ ":code:`evaluate()` method insides the " +#~ ":code:`XgbClient` class in :code:`client_utils.py`." +#~ msgstr "" + +#~ msgid "" +#~ "We also provide an example code " +#~ "(:code:`sim.py`) to use the simulation " +#~ "capabilities of Flower to simulate " +#~ "federated XGBoost training on either a" +#~ " single machine or a cluster of " +#~ "machines." +#~ msgstr "" + +#~ msgid "" +#~ "After importing all required packages, " +#~ "we define a :code:`main()` function to" +#~ " perform the simulation process:" +#~ msgstr "" + +#~ msgid "" +#~ "We first load the dataset and " +#~ "perform data partitioning, and the " +#~ "pre-processed data is stored in a " +#~ ":code:`list`. After the simulation begins, " +#~ "the clients won't need to pre-" +#~ "process their partitions again." +#~ msgstr "" + +#~ msgid "" +#~ "After that, we start the simulation " +#~ "by calling :code:`fl.simulation.start_simulation`:" +#~ msgstr "" + +#~ msgid "" +#~ "One of key parameters for " +#~ ":code:`start_simulation` is :code:`client_fn` which" +#~ " returns a function to construct a" +#~ " client. We define it as follows:" +#~ msgstr "" + +#~ msgid "" +#~ "In :code:`utils.py`, we define the " +#~ "arguments parsers for clients, server " +#~ "and simulation, allowing users to " +#~ "specify different experimental settings. Let's" +#~ " first see the sever side:" +#~ msgstr "" + +#~ msgid "" +#~ "This allows user to specify training " +#~ "strategies / the number of total " +#~ "clients / FL rounds / participating " +#~ "clients / clients for evaluation, and" +#~ " evaluation fashion. Note that with " +#~ ":code:`--centralised-eval`, the sever will " +#~ "do centralised evaluation and all " +#~ "functionalities for client evaluation will " +#~ "be disabled." +#~ msgstr "" + +#~ msgid "" +#~ "This defines various options for client" +#~ " data partitioning. Besides, clients also" +#~ " have an option to conduct evaluation" +#~ " on centralised test set by setting" +#~ " :code:`--centralised-eval`, as well as " +#~ "an option to perform scaled learning " +#~ "rate based on the number of " +#~ "clients by setting :code:`--scaled-lr`." +#~ msgstr "" + +#~ msgid "|b8714c45b74b4d8fb008e2ebb3bc1d44|" +#~ msgstr "" + +#~ msgid "|75f1561efcfd422ea67d28d1513120dc|" +#~ msgstr "" + +#~ msgid "|6a1f51b235304558a9bdaaabfc93b8d2|" +#~ msgstr "" + +#~ msgid "|35e70dab1fb544af9aa3a9c09c4f9797|" +#~ msgstr "" + +#~ msgid "|d7efb5705dd3467f991ed23746824a07|" +#~ msgstr "" + +#~ msgid "|94e7b021c7b540bfbedf7f082a41ff87|" +#~ msgstr "" + +#~ msgid "|a80714782dde439ab73936518f91fc3c|" +#~ msgstr "" + +#~ msgid "|c62080ca6197473da57d191c8225a9d9|" +#~ msgstr "" + +#~ msgid "|21a8f1e6a5b14a7bbb8559979d0e8a2b|" +#~ msgstr "" + +#~ msgid "|c310f2a22f7b4917bf42775aae7a1c09|" +#~ msgstr "" + +#~ msgid "|a0c5b43401194535a8460bcf02e65f9a|" +#~ msgstr "" + +#~ msgid "|aabfdbd5564e41a790f8ea93cc21a444|" +#~ msgstr "" + +#~ msgid "|c9cc8f160fa647b09e742fe4dc8edb54|" +#~ msgstr "" + +#~ msgid "|7e83aad011cd4907b2f02f907c6922e9|" +#~ msgstr "" + +#~ msgid "|4627c2bb6cc443ae9e079f81f33c9dd9|" +#~ msgstr "" + +#~ msgid "|131af8322dc5466b827afd24be98f8c0|" +#~ msgstr "" + +#~ msgid "|f92920b87f3a40179bf7ddd0b6144c53|" +#~ msgstr "" + +#~ msgid "|d62da263071d45a496f543e41fce3a19|" +#~ msgstr "" + +#~ msgid "|ad851971645b4e1fbf8d15bcc0b2ee11|" +#~ msgstr "" + +#~ msgid "|929e9a6de6b34edb8488e644e2bb5221|" +#~ msgstr "" + +#~ msgid "|404cf9c9e8d64784a55646c0f9479cbc|" +#~ msgstr "" + +#~ msgid "|b021ff9d25814458b1e631f8985a648b|" +#~ msgstr "" + +#~ msgid "|e6ca84e1df244f238288a768352678e5|" +#~ msgstr "" + +#~ msgid "|39c2422082554a21963baffb33a0d057|" +#~ msgstr "" + +#~ msgid "|07ecf5fcd6814e88906accec6fa0fbfb|" +#~ msgstr "" + +#~ msgid "|57e78c0ca8a94ba5a64a04b1f2280e55|" +#~ msgstr "" + +#~ msgid "|9819b40e59ee40a4921e1244e8c99bac|" +#~ msgstr "" + +#~ msgid "|797bf279c4894b5ead31dc9b0534ed62|" +#~ msgstr "" + diff --git a/doc/locales/ko/LC_MESSAGES/framework-docs.po b/doc/locales/ko/LC_MESSAGES/framework-docs.po index db201f613126..4cdf1c565be6 100644 --- a/doc/locales/ko/LC_MESSAGES/framework-docs.po +++ b/doc/locales/ko/LC_MESSAGES/framework-docs.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: Flower main\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-09-15 09:09+0200\n" +"POT-Creation-Date: 2024-09-27 00:30+0000\n" "PO-Revision-Date: 2024-08-23 13:09+0000\n" "Last-Translator: Seulki Yun \n" "Language: ko\n" @@ -17,7 +17,7 @@ msgstr "" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.15.0\n" +"Generated-By: Babel 2.16.0\n" #: ../../source/contributor-explanation-public-and-private-apis.rst:2 msgid "Public and private APIs" @@ -62,23 +62,23 @@ msgid "" "or not by reading the Flower source code." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:22 +#: ../../source/contributor-explanation-public-and-private-apis.rst:23 #, fuzzy msgid "Flower public API" msgstr "Flower ClientApp." -#: ../../source/contributor-explanation-public-and-private-apis.rst:24 +#: ../../source/contributor-explanation-public-and-private-apis.rst:25 msgid "Flower has a well-defined public API. Let's look at this in more detail." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:28 +#: ../../source/contributor-explanation-public-and-private-apis.rst:29 msgid "" "Every component that is reachable by recursively following " "``__init__.__all__`` starting from the root package (``flwr``) is part of" " the public API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:30 +#: ../../source/contributor-explanation-public-and-private-apis.rst:32 msgid "" "If you want to determine whether a component " "(class/function/generator/...) is part of the public API or not, you need" @@ -86,13 +86,13 @@ msgid "" "src/py/flwr`` to look at the Python sub-packages contained ``flwr``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:43 +#: ../../source/contributor-explanation-public-and-private-apis.rst:46 msgid "" "Contrast this with the definition of ``__all__`` in the root " "``src/py/flwr/__init__.py``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:55 +#: ../../source/contributor-explanation-public-and-private-apis.rst:59 msgid "" "You can see that ``flwr`` has six subpackages (``cli``, ``client``, " "``common``, ``proto``, ``server``, ``simulation``), but only four of them" @@ -100,7 +100,7 @@ msgid "" "``simulation``)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:57 +#: ../../source/contributor-explanation-public-and-private-apis.rst:63 msgid "" "What does this mean? It means that ``client``, ``common``, ``server`` and" " ``simulation`` are part of the public API, but ``cli`` and ``proto`` are" @@ -111,21 +111,21 @@ msgid "" "even be removed completely." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:62 +#: ../../source/contributor-explanation-public-and-private-apis.rst:70 msgid "Therefore, as a Flower user:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:64 +#: ../../source/contributor-explanation-public-and-private-apis.rst:72 msgid "``from flwr import client`` ✅ Ok, you're importing a public API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:65 +#: ../../source/contributor-explanation-public-and-private-apis.rst:73 msgid "" "``from flwr import proto`` ❌ Not recommended, you're importing a private " "API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:67 +#: ../../source/contributor-explanation-public-and-private-apis.rst:75 msgid "" "What about components that are nested deeper in the hierarchy? Let's look" " at Flower strategies to see another typical pattern. Flower strategies " @@ -134,7 +134,7 @@ msgid "" "``src/py/flwr/server/strategy/__init__.py``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:81 +#: ../../source/contributor-explanation-public-and-private-apis.rst:91 msgid "" "What's notable here is that all strategies are implemented in dedicated " "modules (e.g., ``fedavg.py``). In ``__init__.py``, we *import* the " @@ -146,33 +146,33 @@ msgid "" "the public API (as long as we update the import path in ``__init__.py``)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:86 +#: ../../source/contributor-explanation-public-and-private-apis.rst:99 msgid "Therefore:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:88 +#: ../../source/contributor-explanation-public-and-private-apis.rst:101 msgid "" "``from flwr.server.strategy import FedAvg`` ✅ Ok, you're importing a " "class that is part of the public API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:89 +#: ../../source/contributor-explanation-public-and-private-apis.rst:103 msgid "" "``from flwr.server.strategy import fedavg`` ❌ Not recommended, you're " "importing a private module." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:91 +#: ../../source/contributor-explanation-public-and-private-apis.rst:106 msgid "" "This approach is also implemented in the tooling that automatically " "builds API reference docs." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:94 +#: ../../source/contributor-explanation-public-and-private-apis.rst:110 msgid "Flower public API of private packages" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:96 +#: ../../source/contributor-explanation-public-and-private-apis.rst:112 msgid "" "We also use this to define the public API of private subpackages. Public," " in this context, means the API that other ``flwr`` subpackages should " @@ -180,14 +180,14 @@ msgid "" "not exported via ``src/py/flwr/server/__init__.py``'s ``__all__``)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:100 +#: ../../source/contributor-explanation-public-and-private-apis.rst:117 msgid "" "Still, the private sub-package ``flwr.server.driver`` defines a " "\"public\" API using ``__all__`` in " "``src/py/flwr/server/driver/__init__.py``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:114 +#: ../../source/contributor-explanation-public-and-private-apis.rst:132 msgid "" "The interesting part is that both ``GrpcDriver`` and ``InMemoryDriver`` " "are never used by Flower framework users, only by other parts of the " @@ -199,7 +199,7 @@ msgid "" "``InMemoryDriver`` class definition)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:117 +#: ../../source/contributor-explanation-public-and-private-apis.rst:140 msgid "" "This is because ``flwr.server.driver`` defines a public interface for " "other ``flwr`` subpackages. This allows codeowners of " @@ -234,16 +234,16 @@ msgid "" "development environment." msgstr "시작하기 전에, 로컬 개발 환경에서 몇 가지 전제 조건을 충족해야 합니다." -#: ../../source/contributor-how-to-build-docker-images.rst:12 +#: ../../source/contributor-how-to-build-docker-images.rst:13 #, fuzzy msgid "Clone the ``flower`` repository." msgstr "플라워 레포지토리를 클론합니다." -#: ../../source/contributor-how-to-build-docker-images.rst:18 +#: ../../source/contributor-how-to-build-docker-images.rst:19 msgid "Verify the Docker daemon is running." msgstr "Docker 데몬이 실행 중인지 확인하십시오." -#: ../../source/contributor-how-to-build-docker-images.rst:20 +#: ../../source/contributor-how-to-build-docker-images.rst:21 msgid "" "The build instructions that assemble the images are located in the " "respective Dockerfiles. You can find them in the subdirectories of " @@ -252,7 +252,7 @@ msgstr "" "이미지들을 조합하는 빌드 명령어들은 해당 Dockerfile에 있습니다. \"src/docker\" 의 하위 디렉토리에서 찾을 수 " "있습니다." -#: ../../source/contributor-how-to-build-docker-images.rst:23 +#: ../../source/contributor-how-to-build-docker-images.rst:24 msgid "" "Flower Docker images are configured via build arguments. Through build " "arguments, we can make the creation of images more flexible. For example," @@ -267,141 +267,141 @@ msgstr "" " 있습니다. 일부 빌드 전달인자들은 기본값이며, 이미지를 빌드할 때 지정해야 합니다. 각 이미지에 사용할 수 있는 모든 빌드 " "전달인자는 아래 표 중에 있습니다." -#: ../../source/contributor-how-to-build-docker-images.rst:30 +#: ../../source/contributor-how-to-build-docker-images.rst:32 #, fuzzy msgid "Building the Base Image" msgstr "기본 이미지 빌드" -#: ../../source/contributor-how-to-build-docker-images.rst:36 -#: ../../source/contributor-how-to-build-docker-images.rst:98 +#: ../../source/contributor-how-to-build-docker-images.rst:38 +#: ../../source/contributor-how-to-build-docker-images.rst:104 msgid "Build argument" msgstr "빌드 전달인자" -#: ../../source/contributor-how-to-build-docker-images.rst:37 -#: ../../source/contributor-how-to-build-docker-images.rst:99 +#: ../../source/contributor-how-to-build-docker-images.rst:39 +#: ../../source/contributor-how-to-build-docker-images.rst:105 msgid "Description" msgstr "설명" -#: ../../source/contributor-how-to-build-docker-images.rst:38 -#: ../../source/contributor-how-to-build-docker-images.rst:100 +#: ../../source/contributor-how-to-build-docker-images.rst:40 +#: ../../source/contributor-how-to-build-docker-images.rst:106 msgid "Required" msgstr "필수" -#: ../../source/contributor-how-to-build-docker-images.rst:39 -#: ../../source/contributor-how-to-build-docker-images.rst:101 -#: ../../source/docker/persist-superlink-state.rst:18 -#: ../../source/docker/pin-version.rst:11 +#: ../../source/contributor-how-to-build-docker-images.rst:41 +#: ../../source/contributor-how-to-build-docker-images.rst:107 +#: ../../source/docker/persist-superlink-state.rst:19 +#: ../../source/docker/pin-version.rst:12 #: ../../source/docker/set-environment-variables.rst:8 msgid "Example" msgstr "예시" -#: ../../source/contributor-how-to-build-docker-images.rst:40 +#: ../../source/contributor-how-to-build-docker-images.rst:42 msgid "``DISTRO``" msgstr "``DISTRO``" -#: ../../source/contributor-how-to-build-docker-images.rst:41 +#: ../../source/contributor-how-to-build-docker-images.rst:43 msgid "The Linux distribution to use as the base image." msgstr "기본 이미지 사용을 위한 Linux 배포판." -#: ../../source/contributor-how-to-build-docker-images.rst:42 -#: ../../source/contributor-how-to-build-docker-images.rst:46 -#: ../../source/contributor-how-to-build-docker-images.rst:50 -#: ../../source/contributor-how-to-build-docker-images.rst:66 -#: ../../source/contributor-how-to-build-docker-images.rst:70 -#: ../../source/contributor-how-to-build-docker-images.rst:104 +#: ../../source/contributor-how-to-build-docker-images.rst:44 +#: ../../source/contributor-how-to-build-docker-images.rst:48 +#: ../../source/contributor-how-to-build-docker-images.rst:52 +#: ../../source/contributor-how-to-build-docker-images.rst:68 +#: ../../source/contributor-how-to-build-docker-images.rst:75 +#: ../../source/contributor-how-to-build-docker-images.rst:110 msgid "No" msgstr "아니오" -#: ../../source/contributor-how-to-build-docker-images.rst:43 +#: ../../source/contributor-how-to-build-docker-images.rst:45 msgid "``ubuntu``" msgstr "``ubuntu``" -#: ../../source/contributor-how-to-build-docker-images.rst:44 +#: ../../source/contributor-how-to-build-docker-images.rst:46 msgid "``DISTRO_VERSION``" msgstr "``DISTRO_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:45 +#: ../../source/contributor-how-to-build-docker-images.rst:47 msgid "Version of the Linux distribution." msgstr "Linux 배포판 버전." -#: ../../source/contributor-how-to-build-docker-images.rst:47 +#: ../../source/contributor-how-to-build-docker-images.rst:49 msgid ":substitution-code:`|ubuntu_version|`" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:48 +#: ../../source/contributor-how-to-build-docker-images.rst:50 msgid "``PYTHON_VERSION``" msgstr "``PYTHON_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:49 +#: ../../source/contributor-how-to-build-docker-images.rst:51 msgid "Version of ``python`` to be installed." msgstr "설치 된 ``python`` 버전." -#: ../../source/contributor-how-to-build-docker-images.rst:51 +#: ../../source/contributor-how-to-build-docker-images.rst:53 msgid "``3.11`` or ``3.11.1``" msgstr "``3.11`` 또는 ``3.11.1``" -#: ../../source/contributor-how-to-build-docker-images.rst:52 +#: ../../source/contributor-how-to-build-docker-images.rst:54 msgid "``PIP_VERSION``" msgstr "``PIP_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:53 +#: ../../source/contributor-how-to-build-docker-images.rst:55 msgid "Version of ``pip`` to be installed." msgstr "설치 된 ``pip`` 버전." -#: ../../source/contributor-how-to-build-docker-images.rst:54 -#: ../../source/contributor-how-to-build-docker-images.rst:58 -#: ../../source/contributor-how-to-build-docker-images.rst:62 -#: ../../source/contributor-how-to-build-docker-images.rst:108 +#: ../../source/contributor-how-to-build-docker-images.rst:56 +#: ../../source/contributor-how-to-build-docker-images.rst:60 +#: ../../source/contributor-how-to-build-docker-images.rst:64 +#: ../../source/contributor-how-to-build-docker-images.rst:114 msgid "Yes" msgstr "예" -#: ../../source/contributor-how-to-build-docker-images.rst:55 +#: ../../source/contributor-how-to-build-docker-images.rst:57 msgid ":substitution-code:`|pip_version|`" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:56 +#: ../../source/contributor-how-to-build-docker-images.rst:58 msgid "``SETUPTOOLS_VERSION``" msgstr "``SETUPTOOLS_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:57 +#: ../../source/contributor-how-to-build-docker-images.rst:59 msgid "Version of ``setuptools`` to be installed." msgstr "설치 된 ``setuptools`` 버전." -#: ../../source/contributor-how-to-build-docker-images.rst:59 +#: ../../source/contributor-how-to-build-docker-images.rst:61 #, fuzzy msgid ":substitution-code:`|setuptools_version|`" msgstr "``SETUPTOOLS_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:60 +#: ../../source/contributor-how-to-build-docker-images.rst:62 msgid "``FLWR_VERSION``" msgstr "``FLWR_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:61 +#: ../../source/contributor-how-to-build-docker-images.rst:63 msgid "Version of Flower to be installed." msgstr "설치 된 Flower 버전." -#: ../../source/contributor-how-to-build-docker-images.rst:63 +#: ../../source/contributor-how-to-build-docker-images.rst:65 msgid ":substitution-code:`|stable_flwr_version|`" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:64 +#: ../../source/contributor-how-to-build-docker-images.rst:66 msgid "``FLWR_PACKAGE``" msgstr "``FLWR_PACKAGE``" -#: ../../source/contributor-how-to-build-docker-images.rst:65 +#: ../../source/contributor-how-to-build-docker-images.rst:67 msgid "The Flower package to be installed." msgstr "설치 할 Flower 패키지." -#: ../../source/contributor-how-to-build-docker-images.rst:67 +#: ../../source/contributor-how-to-build-docker-images.rst:69 msgid "``flwr`` or ``flwr-nightly``" msgstr "``flwr`` 또는 ``flwr-nightly``" -#: ../../source/contributor-how-to-build-docker-images.rst:68 +#: ../../source/contributor-how-to-build-docker-images.rst:70 #, fuzzy msgid "``FLWR_VERSION_REF``" msgstr "``FLWR_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:69 +#: ../../source/contributor-how-to-build-docker-images.rst:71 msgid "" "A `direct reference " "`_를 참조하세요." -#: ../../source/contributor-how-to-contribute-translations.rst:29 +#: ../../source/contributor-how-to-contribute-translations.rst:28 msgid "" "Once you are signed in to Weblate, you can navigate to the `Flower " "Framework project `_로 이동할 수 " "있습니다. 여기에서 웹사이트에 있는 다양한 기존 언어들을 확인할 수 있습니다." -#: ../../source/contributor-how-to-contribute-translations.rst:34 +#: ../../source/contributor-how-to-contribute-translations.rst:32 msgid "" "Once you have selected the language you want to contribute to, you should" " see a similar interface to this:" msgstr "기여하고자 하는 언어를 선택하면, 다음과 같은 인터페이스가 나타납니다:" -#: ../../source/contributor-how-to-contribute-translations.rst:39 +#: ../../source/contributor-how-to-contribute-translations.rst:37 msgid "" "The most straight forward option here is to click on the ``Translate`` " "button on the top right (in the ``Translation status`` section). This " @@ -564,11 +564,11 @@ msgstr "" "여기서 가장 간단한 옵션은 오른쪽 상단(``Translation status`` 부분)에 있는 ``Translate`` 버튼을 " "클릭하는 것 입니다. 번역되지 않은 문장에 대한 번역 인터페이스로 자동으로 이동합니다." -#: ../../source/contributor-how-to-contribute-translations.rst:43 +#: ../../source/contributor-how-to-contribute-translations.rst:41 msgid "This is what the interface looks like:" msgstr "인터페이스는 다음과 같습니다:" -#: ../../source/contributor-how-to-contribute-translations.rst:47 +#: ../../source/contributor-how-to-contribute-translations.rst:45 msgid "" "You input your translation in the text box at the top and then, once you " "are happy with it, you either press ``Save and continue`` (to save the " @@ -582,7 +582,7 @@ msgstr "" "볼 수 있도록 번역을 제안 항목에 추가), ``Skip``(아무것도 저장하지 않고 다음 미번역 문장으로 이동) 중 하나를 선택하면 " "됩니다." -#: ../../source/contributor-how-to-contribute-translations.rst:54 +#: ../../source/contributor-how-to-contribute-translations.rst:51 msgid "" "In order to help with the translations, you can see on the bottom the " "``Nearby strings``, the ``Comments`` (from other contributors), the " @@ -593,14 +593,14 @@ msgstr "" "번역에 도움을 주기위해 하단에서 `주변 문자열``, ``의견``(다른 기여자의), ``자동 제안``(기계 번역의), ``다른 " "언어``의 번역 및 해당 문장의 번역``히스토리``를 볼 수 있습니다." -#: ../../source/contributor-how-to-contribute-translations.rst:59 +#: ../../source/contributor-how-to-contribute-translations.rst:56 msgid "" "On the right, under the ``String information`` section, you can also " "click the link under ``Source string location`` in order to view the " "source of the doc file containing the string." msgstr "오른쪽의 ``문자열 정보``에서 ``원본 문자열 위치``를 클릭하여 해당 문장이 포함된 문서의 파일 소스를 볼 수도 있습니다." -#: ../../source/contributor-how-to-contribute-translations.rst:63 +#: ../../source/contributor-how-to-contribute-translations.rst:60 msgid "" "For more information about translating using Weblate, you can check out " "this `in-depth guide " @@ -609,11 +609,11 @@ msgstr "" "Weblate를 통한 번역에 대한 자세한 정보는 `in-depth guide " "`_를 확인하세요." -#: ../../source/contributor-how-to-contribute-translations.rst:67 +#: ../../source/contributor-how-to-contribute-translations.rst:64 msgid "Add new languages" msgstr "새 언어 추가" -#: ../../source/contributor-how-to-contribute-translations.rst:69 +#: ../../source/contributor-how-to-contribute-translations.rst:66 msgid "" "If you want to add a new language, you will first have to contact us, " "either on `Slack `_, or by opening an issue" @@ -637,17 +637,17 @@ msgstr "" "위해 VSCode Remote Containers 확장을 사용하고 있습니다. 그것이 무엇인지 알아보기 위해 다음 인용문을 " "읽어보세요:" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:7 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:8 +#, fuzzy msgid "" "The Visual Studio Code Remote - Containers extension lets you use a " "Docker container as a fully-featured development environment. It allows " "you to open any folder inside (or mounted into) a container and take " "advantage of Visual Studio Code's full feature set. A " -":code:`devcontainer.json` file in your project tells VS Code how to " -"access (or create) a development container with a well-defined tool and " -"runtime stack. This container can be used to run an application or to " -"separate tools, libraries, or runtimes needed for working with a " -"codebase." +"``devcontainer.json`` file in your project tells VS Code how to access " +"(or create) a development container with a well-defined tool and runtime " +"stack. This container can be used to run an application or to separate " +"tools, libraries, or runtimes needed for working with a codebase." msgstr "" "Visual Studio Code Remote - 컨테이너 확장을 사용하면 Docker 컨테이너를 모든 기능을 갖춘 개발 환경으로 " "사용할 수 있습니다. 이 확장 기능을 사용하면 컨테이너 내부(또는 컨테이너에 마운트된)의 모든 폴더를 열고 Visual Studio" @@ -655,7 +655,7 @@ msgstr "" "도구와 런타임 스택을 사용하여 개발 컨테이너에 액세스(또는 생성)하는 방법을 VS Code에 알려줍니다. 이 컨테이너는 " "애플리케이션을 실행하거나 코드베이스 작업에 필요한 도구, 라이브러리 또는 런타임을 분리하는 데 사용할 수 있습니다." -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:9 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:16 msgid "" "Workspace files are mounted from the local file system or copied or " "cloned into the container. Extensions are installed and run inside the " @@ -667,24 +667,25 @@ msgstr "" "실행되며, 도구, 플랫폼 및 파일 시스템에 완전한 접근 권한을 갖습니다. 이는 다른 컨테이너에 연결하는 것만으로 전체 개발 환경을 " "원활하게 전환할 수 있음을 의미합니다." -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:11 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:22 msgid "" "Source: `Official VSCode documentation " "`_" msgstr "출처 : 공식 VSCode 문서" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:15 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:26 msgid "Getting started" msgstr "시작하기" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:17 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:28 +#, fuzzy msgid "" -"Configuring and setting up the :code:`Dockerfile` as well the " -"configuration for the devcontainer can be a bit more involved. The good " -"thing is you don't have to do it. Usually it should be enough to install " -"`Docker `_ on your system and " -"ensure its available on your command line. Additionally, install the " -"`VSCode Containers Extension `_ on your system and ensure its" +" available on your command line. Additionally, install the `VSCode " +"Containers Extension `_." msgstr "" "`Dockerfile`을 설정하고 구성하는 것과 개발 컨테이너 구성은 약간 복잡할 수 있습니다. 다행히도, 이를 직접 할 필요는 " @@ -692,7 +693,7 @@ msgstr "" "설치하고 커맨드 라인에서 사용할 수 있는지 확인하는 것으로 충분합니다. 추가로 `VSCode Containers Extension " "`_을 설치하세요." -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:19 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:35 msgid "" "Now you should be good to go. When starting VSCode, it will ask you to " "run in the container environment and - if you confirm - automatically " @@ -705,13 +706,13 @@ msgstr "" " 것입니다. VSCode에 수동으로 개발 컨테이너를 사용하도록 지시하려면, 확장을 설치한 후, VSCode 창의 왼쪽 하단에 있는 " "초록색 부을 클릭하고 *(Re)Open Folder in Container* 옵션을 선택하세요." -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:21 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:41 msgid "" "In some cases your setup might be more involved. For those cases consult " "the following sources:" msgstr "경우에 따라 설정이 더 복잡할 수도 있습니다. 이러한 경우에는 다음 소스를 참조하세요:" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:23 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:44 msgid "" "`Developing inside a Container " "`_" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:24 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:46 msgid "" "`Remote development in Containers " "`_" @@ -748,13 +749,13 @@ msgstr "" "다음, 재설치하세요(``poetry 설치``이전에 ``poetry.lock`` (``rm poetry.lock``)를 제거하는 것을" " 잊지 마세요)." -#: ../../source/contributor-how-to-install-development-versions.rst:12 +#: ../../source/contributor-how-to-install-development-versions.rst:14 msgid "" "``flwr = { version = \"1.0.0a0\", allow-prereleases = true }`` (without " "extras)" msgstr "``flwr = { version = \"1.0.0a0\", allow-prereleases = true }`` (extras 제외)" -#: ../../source/contributor-how-to-install-development-versions.rst:13 +#: ../../source/contributor-how-to-install-development-versions.rst:15 msgid "" "``flwr = { version = \"1.0.0a0\", allow-prereleases = true, extras = " "[\"simulation\"] }`` (with extras)" @@ -762,17 +763,17 @@ msgstr "" "``flwr = { version = \"1.0.0a0\", allow-prereleases = true, extras = " "[\"simulation\"] }`` (extras 포함)" -#: ../../source/contributor-how-to-install-development-versions.rst:15 +#: ../../source/contributor-how-to-install-development-versions.rst:18 msgid "" "Install ``flwr`` from a local copy of the Flower source code via " "``pyproject.toml``:" msgstr "``pyproject.toml``을 통해 Flower 소스 코드의 로컬 복사본에서 ``flwr``을 설치하세요:" -#: ../../source/contributor-how-to-install-development-versions.rst:17 +#: ../../source/contributor-how-to-install-development-versions.rst:20 msgid "``flwr = { path = \"../../\", develop = true }`` (without extras)" msgstr "``flwr = { path = \"../../\", develop = true }`` (extras 제외)" -#: ../../source/contributor-how-to-install-development-versions.rst:18 +#: ../../source/contributor-how-to-install-development-versions.rst:21 msgid "" "``flwr = { path = \"../../\", develop = true, extras = [\"simulation\"] " "}`` (with extras)" @@ -780,11 +781,11 @@ msgstr "" "``flwr = { path = \"../../\", develop = true, extras = [\"simulation\"] " "}`` (extras 포함)" -#: ../../source/contributor-how-to-install-development-versions.rst:20 +#: ../../source/contributor-how-to-install-development-versions.rst:23 msgid "Install ``flwr`` from a local wheel file via ``pyproject.toml``:" msgstr "``pyproject.toml``을 통해 로컬 wheel file에서 ``flwr``을 설치하세요:" -#: ../../source/contributor-how-to-install-development-versions.rst:22 +#: ../../source/contributor-how-to-install-development-versions.rst:25 msgid "" "``flwr = { path = \"../../dist/flwr-1.8.0-py3-none-any.whl\" }`` (without" " extras)" @@ -792,7 +793,7 @@ msgstr "" "``flwr = { path = \"../../dist/flwr-1.8.0-py3-none-any.whl\" }`` (extras " "제외)" -#: ../../source/contributor-how-to-install-development-versions.rst:23 +#: ../../source/contributor-how-to-install-development-versions.rst:26 msgid "" "``flwr = { path = \"../../dist/flwr-1.8.0-py3-none-any.whl\", extras = " "[\"simulation\"] }`` (with extras)" @@ -800,7 +801,7 @@ msgstr "" "``flwr = { path = \"../../dist/flwr-1.8.0-py3-none-any.whl\", extras = " "[\"simulation\"] }`` (extras 포함)" -#: ../../source/contributor-how-to-install-development-versions.rst:25 +#: ../../source/contributor-how-to-install-development-versions.rst:29 msgid "" "Please refer to the Poetry documentation for further details: `Poetry " "Dependency Specification `_" -#: ../../source/contributor-how-to-install-development-versions.rst:28 +#: ../../source/contributor-how-to-install-development-versions.rst:33 msgid "Using pip (recommended on Colab)" msgstr "pip 사용하기(Colab에서 권장)" -#: ../../source/contributor-how-to-install-development-versions.rst:30 +#: ../../source/contributor-how-to-install-development-versions.rst:35 msgid "Install a ``flwr`` pre-release from PyPI:" msgstr "PyPI에서 ``flwr`` 사전 릴리즈를 설치하기:" -#: ../../source/contributor-how-to-install-development-versions.rst:32 +#: ../../source/contributor-how-to-install-development-versions.rst:37 msgid "``pip install -U --pre flwr`` (without extras)" msgstr "``pip install -U --pre flwr`` (extras 제외)" -#: ../../source/contributor-how-to-install-development-versions.rst:33 +#: ../../source/contributor-how-to-install-development-versions.rst:38 msgid "``pip install -U --pre 'flwr[simulation]'`` (with extras)" msgstr "``pip install -U --pre 'flwr[simulation]'`` (extras 포함)" -#: ../../source/contributor-how-to-install-development-versions.rst:35 +#: ../../source/contributor-how-to-install-development-versions.rst:40 msgid "" "Python packages can be installed from git repositories. Use one of the " "following commands to install the Flower directly from GitHub." @@ -833,17 +834,17 @@ msgstr "" "Python 패키지는 git 저장소에서 설치할 수 있습니다. 다음 명령어 중 하나를 사용하여 GitHub에서 직접 Flower를 " "설치하세요." -#: ../../source/contributor-how-to-install-development-versions.rst:37 +#: ../../source/contributor-how-to-install-development-versions.rst:43 msgid "Install ``flwr`` from the default GitHub branch (``main``):" msgstr "기본 GitHub branch (``main``)에서 ``flwr`` 를 설치하기:" -#: ../../source/contributor-how-to-install-development-versions.rst:39 +#: ../../source/contributor-how-to-install-development-versions.rst:45 msgid "" "``pip install flwr@git+https://github.com/adap/flower.git`` (without " "extras)" msgstr "``pip install flwr@git+https://github.com/adap/flower.git`` (extras 제외)" -#: ../../source/contributor-how-to-install-development-versions.rst:40 +#: ../../source/contributor-how-to-install-development-versions.rst:46 msgid "" "``pip install 'flwr[simulation]@git+https://github.com/adap/flower.git'``" " (with extras)" @@ -851,11 +852,11 @@ msgstr "" "``pip install 'flwr[simulation]@git+https://github.com/adap/flower.git'``" " (extras 포함)" -#: ../../source/contributor-how-to-install-development-versions.rst:42 +#: ../../source/contributor-how-to-install-development-versions.rst:49 msgid "Install ``flwr`` from a specific GitHub branch (``branch-name``):" msgstr "특정 GitHub branch (``branch-name``)에서 ``flwr`` 설치하기:" -#: ../../source/contributor-how-to-install-development-versions.rst:44 +#: ../../source/contributor-how-to-install-development-versions.rst:51 msgid "" "``pip install flwr@git+https://github.com/adap/flower.git@branch-name`` " "(without extras)" @@ -863,7 +864,7 @@ msgstr "" "``pip install flwr@git+https://github.com/adap/flower.git@branch-name`` " "(extras 제외)" -#: ../../source/contributor-how-to-install-development-versions.rst:45 +#: ../../source/contributor-how-to-install-development-versions.rst:53 msgid "" "``pip install 'flwr[simulation]@git+https://github.com/adap/flower.git" "@branch-name'`` (with extras)" @@ -871,11 +872,11 @@ msgstr "" "``pip install 'flwr[simulation]@git+https://github.com/adap/flower.git" "@branch-name'`` (extras 포함)" -#: ../../source/contributor-how-to-install-development-versions.rst:49 +#: ../../source/contributor-how-to-install-development-versions.rst:57 msgid "Open Jupyter Notebooks on Google Colab" msgstr "Google Colab에서 Jupyter Notebooks 열기" -#: ../../source/contributor-how-to-install-development-versions.rst:51 +#: ../../source/contributor-how-to-install-development-versions.rst:59 msgid "" "Open the notebook ``doc/source/tutorial-series-get-started-with-flower-" "pytorch.ipynb``:" @@ -883,7 +884,7 @@ msgstr "" "``doc/source/tutorial-series-get-started-with-flower-" "pytorch.ipynb``notebook을 엽니다:" -#: ../../source/contributor-how-to-install-development-versions.rst:53 +#: ../../source/contributor-how-to-install-development-versions.rst:61 msgid "" "https://colab.research.google.com/github/adap/flower/blob/main/doc/source" "/tutorial-series-get-started-with-flower-pytorch.ipynb" @@ -891,7 +892,7 @@ msgstr "" "https://colab.research.google.com/github/adap/flower/blob/main/doc/source" "/tutorial-series-get-started-with-flower-pytorch.ipynb" -#: ../../source/contributor-how-to-install-development-versions.rst:55 +#: ../../source/contributor-how-to-install-development-versions.rst:63 msgid "" "Open a development version of the same notebook from branch `branch-name`" " by changing ``main`` to ``branch-name`` (right after ``blob``):" @@ -899,7 +900,7 @@ msgstr "" "``main``을 ``branch-name``(``blob`` 바로 뒤)으로 변경하여 동일한 notebook의 개발 버전을 브랜치 " "`branch-name`에서 엽니다 :" -#: ../../source/contributor-how-to-install-development-versions.rst:57 +#: ../../source/contributor-how-to-install-development-versions.rst:66 msgid "" "https://colab.research.google.com/github/adap/flower/blob/branch-" "name/doc/source/tutorial-series-get-started-with-flower-pytorch.ipynb" @@ -907,21 +908,21 @@ msgstr "" "https://colab.research.google.com/github/adap/flower/blob/branch-" "name/doc/source/tutorial-series-get-started-with-flower-pytorch.ipynb" -#: ../../source/contributor-how-to-install-development-versions.rst:59 +#: ../../source/contributor-how-to-install-development-versions.rst:68 msgid "Install a `whl` on Google Colab:" msgstr "Google Colab에서 `whl` 설치하기:" -#: ../../source/contributor-how-to-install-development-versions.rst:61 +#: ../../source/contributor-how-to-install-development-versions.rst:70 msgid "" "In the vertical icon grid on the left hand side, select ``Files`` > " "``Upload to session storage``" msgstr "왼쪽의 수직 아이콘 그리드에서 ``Files`` > ``Upload to session storage``를 선택하세요" -#: ../../source/contributor-how-to-install-development-versions.rst:62 +#: ../../source/contributor-how-to-install-development-versions.rst:72 msgid "Upload the whl (e.g., ``flwr-1.8.0-py3-none-any.whl``)" msgstr "whl (예:``flwr-1.8.0-py3-none-any.whl``)을 업로드하세요" -#: ../../source/contributor-how-to-install-development-versions.rst:63 +#: ../../source/contributor-how-to-install-development-versions.rst:73 msgid "" "Change ``!pip install -q 'flwr[simulation]' torch torchvision " "matplotlib`` to ``!pip install -q 'flwr-1.8.0-py3-none-" @@ -941,11 +942,11 @@ msgid "" "change in the future." msgstr "이 문서는 현재 릴리즈 과정을 설명합니다. 이는 앞으로 변경될 수도 있습니다." -#: ../../source/contributor-how-to-release-flower.rst:7 +#: ../../source/contributor-how-to-release-flower.rst:8 msgid "During the release" msgstr "릴리즈 동안에" -#: ../../source/contributor-how-to-release-flower.rst:9 +#: ../../source/contributor-how-to-release-flower.rst:10 msgid "" "The version number of a release is stated in ``pyproject.toml``. To " "release a new version of Flower, the following things need to happen (in " @@ -954,7 +955,7 @@ msgstr "" "릴리즈의 버전 번호는 ``pyproject.toml``에 명시되어 있습니다. Flower의 새 버전을 릴리즈하려면 다음 작업이 " "순서대로 수행되어야 합니다:" -#: ../../source/contributor-how-to-release-flower.rst:11 +#: ../../source/contributor-how-to-release-flower.rst:13 msgid "" "Run ``python3 src/py/flwr_tool/update_changelog.py `` in " "order to add every new change to the changelog (feel free to make manual " @@ -964,7 +965,7 @@ msgstr "" "src/py/flwr_tool/update_changelog.py ``을 실행합니다 (변경 로그가 " "만족스러워질 때까지 수동으로 변경해도 됩니다)." -#: ../../source/contributor-how-to-release-flower.rst:12 +#: ../../source/contributor-how-to-release-flower.rst:16 msgid "" "Once the changelog has been updated with all the changes, run ``./dev" "/prepare-release-changelog.sh v``, where ```` " @@ -978,7 +979,7 @@ msgstr "" "버전 번호입니다 (앞에 ``v``가 추가된 것을 주의하세요). 이 명령어는 변경 로그의 ``Unreleased``헤더를 해당 버전과" " 현재 날짜로 교체하고, 기여자들에게 감사 메시지가 추가됩니다. 이러한 변경 사항으로 pull request합니다." -#: ../../source/contributor-how-to-release-flower.rst:13 +#: ../../source/contributor-how-to-release-flower.rst:22 msgid "" "Once the pull request is merged, tag the release commit with the version " "number as soon as the PR is merged: ``git tag v`` (notice " @@ -990,93 +991,93 @@ msgstr "" "v`` (버전 번호 앞에 ``v``가 추가된 것을 확인), 그 다음 ``git push --tags``. " "이렇게 하면 올바른 아티팩트와 변경 로그의 관련 부분이 포함된 초안 릴리즈가 GitHub에 생성됩니다." -#: ../../source/contributor-how-to-release-flower.rst:14 +#: ../../source/contributor-how-to-release-flower.rst:26 msgid "Check the draft release on GitHub, and if everything is good, publish it." msgstr "GitHub에서 릴리즈 초안을 확인하고, 모든 것이 양호하면 게시하세요." -#: ../../source/contributor-how-to-release-flower.rst:17 +#: ../../source/contributor-how-to-release-flower.rst:29 msgid "After the release" msgstr "릴리즈 후에" -#: ../../source/contributor-how-to-release-flower.rst:19 +#: ../../source/contributor-how-to-release-flower.rst:31 msgid "Create a pull request which contains the following changes:" msgstr "다음 변경 사항이 포함된 pull request를 만듭니다:" -#: ../../source/contributor-how-to-release-flower.rst:21 +#: ../../source/contributor-how-to-release-flower.rst:33 msgid "Increase the minor version in ``pyproject.toml`` by one." msgstr "``pyproject.toml``의 마이너 버전을 하나씩 늘립니다." -#: ../../source/contributor-how-to-release-flower.rst:22 +#: ../../source/contributor-how-to-release-flower.rst:34 msgid "Update all files which contain the current version number if necessary." msgstr "필요한 경우 현재 버전 번호가 포함된 모든 파일을 업데이트합니다." -#: ../../source/contributor-how-to-release-flower.rst:23 +#: ../../source/contributor-how-to-release-flower.rst:35 msgid "Add a new ``Unreleased`` section in ``changelog.md``." msgstr "``changelog.md``에 ``Unreleased`` 섹션을 새로 추가합니다." -#: ../../source/contributor-how-to-release-flower.rst:25 +#: ../../source/contributor-how-to-release-flower.rst:37 msgid "" "Merge the pull request on the same day (i.e., before a new nightly " "release gets published to PyPI)." msgstr "pull request를 같은 날(즉, 새로운 nightly 릴리즈가 PyPI에 게시되기 전에) 병합하세요." -#: ../../source/contributor-how-to-release-flower.rst:28 +#: ../../source/contributor-how-to-release-flower.rst:41 msgid "Publishing a pre-release" msgstr "사전 릴리즈 게시" -#: ../../source/contributor-how-to-release-flower.rst:31 +#: ../../source/contributor-how-to-release-flower.rst:44 msgid "Pre-release naming" msgstr "사전 릴리즈 이름" -#: ../../source/contributor-how-to-release-flower.rst:33 +#: ../../source/contributor-how-to-release-flower.rst:46 msgid "" "PyPI supports pre-releases (alpha, beta, release candidate). Pre-releases" " MUST use one of the following naming patterns:" msgstr "PyPI는 사전 릴리즈(알파, 베타, 릴리스 후보)를 지원합니다. 사전 릴리즈는 반드시 다음 명명 패턴 중 하나를 사용해야 합니다:" -#: ../../source/contributor-how-to-release-flower.rst:35 +#: ../../source/contributor-how-to-release-flower.rst:49 msgid "Alpha: ``MAJOR.MINOR.PATCHaN``" msgstr "Alpha: ``MAJOR.MINOR.PATCHaN``" -#: ../../source/contributor-how-to-release-flower.rst:36 +#: ../../source/contributor-how-to-release-flower.rst:50 msgid "Beta: ``MAJOR.MINOR.PATCHbN``" msgstr "Beta: ``MAJOR.MINOR.PATCHbN``" -#: ../../source/contributor-how-to-release-flower.rst:37 +#: ../../source/contributor-how-to-release-flower.rst:51 msgid "Release candidate (RC): ``MAJOR.MINOR.PATCHrcN``" msgstr "Release candidate (RC): ``MAJOR.MINOR.PATCHrcN``" -#: ../../source/contributor-how-to-release-flower.rst:39 +#: ../../source/contributor-how-to-release-flower.rst:53 msgid "Examples include:" msgstr "예시:" -#: ../../source/contributor-how-to-release-flower.rst:41 +#: ../../source/contributor-how-to-release-flower.rst:55 msgid "``1.0.0a0``" msgstr "``1.0.0a0``" -#: ../../source/contributor-how-to-release-flower.rst:42 +#: ../../source/contributor-how-to-release-flower.rst:56 msgid "``1.0.0b0``" msgstr "``1.0.0b0``" -#: ../../source/contributor-how-to-release-flower.rst:43 +#: ../../source/contributor-how-to-release-flower.rst:57 msgid "``1.0.0rc0``" msgstr "``1.0.0rc0``" -#: ../../source/contributor-how-to-release-flower.rst:44 +#: ../../source/contributor-how-to-release-flower.rst:58 msgid "``1.0.0rc1``" msgstr "``1.0.0rc1``" -#: ../../source/contributor-how-to-release-flower.rst:46 +#: ../../source/contributor-how-to-release-flower.rst:60 msgid "" "This is in line with PEP-440 and the recommendations from the Python " "Packaging Authority (PyPA):" msgstr "이는 PEP-440 및 Python Packaging Authority (PyPA)의 권장 사항과 일치합니다:" -#: ../../source/contributor-how-to-release-flower.rst:49 +#: ../../source/contributor-how-to-release-flower.rst:63 msgid "`PEP-440 `_" msgstr "`PEP-440 `_" -#: ../../source/contributor-how-to-release-flower.rst:50 +#: ../../source/contributor-how-to-release-flower.rst:64 msgid "" "`PyPA Choosing a versioning scheme " "`_" -#: ../../source/contributor-how-to-release-flower.rst:52 +#: ../../source/contributor-how-to-release-flower.rst:67 msgid "" "Note that the approach defined by PyPA is not compatible with SemVer " "2.0.0 spec, for details consult the `Semantic Versioning Specification " @@ -1096,15 +1097,15 @@ msgstr "" "관리 사양 `_ (특히 항목 11이 " "우선순위)을 참조하세요." -#: ../../source/contributor-how-to-release-flower.rst:55 +#: ../../source/contributor-how-to-release-flower.rst:73 msgid "Pre-release classification" msgstr "사전 릴리즈 분류" -#: ../../source/contributor-how-to-release-flower.rst:57 +#: ../../source/contributor-how-to-release-flower.rst:75 msgid "Should the next pre-release be called alpha, beta, or release candidate?" msgstr "다음 사전 릴리즈를 알파, 베타 또는 릴리스 후보라고 불러야 하나요?" -#: ../../source/contributor-how-to-release-flower.rst:59 +#: ../../source/contributor-how-to-release-flower.rst:77 msgid "" "RC: feature complete, no known issues (apart from issues that are " "classified as \"won't fix\" for the next stable release) - if no issues " @@ -1113,11 +1114,11 @@ msgstr "" "RC: 기능 완료, 알려진 문제 없음(다음 stable 릴리즈에서 \"수정되지 않음\"으로 분류된 문제 제외) - 문제가 나타나지 " "않으면 다음 stable 릴리즈가 됩니다" -#: ../../source/contributor-how-to-release-flower.rst:60 +#: ../../source/contributor-how-to-release-flower.rst:80 msgid "Beta: feature complete, allowed to have known issues" msgstr "베타: 기능 완료, 알려진 문제 발생 가능" -#: ../../source/contributor-how-to-release-flower.rst:61 +#: ../../source/contributor-how-to-release-flower.rst:81 msgid "Alpha: not feature complete, allowed to have known issues" msgstr "알파: 기능 미완성, 알려진 문제가 있을 수 있음" @@ -1135,12 +1136,12 @@ msgstr "" "가상 환경 내에서 파이썬 설정을 실행하는 것이 좋습니다. 이 가이드에서는 pyenv virtualenv, poetry 또는 " "Anaconda를 사용하여 가상 환경을 만드는 세 가지 예제를 보여줍니다. 안내를 따르거나 원하는 설정을 선택할 수 있습니다." -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:9 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:10 msgid "Python Version" msgstr "Python 버전" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:11 -#: ../../source/how-to-install-flower.rst:8 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:12 +#: ../../source/how-to-install-flower.rst:7 msgid "" "Flower requires at least `Python 3.9 `_, " "but `Python 3.10 `_ or above is " @@ -1149,7 +1150,7 @@ msgstr "" "Flower는 `Python 3.9 `_이상이 필요하지만, `Python " "3.10 `_이상을 권장합니다." -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:14 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:17 msgid "" "Due to a known incompatibility with `ray " "`_, we currently recommend utilizing at " @@ -1160,11 +1161,11 @@ msgstr "" " 시뮬레이션을 실행할 때는 최대 `Python 3.11 `_을 사용하는 것이" " 좋습니다." -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:19 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:22 msgid "Virtualenv with Pyenv/Virtualenv" msgstr "Pyenv/Virtualenv를 사용한 가상 환경" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:21 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:24 msgid "" "One of the recommended virtual environment is `pyenv " "`_/`virtualenv `_입니다. 자세한 내용은 `Flower " "examples `_를 참조하세요." -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:23 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:29 msgid "" "Once Pyenv is set up, you can use it to install `Python Version 3.10 " "`_ or above:" @@ -1183,19 +1184,19 @@ msgstr "" "Pyenv가 설정되면 이를 사용하여 'Python 버전 3.10 `_ " "이상'을 설치할 수 있습니다:" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:29 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:36 msgid "Create the virtualenv with:" msgstr "가상 환경을 만듭니다:" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:36 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:42 msgid "Activate the virtualenv by running the following command:" msgstr "다음 명령을 실행하여 가상 환경을 활성화합니다:" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:44 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:49 msgid "Virtualenv with Poetry" msgstr "Poetry를 사용한 가상 환경" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:46 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:51 msgid "" "The Flower examples are based on `Poetry `_ to manage dependencies. After installing Poetry you " @@ -1204,21 +1205,22 @@ msgstr "" "Flower examples은 의존성을 관리하기 위해 `Poetry `_를 기반으로 합니다. Poetry를 설치한 후 가상 환경을 생성하기만 하면 됩니다:" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:52 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:58 msgid "" "If you open a new terminal you can activate the previously created " "virtual environment with the following command:" msgstr "새 터미널을 열면 다음 명령을 사용하여 이전에 생성한 가상 환경을 활성화할 수 있습니다:" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:60 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:66 msgid "Virtualenv with Anaconda" msgstr "Anaconda를 사용한 가상 환경" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:62 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:68 +#, fuzzy msgid "" "If you prefer to use Anaconda for your virtual environment then install " "and setup the `conda `_ package. After setting it up you can " +"/user-guide/install/index.html>`_ package. After setting it up you can " "create a virtual environment with:" msgstr "" "가상 환경에서 Anaconda를 사용하려면 `conda " @@ -1226,15 +1228,15 @@ msgstr "" "guide/install/index.html>`_ 패키지를 설치 및 설정하세요. 설정 후 다음을 사용하여 가상 환경을 만들 수 " "있습니다:" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:68 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:76 msgid "and activate the virtual environment with:" msgstr "그 후 가상 환경을 활성화합니다:" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:76 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:83 msgid "And then?" msgstr "그다음은?" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:78 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:85 msgid "" "As soon as you created your virtual environment you clone one of the " "`Flower examples `_." @@ -1246,11 +1248,11 @@ msgstr "" msgid "Write documentation" msgstr "문서 작성" -#: ../../source/contributor-how-to-write-documentation.rst:6 +#: ../../source/contributor-how-to-write-documentation.rst:5 msgid "Project layout" msgstr "프로젝트 레이아웃" -#: ../../source/contributor-how-to-write-documentation.rst:8 +#: ../../source/contributor-how-to-write-documentation.rst:7 msgid "" "The Flower documentation lives in the ``doc`` directory. The Sphinx-based" " documentation system supports both reStructuredText (``.rst`` files) and" @@ -1260,7 +1262,7 @@ msgstr "" "텍스트(``.rst`` 파일)와 Markdown(``.md`` 파일)을 모두 지원합니다." #: ../../source/contributor-how-to-write-documentation.rst:10 -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:169 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:193 msgid "" "Note that, in order to build the documentation locally (with ``poetry run" " make html``, like described below), `Pandoc " @@ -1270,20 +1272,20 @@ msgstr "" "로컬에서 문서를 작성하려면(아래 설명과 같이 ``poetry run make html``로) `Pandoc " "`_이 시스템에 설치되어 있어야 합니다." -#: ../../source/contributor-how-to-write-documentation.rst:14 +#: ../../source/contributor-how-to-write-documentation.rst:15 msgid "Edit an existing page" msgstr "기존 페이지 편집" -#: ../../source/contributor-how-to-write-documentation.rst:16 +#: ../../source/contributor-how-to-write-documentation.rst:17 msgid "Edit an existing ``.rst`` (or ``.md``) file under ``doc/source/``" msgstr "doc/source/``에서 기존 ``.rst``(또는 ``.md``) 파일을 편집합니다" -#: ../../source/contributor-how-to-write-documentation.rst:17 +#: ../../source/contributor-how-to-write-documentation.rst:18 #: ../../source/contributor-how-to-write-documentation.rst:27 msgid "Compile the docs: ``cd doc``, then ``poetry run make html``" msgstr "문서를 컴파일합니다: ``cd doc``, ``poetry run make html`` 순으로 컴파일합니다" -#: ../../source/contributor-how-to-write-documentation.rst:18 +#: ../../source/contributor-how-to-write-documentation.rst:19 #: ../../source/contributor-how-to-write-documentation.rst:28 msgid "Open ``doc/build/html/index.html`` in the browser to check the result" msgstr "브라우저에서 ``doc/build/html/index.html``을 열어 결과를 확인합니다" @@ -1318,11 +1320,11 @@ msgstr "" "Flower에 대한 기여를 환영합니다! 하지만 어디서부터 시작해야 할지 알기란 쉽지 않습니다. 그래서 저희는 여러분의 PR이 " "Flower 코드베이스에 채택될 가능성을 높이기 위해 어디서부터 시작해야 하는지 몇 가지 권장 사항을 정리해 보았습니다." -#: ../../source/contributor-ref-good-first-contributions.rst:11 +#: ../../source/contributor-ref-good-first-contributions.rst:9 msgid "Where to start" msgstr "시작 위치" -#: ../../source/contributor-ref-good-first-contributions.rst:13 +#: ../../source/contributor-ref-good-first-contributions.rst:11 msgid "" "Until the Flower core library matures it will be easier to get PR's " "accepted if they only touch non-core areas of the codebase. Good " @@ -1331,23 +1333,23 @@ msgstr "" "Flower 코어 라이브러리가 완성될 때까지는 코드베이스의 비핵심 영역만 건드리는 것이 PR을 승인받기가 더 쉬울 것입니다. " "시작하기에 좋은 후보자는 다음과 같습니다:" -#: ../../source/contributor-ref-good-first-contributions.rst:17 +#: ../../source/contributor-ref-good-first-contributions.rst:14 msgid "Documentation: What's missing? What could be expressed more clearly?" msgstr "문서: 무엇이 누락되었나요? 무엇을 더 명확하게 표현할 수 있을까요?" -#: ../../source/contributor-ref-good-first-contributions.rst:18 +#: ../../source/contributor-ref-good-first-contributions.rst:15 msgid "Baselines: See below." msgstr "Baselines: 아래를 참조하세요." -#: ../../source/contributor-ref-good-first-contributions.rst:19 +#: ../../source/contributor-ref-good-first-contributions.rst:16 msgid "Examples: See below." msgstr "예시: 아래를 참조하세요." -#: ../../source/contributor-ref-good-first-contributions.rst:23 +#: ../../source/contributor-ref-good-first-contributions.rst:19 msgid "Request for Flower Baselines" msgstr "Flower Baselines 요청" -#: ../../source/contributor-ref-good-first-contributions.rst:25 +#: ../../source/contributor-ref-good-first-contributions.rst:21 msgid "" "If you are not familiar with Flower Baselines, you should probably check-" "out our `contributing guide for baselines " @@ -1357,7 +1359,7 @@ msgstr "" "`_를 " "확인해보세요." -#: ../../source/contributor-ref-good-first-contributions.rst:27 +#: ../../source/contributor-ref-good-first-contributions.rst:25 msgid "" "You should then check out the open `issues " "`_" @@ -1370,7 +1372,7 @@ msgstr "" " baseline 요청을 확인해야 합니다. 작업하고 싶은 기준선을 찾았지만 담당자가 없는 경우, 자유롭게 자신에게 할당하고 작업을 " "시작하세요!" -#: ../../source/contributor-ref-good-first-contributions.rst:31 +#: ../../source/contributor-ref-good-first-contributions.rst:30 msgid "" "Otherwise, if you don't find a baseline you'd like to work on, be sure to" " open a new issue with the baseline request template!" @@ -1416,12 +1418,13 @@ msgstr "" "때문에 다이어그램과 추상화가 실제로는 정확하지 않을 수 있습니다. SecAgg 프로토콜은 SecAgg+ 프로토콜의 특수한 경우로 " "간주할 수 있습니다." -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:8 -msgid "The :code:`SecAgg+` abstraction" +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:9 +#, fuzzy +msgid "The ``SecAgg+`` abstraction" msgstr "The :code:`SecAgg+` 추상화" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:10 -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:161 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:11 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:163 msgid "" "In this implementation, each client will be assigned with a unique index " "(int) for secure aggregation, and thus many python dictionaries used have" @@ -1430,18 +1433,19 @@ msgstr "" "구현에서는 각 클라이언트에 secure aggregation를 위한 고유 인덱스(int)가 할당되므로 사용되는 많은 파이썬 " "dictionaries에는 ClientProxy 타입이 아닌 int 타입의 키가 있습니다." -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:65 -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:198 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:67 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:204 msgid "" "The Flower server will execute and process received results in the " "following order:" msgstr "Flower 서버는 수신된 결과를 다음 순서로 실행하고 처리합니다:" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:159 -msgid "The :code:`LightSecAgg` abstraction" +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:161 +#, fuzzy +msgid "The ``LightSecAgg`` abstraction" msgstr "The :code:`LightSecAgg` 추상" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:271 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:277 msgid "Types" msgstr "타입" @@ -1455,7 +1459,7 @@ msgid "" "are not used to contributing to GitHub projects." msgstr "이 가이드는 Flower에 참여하고 싶지만 GitHub 프로젝트에 기여하는 데 익숙하지 않은 분들을 위한 것입니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:6 +#: ../../source/contributor-tutorial-contribute-on-github.rst:7 msgid "" "If you're familiar with how contributing on GitHub works, you can " "directly checkout our :doc:`getting started guide for contributors " @@ -1464,15 +1468,15 @@ msgstr "" "깃허브에서 기여하는 방식에 익숙하다면 :doc:`기여자를 위한 시작 가이드`를 직접 확인하세요." -#: ../../source/contributor-tutorial-contribute-on-github.rst:10 +#: ../../source/contributor-tutorial-contribute-on-github.rst:12 msgid "Setting up the repository" msgstr "레포지토리 설정하기" -#: ../../source/contributor-tutorial-contribute-on-github.rst:12 +#: ../../source/contributor-tutorial-contribute-on-github.rst:29 msgid "**Create a GitHub account and setup Git**" msgstr "**GitHub 계정을 만들고 Git을 설정합니다**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:13 +#: ../../source/contributor-tutorial-contribute-on-github.rst:15 msgid "" "Git is a distributed version control tool. This allows for an entire " "codebase's history to be stored and every developer's machine. It is a " @@ -1484,7 +1488,7 @@ msgstr "" "컴퓨터에 설치해야 하는 소프트웨어로, 이 `가이드 `_를 따라 설정할 수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:16 +#: ../../source/contributor-tutorial-contribute-on-github.rst:21 msgid "" "GitHub, itself, is a code hosting platform for version control and " "collaboration. It allows for everyone to collaborate and work from " @@ -1493,13 +1497,13 @@ msgstr "" "GitHub는 그 자체로 버전 관리 및 협업을 위한 코드 호스팅 플랫폼입니다. 누구나 원격 레포지토리에서 어디서든 협업하고 작업할 " "수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:18 +#: ../../source/contributor-tutorial-contribute-on-github.rst:25 msgid "" "If you haven't already, you will need to create an account on `GitHub " "`_." msgstr "아직 계정을 만들지 않았다면 `GitHub `_에서 계정을 만들어야 합니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:20 +#: ../../source/contributor-tutorial-contribute-on-github.rst:28 msgid "" "The idea behind the generic Git and GitHub workflow boils down to this: " "you download code from a remote repository on GitHub, make changes " @@ -1509,14 +1513,15 @@ msgstr "" "일반적인 Git 및 GitHub 워크플로우의 기본 개념은 다음과 같이 요약됩니다. GitHub의 원격 레포지토리에서 코드를 " "다운로드하고 로컬에서 변경한 후 Git을 사용하여 추적한 다음 새 기록을 다시 GitHub에 업로드하는 것입니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:23 +#: ../../source/contributor-tutorial-contribute-on-github.rst:42 msgid "**Forking the Flower repository**" msgstr "**Flower 레포지토리 포크하기**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:24 +#: ../../source/contributor-tutorial-contribute-on-github.rst:32 +#, fuzzy msgid "" "A fork is a personal copy of a GitHub repository. To create one for " -"Flower, you must navigate to ``_ (while " +"Flower, you must navigate to https://github.com/adap/flower (while " "connected to your GitHub account) and click the ``Fork`` button situated " "on the top right of the page." msgstr "" @@ -1524,7 +1529,7 @@ msgstr "" "``_로 이동하여(GitHub 계정에 연결된 상태에서) 페이지 오른쪽 " "상단에 있는 ``포크`` 버튼을 클릭해야 합니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:29 +#: ../../source/contributor-tutorial-contribute-on-github.rst:38 msgid "" "You can change the name if you want, but this is not necessary as this " "version of Flower will be yours and will sit inside your own account " @@ -1534,11 +1539,11 @@ msgstr "" "원하는 경우 이름을 변경할 수 있지만, 이 버전의 Flower는 자신의 계정(즉, 자신의 리포지토리 목록)에 위치하게 되므로 변경할" " 필요는 없습니다. 만들기가 완료되면 왼쪽 상단에Flower 버전이 표시되는 것을 볼 수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:34 +#: ../../source/contributor-tutorial-contribute-on-github.rst:59 msgid "**Cloning your forked repository**" msgstr "**포크된 레포지토리 클론하기**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:35 +#: ../../source/contributor-tutorial-contribute-on-github.rst:45 msgid "" "The next step is to download the forked repository on your machine to be " "able to make changes to it. On your forked repository page, you should " @@ -1548,27 +1553,27 @@ msgstr "" "다음 단계는 컴퓨터에서 포크된 레포지토리를 변경할 수 있도록 다운로드하는 것입니다. 포크된 포지토리 페이지에서 먼저 오른쪽의 " "``Code`` 버튼을 클릭하면 레포지토리의 HTTPS 링크를 복사할 수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:41 +#: ../../source/contributor-tutorial-contribute-on-github.rst:52 msgid "" "Once you copied the \\, you can open a terminal on your machine, " "navigate to the place you want to download the repository to and type:" msgstr "\\를 복사한 후에는 컴퓨터에서 터미널을 열고 레포지토리를 다운로드할 위치로 이동하여 입력하면 됩니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:47 +#: ../../source/contributor-tutorial-contribute-on-github.rst:59 msgid "" "This will create a ``flower/`` (or the name of your fork if you renamed " "it) folder in the current working directory." msgstr "현재 작업 디렉터리에``flower/``(또는 포크 이름을 변경한 경우 포크 이름) 폴더가 생성됩니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:49 +#: ../../source/contributor-tutorial-contribute-on-github.rst:78 msgid "**Add origin**" msgstr "**origin 추가**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:50 +#: ../../source/contributor-tutorial-contribute-on-github.rst:62 msgid "You can then go into the repository folder:" msgstr "그런 다음 레포지토리 폴더로 이동할 수 있습니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:56 +#: ../../source/contributor-tutorial-contribute-on-github.rst:68 msgid "" "And here we will need to add an origin to our repository. The origin is " "the \\ of the remote fork repository. To obtain it, we can do as " @@ -1578,27 +1583,27 @@ msgstr "" "여기에 레포지토리에 origin을 추가해야 합니다. origin은 원격 포크 레포지토리의 \\입니다. origin을 " "얻으려면 앞서 설명한 대로 GitHub 계정의 포크 레포지토리로 이동하여 링크를 복사하면 됩니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:61 +#: ../../source/contributor-tutorial-contribute-on-github.rst:75 msgid "" "Once the \\ is copied, we can type the following command in our " "terminal:" msgstr "\\ 이 복사되면 터미널에 다음 명령을 입력하면 됩니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:68 +#: ../../source/contributor-tutorial-contribute-on-github.rst:102 msgid "**Add upstream**" msgstr "**Upstream 추가하기**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:69 +#: ../../source/contributor-tutorial-contribute-on-github.rst:81 msgid "" "Now we will add an upstream address to our repository. Still in the same " "directory, we must run the following command:" msgstr "이제 레포지토리에 upstream 주소를 추가하겠습니다. 여전히 같은 디렉터리에서 다음 명령을 실행해야 합니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:76 +#: ../../source/contributor-tutorial-contribute-on-github.rst:88 msgid "The following diagram visually explains what we did in the previous steps:" msgstr "다음 다이어그램은 이전 단계에서 수행한 작업을 시각적으로 설명합니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:80 +#: ../../source/contributor-tutorial-contribute-on-github.rst:92 msgid "" "The upstream is the GitHub remote address of the parent repository (in " "this case Flower), i.e. the one we eventually want to contribute to and " @@ -1610,17 +1615,17 @@ msgstr "" " 기록이 필요한 레포지토리입니다. origin은 우리가 만든 포크된 레포지토리의 GitHub 원격 주소, 즉 우리 계정에 있는 " "사본(포크)입니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:84 +#: ../../source/contributor-tutorial-contribute-on-github.rst:97 msgid "" "To make sure our local version of the fork is up-to-date with the latest " "changes from the Flower repository, we can execute the following command:" msgstr "로컬 버전의 포크가 Flower 레포지토리의 최신 변경 사항으로 최신 상태인지 확인하려면 다음 명령을 실행하면 됩니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:93 +#: ../../source/contributor-tutorial-contribute-on-github.rst:105 msgid "Setting up the coding environment" msgstr "코딩 환경 설정" -#: ../../source/contributor-tutorial-contribute-on-github.rst:95 +#: ../../source/contributor-tutorial-contribute-on-github.rst:107 msgid "" "This can be achieved by following this :doc:`getting started guide for " "contributors ` (note " @@ -1631,50 +1636,50 @@ msgstr "" "as-a-contributor>'를 참조하세요(레포지토리를 복제할 필요는 없습니다). 코드를 작성하고 테스트할 수 있게 되면 드디어" " 변경을 시작할 수 있습니다!" -#: ../../source/contributor-tutorial-contribute-on-github.rst:100 +#: ../../source/contributor-tutorial-contribute-on-github.rst:113 msgid "Making changes" msgstr "변경하기" -#: ../../source/contributor-tutorial-contribute-on-github.rst:102 +#: ../../source/contributor-tutorial-contribute-on-github.rst:115 msgid "" "Before making any changes make sure you are up-to-date with your " "repository:" msgstr "변경하기 전에 레포지토리를 최신 상태로 유지하세요:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:108 +#: ../../source/contributor-tutorial-contribute-on-github.rst:121 msgid "And with Flower's repository:" msgstr "Flower의 레포지토리도 있습니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:114 +#: ../../source/contributor-tutorial-contribute-on-github.rst:134 msgid "**Create a new branch**" msgstr "**새 브랜치 만들기**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:115 +#: ../../source/contributor-tutorial-contribute-on-github.rst:128 msgid "" "To make the history cleaner and easier to work with, it is good practice " "to create a new branch for each feature/project that needs to be " "implemented." msgstr "히스토리를 더 깔끔하고 작업하기 쉽게 만들려면 구현해야 하는 각 기능/프로젝트에 대해 새 브랜치를 만드는 것이 좋습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:118 +#: ../../source/contributor-tutorial-contribute-on-github.rst:131 msgid "" "To do so, just run the following command inside the repository's " "directory:" msgstr "이렇게 하려면 레포지토리 디렉토리에서 다음 명령을 실행하면 됩니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:124 +#: ../../source/contributor-tutorial-contribute-on-github.rst:136 msgid "**Make changes**" msgstr "**변경하기**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:125 +#: ../../source/contributor-tutorial-contribute-on-github.rst:137 msgid "Write great code and create wonderful changes using your favorite editor!" msgstr "선호하는 편집기를 사용하여 멋진 코드를 작성하고 훌륭한 변화를 만들어 보세요!" -#: ../../source/contributor-tutorial-contribute-on-github.rst:127 +#: ../../source/contributor-tutorial-contribute-on-github.rst:149 msgid "**Test and format your code**" msgstr "**코드 테스트 및 서식 지정**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:128 +#: ../../source/contributor-tutorial-contribute-on-github.rst:139 msgid "" "Don't forget to test and format your code! Otherwise your code won't be " "able to be merged into the Flower repository. This is done so the " @@ -1683,57 +1688,60 @@ msgstr "" "코드를 테스트하고 서식을 지정하는 것을 잊지 마세요! 그렇지 않으면 코드를 Flower 레포지토리에 병합할 수 없습니다. 이는 " "코드베이스가 일관성을 유지하고 이해하기 쉽도록 하기 위한 것입니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:131 +#: ../../source/contributor-tutorial-contribute-on-github.rst:143 msgid "To do so, we have written a few scripts that you can execute:" msgstr "이를 위해 실행할 수 있는 몇 가지 스크립트를 작성했습니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:140 +#: ../../source/contributor-tutorial-contribute-on-github.rst:162 msgid "**Stage changes**" msgstr "**변경사항 스테이징**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:141 +#: ../../source/contributor-tutorial-contribute-on-github.rst:152 msgid "" "Before creating a commit that will update your history, you must specify " "to Git which files it needs to take into account." msgstr "기록을 업데이트할 커밋을 만들기 전에 어떤 파일을 고려해야 하는지 Git에 지정해야 합니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:143 +#: ../../source/contributor-tutorial-contribute-on-github.rst:155 msgid "This can be done with:" msgstr "이 작업을 수행할 수 있습니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:149 +#: ../../source/contributor-tutorial-contribute-on-github.rst:161 +#, fuzzy msgid "" "To check which files have been modified compared to the last version " "(last commit) and to see which files are staged for commit, you can use " -"the :code:`git status` command." +"the ``git status`` command." msgstr "" "마지막 버전(마지막 커밋)과 비교하여 수정된 파일을 확인하고 커밋을 위해 스테이징된 파일을 확인하려면 :code:`git " "status` 명령을 사용하면 됩니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:152 +#: ../../source/contributor-tutorial-contribute-on-github.rst:173 msgid "**Commit changes**" msgstr "**변경사항 커밋**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:153 +#: ../../source/contributor-tutorial-contribute-on-github.rst:165 +#, fuzzy msgid "" -"Once you have added all the files you wanted to commit using :code:`git " -"add`, you can finally create your commit using this command:" +"Once you have added all the files you wanted to commit using ``git add``," +" you can finally create your commit using this command:" msgstr ":code:`git add`를 사용하여 커밋하려는 모든 파일을 추가한 후, 마지막으로 이 명령을 사용하여 커밋을 생성할 수 있습니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:159 +#: ../../source/contributor-tutorial-contribute-on-github.rst:172 +#, fuzzy msgid "" "The \\ is there to explain to others what the commit " "does. It should be written in an imperative style and be concise. An " -"example would be :code:`git commit -m \"Add images to README\"`." +"example would be ``git commit -m \"Add images to README\"``." msgstr "" "커밋의 내용을 다른 사람에게 설명하기 위해 \\가 있습니다. 명령형 스타일로 작성해야 하며 간결해야" " 합니다. 예를 들면 :code:`git commit -m \"Add images to README\"`." -#: ../../source/contributor-tutorial-contribute-on-github.rst:162 +#: ../../source/contributor-tutorial-contribute-on-github.rst:185 msgid "**Push the changes to the fork**" msgstr "**변경 사항을 포크에 푸시**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:163 +#: ../../source/contributor-tutorial-contribute-on-github.rst:176 msgid "" "Once we have committed our changes, we have effectively updated our local" " history, but GitHub has no way of knowing this unless we push our " @@ -1742,41 +1750,41 @@ msgstr "" "변경 사항을 커밋하면 로컬 히스토리를 효과적으로 업데이트한 것이지만, 변경 사항을 원본의 원격 주소로 푸시하지 않는 한 " "GitHub는 이를 알 방법이 없습니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:170 +#: ../../source/contributor-tutorial-contribute-on-github.rst:184 msgid "" "Once this is done, you will see on the GitHub that your forked repo was " "updated with the changes you have made." msgstr "이 작업이 완료되면 변경한 내용으로 포크된 레포지토리가 업데이트된 것을 GitHub에서 확인할 수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:174 +#: ../../source/contributor-tutorial-contribute-on-github.rst:188 msgid "Creating and merging a pull request (PR)" msgstr "pull request(PR) 만들기 및 병합하기" -#: ../../source/contributor-tutorial-contribute-on-github.rst:176 +#: ../../source/contributor-tutorial-contribute-on-github.rst:226 msgid "**Create the PR**" msgstr "**PR 만들기**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:177 +#: ../../source/contributor-tutorial-contribute-on-github.rst:191 msgid "" "Once you have pushed changes, on the GitHub webpage of your repository " "you should see the following message:" msgstr "변경 사항을 푸시하고 나면 레포지토리의 GitHub 웹페이지에 다음 메시지가 표시됩니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:181 +#: ../../source/contributor-tutorial-contribute-on-github.rst:196 msgid "Otherwise you can always find this option in the ``Branches`` page." msgstr "그렇지 않으면 언제든지 ``Branches`` 페이지에서 이 옵션을 찾을 수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:183 +#: ../../source/contributor-tutorial-contribute-on-github.rst:198 msgid "" "Once you click the ``Compare & pull request`` button, you should see " "something similar to this:" msgstr "``Compare & pull request`` 버튼을 클릭하면 이와 비슷한 화면이 표시됩니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:187 +#: ../../source/contributor-tutorial-contribute-on-github.rst:203 msgid "At the top you have an explanation of which branch will be merged where:" msgstr "상단에는 어느 지점이 어디에 병합될 것인지에 대한 설명이 있습니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:191 +#: ../../source/contributor-tutorial-contribute-on-github.rst:207 msgid "" "In this example you can see that the request is to merge the branch " "``doc-fixes`` from my forked repository to branch ``main`` from the " @@ -1785,7 +1793,7 @@ msgstr "" "이 예제에서는 내 포크된 레포지토리의 ``doc-fixes`` 브랜치를 Flower 레포지토리의 ``main`` 브랜치에 병합하라는" " 요청을 볼 수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:193 +#: ../../source/contributor-tutorial-contribute-on-github.rst:210 msgid "" "The title should be changed to adhere to the :ref:`pr_title_format` " "guidelines, otherwise it won't be possible to merge the PR. So in this " @@ -1794,7 +1802,7 @@ msgstr "" "제목은 :ref:`pr_title_format` 가이드라인을 준수하도록 변경해야 하며, 그렇지 않으면 PR을 병합할 수 없습니다. " "따라서 이 경우 올바른 제목은 ``docs(framework:skip) Fix typos``이 될 수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:196 +#: ../../source/contributor-tutorial-contribute-on-github.rst:214 msgid "" "The input box in the middle is there for you to describe what your PR " "does and to link it to existing issues. We have placed comments (that " @@ -1804,11 +1812,11 @@ msgstr "" "가운데에 있는 입력 상자는 PR의 기능을 설명하고 기존 이슈에 연결할 수 있는 곳입니다. 프로세스를 안내하기 위해 코멘트(PR이 " "열리면 렌더링되지 않음)를 배치했습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:199 +#: ../../source/contributor-tutorial-contribute-on-github.rst:218 msgid "It is important to follow the instructions described in comments." msgstr "코멘트에 설명된 지침을 따르는 것이 중요합니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:201 +#: ../../source/contributor-tutorial-contribute-on-github.rst:220 msgid "" "At the bottom you will find the button to open the PR. This will notify " "reviewers that a new PR has been opened and that they should look over it" @@ -1817,94 +1825,94 @@ msgstr "" "하단에는 PR을 여는 버튼이 있습니다. 이렇게 하면 검토자에게 새 PR이 열렸으며 병합하거나 변경을 요청하기 위해 검토해야 함을 " "알립니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:204 +#: ../../source/contributor-tutorial-contribute-on-github.rst:224 msgid "" "If your PR is not yet ready for review, and you don't want to notify " "anyone, you have the option to create a draft pull request:" msgstr "PR이 아직 검토할 준비가 되지 않았고 다른 사람에게 알리고 싶지 않은 경우 pull request 초안을 만드는 옵션이 있습니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:208 +#: ../../source/contributor-tutorial-contribute-on-github.rst:230 msgid "**Making new changes**" msgstr "**new changes 만들기**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:209 +#: ../../source/contributor-tutorial-contribute-on-github.rst:229 msgid "" "Once the PR has been opened (as draft or not), you can still push new " "commits to it the same way we did before, by making changes to the branch" " associated with the PR." msgstr "PR이 초안으로 열렸든 아니든, PR과 연결된 브랜치를 변경하여 이전과 같은 방식으로 새 커밋을 푸시할 수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:211 +#: ../../source/contributor-tutorial-contribute-on-github.rst:253 msgid "**Review the PR**" msgstr "**PR 검토하기**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:212 +#: ../../source/contributor-tutorial-contribute-on-github.rst:233 msgid "" "Once the PR has been opened or once the draft PR has been marked as " "ready, a review from code owners will be automatically requested:" msgstr "PR이 열리거나 초안 PR이 준비됨으로 표시되면 코드 소유자의 검토가 자동으로 요청됩니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:216 +#: ../../source/contributor-tutorial-contribute-on-github.rst:238 msgid "" "Code owners will then look into the code, ask questions, request changes " "or validate the PR." msgstr "그러면 코드 소유자는 코드를 살펴보고, 질문하고, 변경을 요청하거나 PR의 유효성을 검사합니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:218 +#: ../../source/contributor-tutorial-contribute-on-github.rst:241 msgid "Merging will be blocked if there are ongoing requested changes." msgstr "진행 중인 변경 요청이 있는 경우 병합이 차단됩니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:222 +#: ../../source/contributor-tutorial-contribute-on-github.rst:245 msgid "" "To resolve them, just push the necessary changes to the branch associated" " with the PR:" msgstr "이를 해결하려면 PR과 연결된 브랜치에 필요한 변경 사항을 푸시하면 됩니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:226 +#: ../../source/contributor-tutorial-contribute-on-github.rst:250 msgid "And resolve the conversation:" msgstr "그리고 소통을 통해 해결하세요:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:230 +#: ../../source/contributor-tutorial-contribute-on-github.rst:254 msgid "" "Once all the conversations have been resolved, you can re-request a " "review." msgstr "모든 대화가 해결되면 검토를 다시 요청할 수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:233 +#: ../../source/contributor-tutorial-contribute-on-github.rst:274 msgid "**Once the PR is merged**" msgstr "**PR이 병합되면**" -#: ../../source/contributor-tutorial-contribute-on-github.rst:234 +#: ../../source/contributor-tutorial-contribute-on-github.rst:256 msgid "" "If all the automatic tests have passed and reviewers have no more changes" " to request, they can approve the PR and merge it." msgstr "모든 자동 테스트가 통과되고 검토자가 더 이상 요청할 변경 사항이 없는 경우 PR을 승인하고 병합할 수 있습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:238 +#: ../../source/contributor-tutorial-contribute-on-github.rst:261 msgid "" "Once it is merged, you can delete the branch on GitHub (a button should " "appear to do so) and also delete it locally by doing:" msgstr "병합이 완료되면 GitHub에서 브랜치를 삭제할 수 있으며(삭제 버튼이 표시되어야 함), 로컬에서도 삭제할 수 있습니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:245 +#: ../../source/contributor-tutorial-contribute-on-github.rst:269 msgid "Then you should update your forked repository by doing:" msgstr "그런 다음 다음을 수행하여 포크된 레포지토리를 업데이트해야 합니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:254 +#: ../../source/contributor-tutorial-contribute-on-github.rst:277 msgid "Example of first contribution" msgstr "첫 번째 기여의 예" -#: ../../source/contributor-tutorial-contribute-on-github.rst:257 +#: ../../source/contributor-tutorial-contribute-on-github.rst:280 msgid "Problem" msgstr "문제" -#: ../../source/contributor-tutorial-contribute-on-github.rst:259 +#: ../../source/contributor-tutorial-contribute-on-github.rst:282 msgid "" "For our documentation, we've started to use the `Diàtaxis framework " "`_." msgstr "저희 문서에는 'Diàtaxis 프레임워크 `_'를 사용하기 시작했습니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:261 +#: ../../source/contributor-tutorial-contribute-on-github.rst:285 msgid "" "Our \"How to\" guides should have titles that continue the sentence \"How" " to …\", for example, \"How to upgrade to Flower 1.0\"." @@ -1912,19 +1920,19 @@ msgstr "" "'How to' 가이드의 제목은 \"How to …\"라는 문장을 이어가는 제목이어야 합니다(예: \"How to upgrade " "to Flower 1.0\")." -#: ../../source/contributor-tutorial-contribute-on-github.rst:263 +#: ../../source/contributor-tutorial-contribute-on-github.rst:288 msgid "" "Most of our guides do not follow this new format yet, and changing their " "title is (unfortunately) more involved than one might think." msgstr "대부분의 가이드는 아직 이 새로운 형식을 따르지 않으며, 안타깝게도 제목을 변경하는 작업은 생각보다 복잡합니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:265 +#: ../../source/contributor-tutorial-contribute-on-github.rst:291 msgid "" "This issue is about changing the title of a doc from present continuous " "to present simple." msgstr "이번 이슈는 문서 제목을 현재 연속형에서 현재 단순형으로 변경하는 것에 관한 것입니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:267 +#: ../../source/contributor-tutorial-contribute-on-github.rst:294 msgid "" "Let's take the example of \"Saving Progress\" which we changed to \"Save " "Progress\". Does this pass our check?" @@ -1932,19 +1940,19 @@ msgstr "" "\"How to saving progress\"을 \"How to save progress\"으로 변경한 예를 들어 보겠습니다. " "이것이 우리의 점검을 통과했나요?" -#: ../../source/contributor-tutorial-contribute-on-github.rst:269 +#: ../../source/contributor-tutorial-contribute-on-github.rst:297 msgid "Before: \"How to saving progress\" ❌" msgstr "Before: \"How to saving progress\" ❌" -#: ../../source/contributor-tutorial-contribute-on-github.rst:271 +#: ../../source/contributor-tutorial-contribute-on-github.rst:299 msgid "After: \"How to save progress\" ✅" msgstr "After: \"How to save progress\" ✅" -#: ../../source/contributor-tutorial-contribute-on-github.rst:274 +#: ../../source/contributor-tutorial-contribute-on-github.rst:302 msgid "Solution" msgstr "해결법" -#: ../../source/contributor-tutorial-contribute-on-github.rst:276 +#: ../../source/contributor-tutorial-contribute-on-github.rst:304 msgid "" "This is a tiny change, but it'll allow us to test your end-to-end setup. " "After cloning and setting up the Flower repo, here's what you should do:" @@ -1952,17 +1960,17 @@ msgstr "" "이것은 사소한 변경이지만 end-to-end 설정을 테스트할 수 있습니다. Flower 레포지토리를 복제하고 설정한 후에는 다음과 " "같이 하세요:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:278 +#: ../../source/contributor-tutorial-contribute-on-github.rst:307 msgid "Find the source file in ``doc/source``" msgstr "``doc/source``에서 소스 파일을 찾습니다" -#: ../../source/contributor-tutorial-contribute-on-github.rst:279 +#: ../../source/contributor-tutorial-contribute-on-github.rst:308 msgid "" "Make the change in the ``.rst`` file (beware, the dashes under the title " "should be the same length as the title itself)" msgstr "``.rst`` 파일에서 변경합니다(제목 아래의 대시는 제목 자체의 길이와 같아야 합니다)" -#: ../../source/contributor-tutorial-contribute-on-github.rst:280 +#: ../../source/contributor-tutorial-contribute-on-github.rst:310 msgid "" "Build the docs and `check the result `_" @@ -1970,11 +1978,11 @@ msgstr "" "문서를 빌드하고 '결과 확인 `_'합니다" -#: ../../source/contributor-tutorial-contribute-on-github.rst:283 +#: ../../source/contributor-tutorial-contribute-on-github.rst:314 msgid "Rename file" msgstr "파일 이름 바꾸기" -#: ../../source/contributor-tutorial-contribute-on-github.rst:285 +#: ../../source/contributor-tutorial-contribute-on-github.rst:316 msgid "" "You might have noticed that the file name still reflects the old wording." " If we just change the file, then we break all existing links to it - it " @@ -1984,19 +1992,19 @@ msgstr "" "파일 이름에 여전히 이전 문구가 반영되어 있는 것을 보셨을 것입니다. 파일만 변경하면 파일에 대한 기존 링크가 모두 끊어지는데, " "링크를 끊으면 검색 엔진 순위에 영향을 줄 수 있으므로 이를 방지하는 것이 **매우 중요**합니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:288 +#: ../../source/contributor-tutorial-contribute-on-github.rst:320 msgid "Here's how to change the file name:" msgstr "파일 이름을 변경하는 방법은 다음과 같습니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:290 +#: ../../source/contributor-tutorial-contribute-on-github.rst:322 msgid "Change the file name to ``save-progress.rst``" msgstr "파일 이름을 ``save-progress.rst``로 변경합니다" -#: ../../source/contributor-tutorial-contribute-on-github.rst:291 +#: ../../source/contributor-tutorial-contribute-on-github.rst:323 msgid "Add a redirect rule to ``doc/source/conf.py``" msgstr "'doc/source/conf.py'에 리디렉션 규칙을 추가합니다" -#: ../../source/contributor-tutorial-contribute-on-github.rst:293 +#: ../../source/contributor-tutorial-contribute-on-github.rst:325 msgid "" "This will cause a redirect from ``saving-progress.html`` to ``save-" "progress.html``, old links will continue to work." @@ -2004,11 +2012,11 @@ msgstr "" "이렇게 하면 ``saving-progress.html``에서 ``save-progress.html``로 리디렉션되며, 이전 링크는 " "계속 작동합니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:296 +#: ../../source/contributor-tutorial-contribute-on-github.rst:329 msgid "Apply changes in the index file" msgstr "인덱스 파일에 변경 사항 적용" -#: ../../source/contributor-tutorial-contribute-on-github.rst:298 +#: ../../source/contributor-tutorial-contribute-on-github.rst:331 msgid "" "For the lateral navigation bar to work properly, it is very important to " "update the ``index.rst`` file as well. This is where we define the whole " @@ -2017,39 +2025,39 @@ msgstr "" "횡방향 내비게이션 바가 제대로 작동하려면 ``index.rst`` 파일도 업데이트하는 것이 매우 중요합니다. 이 파일은 탐색 모음의" " 전체 배열을 정의하는 곳입니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:301 +#: ../../source/contributor-tutorial-contribute-on-github.rst:335 msgid "Find and modify the file name in ``index.rst``" msgstr "``index.rst``에서 파일 이름을 찾아 수정합니다" -#: ../../source/contributor-tutorial-contribute-on-github.rst:304 +#: ../../source/contributor-tutorial-contribute-on-github.rst:338 msgid "Open PR" msgstr "PR 열기" -#: ../../source/contributor-tutorial-contribute-on-github.rst:306 +#: ../../source/contributor-tutorial-contribute-on-github.rst:340 msgid "" "Commit the changes (commit messages are always imperative: \"Do " "something\", in this case \"Change …\")" msgstr "변경 사항을 커밋합니다(커밋 메시지는 항상 필수 메시지입니다:\"Do something\"(이 경우 는 \"Change …\" )" -#: ../../source/contributor-tutorial-contribute-on-github.rst:307 +#: ../../source/contributor-tutorial-contribute-on-github.rst:342 msgid "Push the changes to your fork" msgstr "변경 사항을 포크에 푸시합니다" -#: ../../source/contributor-tutorial-contribute-on-github.rst:308 +#: ../../source/contributor-tutorial-contribute-on-github.rst:343 msgid "" "Open a PR (as shown above) with title ``docs(framework) Update how-to " "guide title``" msgstr "``docs(framework) Update how-to guide title`` 제목으로 PR(위와 같이)을 엽니다" -#: ../../source/contributor-tutorial-contribute-on-github.rst:309 +#: ../../source/contributor-tutorial-contribute-on-github.rst:344 msgid "Wait for it to be approved!" msgstr "승인될 때까지 기다리세요!" -#: ../../source/contributor-tutorial-contribute-on-github.rst:310 +#: ../../source/contributor-tutorial-contribute-on-github.rst:345 msgid "Congrats! 🥳 You're now officially a Flower contributor!" msgstr "축하합니다! 이제 공식적으로 Flower 기여자가 되셨습니다!" -#: ../../source/contributor-tutorial-contribute-on-github.rst:314 +#: ../../source/contributor-tutorial-contribute-on-github.rst:348 #: ../../source/tutorial-series-build-a-strategy-from-scratch-pytorch.ipynb:573 #: ../../source/tutorial-series-customize-the-client-pytorch.ipynb:1012 #: ../../source/tutorial-series-get-started-with-flower-pytorch.ipynb:811 @@ -2058,41 +2066,42 @@ msgstr "축하합니다! 이제 공식적으로 Flower 기여자가 되셨습니 msgid "Next steps" msgstr "다음 단계" -#: ../../source/contributor-tutorial-contribute-on-github.rst:316 +#: ../../source/contributor-tutorial-contribute-on-github.rst:350 msgid "" "Once you have made your first PR, and want to contribute more, be sure to" " check out the following :" msgstr "첫 번째 PR을 작성하고 더 많은 기여를 하고 싶다면 다음을 확인하세요:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:318 +#: ../../source/contributor-tutorial-contribute-on-github.rst:353 +#, fuzzy msgid "" ":doc:`Good first contributions `, where you should particularly look into the " -":code:`baselines` contributions." +"``baselines`` contributions." msgstr "" ":doc:`훌륭한 첫 번째 기여 `, 특히 " ":code:`baselines` 기여를 살펴봐야 합니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:322 +#: ../../source/contributor-tutorial-contribute-on-github.rst:357 #: ../../source/fed/0000-20200102-fed-template.md:60 msgid "Appendix" msgstr "부록" -#: ../../source/contributor-tutorial-contribute-on-github.rst:327 +#: ../../source/contributor-tutorial-contribute-on-github.rst:362 msgid "PR title format" msgstr "PR 제목 형식" -#: ../../source/contributor-tutorial-contribute-on-github.rst:329 +#: ../../source/contributor-tutorial-contribute-on-github.rst:364 msgid "We enforce the following PR title format:" msgstr "다음과 같은 PR 제목 형식을 적용합니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:335 +#: ../../source/contributor-tutorial-contribute-on-github.rst:370 msgid "" "(or ``(:skip) `` to ignore the PR in the " "changelog)" msgstr "(또는 ``(:skip) ``를 사용하면 변경 로그에서 PR을 무시합니다.)" -#: ../../source/contributor-tutorial-contribute-on-github.rst:337 +#: ../../source/contributor-tutorial-contribute-on-github.rst:372 msgid "" "Where ```` needs to be in ``{ci, fix, feat, docs, refactor, " "break}``, ```` should be in ``{framework, baselines, datasets, " @@ -2105,49 +2114,49 @@ msgstr "" "':skip' 플래그를 사용해야 하는 여러 프로젝트를 수정하는 경우}``로 입력해야 하며, ````는 대문자로 " "시작해야 합니다." -#: ../../source/contributor-tutorial-contribute-on-github.rst:341 +#: ../../source/contributor-tutorial-contribute-on-github.rst:377 msgid "Valid examples:" msgstr "유효한 예시입니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:343 +#: ../../source/contributor-tutorial-contribute-on-github.rst:379 msgid "``feat(framework) Add flwr build CLI command``" msgstr "``feat(framework) Add flwr build CLI command``" -#: ../../source/contributor-tutorial-contribute-on-github.rst:344 +#: ../../source/contributor-tutorial-contribute-on-github.rst:380 msgid "``refactor(examples:skip) Improve quickstart-pytorch logging``" msgstr "``refactor(examples:skip) Improve quickstart-pytorch logging``" -#: ../../source/contributor-tutorial-contribute-on-github.rst:345 +#: ../../source/contributor-tutorial-contribute-on-github.rst:381 msgid "``ci(*:skip) Enforce PR title format``" msgstr "``ci(*:skip) Enforce PR title format``" -#: ../../source/contributor-tutorial-contribute-on-github.rst:347 +#: ../../source/contributor-tutorial-contribute-on-github.rst:383 msgid "Invalid examples:" msgstr "잘못된 예시입니다:" -#: ../../source/contributor-tutorial-contribute-on-github.rst:349 +#: ../../source/contributor-tutorial-contribute-on-github.rst:385 msgid "``feat(framework): Add flwr build CLI command`` (extra ``:``)" msgstr "``feat(framework): Add flwr build CLI command`` ( ``:``제외)" -#: ../../source/contributor-tutorial-contribute-on-github.rst:350 +#: ../../source/contributor-tutorial-contribute-on-github.rst:386 msgid "" "``feat(*) Add flwr build CLI command`` (missing ``skip`` flag along with " "``*``)" msgstr "``feat(*) Add flwr build CLI command`` (``skip`` flag와 함께 ``*``누락)" -#: ../../source/contributor-tutorial-contribute-on-github.rst:351 +#: ../../source/contributor-tutorial-contribute-on-github.rst:387 msgid "``feat(skip) Add flwr build CLI command`` (missing ````)" msgstr "``feat(skip) Add flwr build CLI command`` (````누락)" -#: ../../source/contributor-tutorial-contribute-on-github.rst:352 +#: ../../source/contributor-tutorial-contribute-on-github.rst:388 msgid "``feat(framework) add flwr build CLI command`` (non capitalised verb)" msgstr "``feat(framework) add flwr build CLI command`` (대문자로 표기되지 않은 동사)" -#: ../../source/contributor-tutorial-contribute-on-github.rst:353 +#: ../../source/contributor-tutorial-contribute-on-github.rst:389 msgid "``feat(framework) Add flwr build CLI command.`` (dot at the end)" msgstr "``feat(framework) Add flwr build CLI command.`` (끝에 마침표)" -#: ../../source/contributor-tutorial-contribute-on-github.rst:354 +#: ../../source/contributor-tutorial-contribute-on-github.rst:390 msgid "``Add flwr build CLI command.`` (missing ``()``)" msgstr "``Add flwr build CLI command.`` ( ``()``누락)" @@ -2157,7 +2166,9 @@ msgstr "기여자로 시작하기" #: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:5 #: ../../source/docker/run-as-subprocess.rst:11 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:12 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:16 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:18 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:13 #: ../../source/docker/tutorial-quickstart-docker.rst:11 msgid "Prerequisites" msgstr "전제 조건" @@ -2179,8 +2190,9 @@ msgid "(Optional) `pyenv-virtualenv ` msgstr "(선택 사항) `pyenv-virtualenv `_" #: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:12 +#, fuzzy msgid "" -"Flower uses :code:`pyproject.toml` to manage dependencies and configure " +"Flower uses ``pyproject.toml`` to manage dependencies and configure " "development tools (the ones which support it). Poetry is a build tool " "which supports `PEP 517 `_." msgstr "" @@ -2188,11 +2200,11 @@ msgstr "" ":code:`pyproject.toml`을 사용합니다. Poetry는 `PEP 517 " "`_을 지원하는 빌드 도구입니다." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:18 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:17 msgid "Developer Machine Setup" msgstr "개발자 머신 설정" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:21 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:20 #, fuzzy msgid "Preliminaries" msgstr "사전 준비" @@ -2211,105 +2223,112 @@ msgid "" "installation actions to add `brew` to your PATH." msgstr "`homebrew `_를 설치합니다. 설치 후 `brew`를 PATH에 추가하는 작업을 잊지 마세요." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:28 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:29 +#, fuzzy msgid "" "Install `xz` (to install different Python versions) and `pandoc` to build" -" the docs::" +" the docs:" msgstr "xz`(다른 Python 버전을 설치하려면)와 `pandoc`을 설치하여 문서를 빌드합니다::" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:34 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:36 msgid "For Ubuntu" msgstr "Ubuntu의 경우" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:35 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:38 +#, fuzzy msgid "" "Ensure you system (Ubuntu 22.04+) is up-to-date, and you have all " -"necessary packages::" +"necessary packages:" msgstr "시스템(우분투 22.04 이상)이 최신 상태이고 필요한 패키지가 모두 설치되어 있는지 확인하세요:" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:44 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:47 msgid "Create Flower Dev Environment" msgstr "Flower 개발 환경 만들기" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:46 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:49 +#, fuzzy msgid "" -"1. Clone the `Flower repository `_ from " -"GitHub::" +"Clone the `Flower repository `_ from " +"GitHub:" msgstr "1. GitHub: 에서 ``Flower 레포지토리 `_를 복제합니다::" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:52 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:56 +#, fuzzy msgid "" "Let's create the Python environment for all-things Flower. If you wish to" -" use :code:`pyenv`, we provide two convenience scripts that you can use. " -"If you prefer using something else than :code:`pyenv`, create a new " -"environment, activate and skip to the last point where all packages are " -"installed." +" use ``pyenv``, we provide two convenience scripts that you can use. If " +"you prefer using something else than ``pyenv``, create a new environment," +" activate and skip to the last point where all packages are installed." msgstr "" "Flower의 모든 것을 위한 파이썬 환경을 만들어 보겠습니다.:code:`pyenv`를 사용하고자 하는 경우 사용할 수 있는 두 " "가지 편의 스크립트를 제공합니다.:code:`pyenv`가 아닌 다른 것을 사용하려면 새 환경을 생성하고 활성화한 후 모든 패키지가" " 설치된 마지막 지점으로 건너뛰세요." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:54 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:61 +#, fuzzy msgid "" -"If you don't have :code:`pyenv` installed, the following script that will" -" install it, set it up, and create the virtual environment (with " -":code:`Python 3.9.20` by default)::" +"If you don't have ``pyenv`` installed, the following script that will " +"install it, set it up, and create the virtual environment (with ``Python " +"3.9.20`` by default):" msgstr "" ":code:`pyenv`가 설치되어 있지 않은 경우 다음 스크립트를 사용하여 설치, 설정 및 가상 환경을 생성합니다(기본적으로 " ":code:`Python 3.9.20` 사용):" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:58 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:68 +#, fuzzy msgid "" -"If you already have :code:`pyenv` installed (along with the :code:`pyenv-" -"virtualenv` plugin), you can use the following convenience script (with " -":code:`Python 3.9.20` by default)::" +"If you already have ``pyenv`` installed (along with the ``pyenv-" +"virtualenv`` plugin), you can use the following convenience script (with " +"``Python 3.9.20`` by default):" msgstr "" ":code:`pyenv`가 이미 설치되어 있는 경우( :code:`pyenv-virtualenv` 플러그인과 함께) 다음과 같은 " "편의 스크립트를 사용할 수 있습니다(기본적으로 코드:`Python 3.9.20` 사용):" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:62 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:75 +#, fuzzy msgid "" -"3. Install the Flower package in development mode (think :code:`pip " -"install -e`) along with all necessary dependencies::" +"3. Install the Flower package in development mode (think ``pip install " +"-e``) along with all necessary dependencies:" msgstr "" "3. 필요한 모든 dependencies와 함께 개발 모드에서 Flower 패키지를 설치합니다(예:code:`pip install " "-e`)::" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:69 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:83 msgid "Convenience Scripts" msgstr "편의 스크립트" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:71 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:85 +#, fuzzy msgid "" "The Flower repository contains a number of convenience scripts to make " -"recurring development tasks easier and less error-prone. See the " -":code:`/dev` subdirectory for a full list. The following scripts are " -"amongst the most important ones:" +"recurring development tasks easier and less error-prone. See the ``/dev``" +" subdirectory for a full list. The following scripts are amongst the most" +" important ones:" msgstr "" "Flower 레포지토리에는 반복적인 개발 작업을 더 쉽고 오류를 줄이기 위한 여러 가지 편의 스크립트가 포함되어 있습니다. 전체 " "목록은 :code:`/dev` 하위 디렉터리를 참조하세요. 다음 스크립트는 가장 중요한 스크립트 중 하나입니다:" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:77 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:90 msgid "Create/Delete Virtual Environment" msgstr "가상 환경 생성/삭제" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:85 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:98 msgid "Compile ProtoBuf Definitions" msgstr "ProtoBuf 정의 컴파일" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:92 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:105 msgid "Auto-Format Code" msgstr "자동 포맷 코드" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:99 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:112 msgid "Run Linters and Tests" msgstr "린터 및 테스트 실행" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:106 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:119 msgid "Add a pre-commit hook" msgstr "사전 커밋 훅 추가" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:108 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:121 msgid "" "Developers may integrate a pre-commit hook into their workflow utilizing " "the `pre-commit `_ library. The pre-" @@ -2320,90 +2339,95 @@ msgstr "" " 워크플로에 통합할 수 있습니다. 사전 커밋 훅은 두 가지 기본 작업을 실행하도록 구성됩니다:``./dev/format.sh`` 및" " ``./dev/test.sh`` 스크립트." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:110 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:125 msgid "There are multiple ways developers can use this:" msgstr "개발자가 이것을 사용할 수 있는 여러가지 방법이 있습니다:" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:112 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:127 msgid "Install the pre-commit hook to your local git directory by simply running:" msgstr "간단하게 실행하여 로컬 git 디렉터리에 사전 커밋 훅을 설치하세요:" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:118 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:133 msgid "" "Each ``git commit`` will trigger the execution of formatting and " "linting/test scripts." msgstr "각 ``git 커밋``은 포맷 및 린팅/테스트 스크립트의 실행을 트리거합니다." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:119 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:135 +#, fuzzy msgid "" "If in a hurry, bypass the hook using ``--no-verify`` with the ``git " -"commit`` command. ::" +"commit`` command." msgstr "급한 경우 ``git commit`` 명령과 함께 `--no-verify``를 사용하여 훅을 넘기세요:" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:124 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:142 msgid "" "For developers who prefer not to install the hook permanently, it is " "possible to execute a one-time check prior to committing changes by using" " the following command:" msgstr "훅을 영구적으로 설치하지 않으려는 개발자의 경우 다음 명령을 사용하여 변경 사항을 커밋하기 전에 일회성 검사를 실행할 수 있습니다:" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:130 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:149 msgid "" "This executes the formatting and linting checks/tests on all the files " "without modifying the default behavior of ``git commit``." msgstr "이렇게 하면 ``git commit``의 기본 동작을 수정하지 않고 모든 파일에 대해 포맷 및 린팅 검사/테스트를 실행합니다." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:133 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:153 msgid "Run Github Actions (CI) locally" msgstr "로컬에서 Github Action(CI) 실행하기" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:135 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:155 +#, fuzzy msgid "" "Developers could run the full set of Github Actions workflows under their" " local environment by using `Act `_. " "Please refer to the installation instructions under the linked repository" -" and run the next command under Flower main cloned repository folder::" +" and run the next command under Flower main cloned repository folder:" msgstr "" "개발자는 `Act `_를 사용하여 로컬 환경에서 전체 Github " "Actions 워크플로우 세트를 실행할 수 있습니다. 링크된 레포지토리 아래의 설치 지침을 참조하여 Flower 메인 클론 " "레포지토리 폴더 아래에서 다음 명령을 실행하세요::" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:142 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:164 msgid "" "The Flower default workflow would run by setting up the required Docker " "machines underneath." msgstr "Flower 기본 워크플로우는 아래에 필요한 Docker 머신을 설정하여 실행합니다." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:147 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:168 msgid "Build Release" msgstr "릴리즈 빌드" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:149 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:170 +#, fuzzy msgid "" "Flower uses Poetry to build releases. The necessary command is wrapped in" -" a simple script::" +" a simple script:" msgstr "Flower는 Poetry를 사용하여 릴리즈를 빌드합니다. 필요한 명령은 간단한 스크립트로 래핑됩니다::" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:154 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:177 +#, fuzzy msgid "" -"The resulting :code:`.whl` and :code:`.tar.gz` releases will be stored in" -" the :code:`/dist` subdirectory." +"The resulting ``.whl`` and ``.tar.gz`` releases will be stored in the " +"``/dist`` subdirectory." msgstr "결과물인 :code:`.whl` 및 :code:`.tar.gz` 릴리즈는 :code:`/dist` 하위 디렉터리에 저장됩니다." -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:159 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:181 msgid "Build Documentation" msgstr "문서 빌드" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:161 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:183 +#, fuzzy msgid "" "Flower's documentation uses `Sphinx `_. " "There's no convenience script to re-build the documentation yet, but it's" -" pretty easy::" +" pretty easy:" msgstr "" "Flower의 문서는 `Sphinx `_를 사용합니다. 아직 문서를 다시 작성할" " 수 있는 편리한 스크립트는 없지만 다음과 같이 쉽게 작성할 수 있습니다:" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:167 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:191 msgid "This will generate HTML documentation in ``doc/build/html``." msgstr "그러면 ``doc/build/html``에 HTML 문서가 생성됩니다." @@ -2459,7 +2483,7 @@ msgstr "" "사용자 ID를 변경하려면 ``sudo chown -R 49999:49999 certificates/*``를 실행하면 됩니다." #: ../../source/docker/enable-tls.rst:23 -#: ../../source/docker/persist-superlink-state.rst:14 +#: ../../source/docker/persist-superlink-state.rst:15 msgid "" "If you later want to delete the directory, you can change the user ID " "back to the current user ID by running ``sudo chown -R $USER:$(id -gn) " @@ -2482,22 +2506,22 @@ msgstr "" msgid "Understanding the command" msgstr "" -#: ../../source/docker/enable-tls.rst:44 ../../source/docker/enable-tls.rst:91 +#: ../../source/docker/enable-tls.rst:45 ../../source/docker/enable-tls.rst:92 #: ../../source/docker/enable-tls.rst:125 #: ../../source/docker/tutorial-quickstart-docker.rst:66 #: ../../source/docker/tutorial-quickstart-docker.rst:103 -#: ../../source/docker/tutorial-quickstart-docker.rst:213 -#: ../../source/docker/tutorial-quickstart-docker.rst:300 +#: ../../source/docker/tutorial-quickstart-docker.rst:217 +#: ../../source/docker/tutorial-quickstart-docker.rst:305 #, fuzzy msgid "``docker run``: This tells Docker to run a container from an image." msgstr "``docker run``: 새 Docker 컨테이너를 실행하는 명령입니다." -#: ../../source/docker/enable-tls.rst:45 ../../source/docker/enable-tls.rst:92 +#: ../../source/docker/enable-tls.rst:46 ../../source/docker/enable-tls.rst:93 #: ../../source/docker/enable-tls.rst:126 #: ../../source/docker/tutorial-quickstart-docker.rst:67 #: ../../source/docker/tutorial-quickstart-docker.rst:104 -#: ../../source/docker/tutorial-quickstart-docker.rst:214 -#: ../../source/docker/tutorial-quickstart-docker.rst:301 +#: ../../source/docker/tutorial-quickstart-docker.rst:218 +#: ../../source/docker/tutorial-quickstart-docker.rst:306 msgid "``--rm``: Remove the container once it is stopped or the command exits." msgstr "" @@ -2602,12 +2626,12 @@ msgstr "" msgid "the network." msgstr "" -#: ../../source/docker/enable-tls.rst:71 +#: ../../source/docker/enable-tls.rst:72 #, fuzzy msgid "SuperNode" msgstr "run\\_supernode" -#: ../../source/docker/enable-tls.rst:73 +#: ../../source/docker/enable-tls.rst:74 #, fuzzy msgid "" "Assuming that the ``ca.crt`` certificate already exists locally, we can " @@ -2618,7 +2642,7 @@ msgstr "" "디렉터리에 마운트할 수 있습니다. 이렇게 하면 SuperNode가 컨테이너 내의 인증서에 액세스할 수 있습니다. 컨테이너를 시작할 " "때 ``--root-certificates`` 플래그를 사용하세요." -#: ../../source/docker/enable-tls.rst:78 +#: ../../source/docker/enable-tls.rst:79 msgid "" "If you're generating self-signed certificates and the ``ca.crt`` " "certificate doesn't exist on the SuperNode, you can copy it over after " @@ -2733,16 +2757,16 @@ msgstr "" msgid "Getting Started" msgstr "시작하기" -#: ../../source/docker/index.rst:20 +#: ../../source/docker/index.rst:19 msgid "Running in Production" msgstr "" -#: ../../source/docker/index.rst:29 +#: ../../source/docker/index.rst:28 #, fuzzy msgid "Advanced Options" msgstr "고급 Docker 옵션" -#: ../../source/docker/index.rst:41 +#: ../../source/docker/index.rst:40 #, fuzzy msgid "Run Flower using Docker Compose" msgstr "Docker를 사용하여 Flower 실행" @@ -2768,7 +2792,7 @@ msgid "" " on your host system and a name for the database file." msgstr "" -#: ../../source/docker/persist-superlink-state.rst:10 +#: ../../source/docker/persist-superlink-state.rst:11 msgid "" "By default, the SuperLink container runs with a non-root user called " "``app`` with the user ID ``49999``. It is recommended to create a new " @@ -2776,7 +2800,7 @@ msgid "" "the mounted directory has the proper permissions." msgstr "" -#: ../../source/docker/persist-superlink-state.rst:20 +#: ../../source/docker/persist-superlink-state.rst:21 #, fuzzy msgid "" "In the example below, we create a new directory called ``state``, change " @@ -2789,7 +2813,7 @@ msgstr "" "``state`` 디렉터리를 컨테이너의 ``/app/state`` 디렉터리에 마운트하도록 지시합니다. 또한 " "``--database`` 플래그를 사용하여 데이터베이스 파일의 이름을 지정합니다." -#: ../../source/docker/persist-superlink-state.rst:35 +#: ../../source/docker/persist-superlink-state.rst:36 #, fuzzy msgid "" "As soon as the SuperLink starts, the file ``state.db`` is created in the " @@ -2822,18 +2846,18 @@ msgstr "" "태그 뒤에 있는 이미지가 업데이트될 수 있습니다. 이러한 업데이트에는 일반적으로 Flower의 기능을 변경해서는 안 되는 시스템 " "의존성에 대한 보안 업데이트가 포함됩니다. 그러나 항상 동일한 이미지를 사용하려면 태그 대신 이미지의 해시를 지정할 수 있습니다." -#: ../../source/docker/pin-version.rst:13 +#: ../../source/docker/pin-version.rst:14 #, fuzzy msgid "" "The following command returns the current image digest referenced by the " ":substitution-code:`superlink:|stable_flwr_version|` tag:" msgstr "다음 명령은 ``superlink:1.8.0`` 태그가 참조하는 현재 이미지 해시를 반환합니다:" -#: ../../source/docker/pin-version.rst:22 +#: ../../source/docker/pin-version.rst:23 msgid "This will output" msgstr "" -#: ../../source/docker/pin-version.rst:29 +#: ../../source/docker/pin-version.rst:30 #, fuzzy msgid "Next, we can pin the digest when running a new SuperLink container:" msgstr "다음으로, 새 SuperLink 컨테이너를 실행할 때 해시를 고정할 수 있습니다:" @@ -2890,7 +2914,7 @@ msgstr "" "Docker 이미지 빌드 과정에서 루트 사용자로 전환하여 누락된 시스템 의존성을 설치하려면 Dockerfile 내에서 ``USER " "root`` 지시어를 사용할 수 있습니다." -#: ../../source/docker/run-as-root-user.rst:29 +#: ../../source/docker/run-as-root-user.rst:30 #, fuzzy msgid "SuperNode Dockerfile" msgstr "SuperNode Dockerfile 만들기" @@ -2917,12 +2941,12 @@ msgid "" "done by extending the SuperNode image:" msgstr "" -#: ../../source/docker/run-as-subprocess.rst:16 +#: ../../source/docker/run-as-subprocess.rst:17 #, fuzzy msgid "Dockerfile.supernode" msgstr "Flower SuperNode" -#: ../../source/docker/run-as-subprocess.rst:30 +#: ../../source/docker/run-as-subprocess.rst:31 #, fuzzy msgid "" "Next, build the SuperNode Docker image by running the following command " @@ -2941,247 +2965,660 @@ msgid "" " the SuperNode to execute the ClientApp as a subprocess:" msgstr "" -#: ../../source/docker/set-environment-variables.rst:2 -#, fuzzy -msgid "Set Environment Variables" -msgstr "환경 변수 설정" - -#: ../../source/docker/set-environment-variables.rst:4 -#, fuzzy -msgid "" -"To set a variable inside a Docker container, you can use the ``-e " -"=`` flag. Multiple ``-e`` flags can be used to set multiple " -"environment variables for a container." -msgstr "Docker 컨테이너 내에서 변수를 설정하려면 ``-e =`` 플래그를 사용하면 됩니다." - -#: ../../source/docker/tutorial-quickstart-docker.rst:2 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:2 #, fuzzy -msgid "Quickstart with Docker" +msgid "Run Flower Quickstart Examples with Docker Compose" msgstr "빠른 시작 튜토리얼" -#: ../../source/docker/tutorial-quickstart-docker.rst:4 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:4 msgid "" -"This quickstart aims to guide you through the process of containerizing a" -" Flower project and running it end to end using Docker on your local " -"machine." +"Flower provides a set of `quickstart examples " +"`_ to help you get " +"started with the framework. These examples are designed to demonstrate " +"the capabilities of Flower and by default run using the Simulation " +"Engine. This guide demonstrates how to run them using Flower's Deployment" +" Engine via Docker Compose." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:7 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:12 msgid "" -"This tutorial does not use production-ready settings, so you can focus on" -" understanding the basic workflow that uses the minimum configurations." +"Some quickstart examples may have limitations or requirements that " +"prevent them from running on every environment. For more information, " +"please see Limitations_." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:14 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:18 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:15 #: ../../source/docker/tutorial-quickstart-docker.rst:13 #, fuzzy msgid "Before you start, make sure that:" msgstr "시작하기 전에 Docker daemon이 실행 중인지 확인하세요:" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:16 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:20 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:22 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:17 #: ../../source/docker/tutorial-quickstart-docker.rst:15 msgid "The ``flwr`` CLI is :doc:`installed <../how-to-install-flower>` locally." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:17 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:21 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:18 #: ../../source/docker/tutorial-quickstart-docker.rst:16 #, fuzzy msgid "The Docker daemon is running." msgstr "Docker 데몬이 실행 중인지 확인하십시오." -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:21 -#: ../../source/docker/tutorial-quickstart-docker.rst:19 -msgid "Step 1: Set Up" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:22 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:19 +msgid "Docker Compose is `installed `_." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:31 -#: ../../source/docker/tutorial-quickstart-docker.rst:21 -msgid "Create a new Flower project (PyTorch):" -msgstr "" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:25 +#, fuzzy +msgid "Run the Quickstart Example" +msgstr "예시 요청" -#: ../../source/docker/tutorial-quickstart-docker.rst:39 -msgid "Create a new Docker bridge network called ``flwr-network``:" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:27 +msgid "" +"Clone the quickstart example you like to run. For example, ``quickstart-" +"pytorch``:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:45 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:35 msgid "" -"User-defined networks, such as ``flwr-network``, enable IP resolution of " -"container names, a feature absent in the default bridge network. This " -"simplifies quickstart example by avoiding the need to determine host IP " -"first." +"Download the `compose.yml " +"`_" +" file into the example directory:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:50 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:44 #, fuzzy -msgid "Step 2: Start the SuperLink" -msgstr "서버(SuperLink)" +msgid "Build and start the services using the following command:" +msgstr "다음 명령을 실행하여 가상 환경을 활성화합니다:" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:60 -#: ../../source/docker/tutorial-quickstart-docker.rst:52 -msgid "Open your terminal and run:" -msgstr "" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:50 +#, fuzzy +msgid "" +"Append the following lines to the end of the ``pyproject.toml`` file and " +"save it:" +msgstr "``pyproject.toml``에 다음 버전 제약 조건을 설정했는지 확인하세요:" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "Understand the command" -msgstr "" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:52 +#: ../../source/docker/tutorial-quickstart-docker.rst:324 +#, fuzzy +msgid "pyproject.toml" +msgstr "또는 ``pyproject.toml``:" -#: ../../source/docker/tutorial-quickstart-docker.rst +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:61 msgid "" -"``-p 9091:9091 -p 9092:9092``: Map port ``9091`` and ``9092`` of the " -"container to the same port of" +"You can customize the string that follows ``tool.flwr.federations.`` to " +"fit your needs. However, please note that the string cannot contain a dot" +" (``.``)." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "the host machine, allowing other services to access the Driver API on" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:64 +msgid "" +"In this example, ``local-deployment`` has been used. Just remember to " +"replace ``local-deployment`` with your chosen name in both the " +"``tool.flwr.federations.`` string and the corresponding ``flwr run .`` " +"command." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "``http://localhost:9091`` and the Fleet API on ``http://localhost:9092``." +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:68 +#, fuzzy +msgid "Run the example:" +msgstr "전체 코드 예제" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:74 +msgid "Follow the logs of the SuperExec service:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:71 -#: ../../source/docker/tutorial-quickstart-docker.rst:108 -#: ../../source/docker/tutorial-quickstart-docker.rst:215 -#: ../../source/docker/tutorial-quickstart-docker.rst:304 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:80 msgid "" -"``--network flwr-network``: Make the container join the network named " -"``flwr-network``." +"That is all it takes! You can monitor the progress of the run through the" +" logs of the SuperExec." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:72 -msgid "``--name superlink``: Assign the name ``superlink`` to the container." +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:84 +msgid "Run a Different Quickstart Example" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:73 -#: ../../source/docker/tutorial-quickstart-docker.rst:110 -#: ../../source/docker/tutorial-quickstart-docker.rst:216 -#: ../../source/docker/tutorial-quickstart-docker.rst:306 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:86 msgid "" -"``--detach``: Run the container in the background, freeing up the " -"terminal." +"To run a different quickstart example, such as ``quickstart-tensorflow``," +" first, shut down the Docker Compose services of the current example:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "" -"tag of the image. The tag :substitution-code:`|stable_flwr_version|` " -"represents a :doc:`specific version ` of the image." +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:93 +msgid "After that, you can repeat the steps above." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "" -"``--insecure``: This flag tells the container to operate in an insecure " -"mode, allowing" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:96 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:102 +#, fuzzy +msgid "Limitations" +msgstr "동기" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:101 +#, fuzzy +msgid "Quickstart Example" +msgstr "빠른 시작" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:103 +#, fuzzy +msgid "quickstart-fastai" +msgstr "빠른 시작 튜토리얼" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:104 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:106 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:115 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:117 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:121 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:123 +#: ../../source/ref-changelog.md:33 ../../source/ref-changelog.md:399 +#: ../../source/ref-changelog.md:676 ../../source/ref-changelog.md:740 +#: ../../source/ref-changelog.md:798 ../../source/ref-changelog.md:867 +#: ../../source/ref-changelog.md:929 +msgid "None" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "unencrypted communication." +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:105 +#, fuzzy +msgid "quickstart-huggingface" +msgstr "빠른 시작 튜토리얼" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:107 +#, fuzzy +msgid "quickstart-jax" +msgstr "빠른 시작" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:108 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:110 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:125 +msgid "" +"The example has not yet been updated to work with the latest ``flwr`` " +"version." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:80 -msgid "Step 3: Start the SuperNode" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:109 +#, fuzzy +msgid "quickstart-mlcube" +msgstr "빠른 시작" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:111 +#, fuzzy +msgid "quickstart-mlx" +msgstr "빠른 시작" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:112 +msgid "" +"`Requires to run on macOS with Apple Silicon `_." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:82 -msgid "Start two SuperNode containers." +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:114 +#, fuzzy +msgid "quickstart-monai" +msgstr "빠른 시작" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:116 +#, fuzzy +msgid "quickstart-pandas" +msgstr "빠른 시작 튜토리얼" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:118 +msgid "quickstart-pytorch-lightning" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:84 -msgid "Start the first container:" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:119 +msgid "" +"Requires an older pip version that is not supported by the Flower Docker " +"images." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "``-p 9094:9094``: Map port ``9094`` of the container to the same port of" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:120 +#, fuzzy +msgid "quickstart-pytorch" +msgstr "빠른 시작 튜토리얼" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:122 +msgid "quickstart-sklearn-tabular" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "the host machine, allowing other services to access the SuperNode API on" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:124 +#, fuzzy +msgid "quickstart-tabnet" +msgstr "빠른 시작 튜토리얼" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:126 +#, fuzzy +msgid "quickstart-tensorflow" +msgstr "빠른 시작 튜토리얼" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:127 +msgid "Only runs on AMD64." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "``http://localhost:9094``." +#: ../../source/docker/set-environment-variables.rst:2 +#, fuzzy +msgid "Set Environment Variables" +msgstr "환경 변수 설정" + +#: ../../source/docker/set-environment-variables.rst:4 +#, fuzzy +msgid "" +"To set a variable inside a Docker container, you can use the ``-e " +"=`` flag. Multiple ``-e`` flags can be used to set multiple " +"environment variables for a container." +msgstr "Docker 컨테이너 내에서 변수를 설정하려면 ``-e =`` 플래그를 사용하면 됩니다." + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:2 +#, fuzzy +msgid "Deploy Flower on Multiple Machines with Docker Compose" +msgstr "빠른 시작 튜토리얼" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:4 +msgid "" +"This guide will help you set up a Flower project on multiple machines " +"using Docker Compose." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:109 -msgid "``--name supernode-1``: Assign the name ``supernode-1`` to the container." +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:7 +msgid "" +"You will learn how to run the Flower client and server components on two " +"separate machines, with Flower configured to use TLS encryption and " +"persist SuperLink state across restarts. A server consists of a SuperLink" +" and ``SuperExec``. For more details about the Flower architecture, refer" +" to the :doc:`../explanation-flower-architecture` explainer page." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:13 msgid "" -"``flwr/supernode:|stable_flwr_version|``: This is the name of the image " -"to be run and the specific tag" +"This guide assumes you have completed the :doc:`tutorial-quickstart-" +"docker-compose` tutorial. It is highly recommended that you follow and " +"understand the contents of that tutorial before proceeding with this " +"guide." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "of the image." +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:20 +msgid "Before you begin, make sure you have the following prerequisites:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:23 +msgid "The Docker daemon is running on your local machine and the remote machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:24 msgid "" -"``--superlink superlink:9092``: Connect to the SuperLink's Fleet API at " -"the address" +"Docker Compose V2 is installed on both your local machine and the remote " +"machine." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "``superlink:9092``." +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:25 +msgid "You can connect to the remote machine from your local machine." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "" -"``--node-config \"partition-id=0 num-partitions=2\"``: Set the partition " -"ID to ``0`` and the" +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:26 +msgid "Ports ``9091`` and ``9093`` are accessible on the remote machine." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "number of partitions to ``2`` for the SuperNode configuration." +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:30 +msgid "" +"The guide uses the |quickstart_sklearn_tabular|_ example as an example " +"project." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:32 msgid "" -"``--supernode-address 0.0.0.0:9094``: Set the address and port number " -"that the SuperNode" +"If your project has a different name or location, please remember to " +"adjust the commands/paths accordingly." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "is listening on." +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:36 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:22 +#: ../../source/docker/tutorial-quickstart-docker.rst:19 +msgid "Step 1: Set Up" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "" -"``--isolation process``: Tells the SuperNode that the ClientApp is " -"created by separate" +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:38 +msgid "Clone the Flower repository and change to the ``distributed`` directory:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst -msgid "independent process. The SuperNode does not attempt to create it." +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:45 +msgid "Get the IP address from the remote machine and save it for later." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:124 -msgid "Start the second container:" +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:46 +msgid "" +"Use the ``certs.yml`` Compose file to generate your own self-signed " +"certificates. If you have certificates, you can continue with Step 2." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:142 -msgid "Step 4: Start the ClientApp" +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:51 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:221 +msgid "These certificates should be used only for development purposes." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:144 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:53 msgid "" -"The ClientApp Docker image comes with a pre-installed version of Flower " -"and serves as a base for building your own ClientApp image. In order to " -"install the FAB dependencies, you will need to create a Dockerfile that " -"extends the ClientApp image and installs the required dependencies." +"For production environments, you may have to use dedicated services to " +"obtain your certificates." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:148 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:56 msgid "" -"Create a ClientApp Dockerfile called ``Dockerfile.clientapp`` and paste " -"the following code into it:" +"First, set the environment variables ``SUPERLINK_IP`` and " +"``SUPEREXEC_IP`` with the IP address from the remote machine. For " +"example, if the IP is ``192.168.2.33``, execute:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:150 -#, fuzzy -msgid "Dockerfile.clientapp" -msgstr "flower 클라이언트 앱" +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:65 +msgid "Next, generate the self-signed certificates:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:72 +msgid "Step 2: Copy the Server Compose Files" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:74 +msgid "" +"Use the method that works best for you to copy the ``server`` directory, " +"the certificates, and your Flower project to the remote machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:77 +msgid "For example, you can use ``scp`` to copy the directories:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:87 +msgid "Step 3: Start the Flower Server Components" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:89 +msgid "" +"Log into the remote machine using ``ssh`` and run the following command " +"to start the SuperLink and SuperExec services:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:102 +msgid "" +"The Path of the ``PROJECT_DIR`` should be relative to the location of the" +" ``server`` Docker Compose files." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:105 +msgid "Go back to your terminal on your local machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:108 +#, fuzzy +msgid "Step 4: Start the Flower Client Components" +msgstr "서버(SuperLink)" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:110 +msgid "" +"On your local machine, run the following command to start the client " +"components:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:120 +msgid "" +"The Path of the ``PROJECT_DIR`` should be relative to the location of the" +" ``client`` Docker Compose files." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:124 +#, fuzzy +msgid "Step 5: Run Your Flower Project" +msgstr "Flower SuperNode를 실행합니다." + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:126 +msgid "" +"Specify the remote SuperExec IP addresses and the path to the root " +"certificate in the ``[tool.flwr.federations.remote-superexec]`` table in " +"the ``pyproject.toml`` file. Here, we have named our remote federation " +"``remote-superexec``:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:130 +msgid "examples/quickstart-sklearn-tabular/pyproject.toml" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:139 +msgid "" +"The Path of the ``root-certificates`` should be relative to the location " +"of the ``pyproject.toml`` file." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:142 +msgid "To run the project, execute:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:148 +msgid "" +"That's it! With these steps, you've set up Flower on two separate " +"machines and are ready to start using it." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:152 +msgid "Step 6: Clean Up" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:154 +#, fuzzy +msgid "Shut down the Flower client components:" +msgstr "Flower 클라이언트 앱을 실행합니다." + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:161 +msgid "Shut down the Flower server components and delete the SuperLink state:" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:2 +#, fuzzy +msgid "Quickstart with Docker" +msgstr "빠른 시작 튜토리얼" + +#: ../../source/docker/tutorial-quickstart-docker.rst:4 +msgid "" +"This quickstart aims to guide you through the process of containerizing a" +" Flower project and running it end to end using Docker on your local " +"machine." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:7 +msgid "" +"This tutorial does not use production-ready settings, so you can focus on" +" understanding the basic workflow that uses the minimum configurations." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:32 +#: ../../source/docker/tutorial-quickstart-docker.rst:21 +msgid "Create a new Flower project (PyTorch):" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:39 +msgid "Create a new Docker bridge network called ``flwr-network``:" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:45 +msgid "" +"User-defined networks, such as ``flwr-network``, enable IP resolution of " +"container names, a feature absent in the default bridge network. This " +"simplifies quickstart example by avoiding the need to determine host IP " +"first." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:50 +#, fuzzy +msgid "Step 2: Start the SuperLink" +msgstr "서버(SuperLink)" + +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:62 +#: ../../source/docker/tutorial-quickstart-docker.rst:52 +msgid "Open your terminal and run:" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker-compose.rst +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "Understand the command" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "" +"``-p 9091:9091 -p 9092:9092``: Map port ``9091`` and ``9092`` of the " +"container to the same port of" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "the host machine, allowing other services to access the Driver API on" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "``http://localhost:9091`` and the Fleet API on ``http://localhost:9092``." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:71 +#: ../../source/docker/tutorial-quickstart-docker.rst:108 +#: ../../source/docker/tutorial-quickstart-docker.rst:219 +#: ../../source/docker/tutorial-quickstart-docker.rst:309 +msgid "" +"``--network flwr-network``: Make the container join the network named " +"``flwr-network``." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:72 +msgid "``--name superlink``: Assign the name ``superlink`` to the container." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:73 +#: ../../source/docker/tutorial-quickstart-docker.rst:110 +#: ../../source/docker/tutorial-quickstart-docker.rst:220 +#: ../../source/docker/tutorial-quickstart-docker.rst:311 +msgid "" +"``--detach``: Run the container in the background, freeing up the " +"terminal." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "" +"tag of the image. The tag :substitution-code:`|stable_flwr_version|` " +"represents a :doc:`specific version ` of the image." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "" +"``--insecure``: This flag tells the container to operate in an insecure " +"mode, allowing" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "unencrypted communication." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:80 +msgid "Step 3: Start the SuperNode" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:82 +msgid "Start two SuperNode containers." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:84 +msgid "Start the first container:" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "``-p 9094:9094``: Map port ``9094`` of the container to the same port of" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "the host machine, allowing other services to access the SuperNode API on" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "``http://localhost:9094``." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:109 +msgid "``--name supernode-1``: Assign the name ``supernode-1`` to the container." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "" +"``flwr/supernode:|stable_flwr_version|``: This is the name of the image " +"to be run and the specific tag" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "of the image." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "" +"``--superlink superlink:9092``: Connect to the SuperLink's Fleet API at " +"the address" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "``superlink:9092``." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "" +"``--node-config \"partition-id=0 num-partitions=2\"``: Set the partition " +"ID to ``0`` and the" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "number of partitions to ``2`` for the SuperNode configuration." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "" +"``--supernode-address 0.0.0.0:9094``: Set the address and port number " +"that the SuperNode" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "is listening on." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "" +"``--isolation process``: Tells the SuperNode that the ClientApp is " +"created by separate" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst +msgid "independent process. The SuperNode does not attempt to create it." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:124 +msgid "Start the second container:" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:142 +msgid "Step 4: Start the ClientApp" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:144 +msgid "" +"The ClientApp Docker image comes with a pre-installed version of Flower " +"and serves as a base for building your own ClientApp image. In order to " +"install the FAB dependencies, you will need to create a Dockerfile that " +"extends the ClientApp image and installs the required dependencies." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:149 +msgid "" +"Create a ClientApp Dockerfile called ``Dockerfile.clientapp`` and paste " +"the following code into it:" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:152 +#, fuzzy +msgid "Dockerfile.clientapp" +msgstr "flower 클라이언트 앱" #: ../../source/docker/tutorial-quickstart-docker.rst #, fuzzy @@ -3263,7 +3700,7 @@ msgstr "" msgid "the default command run when the container is started." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:184 +#: ../../source/docker/tutorial-quickstart-docker.rst:186 msgid "" "Note that `flwr `__ is already installed " "in the ``flwr/clientapp`` base image, so only other package dependencies " @@ -3272,7 +3709,7 @@ msgid "" "after it has been copied into the Docker image (see line 5)." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:189 +#: ../../source/docker/tutorial-quickstart-docker.rst:192 #, fuzzy msgid "" "Next, build the ClientApp Docker image by running the following command " @@ -3281,7 +3718,7 @@ msgstr "" "다음으로, Docker파일과 ServerApp 코드가 있는 디렉터리에서 다음 명령을 실행하여 ServerApp Docker 이미지를" " 빌드합니다." -#: ../../source/docker/tutorial-quickstart-docker.rst:198 +#: ../../source/docker/tutorial-quickstart-docker.rst:201 #, fuzzy msgid "" "The image name was set as ``flwr_clientapp`` with the tag ``0.0.1``. " @@ -3291,7 +3728,7 @@ msgstr "" "이미지에``flwr_serverapp``이라는 이름을 붙이고 ``0.0.1``이라는 태그를 붙였습니다. 여기서 선택한 값은 예시일 " "뿐이라는 점을 기억하세요. 필요에 따라 변경할 수 있습니다." -#: ../../source/docker/tutorial-quickstart-docker.rst:201 +#: ../../source/docker/tutorial-quickstart-docker.rst:205 msgid "Start the first ClientApp container:" msgstr "" @@ -3312,34 +3749,34 @@ msgstr "" msgid "``supernode-1:9094``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:222 +#: ../../source/docker/tutorial-quickstart-docker.rst:226 msgid "Start the second ClientApp container:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:233 +#: ../../source/docker/tutorial-quickstart-docker.rst:237 msgid "Step 5: Start the SuperExec" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:235 +#: ../../source/docker/tutorial-quickstart-docker.rst:239 #, fuzzy msgid "" "The procedure for building and running a SuperExec image is almost " "identical to the ClientApp image." msgstr "ServerApp 이미지를 빌드하고 실행하는 절차는 SuperNode 이미지와 거의 동일합니다." -#: ../../source/docker/tutorial-quickstart-docker.rst:237 +#: ../../source/docker/tutorial-quickstart-docker.rst:242 msgid "" "Similar to the ClientApp image, you will need to create a Dockerfile that" " extends the SuperExec image and installs the required FAB dependencies." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:240 +#: ../../source/docker/tutorial-quickstart-docker.rst:245 msgid "" "Create a SuperExec Dockerfile called ``Dockerfile.superexec`` and paste " "the following code in:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:242 +#: ../../source/docker/tutorial-quickstart-docker.rst:248 msgid "Dockerfile.superexec" msgstr "" @@ -3369,13 +3806,13 @@ msgstr "" msgid "``flwr.superexec.deployment:executor`` executor to run the ServerApps." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:277 +#: ../../source/docker/tutorial-quickstart-docker.rst:283 msgid "" "Afterward, in the directory that holds the Dockerfile, execute this " "Docker command to build the SuperExec image:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:285 +#: ../../source/docker/tutorial-quickstart-docker.rst:290 msgid "Start the SuperExec container:" msgstr "" @@ -3389,7 +3826,7 @@ msgid "" "``http://localhost:9093``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:305 +#: ../../source/docker/tutorial-quickstart-docker.rst:310 msgid "``--name superexec``: Assign the name ``superexec`` to the container." msgstr "" @@ -3410,82 +3847,78 @@ msgstr "" msgid "connect to the SuperLink running on port ``9091``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:315 +#: ../../source/docker/tutorial-quickstart-docker.rst:320 msgid "Step 6: Run the Quickstart Project" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:317 +#: ../../source/docker/tutorial-quickstart-docker.rst:322 #, fuzzy msgid "Add the following lines to the ``pyproject.toml``:" msgstr "``pyproject.toml``에 다음 버전 제약 조건을 설정했는지 확인하세요:" -#: ../../source/docker/tutorial-quickstart-docker.rst:319 -#, fuzzy -msgid "pyproject.toml" -msgstr "또는 ``pyproject.toml``:" - -#: ../../source/docker/tutorial-quickstart-docker.rst:326 +#: ../../source/docker/tutorial-quickstart-docker.rst:331 msgid "Run the ``quickstart-docker`` project by executing the command:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:332 +#: ../../source/docker/tutorial-quickstart-docker.rst:337 msgid "Follow the SuperExec logs to track the execution of the run:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:339 +#: ../../source/docker/tutorial-quickstart-docker.rst:344 msgid "Step 7: Update the Application" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:341 +#: ../../source/docker/tutorial-quickstart-docker.rst:346 msgid "" -"Change the application code. For example, change the ``seed`` in " +"Change the application code. For example, change the ``seed`` in " "``quickstart_docker/task.py`` to ``43`` and save it:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:344 +#: ../../source/docker/tutorial-quickstart-docker.rst:349 msgid "quickstart_docker/task.py" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:351 +#: ../../source/docker/tutorial-quickstart-docker.rst:356 #, fuzzy msgid "Stop the current ClientApp containers:" msgstr "현재 클라이언트 속성입니다." -#: ../../source/docker/tutorial-quickstart-docker.rst:357 +#: ../../source/docker/tutorial-quickstart-docker.rst:362 #, fuzzy msgid "Rebuild the FAB and ClientApp image:" msgstr "기본 이미지 빌드" -#: ../../source/docker/tutorial-quickstart-docker.rst:363 +#: ../../source/docker/tutorial-quickstart-docker.rst:368 msgid "Launch two new ClientApp containers based on the newly built image:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:378 +#: ../../source/docker/tutorial-quickstart-docker.rst:383 msgid "Run the updated project:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:385 +#: ../../source/docker/tutorial-quickstart-docker.rst:390 msgid "Step 8: Clean Up" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:387 +#: ../../source/docker/tutorial-quickstart-docker.rst:392 msgid "Remove the containers and the bridge network:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:399 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:408 +#: ../../source/docker/tutorial-quickstart-docker.rst:404 #, fuzzy msgid "Where to Go Next" msgstr "시작 위치" -#: ../../source/docker/tutorial-quickstart-docker.rst:401 +#: ../../source/docker/tutorial-quickstart-docker.rst:406 msgid ":doc:`enable-tls`" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:402 +#: ../../source/docker/tutorial-quickstart-docker.rst:407 msgid ":doc:`persist-superlink-state`" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:403 +#: ../../source/docker/tutorial-quickstart-docker.rst:408 msgid ":doc:`tutorial-quickstart-docker-compose`" msgstr "" @@ -3508,183 +3941,179 @@ msgid "" "configuration that best suits your project's needs." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:18 -msgid "Docker Compose is `installed `_." -msgstr "" - -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:23 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:24 msgid "Clone the Docker Compose ``complete`` directory:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:37 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:38 msgid "" "Export the path of the newly created project. The path should be relative" " to the location of the Docker Compose files:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:44 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:45 msgid "" "Setting the ``PROJECT_DIR`` helps Docker Compose locate the " "``pyproject.toml`` file, allowing it to install dependencies in the " "SuperExec and SuperNode images correctly." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:48 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:49 #, fuzzy msgid "Step 2: Run Flower in Insecure Mode" msgstr "Flower SuperNode를 실행합니다." -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:50 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:51 msgid "" "To begin, start Flower with the most basic configuration. In this setup, " "Flower will run without TLS and without persisting the state." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:55 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:56 msgid "" "Without TLS, the data sent between the services remains **unencrypted**. " "Use it only for development purposes." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:58 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:59 msgid "" "For production-oriented use cases, :ref:`enable TLS` for secure data" " transmission." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:68 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:179 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:70 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:184 #, fuzzy msgid "``docker compose``: The Docker command to run the Docker Compose tool." msgstr "``docker run``: 새 Docker 컨테이너를 실행하는 명령입니다." -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:69 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:180 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:71 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:185 msgid "" "``-f compose.yml``: Specify the YAML file that contains the basic Flower " "service definitions." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:70 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:185 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:72 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:190 msgid "" "``--build``: Rebuild the images for each service if they don't already " "exist." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:71 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:186 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:73 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:191 msgid "" "``-d``: Detach the containers from the terminal and run them in the " "background." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:74 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:76 msgid "Step 3: Run the Quickstart Project" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:76 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:78 msgid "" "Now that the Flower services have been started via Docker Compose, it is " "time to run the quickstart example." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:79 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:81 msgid "" "To ensure the ``flwr`` CLI connects to the SuperExec, you need to specify" " the SuperExec addresses in the ``pyproject.toml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:82 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:226 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:84 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:232 msgid "Add the following lines to the ``quickstart-compose/pyproject.toml``:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:84 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:228 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:86 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:234 msgid "quickstart-compose/pyproject.toml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:91 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:93 msgid "Execute the command to run the quickstart example:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:97 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:99 msgid "Monitor the SuperExec logs and wait for the summary to appear:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:104 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:106 msgid "Step 4: Update the Application" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:106 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:108 msgid "In the next step, change the application code." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:108 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:110 msgid "" "For example, go to the ``task.py`` file in the ``quickstart-" "compose/quickstart_compose/`` directory and add a ``print`` call in the " "``get_weights`` function:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:111 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:114 msgid "quickstart-compose/quickstart_compose/task.py" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:120 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:125 #, fuzzy msgid "Rebuild and restart the services." msgstr "이미 *서버*를 시작할 수 있습니다:" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:124 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:129 msgid "" "If you have modified the dependencies listed in your ``pyproject.toml`` " "file, it is essential to rebuild images." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:127 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:132 msgid "If you haven't made any changes, you can skip this step." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:129 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:134 msgid "Run the following command to rebuild and restart the services:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:135 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:140 msgid "Run the updated quickstart example:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:142 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:147 msgid "In the SuperExec logs, you should find the ``Get weights`` line:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:159 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:164 msgid "Step 5: Persisting the SuperLink State" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:161 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:166 msgid "" "In this step, Flower services are configured to persist the state of the " "SuperLink service, ensuring that it maintains its state even after a " "restart." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:166 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:171 msgid "" "When working with Docker Compose on Linux, you may need to create the " "``state`` directory first and change its ownership to ensure proper " "access and permissions." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:169 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:174 msgid "" "For more information, consult the following page: :doc:`persist-" "superlink-state`." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:171 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:220 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:176 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:226 msgid "Run the command:" msgstr "" @@ -3705,17 +4134,17 @@ msgid "" "rules>`_." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:188 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:241 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:362 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:193 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:247 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:375 msgid "Rerun the ``quickstart-compose`` project:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:194 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:199 msgid "Check the content of the ``state`` directory:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:201 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:206 msgid "" "You should see a ``state.db`` file in the ``state`` directory. If you " "restart the service, the state file will be used to restore the state " @@ -3723,124 +4152,125 @@ msgid "" "if the containers are stopped and started again." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:208 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:214 msgid "Step 6: Run Flower with TLS" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:210 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:216 msgid "" "To demonstrate how to enable TLS, generate self-signed certificates using" " the ``certs.yml`` Compose file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:215 -msgid "These certificates should be used only for development purposes." -msgstr "" - -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:217 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:223 msgid "" "For production environments, use a service like `Let's Encrypt " "`_ to obtain your certificates." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:235 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:241 msgid "Restart the services with TLS enabled:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:249 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:255 msgid "Step 7: Add another SuperNode" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:251 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:257 msgid "" "You can add more SuperNodes and ClientApps by duplicating their " "definitions in the ``compose.yml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:254 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:260 msgid "" "Just give each new SuperNode and ClientApp service a unique service name " "like ``supernode-3``, ``clientapp-3``, etc." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:257 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:263 msgid "In ``compose.yml``, add the following:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:259 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:265 msgid "compose.yml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:303 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:316 msgid "" "If you also want to enable TLS for the new SuperNodes, duplicate the " "SuperNode definition for each new SuperNode service in the ``with-" "tls.yml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:306 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:319 msgid "" "Make sure that the names of the services match with the one in the " "``compose.yml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:308 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:321 msgid "In ``with-tls.yml``, add the following:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:310 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:323 msgid "with-tls.yml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:332 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:345 msgid "Step 8: Persisting the SuperLink State and Enabling TLS" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:334 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:347 msgid "" "To run Flower with persisted SuperLink state and enabled TLS, a slight " "change in the ``with-state.yml`` file is required:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:337 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:350 msgid "Comment out the lines 2-4 and uncomment the lines 5-9:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:339 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:352 msgid "with-state.yml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:356 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:369 #, fuzzy msgid "Restart the services:" msgstr "이미 *서버*를 시작할 수 있습니다:" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:370 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:383 msgid "Step 9: Merge Multiple Compose Files" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:372 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:385 msgid "" "You can merge multiple Compose files into a single file. For instance, if" " you wish to combine the basic configuration with the TLS configuration, " "execute the following command:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:380 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:394 msgid "" "This will merge the contents of ``compose.yml`` and ``with-tls.yml`` into" " a new file called ``my_compose.yml``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:384 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:398 msgid "Step 10: Clean Up" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:386 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:400 #, fuzzy msgid "Remove all services and volumes:" msgstr "R에서 모든 항목을 제거합니다." +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:410 +#, fuzzy +msgid ":doc:`run-quickstart-examples-docker-compose`" +msgstr "빠른 시작 튜토리얼" + #: ../../source/docker/use-a-different-version.rst:2 #, fuzzy msgid "Use a Different Flower Version" @@ -3855,7 +4285,7 @@ msgstr "" "다른 버전의 Flower를 사용하려면 태그를 변경하여 사용할 수 있습니다(예: Flower nightly). 사용 가능한 모든 " "버전은 `Docker Hub `__에 있습니다." -#: ../../source/docker/use-a-different-version.rst:9 +#: ../../source/docker/use-a-different-version.rst:10 #, fuzzy msgid "" "When using Flower nightly, the SuperLink nightly image must be paired " @@ -3890,34 +4320,35 @@ msgstr "" "연합식으로 ` 와 비교했을 때 몇 가지 사항만 " "변경 하면 됩니다." -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:9 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:10 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:12 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:12 msgid "Centralized Training" msgstr "중앙 집중식 훈련" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:10 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:14 +#, fuzzy msgid "" "All files are revised based on :doc:`Example: PyTorch - From Centralized " "To Federated `. The only " -"thing to do is modifying the file called :code:`cifar.py`, revised part " -"is shown below:" +"thing to do is modifying the file called ``cifar.py``, revised part is " +"shown below:" msgstr "" "모든 파일은 :doc:`예제: 파이토치 -중앙 집중식에서 연합식으로 `를 기반으로 수정합니다. :code:`cifar.py`라는 파일을 수정하기만 하면 되며, 수정된 부분은 " "아래와 같습니다:" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:13 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:18 msgid "" "The model architecture defined in class Net() is added with Batch " "Normalization layers accordingly." msgstr "Net() 클래스에 정의된 모델 아키텍처는 그에 따라 배치 정규화 레이어가 추가됩니다." -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:41 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:157 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:47 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:171 msgid "You can now run your machine learning workload:" msgstr "이제 머신 러닝 워크로드를 실행할 수 있습니다:" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:47 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:53 msgid "" "So far this should all look fairly familiar if you've used PyTorch " "before. Let's take the next step and use what we've built to create a " @@ -3927,19 +4358,20 @@ msgstr "" "지금까지는 파이토치를 사용해 본 적이 있다면 상당히 익숙하게 보일 것입니다. 다음 단계로 넘어가서 우리가 구축한 것을 사용하여 " "FedBN 내에서 하나의 서버와 두 개의 클라이언트로 구성된 연합학습 시스템을 만들어 보겠습니다." -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:51 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:167 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:58 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:182 msgid "Federated Training" msgstr "연합 훈련" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:53 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:60 +#, fuzzy msgid "" "If you have read :doc:`Example: PyTorch - From Centralized To Federated " "`, the following parts are" -" easy to follow, only :code:`get_parameters` and :code:`set_parameters` " -"function in :code:`client.py` needed to revise. If not, please read the " -":doc:`Example: PyTorch - From Centralized To Federated `. first." +" easy to follow, only ``get_parameters`` and ``set_parameters`` function " +"in ``client.py`` needed to revise. If not, please read the :doc:`Example:" +" PyTorch - From Centralized To Federated `. first." msgstr "" ":doc:`예제: 파이토치 - 중앙 집중식에서 연합식으로 `를 읽었다면, 다음 부분은 쉽게 따라할 수 있으며 :code:`client.py`의 " @@ -3947,30 +4379,32 @@ msgstr "" ":doc:`예제: 파이토치 - 중앙 집중식에서 연합식으로 `를 먼저 읽어보세요." -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:56 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:66 +#, fuzzy msgid "" "Our example consists of one *server* and two *clients*. In FedBN, " -":code:`server.py` keeps unchanged, we can start the server directly." +"``server.py`` keeps unchanged, we can start the server directly." msgstr "" "이 예제는 하나의 *서버*와 두 개의 *클라이언트*로 구성됩니다. FedBN에서 :code:`server.py`는 변경되지 않고 " "그대로 유지되므로 서버를 바로 시작할 수 있습니다." -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:62 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:73 +#, fuzzy msgid "" -"Finally, we will revise our *client* logic by changing " -":code:`get_parameters` and :code:`set_parameters` in :code:`client.py`, " -"we will exclude batch normalization parameters from model parameter list " -"when sending to or receiving from the server." +"Finally, we will revise our *client* logic by changing ``get_parameters``" +" and ``set_parameters`` in ``client.py``, we will exclude batch " +"normalization parameters from model parameter list when sending to or " +"receiving from the server." msgstr "" "마지막으로, :code:`client.py`에서 :code:`get_parameters` 및 " ":code:`set_parameters`를 변경하여 *client* 로직을 수정할 것입니다. 서버로 보내거나 서버에서 받을 때 모델" " 파라미터 목록에서 배치 정규화 파라미터를 제외할 수 있습니다." -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:85 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:102 msgid "Now, you can now open two additional terminal windows and run" msgstr "이제 두 개의 터미널 창을 추가로 열고 다음을 실행할 수 있습니다" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:91 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:108 msgid "" "in each window (make sure that the server is still running before you do " "so) and see your (previously centralized) PyTorch project run federated " @@ -3979,13 +4413,13 @@ msgstr "" "를 입력하고(클릭하기 전에 서버가 계속 실행 중인지 확인하세요), (이전에 중앙 집중된) PyTorch 프로젝트가 두 클라이언트에서" " FedBN으로 연합 학습을 실행하는 것을 확인합니다. 축하합니다!" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:94 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:310 -#: ../../source/tutorial-quickstart-jax.rst:283 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:113 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:349 +#: ../../source/tutorial-quickstart-jax.rst:319 msgid "Next Steps" msgstr "다음 단계" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:96 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:115 msgid "" "The full source code for this example can be found `here " "`_을 " "참조하세요." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:15 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:18 +#, fuzzy msgid "" -"Let's create a new file called :code:`cifar.py` with all the components " +"Let's create a new file called ``cifar.py`` with all the components " "required for a traditional (centralized) training on CIFAR-10. First, all" -" required packages (such as :code:`torch` and :code:`torchvision`) need " -"to be imported. You can see that we do not import any package for " -"federated learning. You can keep all these imports as they are even when " -"we add the federated learning components at a later point." +" required packages (such as ``torch`` and ``torchvision``) need to be " +"imported. You can see that we do not import any package for federated " +"learning. You can keep all these imports as they are even when we add the" +" federated learning components at a later point." msgstr "" "CIFAR-10에 대한 기존 (중앙 집중식) 교육에 필요한 모든 구성 요소가 포함된 :code:`cifar.py`라는 새 파일을 " "생성해 보겠습니다. 먼저, 필요한 모든 패키지(예: :code:`torch` 및 :code:`torchvision`)를 가져와야 " "합니다. 연합 학습을 위한 패키지를 가져오지 않는 것을 확인 할 수 있습니. 나중에 연합 학습 구성 요소를 추가할 때에도 이러한 " "모든 가져오기를 그대로 유지할 수 있습니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:32 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:36 +#, fuzzy msgid "" "As already mentioned we will use the CIFAR-10 dataset for this machine " "learning workload. The model architecture (a very simple Convolutional " -"Neural Network) is defined in :code:`class Net()`." +"Neural Network) is defined in ``class Net()``." msgstr "" "이미 언급했듯이 이 머신 러닝 워크로드에는 CIFAR-10 데이터 세트를 사용합니다. 모델 아키텍처(매우 간단한 컨볼루션 신경망)는" " :code:`class Net()`에 정의되어 있습니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:56 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:62 +#, fuzzy msgid "" -"The :code:`load_data()` function loads the CIFAR-10 training and test " -"sets. The :code:`transform` normalized the data after loading." +"The ``load_data()`` function loads the CIFAR-10 training and test sets. " +"The ``transform`` normalized the data after loading." msgstr "" ":code:`load_data()` 함수는 CIFAR-10 훈련 및 테스트 세트를 로드합니다. :code:`transform`은 " "로드 후 데이터를 정규화합니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:74 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:84 +#, fuzzy msgid "" -"We now need to define the training (function :code:`train()`) which loops" -" over the training set, measures the loss, backpropagates it, and then " +"We now need to define the training (function ``train()``) which loops " +"over the training set, measures the loss, backpropagates it, and then " "takes one optimizer step for each batch of training examples." msgstr "" "이제 학습 집합을 반복하고, 손실을 측정하고, 이를 역전파한 다음 각 학습 예제 배치에 대해 하나의 최적화 단계를 수행하는 " "학습(함수 :code:`train()`)을 정의해야 합니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:76 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:88 +#, fuzzy msgid "" -"The evaluation of the model is defined in the function :code:`test()`. " -"The function loops over all test samples and measures the loss of the " -"model based on the test dataset." +"The evaluation of the model is defined in the function ``test()``. The " +"function loops over all test samples and measures the loss of the model " +"based on the test dataset." msgstr "" "모델 평가는 :code:`test()` 함수에 정의되어 있습니다. 이 함수는 모든 테스트 샘플을 반복하고 테스트 데이터 세트에 따라" " 모델의 손실을 측정합니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:136 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:149 msgid "" "Having defined the data loading, model architecture, training, and " "evaluation we can put everything together and train our CNN on CIFAR-10." msgstr "데이터 로딩, 모델 아키텍처, 훈련 및 평가를 정의했으면 모든 것을 종합하여 CIFAR-10에서 CNN을 훈련할 수 있습니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:163 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:177 msgid "" "So far, this should all look fairly familiar if you've used PyTorch " "before. Let's take the next step and use what we've built to create a " @@ -4099,7 +4538,7 @@ msgstr "" "지금까지는 파이토치를 사용해 본 적이 있다면 상당히 익숙하게 보일 것입니다. 다음 단계로 넘어가서 구축한 것을 사용하여 하나의 " "서버와 두 개의 클라이언트로 구성된 간단한 연합 학습 시스템을 만들어 보겠습니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:169 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:184 msgid "" "The simple machine learning project discussed in the previous section " "trains the model on a single dataset (CIFAR-10), we call this centralized" @@ -4114,17 +4553,18 @@ msgstr "" "것입니다. 일반적으로 머신 러닝 워크로드를 연합 방식으로 실행하려면 대부분의 코드를 변경하고 모든 것을 처음부터 다시 설정해야 " "합니다. 이는 상당한 노력이 필요할 수 있습니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:173 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:191 msgid "" "However, with Flower you can evolve your pre-existing code into a " "federated learning setup without the need for a major rewrite." msgstr "하지만 Flower를 사용하면 대대적인 재작성 없이도 기존 코드를 연합 학습 설정으로 발전시킬 수 있습니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:175 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:194 +#, fuzzy msgid "" "The concept is easy to understand. We have to start a *server* and then " -"use the code in :code:`cifar.py` for the *clients* that are connected to " -"the *server*. The *server* sends model parameters to the clients. The " +"use the code in ``cifar.py`` for the *clients* that are connected to the " +"*server*. The *server* sends model parameters to the clients. The " "*clients* run the training and update the parameters. The updated " "parameters are sent back to the *server* which averages all received " "parameter updates. This describes one round of the federated learning " @@ -4135,46 +4575,48 @@ msgstr "" "업데이트된 파라미터는 *서버*로 다시 전송되며, *서버*는 수신된 모든 파라미터 업데이트의 평균을 구합니다. 이것은 연합 학습 " "프로세스의 한 라운드를 설명하며 여러 라운드에 걸쳐 이 과정을 반복합니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:181 -#: ../../source/tutorial-quickstart-jax.rst:129 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:201 +#: ../../source/tutorial-quickstart-jax.rst:147 +#, fuzzy msgid "" "Our example consists of one *server* and two *clients*. Let's set up " -":code:`server.py` first. The *server* needs to import the Flower package " -":code:`flwr`. Next, we use the :code:`start_server` function to start a " -"server and tell it to perform three rounds of federated learning." +"``server.py`` first. The *server* needs to import the Flower package " +"``flwr``. Next, we use the ``start_server`` function to start a server " +"and tell it to perform three rounds of federated learning." msgstr "" "이 예제는 하나의 *서버*와 두 개의 *클라이언트*로 구성됩니다. 먼저 :code:`server.py`를 설정해 보겠습니다. " "*server*는 Flower 패키지 :code:`flwr`를 가져와야 합니다. 다음으로, :code:`start_server` " "함수를 사용하여 서버를 시작하고 세 차례의 연합 학습을 수행하도록 지시합니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:191 -#: ../../source/tutorial-quickstart-jax.rst:139 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:215 +#: ../../source/tutorial-quickstart-jax.rst:161 msgid "We can already start the *server*:" msgstr "이미 *서버*를 시작할 수 있습니다:" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:197 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:221 +#, fuzzy msgid "" -"Finally, we will define our *client* logic in :code:`client.py` and build" -" upon the previously defined centralized training in :code:`cifar.py`. " -"Our *client* needs to import :code:`flwr`, but also :code:`torch` to " -"update the parameters on our PyTorch model:" +"Finally, we will define our *client* logic in ``client.py`` and build " +"upon the previously defined centralized training in ``cifar.py``. Our " +"*client* needs to import ``flwr``, but also ``torch`` to update the " +"parameters on our PyTorch model:" msgstr "" "마지막으로, :code:`client.py`에서 *client* 로직을 정의하고 :code:`cifar.py`에서 이전에 정의한 " "중앙 집중식 학습을 기반으로 구축합니다. *클라이언트*는 :code:`flwr`을 가져와야 하며, PyTorch 모델의 파라미터를 " "업데이트하기 위해 :code:`torch`도 가져와야 합니다:" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:213 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:238 +#, fuzzy msgid "" "Implementing a Flower *client* basically means implementing a subclass of" -" either :code:`flwr.client.Client` or :code:`flwr.client.NumPyClient`. " -"Our implementation will be based on :code:`flwr.client.NumPyClient` and " -"we'll call it :code:`CifarClient`. :code:`NumPyClient` is slightly easier" -" to implement than :code:`Client` if you use a framework with good NumPy " -"interoperability (like PyTorch or TensorFlow/Keras) because it avoids " -"some of the boilerplate that would otherwise be necessary. " -":code:`CifarClient` needs to implement four methods, two methods for " -"getting/setting model parameters, one method for training the model, and " -"one method for testing the model:" +" either ``flwr.client.Client`` or ``flwr.client.NumPyClient``. Our " +"implementation will be based on ``flwr.client.NumPyClient`` and we'll " +"call it ``CifarClient``. ``NumPyClient`` is slightly easier to implement " +"than ``Client`` if you use a framework with good NumPy interoperability " +"(like PyTorch or TensorFlow/Keras) because it avoids some of the " +"boilerplate that would otherwise be necessary. ``CifarClient`` needs to " +"implement four methods, two methods for getting/setting model parameters," +" one method for training the model, and one method for testing the model:" msgstr "" "Flower *클라이언트*를 구현한다는 것은 기본적으로 :code:`flwr.client.Client` 또는 " ":code:`flwr.client.NumPyClient`의 서브클래스를 구현하는 것을 의미합니다. 우리의 구현은 " @@ -4184,112 +4626,119 @@ msgstr "" "code:`CifarClient`는 모델 파라미터를 가져오거나 설정하는 메서드 2개, 모델 학습을 위한 메서드 1개, 모델 테스트를" " 위한 메서드 1개 등 네 가지 메서드를 구현해야 합니다:" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:218 -msgid ":code:`set_parameters`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:249 +#, fuzzy +msgid "``set_parameters``" msgstr ":code:`set_parameters`" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:219 -#: ../../source/tutorial-quickstart-jax.rst:166 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:248 +#: ../../source/tutorial-quickstart-jax.rst:192 msgid "" "set the model parameters on the local model that are received from the " "server" msgstr "서버에서 수신한 로컬 모델의 모델 파라미터를 설정합니다" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:220 -#: ../../source/tutorial-quickstart-jax.rst:168 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:249 +#: ../../source/tutorial-quickstart-jax.rst:194 +#, fuzzy msgid "" -"loop over the list of model parameters received as NumPy " -":code:`ndarray`'s (think list of neural network layers)" +"loop over the list of model parameters received as NumPy ``ndarray``'s " +"(think list of neural network layers)" msgstr "(신경망 레이어 목록으로 생각하면 됩니다) NumPy :code:`ndarray`로 받은 모델 파라미터 목록에 대해 반복합니다" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:221 -#: ../../source/tutorial-quickstart-jax.rst:169 -#: ../../source/tutorial-quickstart-scikitlearn.rst:118 -msgid ":code:`get_parameters`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:252 +#: ../../source/tutorial-quickstart-jax.rst:197 +#: ../../source/tutorial-quickstart-scikitlearn.rst:129 +#, fuzzy +msgid "``get_parameters``" msgstr ":code:`get_parameters`" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:222 -#: ../../source/tutorial-quickstart-jax.rst:170 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:252 +#: ../../source/tutorial-quickstart-jax.rst:197 +#, fuzzy msgid "" -"get the model parameters and return them as a list of NumPy " -":code:`ndarray`'s (which is what :code:`flwr.client.NumPyClient` expects)" +"get the model parameters and return them as a list of NumPy ``ndarray``'s" +" (which is what ``flwr.client.NumPyClient`` expects)" msgstr "" "모델 매개변수를 가져와서 NumPy :code:`ndarray`의 목록으로 반환합니다(이는 " ":code:`flwr.client.NumPyClient`가 기대하는 바와 같습니다)" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:223 -#: ../../source/tutorial-quickstart-jax.rst:171 -#: ../../source/tutorial-quickstart-scikitlearn.rst:123 -msgid ":code:`fit`" -msgstr ":code:`fit`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:257 +#: ../../source/tutorial-quickstart-jax.rst:202 +#: ../../source/tutorial-quickstart-scikitlearn.rst:136 +#, fuzzy +msgid "``fit``" +msgstr "``DISTRO``" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:224 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:228 -#: ../../source/tutorial-quickstart-jax.rst:172 -#: ../../source/tutorial-quickstart-jax.rst:176 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:255 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:260 +#: ../../source/tutorial-quickstart-jax.rst:200 +#: ../../source/tutorial-quickstart-jax.rst:205 msgid "" "update the parameters of the local model with the parameters received " "from the server" msgstr "서버에서 받은 파라미터로 로컬 모델의 파라미터를 업데이트합니다" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:225 -#: ../../source/tutorial-quickstart-jax.rst:173 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:257 +#: ../../source/tutorial-quickstart-jax.rst:202 msgid "train the model on the local training set" msgstr "로컬 훈련 세트에서 모델을 훈련합니다" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:226 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:258 msgid "get the updated local model weights and return them to the server" msgstr "업데이트된 로컬 모델 가중치를 가져와 서버로 반환합니다" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:227 -#: ../../source/tutorial-quickstart-jax.rst:175 -#: ../../source/tutorial-quickstart-scikitlearn.rst:127 -msgid ":code:`evaluate`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:263 +#: ../../source/tutorial-quickstart-jax.rst:208 +#: ../../source/tutorial-quickstart-scikitlearn.rst:139 +#, fuzzy +msgid "``evaluate``" msgstr ":code:`evaluate`" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:229 -#: ../../source/tutorial-quickstart-jax.rst:177 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:262 +#: ../../source/tutorial-quickstart-jax.rst:207 msgid "evaluate the updated model on the local test set" msgstr "로컬 테스트 세트에서 업데이트된 모델을 평가합니다" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:230 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:263 msgid "return the local loss and accuracy to the server" msgstr "로컬 손실 및 정확도를 서버에 반환합니다" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:232 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:265 +#, fuzzy msgid "" -"The two :code:`NumPyClient` methods :code:`fit` and :code:`evaluate` make" -" use of the functions :code:`train()` and :code:`test()` previously " -"defined in :code:`cifar.py`. So what we really do here is we tell Flower " -"through our :code:`NumPyClient` subclass which of our already defined " -"functions to call for training and evaluation. We included type " -"annotations to give you a better understanding of the data types that get" -" passed around." +"The two ``NumPyClient`` methods ``fit`` and ``evaluate`` make use of the " +"functions ``train()`` and ``test()`` previously defined in ``cifar.py``. " +"So what we really do here is we tell Flower through our ``NumPyClient`` " +"subclass which of our already defined functions to call for training and " +"evaluation. We included type annotations to give you a better " +"understanding of the data types that get passed around." msgstr "" "두 개의 :code:`NumPyClient` 메서드인 :code:`fit`과 :code:`evaluate`는 이전에 " ":code:`cifar.py`에 정의된 함수인 :code:`train()`과 :code:`test()`를 활용합니다. 따라서 여기서" " 실제로 하는 일은 :code:`NumPyClient` 서브클래스를 통해 이미 정의된 함수 중 훈련과 평가를 위해 호출할 함수를 " "Flower에 알려주는 것입니다. 전달되는 데이터 유형을 더 잘 이해할 수 있도록 type annotations을 포함했습니다." -#: ../../source/example-pytorch-from-centralized-to-federated.rst:280 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:315 +#, fuzzy msgid "" "All that's left to do it to define a function that loads both model and " -"data, creates a :code:`CifarClient`, and starts this client. You load " -"your data and model by using :code:`cifar.py`. Start :code:`CifarClient` " -"with the function :code:`fl.client.start_client()` by pointing it at the " -"same IP address we used in :code:`server.py`:" +"data, creates a ``CifarClient``, and starts this client. You load your " +"data and model by using ``cifar.py``. Start ``CifarClient`` with the " +"function ``fl.client.start_client()`` by pointing it at the same IP " +"address we used in ``server.py``:" msgstr "" "이제 모델과 데이터를 모두 로드하는 함수를 정의하고, :code:`CifarClient`를 생성하고, 이 클라이언트를 시작하는 " "작업만 남았습니다. 코드:`cifar.py`를 사용하여 데이터와 모델을 로드합니다. :code:`server.py`에서 사용한 것과" " 동일한 IP 주소를 지정하여 :code:`fl.client.start_client()` 함수로 " ":code:`CifarClient`를 시작합니다:" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:301 -#: ../../source/tutorial-quickstart-jax.rst:274 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:338 +#: ../../source/tutorial-quickstart-jax.rst:309 msgid "And that's it. You can now open two additional terminal windows and run" msgstr "여기까지입니다. 이제 두 개의 터미널 창을 추가로 열고 다음을 실행할 수 있습니다" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:307 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:344 msgid "" "in each window (make sure that the server is running before you do so) " "and see your (previously centralized) PyTorch project run federated " @@ -4298,7 +4747,7 @@ msgstr "" "를 입력하고(그 전에 서버가 실행 중인지 확인하세요) (이전에는 중앙 집중식) PyTorch 프로젝트가 두 클라이언트에서 연합 " "학습을 실행하는 것을 확인합니다. 축하합니다!" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:312 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:351 msgid "" "The full source code for this example: `PyTorch: From Centralized To " "Federated (Code) `_를 확인하는 것이 좋습니다." -#: ../../source/how-to-authenticate-supernodes.rst:15 +#: ../../source/how-to-authenticate-supernodes.rst:20 msgid "" "This guide covers a preview feature that might change in future versions " "of Flower." msgstr "이 가이드에서는 향후 버전의 Flower에서 변경될 수 있는 미리보기 기능에 대해 설명합니다." -#: ../../source/how-to-authenticate-supernodes.rst:18 +#: ../../source/how-to-authenticate-supernodes.rst:24 msgid "" "For increased security, node authentication can only be used when " "encrypted connections (SSL/TLS) are enabled." msgstr "보안을 강화하기 위해 노드 인증은 암호화된 연결(SSL/TLS)을 사용하도록 설정한 경우에만 사용할 수 있습니다." -#: ../../source/how-to-authenticate-supernodes.rst:21 -msgid "Enable node authentication in :code:`SuperLink`" +#: ../../source/how-to-authenticate-supernodes.rst:28 +#, fuzzy +msgid "Enable node authentication in ``SuperLink``" msgstr ":code:`SuperLink`에서 노드 인증 활성화" -#: ../../source/how-to-authenticate-supernodes.rst:23 +#: ../../source/how-to-authenticate-supernodes.rst:30 +#, fuzzy msgid "" "To enable node authentication, first you need to configure SSL/TLS " "connections to secure the SuperLink<>SuperNode communication. You can " "find the complete guide `here `_. After configuring secure connections, you" -" can enable client authentication in a long-running Flower " -":code:`SuperLink`. Use the following terminal command to start a Flower " -":code:`SuperNode` that has both secure connections and node " -"authentication enabled:" +" can enable client authentication in a long-running Flower ``SuperLink``." +" Use the following terminal command to start a Flower ``SuperNode`` that " +"has both secure connections and node authentication enabled:" msgstr "" "노드 인증을 활성화하려면 먼저 SuperLink<>SuperNode 통신을 보호하기 위해 SSL/TLS 연결을 구성해야 합니다. " "전체 가이드는 `여기 `. Here's a nonsensical example that customizes :code:`FedAvg`" -" by adding a custom ``\"hello\": \"world\"`` configuration key/value pair" -" to the config dict of a *single client* (only the first client in the " -"list, the other clients in this round to not receive this \"special\" " -"config value):" +"strategies>`. Here's a nonsensical example that customizes ``FedAvg`` by " +"adding a custom ``\"hello\": \"world\"`` configuration key/value pair to " +"the config dict of a *single client* (only the first client in the list, " +"the other clients in this round to not receive this \"special\" config " +"value):" msgstr "" "이는 기존 전략을 사용자 지정하거나 :doc:`implementing a custom strategy from scratch " "`를 통해 수행할 수 있습니다. 다음은 사용자 지정 ``\"hello\"'를 " @@ -5942,27 +6405,29 @@ msgstr "" " 정보를 표시합니다:" #: ../../source/how-to-configure-logging.rst:13 +#, fuzzy msgid "" "containing relevant information including: log message level (e.g. " -":code:`INFO`, :code:`DEBUG`), a timestamp, the line where the logging " -"took place from, as well as the log message itself. In this way, the " -"logger would typically display information on your terminal as follows:" +"``INFO``, ``DEBUG``), a timestamp, the line where the logging took place " +"from, as well as the log message itself. In this way, the logger would " +"typically display information on your terminal as follows:" msgstr "" "로그 메시지 수준(예: :code:`INFO`, :code:`DEBUG`), 타임스탬프, 로깅이 발생한 줄, 로그 메시지 자체 등 " "관련 정보를 포함합니다. 이러한 방식으로 로거는 일반적으로 다음과 같은 정보를 터미널에 표시합니다:" -#: ../../source/how-to-configure-logging.rst:34 +#: ../../source/how-to-configure-logging.rst:35 msgid "Saving log to file" msgstr "파일에 로그 저장" -#: ../../source/how-to-configure-logging.rst:36 +#: ../../source/how-to-configure-logging.rst:37 +#, fuzzy msgid "" "By default, the Flower log is outputted to the terminal where you launch " "your Federated Learning workload from. This applies for both gRPC-based " -"federation (i.e. when you do :code:`fl.server.start_server`) and when " -"using the :code:`VirtualClientEngine` (i.e. when you do " -":code:`fl.simulation.start_simulation`). In some situations you might " -"want to save this log to disk. You can do so by calling the " +"federation (i.e. when you do ``fl.server.start_server``) and when using " +"the ``VirtualClientEngine`` (i.e. when you do " +"``fl.simulation.start_simulation``). In some situations you might want to" +" save this log to disk. You can do so by calling the " "`fl.common.logger.configure() " "`_" " function. For example:" @@ -5975,22 +6440,23 @@ msgstr "" "`_" " 함수를 호출하여 저장할 수 있습니다. 예를 들어:" -#: ../../source/how-to-configure-logging.rst:53 +#: ../../source/how-to-configure-logging.rst:59 +#, fuzzy msgid "" "With the above, Flower will record the log you see on your terminal to " -":code:`log.txt`. This file will be created in the same directory as were " -"you are running the code from. If we inspect we see the log above is also" -" recorded but prefixing with :code:`identifier` each line:" +"``log.txt``. This file will be created in the same directory as were you " +"are running the code from. If we inspect we see the log above is also " +"recorded but prefixing with ``identifier`` each line:" msgstr "" "위와 같이 하면 Flower는 터미널에 표시되는 로그를 :code:`log.txt`에 기록합니다. 이 파일은 코드를 실행한 " "디렉터리와 동일한 디렉터리에 생성됩니다. 검사해보면 위의 로그도 기록되지만 각 줄 앞에 :code:`identifier` 접두사가 " "붙는 것을 확인할 수 있습니다:" -#: ../../source/how-to-configure-logging.rst:74 +#: ../../source/how-to-configure-logging.rst:81 msgid "Log your own messages" msgstr "나만의 메시지 기록" -#: ../../source/how-to-configure-logging.rst:76 +#: ../../source/how-to-configure-logging.rst:83 msgid "" "You might expand the information shown by default with the Flower logger " "by adding more messages relevant to your application. You can achieve " @@ -5999,27 +6465,27 @@ msgstr "" "애플리케이션과 관련된 메시지를 더 추가하여 Flower 로거에 기본적으로 표시되는 정보를 확장할 수 있습니다. 다음과 같이 쉽게 " "추가할 수 있습니다." -#: ../../source/how-to-configure-logging.rst:102 +#: ../../source/how-to-configure-logging.rst:114 msgid "" "In this way your logger will show, in addition to the default messages, " "the ones introduced by the clients as specified above." msgstr "이렇게 하면 로거에 기본 메시지 외에 위에서 지정한 대로 클라이언트가 소개한 메시지가 표시됩니다." -#: ../../source/how-to-configure-logging.rst:128 +#: ../../source/how-to-configure-logging.rst:140 msgid "Log to a remote service" msgstr "원격 서비스에 로그인" -#: ../../source/how-to-configure-logging.rst:130 +#: ../../source/how-to-configure-logging.rst:142 +#, fuzzy msgid "" -"The :code:`fl.common.logger.configure` function, also allows specifying a" -" host to which logs can be pushed (via :code:`POST`) through a native " -"Python :code:`logging.handler.HTTPHandler`. This is a particularly useful" -" feature in :code:`gRPC`-based Federated Learning workloads where " -"otherwise gathering logs from all entities (i.e. the server and the " -"clients) might be cumbersome. Note that in Flower simulation, the server " -"automatically displays all logs. You can still specify a " -":code:`HTTPHandler` should you wish to backup or analyze the logs " -"somewhere else." +"The ``fl.common.logger.configure`` function, also allows specifying a " +"host to which logs can be pushed (via ``POST``) through a native Python " +"``logging.handler.HTTPHandler``. This is a particularly useful feature in" +" ``gRPC``-based Federated Learning workloads where otherwise gathering " +"logs from all entities (i.e. the server and the clients) might be " +"cumbersome. Note that in Flower simulation, the server automatically " +"displays all logs. You can still specify a ``HTTPHandler`` should you " +"wish to backup or analyze the logs somewhere else." msgstr "" "또한 :code:`fl.common.logger.configure` 함수를 사용하면 네이티브 Python " ":code:`logging.handler.HTTPHandler`를 통해 로그를 푸시할 수 있는 호스트를 지정할 수 " @@ -6032,15 +6498,16 @@ msgid "Enable SSL connections" msgstr "SSL 연결 사용" #: ../../source/how-to-enable-ssl-connections.rst:4 +#, fuzzy msgid "" "This guide describes how to a SSL-enabled secure Flower server " -"(:code:`SuperLink`) can be started and how a Flower client " -"(:code:`SuperNode`) can establish a secure connections to it." +"(``SuperLink``) can be started and how a Flower client (``SuperNode``) " +"can establish a secure connections to it." msgstr "" "이 가이드에서는 SSL을 지원하는 보안 Flower 서버(:코드:`SuperLink`)를 시작하는 방법과 Flower " "클라이언트(:코드:`SuperNode`)가 이 서버에 보안 연결을 설정하는 방법을 설명합니다." -#: ../../source/how-to-enable-ssl-connections.rst:7 +#: ../../source/how-to-enable-ssl-connections.rst:8 msgid "" "A complete code example demonstrating a secure connection can be found " "`here `_'에서 확인할 수 있습니다." -#: ../../source/how-to-enable-ssl-connections.rst:10 +#: ../../source/how-to-enable-ssl-connections.rst:11 +#, fuzzy msgid "" -"The code example comes with a :code:`README.md` file which explains how " -"to start it. Although it is already SSL-enabled, it might be less " +"The code example comes with a ``README.md`` file which explains how to " +"start it. Although it is already SSL-enabled, it might be less " "descriptive on how it does so. Stick to this guide for a deeper " "introduction to the topic." msgstr "" @@ -6065,26 +6533,29 @@ msgid "Certificates" msgstr "인증서" #: ../../source/how-to-enable-ssl-connections.rst:18 +#, fuzzy msgid "" "Using SSL-enabled connections requires certificates to be passed to the " "server and client. For the purpose of this guide we are going to generate" " self-signed certificates. As this can become quite complex we are going " -"to ask you to run the script in :code:`examples/advanced-" -"tensorflow/certificates/generate.sh` with the following command sequence:" +"to ask you to run the script in ``examples/advanced-" +"tensorflow/certificates/generate.sh`` with the following command " +"sequence:" msgstr "" "SSL 사용 연결을 사용하려면 서버와 클라이언트에 인증서를 전달해야 합니다. 이 가이드에서는 자체 서명된 인증서를 생성하겠습니다. " "이 과정은 상당히 복잡할 수 있으므로 다음 명령 시퀀스를 사용하여 :code:`examples/advanced-" "tensorflow/certificates/generate.sh`에서 스크립트를 실행하도록 요청하겠습니다:" #: ../../source/how-to-enable-ssl-connections.rst:29 +#, fuzzy msgid "" -"This will generate the certificates in :code:`examples/advanced-" -"tensorflow/.cache/certificates`." +"This will generate the certificates in ``examples/advanced-" +"tensorflow/.cache/certificates``." msgstr "" "이렇게 하면 :code:`examples/advanced-tensorflow/.cache/certificates`에 인증서가 " "생성됩니다." -#: ../../source/how-to-enable-ssl-connections.rst:31 +#: ../../source/how-to-enable-ssl-connections.rst:32 msgid "" "The approach for generating SSL certificates in the context of this " "example can serve as an inspiration and starting point, but it should not" @@ -6098,40 +6569,41 @@ msgstr "" "됩니다. 프로덕션 환경용 인증서를 올바르게 생성하는 문제에 대해서는 다른 출처를 참조하세요. 중요하지 않은 프로토타이핑 또는 연구 " "프로젝트의 경우, 이 가이드에 언급된 스크립트를 사용하여 생성한 자체 서명 인증서를 사용하는 것으로 충분할 수 있습니다." -#: ../../source/how-to-enable-ssl-connections.rst:39 +#: ../../source/how-to-enable-ssl-connections.rst:40 msgid "Server (SuperLink)" msgstr "서버(SuperLink)" -#: ../../source/how-to-enable-ssl-connections.rst:41 +#: ../../source/how-to-enable-ssl-connections.rst:42 msgid "" "Use the following terminal command to start a sever (SuperLink) that uses" " the previously generated certificates:" msgstr "다음 터미널 명령을 사용하여 이전에 생성한 인증서를 사용하는 서버(SuperLink)를 시작합니다:" -#: ../../source/how-to-enable-ssl-connections.rst:50 +#: ../../source/how-to-enable-ssl-connections.rst:52 msgid "" "When providing certificates, the server expects a tuple of three " "certificates paths: CA certificate, server certificate and server private" " key." msgstr "인증서를 제공할 때 서버는 세 가지 인증서 경로의 튜플을 기대합니다: CA 인증서, 서버 인증서 및 서버 개인 키입니다." -#: ../../source/how-to-enable-ssl-connections.rst:54 +#: ../../source/how-to-enable-ssl-connections.rst:56 msgid "Client (SuperNode)" msgstr "클라이언트(SuperNode)" -#: ../../source/how-to-enable-ssl-connections.rst:56 +#: ../../source/how-to-enable-ssl-connections.rst:58 msgid "" "Use the following terminal command to start a client (SuperNode) that " "uses the previously generated certificates:" msgstr "다음 터미널 명령을 사용하여 이전에 생성한 인증서를 사용하는 클라이언트(SuperNode)를 시작합니다:" -#: ../../source/how-to-enable-ssl-connections.rst:64 +#: ../../source/how-to-enable-ssl-connections.rst:67 +#, fuzzy msgid "" -"When setting :code:`root_certificates`, the client expects a file path to" -" PEM-encoded root certificates." +"When setting ``root_certificates``, the client expects a file path to " +"PEM-encoded root certificates." msgstr "코드:`root_certificates`를 설정하면 클라이언트는 PEM 인코딩된 루트 인증서의 파일 경로를 예상합니다." -#: ../../source/how-to-enable-ssl-connections.rst:70 +#: ../../source/how-to-enable-ssl-connections.rst:73 msgid "" "You should now have learned how to generate self-signed certificates " "using the given script, start an SSL-enabled server and have a client " @@ -6140,21 +6612,21 @@ msgstr "" "이제 주어진 스크립트를 사용하여 자체 서명 인증서를 생성하고, SSL 사용 서버를 시작하고, 클라이언트가 보안 연결을 설정하는 " "방법을 배웠을 것입니다." -#: ../../source/how-to-enable-ssl-connections.rst:75 +#: ../../source/how-to-enable-ssl-connections.rst:78 msgid "Additional resources" msgstr "추가 리소스" -#: ../../source/how-to-enable-ssl-connections.rst:77 +#: ../../source/how-to-enable-ssl-connections.rst:80 msgid "" "These additional sources might be relevant if you would like to dive " "deeper into the topic of certificates:" msgstr "인증서에 대해 더 자세히 알아보고 싶다면 이러한 추가 자료를 참고하세요:" -#: ../../source/how-to-enable-ssl-connections.rst:79 +#: ../../source/how-to-enable-ssl-connections.rst:83 msgid "`Let's Encrypt `_" msgstr "'암호화하세요 `_'" -#: ../../source/how-to-enable-ssl-connections.rst:80 +#: ../../source/how-to-enable-ssl-connections.rst:84 msgid "`certbot `_" msgstr "`인증봇 `_" @@ -6176,13 +6648,15 @@ msgstr "" "결정합니다. Flower는 아래에 설명된 것과 동일한 API를 기반으로 하는 몇 가지 기본 제공 전략을 제공합니다." #: ../../source/how-to-implement-strategies.rst:11 -msgid "The :code:`Strategy` abstraction" +#, fuzzy +msgid "The ``Strategy`` abstraction" msgstr ":code:`Strategy` 추상화" #: ../../source/how-to-implement-strategies.rst:13 +#, fuzzy msgid "" "All strategy implementation are derived from the abstract base class " -":code:`flwr.server.strategy.Strategy`, both built-in implementations and " +"``flwr.server.strategy.Strategy``, both built-in implementations and " "third party implementations. This means that custom strategy " "implementations have the exact same capabilities at their disposal as " "built-in ones." @@ -6197,55 +6671,60 @@ msgid "" "implemented:" msgstr "전략 추상화에서는 구현해야 하는 몇 가지 추상적인 메서드를 정의합니다:" -#: ../../source/how-to-implement-strategies.rst:75 +#: ../../source/how-to-implement-strategies.rst:67 +#, fuzzy msgid "" -"Creating a new strategy means implementing a new :code:`class` (derived " -"from the abstract base class :code:`Strategy`) that implements for the " -"previously shown abstract methods:" +"Creating a new strategy means implementing a new ``class`` (derived from " +"the abstract base class ``Strategy``) that implements for the previously " +"shown abstract methods:" msgstr "" "새 전략을 생성한다는 것은 이전에 표시된 추상 메서드에 대해 구현하는 새로운 :code:`class`(추상 기본 클래스 " ":code:`Strategy`에서 파생됨)를 구현하는 것을 의미합니다:" -#: ../../source/how-to-implement-strategies.rst:100 +#: ../../source/how-to-implement-strategies.rst:97 msgid "The Flower server calls these methods in the following order:" msgstr "Flower 서버는 다음 순서로 이러한 메서드를 호출합니다:" -#: ../../source/how-to-implement-strategies.rst:177 +#: ../../source/how-to-implement-strategies.rst:174 msgid "The following sections describe each of those methods in more detail." msgstr "다음 섹션에서는 이러한 각 방법에 대해 자세히 설명합니다." -#: ../../source/how-to-implement-strategies.rst:180 -msgid "The :code:`initialize_parameters` method" +#: ../../source/how-to-implement-strategies.rst:177 +#, fuzzy +msgid "The ``initialize_parameters`` method" msgstr ":code:`initialize_parameters` 메서드" -#: ../../source/how-to-implement-strategies.rst:182 +#: ../../source/how-to-implement-strategies.rst:179 +#, fuzzy msgid "" -":code:`initialize_parameters` is called only once, at the very beginning " -"of an execution. It is responsible for providing the initial global model" -" parameters in a serialized form (i.e., as a :code:`Parameters` object)." +"``initialize_parameters`` is called only once, at the very beginning of " +"an execution. It is responsible for providing the initial global model " +"parameters in a serialized form (i.e., as a ``Parameters`` object)." msgstr "" "code:`initialize_parameters`는 실행을 처음 시작할 때 한 번만 호출됩니다. 이 함수는 초기 전역 모델 " "파라미터를 직렬화된 형식(즉, :code:`Parameters` 객체)으로 제공하는 역할을 합니다." -#: ../../source/how-to-implement-strategies.rst:184 +#: ../../source/how-to-implement-strategies.rst:183 +#, fuzzy msgid "" "Built-in strategies return user-provided initial parameters. The " "following example shows how initial parameters can be passed to " -":code:`FedAvg`:" +"``FedAvg``:" msgstr "" "기본 제공 전략은 사용자가 제공한 초기 매개 변수를 반환합니다. 다음 예는 초기 매개 변수를 :code:`FedAvg`에 전달하는 " "방법을 보여줍니다:" #: ../../source/how-to-implement-strategies.rst:209 +#, fuzzy msgid "" -"The Flower server will call :code:`initialize_parameters`, which either " -"returns the parameters that were passed to :code:`initial_parameters`, or" -" :code:`None`. If no parameters are returned from " -":code:`initialize_parameters` (i.e., :code:`None`), the server will " -"randomly select one client and ask it to provide its parameters. This is " -"a convenience feature and not recommended in practice, but it can be " -"useful for prototyping. In practice, it is recommended to always use " -"server-side parameter initialization." +"The Flower server will call ``initialize_parameters``, which either " +"returns the parameters that were passed to ``initial_parameters``, or " +"``None``. If no parameters are returned from ``initialize_parameters`` " +"(i.e., ``None``), the server will randomly select one client and ask it " +"to provide its parameters. This is a convenience feature and not " +"recommended in practice, but it can be useful for prototyping. In " +"practice, it is recommended to always use server-side parameter " +"initialization." msgstr "" "Flower 서버는 :code:`initialize_parameters`를 호출하여 " ":code:`initial_parameters`에 전달된 파라미터를 반환하거나 :code:`None`을 반환합니다. " @@ -6253,7 +6732,7 @@ msgstr "" "클라이언트 하나를 선택하여 해당 클라이언트에 매개변수를 제공하도록 요청합니다. 이는 편의 기능이며 실제로는 권장하지 않지만 " "프로토타이핑에는 유용할 수 있습니다. 실제로는 항상 서버 측 매개변수 초기화를 사용하는 것이 좋습니다." -#: ../../source/how-to-implement-strategies.rst:213 +#: ../../source/how-to-implement-strategies.rst:218 msgid "" "Server-side parameter initialization is a powerful mechanism. It can be " "used, for example, to resume training from a previously saved checkpoint." @@ -6264,220 +6743,240 @@ msgstr "" "서버 측 파라미터 초기화는 강력한 메커니즘입니다. 예를 들어 이전에 저장한 체크포인트에서 학습을 재개하는 데 사용할 수 있습니다. " "또한 연합 학습을 사용하여 사전 학습된 모델을 미세 조정하는 등 하이브리드 접근 방식을 구현하는 데 필요한 기본 기능입니다." -#: ../../source/how-to-implement-strategies.rst:216 -msgid "The :code:`configure_fit` method" +#: ../../source/how-to-implement-strategies.rst:224 +#, fuzzy +msgid "The ``configure_fit`` method" msgstr ":code:`configure_fit` 메서드" -#: ../../source/how-to-implement-strategies.rst:218 +#: ../../source/how-to-implement-strategies.rst:226 +#, fuzzy msgid "" -":code:`configure_fit` is responsible for configuring the upcoming round " -"of training. What does *configure* mean in this context? Configuring a " -"round means selecting clients and deciding what instructions to send to " -"these clients. The signature of :code:`configure_fit` makes this clear:" +"``configure_fit`` is responsible for configuring the upcoming round of " +"training. What does *configure* mean in this context? Configuring a round" +" means selecting clients and deciding what instructions to send to these " +"clients. The signature of ``configure_fit`` makes this clear:" msgstr "" ":code:`configure_fit`은 다가오는 학 라운드를 구성하는 역할을 합니다. 이 문맥에서 *구성*은 무엇을 의미하나요? " "라운드를 구성한다는 것은 클라이언트를 선택하고 이 클라이언트에게 어떤 지침을 보낼지 결정하는 것을 의미합니다. " "code:`configure_fit`의 시그니처를 보면 이를 명확히 알 수 있습니다:" -#: ../../source/how-to-implement-strategies.rst:231 +#: ../../source/how-to-implement-strategies.rst:239 +#, fuzzy msgid "" "The return value is a list of tuples, each representing the instructions " "that will be sent to a particular client. Strategy implementations " -"usually perform the following steps in :code:`configure_fit`:" +"usually perform the following steps in ``configure_fit``:" msgstr "" "반환 값은 튜플 목록으로, 각 튜플은 특정 클라이언트로 전송될 명령어를 나타냅니다. 전략 구현은 일반적으로 " ":code:`configure_fit`에서 다음 단계를 수행합니다:" -#: ../../source/how-to-implement-strategies.rst:233 -#: ../../source/how-to-implement-strategies.rst:280 +#: ../../source/how-to-implement-strategies.rst:243 +#: ../../source/how-to-implement-strategies.rst:307 +#, fuzzy msgid "" -"Use the :code:`client_manager` to randomly sample all (or a subset of) " -"available clients (each represented as a :code:`ClientProxy` object)" +"Use the ``client_manager`` to randomly sample all (or a subset of) " +"available clients (each represented as a ``ClientProxy`` object)" msgstr "" ":code:`client_manager`를 사용하여 사용 가능한 모든 클라이언트(또는 그 하위 집합)를 무작위로 샘플링합니다(각각 " ":code:`ClientProxy` 개체로 표시됨)" -#: ../../source/how-to-implement-strategies.rst:234 +#: ../../source/how-to-implement-strategies.rst:245 +#, fuzzy msgid "" -"Pair each :code:`ClientProxy` with the same :code:`FitIns` holding the " -"current global model :code:`parameters` and :code:`config` dict" +"Pair each ``ClientProxy`` with the same ``FitIns`` holding the current " +"global model ``parameters`` and ``config`` dict" msgstr "" "각 :code:`ClientProxy`를 현재 글로벌 모델 :code:`parameters` 및 :code:`config` " "dict를 보유한 동일한 :code:`FitIns`와 쌍을 이룹니다" -#: ../../source/how-to-implement-strategies.rst:236 +#: ../../source/how-to-implement-strategies.rst:248 +#, fuzzy msgid "" -"More sophisticated implementations can use :code:`configure_fit` to " -"implement custom client selection logic. A client will only participate " -"in a round if the corresponding :code:`ClientProxy` is included in the " -"list returned from :code:`configure_fit`." +"More sophisticated implementations can use ``configure_fit`` to implement" +" custom client selection logic. A client will only participate in a round" +" if the corresponding ``ClientProxy`` is included in the list returned " +"from ``configure_fit``." msgstr "" "보다 정교한 구현은 :code:`configure_fit`을 사용하여 사용자 지정 클라이언트 선택 로직을 구현할 수 있습니다. " "클라이언트는 :code:`configure_fit`에서 반환된 목록에 해당 :code:`ClientProxy`가 포함된 경우에만 " "라운드에 참여합니다." -#: ../../source/how-to-implement-strategies.rst:240 +#: ../../source/how-to-implement-strategies.rst:254 +#, fuzzy msgid "" "The structure of this return value provides a lot of flexibility to the " "user. Since instructions are defined on a per-client basis, different " "instructions can be sent to each client. This enables custom strategies " "to train, for example, different models on different clients, or use " -"different hyperparameters on different clients (via the :code:`config` " -"dict)." +"different hyperparameters on different clients (via the ``config`` dict)." msgstr "" "이 반환 값의 구조는 사용자에게 많은 유연성을 제공합니다. instructions은 클라이언트별로 정의되므로 각 클라이언트에 서로 " "다른 명령어를 전송할 수 있습니다. 이를 통해 예를 들어 클라이언트마다 다른 모델을 학습시키거나 클라이언트마다 다른 하이퍼파라미터를" " 사용하는 사용자 지정 전략을 사용할 수 있습니다(:code:`config` dict를 통해)." -#: ../../source/how-to-implement-strategies.rst:243 -msgid "The :code:`aggregate_fit` method" +#: ../../source/how-to-implement-strategies.rst:261 +#, fuzzy +msgid "The ``aggregate_fit`` method" msgstr ":code:`aggregate_fit` 메서드" -#: ../../source/how-to-implement-strategies.rst:245 +#: ../../source/how-to-implement-strategies.rst:263 +#, fuzzy msgid "" -":code:`aggregate_fit` is responsible for aggregating the results returned" -" by the clients that were selected and asked to train in " -":code:`configure_fit`." +"``aggregate_fit`` is responsible for aggregating the results returned by " +"the clients that were selected and asked to train in ``configure_fit``." msgstr "" "code:`aggregate_fit`은 :code:`configure_fit`에서 훈련하도록 선택되고 요청된 클라이언트가 반환한 " "결과를 집계하는 역할을 담당합니다." -#: ../../source/how-to-implement-strategies.rst:258 +#: ../../source/how-to-implement-strategies.rst:277 +#, fuzzy msgid "" "Of course, failures can happen, so there is no guarantee that the server " "will get results from all the clients it sent instructions to (via " -":code:`configure_fit`). :code:`aggregate_fit` therefore receives a list " -"of :code:`results`, but also a list of :code:`failures`." +"``configure_fit``). ``aggregate_fit`` therefore receives a list of " +"``results``, but also a list of ``failures``." msgstr "" "물론 실패가 발생할 수 있으므로 서버가 명령을 보낸 모든 클라이언트로부터 결과를 얻을 수 있다는 보장은 " "없습니다(:code:`configure_fit`을 통해). 따라서 :code:`aggregate_fit`은 " ":code:`results` 목록뿐만 아니라 :code:`failures` 목록도 받습니다." -#: ../../source/how-to-implement-strategies.rst:260 +#: ../../source/how-to-implement-strategies.rst:282 +#, fuzzy msgid "" -":code:`aggregate_fit` returns an optional :code:`Parameters` object and a" -" dictionary of aggregated metrics. The :code:`Parameters` return value is" -" optional because :code:`aggregate_fit` might decide that the results " -"provided are not sufficient for aggregation (e.g., too many failures)." +"``aggregate_fit`` returns an optional ``Parameters`` object and a " +"dictionary of aggregated metrics. The ``Parameters`` return value is " +"optional because ``aggregate_fit`` might decide that the results provided" +" are not sufficient for aggregation (e.g., too many failures)." msgstr "" "code:`aggregate_fit`은 선택적 :code:`Parameters` 개체와 집계된 메트릭의 dictionary를 " "반환합니다. :code:`Parameters` 반환 값은 :code:`aggregate_fit`이 제공된 결과가 집계에 충분하지 " "않다고 판단할 수 있으므로(예: 실패 수가 너무 많음) 선택 사항입니다." -#: ../../source/how-to-implement-strategies.rst:263 -msgid "The :code:`configure_evaluate` method" +#: ../../source/how-to-implement-strategies.rst:288 +#, fuzzy +msgid "The ``configure_evaluate`` method" msgstr ":code:`configure_evaluate` 메서드" -#: ../../source/how-to-implement-strategies.rst:265 +#: ../../source/how-to-implement-strategies.rst:290 +#, fuzzy msgid "" -":code:`configure_evaluate` is responsible for configuring the upcoming " -"round of evaluation. What does *configure* mean in this context? " -"Configuring a round means selecting clients and deciding what " -"instructions to send to these clients. The signature of " -":code:`configure_evaluate` makes this clear:" +"``configure_evaluate`` is responsible for configuring the upcoming round " +"of evaluation. What does *configure* mean in this context? Configuring a " +"round means selecting clients and deciding what instructions to send to " +"these clients. The signature of ``configure_evaluate`` makes this clear:" msgstr "" ":code:`configure_evaluate`는 다가오는 평가 라운드를 구성하는 역할을 합니다. 이 문맥에서 *구성*은 무엇을 " "의미하나요? 라운드를 구성한다는 것은 클라이언트를 선택하고 이러한 클라이언트에 전송할 지침을 결정하는 것을 의미합니다. " ":code:`configure_evaluate`의 시그니처를 보면 이를 명확히 알 수 있습니다:" -#: ../../source/how-to-implement-strategies.rst:278 +#: ../../source/how-to-implement-strategies.rst:303 +#, fuzzy msgid "" "The return value is a list of tuples, each representing the instructions " "that will be sent to a particular client. Strategy implementations " -"usually perform the following steps in :code:`configure_evaluate`:" +"usually perform the following steps in ``configure_evaluate``:" msgstr "" "반환 값은 튜플 목록으로, 각 튜플은 특정 클라이언트로 전송될 명령어를 나타냅니다. 전략 구현은 일반적으로 " ":code:`configure_evaluate`에서 다음 단계를 수행합니다:" -#: ../../source/how-to-implement-strategies.rst:281 +#: ../../source/how-to-implement-strategies.rst:309 +#, fuzzy msgid "" -"Pair each :code:`ClientProxy` with the same :code:`EvaluateIns` holding " -"the current global model :code:`parameters` and :code:`config` dict" +"Pair each ``ClientProxy`` with the same ``EvaluateIns`` holding the " +"current global model ``parameters`` and ``config`` dict" msgstr "" "각 :code:`ClientProxy`를 현재 글로벌 모델 :code:`parameters` 및 :code:`config` " "dict를 보유한 동일한 :code:`EvaluateIns`와 쌍을 이룹니다" -#: ../../source/how-to-implement-strategies.rst:283 +#: ../../source/how-to-implement-strategies.rst:312 +#, fuzzy msgid "" -"More sophisticated implementations can use :code:`configure_evaluate` to " +"More sophisticated implementations can use ``configure_evaluate`` to " "implement custom client selection logic. A client will only participate " -"in a round if the corresponding :code:`ClientProxy` is included in the " -"list returned from :code:`configure_evaluate`." +"in a round if the corresponding ``ClientProxy`` is included in the list " +"returned from ``configure_evaluate``." msgstr "" "보다 정교한 구현은 :code:`configure_evaluate`를 사용하여 사용자 지정 클라이언트 선택 로직을 구현할 수 " "있습니다. 클라이언트는 :code:`configure_evaluate`에서 반환된 목록에 해당 :code:`ClientProxy`가" " 포함된 경우에만 라운드에 참여합니다." -#: ../../source/how-to-implement-strategies.rst:287 +#: ../../source/how-to-implement-strategies.rst:318 +#, fuzzy msgid "" "The structure of this return value provides a lot of flexibility to the " "user. Since instructions are defined on a per-client basis, different " "instructions can be sent to each client. This enables custom strategies " "to evaluate, for example, different models on different clients, or use " -"different hyperparameters on different clients (via the :code:`config` " -"dict)." +"different hyperparameters on different clients (via the ``config`` dict)." msgstr "" "이 반환 값의 구조는 사용자에게 많은 유연성을 제공합니다. 명령어는 클라이언트별로 정의되므로 각 클라이언트에 서로 다른 명령어를 " "전송할 수 있습니다. 이를 통해 사용자 지정 전략을 통해 예를 들어 클라이언트마다 다른 모델을 평가하거나 클라이언트마다 다른 " "하이퍼파라미터를 사용할 수 있습니다(:code:`config` dict를 통해)." -#: ../../source/how-to-implement-strategies.rst:291 -msgid "The :code:`aggregate_evaluate` method" +#: ../../source/how-to-implement-strategies.rst:325 +#, fuzzy +msgid "The ``aggregate_evaluate`` method" msgstr ":code:`aggregate_evaluate` 메서드" -#: ../../source/how-to-implement-strategies.rst:293 +#: ../../source/how-to-implement-strategies.rst:327 +#, fuzzy msgid "" -":code:`aggregate_evaluate` is responsible for aggregating the results " +"``aggregate_evaluate`` is responsible for aggregating the results " "returned by the clients that were selected and asked to evaluate in " -":code:`configure_evaluate`." +"``configure_evaluate``." msgstr "" "code:`aggregate_evaluate`는 :code:`configure_evaluate`에서 선택되어 평가를 요청한 " "클라이언트가 반환한 결과를 집계하는 역할을 담당합니다." -#: ../../source/how-to-implement-strategies.rst:306 +#: ../../source/how-to-implement-strategies.rst:341 +#, fuzzy msgid "" "Of course, failures can happen, so there is no guarantee that the server " "will get results from all the clients it sent instructions to (via " -":code:`configure_evaluate`). :code:`aggregate_evaluate` therefore " -"receives a list of :code:`results`, but also a list of :code:`failures`." +"``configure_evaluate``). ``aggregate_evaluate`` therefore receives a list" +" of ``results``, but also a list of ``failures``." msgstr "" "물론 실패가 발생할 수 있으므로 서버가 명령을 보낸 모든 클라이언트로부터 결과를 얻을 수 있다는 보장은 " "없습니다(:code:`configure_evaluate`를 통해). 따라서 :code:`aggregate_evaluate`는 " ":code:`results` 목록뿐만 아니라 :code:`failures` 목록도 받습니다." -#: ../../source/how-to-implement-strategies.rst:308 +#: ../../source/how-to-implement-strategies.rst:346 +#, fuzzy msgid "" -":code:`aggregate_evaluate` returns an optional :code:`float` (loss) and a" -" dictionary of aggregated metrics. The :code:`float` return value is " -"optional because :code:`aggregate_evaluate` might decide that the results" -" provided are not sufficient for aggregation (e.g., too many failures)." +"``aggregate_evaluate`` returns an optional ``float`` (loss) and a " +"dictionary of aggregated metrics. The ``float`` return value is optional " +"because ``aggregate_evaluate`` might decide that the results provided are" +" not sufficient for aggregation (e.g., too many failures)." msgstr "" "code:`aggregate_evaluate`는 선택적 :code:`float`(손실)와 집계된 메트릭의 dictionary를 " "반환합니다. code:`float` 반환 값은 :code:`aggregate_evaluate`가 제공된 결과가 집계에 충분하지 " "않다고 판단할 수 있으므로(예: 실패 수가 너무 많음) 선택 사항입니다." -#: ../../source/how-to-implement-strategies.rst:311 -msgid "The :code:`evaluate` method" +#: ../../source/how-to-implement-strategies.rst:352 +#, fuzzy +msgid "The ``evaluate`` method" msgstr ":code:`evaluate` 메서드" -#: ../../source/how-to-implement-strategies.rst:313 +#: ../../source/how-to-implement-strategies.rst:354 +#, fuzzy msgid "" -":code:`evaluate` is responsible for evaluating model parameters on the " -"server-side. Having :code:`evaluate` in addition to " -":code:`configure_evaluate`/:code:`aggregate_evaluate` enables strategies " -"to perform both servers-side and client-side (federated) evaluation." +"``evaluate`` is responsible for evaluating model parameters on the " +"server-side. Having ``evaluate`` in addition to " +"``configure_evaluate``/``aggregate_evaluate`` enables strategies to " +"perform both servers-side and client-side (federated) evaluation." msgstr "" ":code:`evaluate`는 서버 측에서 모델 매개변수를 평가하는 역할을 담당합니다. " "code:`configure_evaluate`/:code:`aggregate_evaluate`와 함께 " ":code:`evaluate`를 사용하면 서버 측과 클라이언트 측(federated) 평가를 모두 수행할 수 있는 전략을 사용할 수" " 있습니다." -#: ../../source/how-to-implement-strategies.rst:323 +#: ../../source/how-to-implement-strategies.rst:364 +#, fuzzy msgid "" "The return value is again optional because the strategy might not need to" " implement server-side evaluation or because the user-defined " -":code:`evaluate` method might not complete successfully (e.g., it might " -"fail to load the server-side evaluation data)." +"``evaluate`` method might not complete successfully (e.g., it might fail " +"to load the server-side evaluation data)." msgstr "" "반환 값은 전략에서 서버 측 평가를 구현할 필요가 없거나 사용자 정의 :code:`evaluate` 메서드가 성공적으로 완료되지 " "않을 수 있기 때문에(예: 서버 측 평가 데이터를 로드하지 못할 수 있음) 다시 선택 사항으로 설정할 수 있습니다." @@ -6486,60 +6985,63 @@ msgstr "" msgid "Install Flower" msgstr "Flower 설치" -#: ../../source/how-to-install-flower.rst:6 +#: ../../source/how-to-install-flower.rst:5 msgid "Python version" msgstr "Python 버전" -#: ../../source/how-to-install-flower.rst:12 +#: ../../source/how-to-install-flower.rst:11 msgid "Install stable release" msgstr "안정적인 릴리즈 설치" -#: ../../source/how-to-install-flower.rst:15 -#: ../../source/how-to-upgrade-to-flower-next.rst:46 +#: ../../source/how-to-install-flower.rst:14 +#: ../../source/how-to-upgrade-to-flower-next.rst:66 msgid "Using pip" msgstr "pip 사용" -#: ../../source/how-to-install-flower.rst:17 -msgid "" -"Stable releases are available on `PyPI " -"`_::" +#: ../../source/how-to-install-flower.rst:16 +#, fuzzy +msgid "Stable releases are available on `PyPI `_:" msgstr "안정적인 릴리즈는 `PyPI `_:: 에서 확인할 수 있습니다::" -#: ../../source/how-to-install-flower.rst:21 +#: ../../source/how-to-install-flower.rst:22 +#, fuzzy msgid "" "For simulations that use the Virtual Client Engine, ``flwr`` should be " -"installed with the ``simulation`` extra::" +"installed with the ``simulation`` extra:" msgstr "가상 클라이언트 엔진을 사용하는 시뮬레이션의 경우 ``flwr``을 ``simulation``extra와 함께 설치해야 합니다:" -#: ../../source/how-to-install-flower.rst:27 +#: ../../source/how-to-install-flower.rst:30 msgid "Using conda (or mamba)" msgstr "conda(또는 mamba) 사용" -#: ../../source/how-to-install-flower.rst:29 +#: ../../source/how-to-install-flower.rst:32 msgid "Flower can also be installed from the ``conda-forge`` channel." msgstr "Flower은 'conda-forge' 채널에서도 설치할 수 있습니다." -#: ../../source/how-to-install-flower.rst:31 +#: ../../source/how-to-install-flower.rst:34 +#, fuzzy msgid "" "If you have not added ``conda-forge`` to your channels, you will first " -"need to run the following::" +"need to run the following:" msgstr "채널에 'conda-forge'를 추가하지 않은 경우 먼저 다음을 실행해야 합니다:" -#: ../../source/how-to-install-flower.rst:36 +#: ../../source/how-to-install-flower.rst:42 +#, fuzzy msgid "" "Once the ``conda-forge`` channel has been enabled, ``flwr`` can be " -"installed with ``conda``::" +"installed with ``conda``:" msgstr "conda-forge`` 채널이 활성화되면 ``flwr``을 ``conda``로 설치할 수 있습니다::" -#: ../../source/how-to-install-flower.rst:40 -msgid "or with ``mamba``::" +#: ../../source/how-to-install-flower.rst:49 +#, fuzzy +msgid "or with ``mamba``:" msgstr "또는 ``mamba``::" -#: ../../source/how-to-install-flower.rst:46 +#: ../../source/how-to-install-flower.rst:56 msgid "Verify installation" msgstr "설치 확인" -#: ../../source/how-to-install-flower.rst:48 +#: ../../source/how-to-install-flower.rst:58 #, fuzzy msgid "" "The following command can be used to verify if Flower was successfully " @@ -6549,54 +7051,58 @@ msgstr "" "다음 명령을 사용하여 Flower가 성공적으로 설치되었는지 확인할 수 있습니다. 모든 것이 정상적으로 작동하면 명령줄에 " "Flower의 버전이 출력됩니다:" -#: ../../source/how-to-install-flower.rst:58 +#: ../../source/how-to-install-flower.rst:68 msgid "Advanced installation options" msgstr "고급 설치 옵션" -#: ../../source/how-to-install-flower.rst:61 +#: ../../source/how-to-install-flower.rst:71 msgid "Install via Docker" msgstr "Docker를 통해 설치" -#: ../../source/how-to-install-flower.rst:63 +#: ../../source/how-to-install-flower.rst:73 #, fuzzy msgid ":doc:`Run Flower using Docker `" msgstr ":doc:`Docker를 사용하여 Flower를 실행하는 방법 `" -#: ../../source/how-to-install-flower.rst:66 +#: ../../source/how-to-install-flower.rst:76 msgid "Install pre-release" msgstr "사전 릴리즈 설치" -#: ../../source/how-to-install-flower.rst:68 +#: ../../source/how-to-install-flower.rst:78 +#, fuzzy msgid "" "New (possibly unstable) versions of Flower are sometimes available as " "pre-release versions (alpha, beta, release candidate) before the stable " -"release happens::" +"release happens:" msgstr "" "새(불안정할 수 있는) 버전의 Flower는 안정 버전이 출시되기 전에 사전 릴리즈 버전(알파, 베타, 릴리즈 후보)으로 제공되는 " "경우가 있습니다:" -#: ../../source/how-to-install-flower.rst:72 +#: ../../source/how-to-install-flower.rst:85 +#, fuzzy msgid "" "For simulations that use the Virtual Client Engine, ``flwr`` pre-releases" -" should be installed with the ``simulation`` extra::" +" should be installed with the ``simulation`` extra:" msgstr "" "가상 클라이언트 엔진을 사용하는 시뮬레이션의 경우 ``flwr`` 사전 릴리즈를 ``simulation`` extra와 함께 " "설치해야 합니다:" -#: ../../source/how-to-install-flower.rst:77 +#: ../../source/how-to-install-flower.rst:93 msgid "Install nightly release" msgstr "야간 릴리즈 설치" -#: ../../source/how-to-install-flower.rst:79 +#: ../../source/how-to-install-flower.rst:95 +#, fuzzy msgid "" "The latest (potentially unstable) changes in Flower are available as " -"nightly releases::" +"nightly releases:" msgstr "Flower의 최신 (불안정할 수 있는) 변경 사항은 다음과 같이 야간 릴리즈로 제공됩니다:" -#: ../../source/how-to-install-flower.rst:83 +#: ../../source/how-to-install-flower.rst:101 +#, fuzzy msgid "" "For simulations that use the Virtual Client Engine, ``flwr-nightly`` " -"should be installed with the ``simulation`` extra::" +"should be installed with the ``simulation`` extra:" msgstr "" "가상 클라이언트 엔진을 사용하는 시뮬레이션의 경우, ``flwr-nightly``를 ``simulation`` extr와 함께 " "설치해야 합니다::" @@ -6617,7 +7123,7 @@ msgstr "" "강력하며 클라이언트별 리소스 할당 방법을 결정하고 총 사용량을 제한할 수 있습니다. 리소스 소비에 대한 인사이트를 통해 더 현명한 " "결정을 내리고 실행 시간을 단축할 수 있습니다." -#: ../../source/how-to-monitor-simulation.rst:6 +#: ../../source/how-to-monitor-simulation.rst:9 msgid "" "The specific instructions assume you are using macOS and have the " "`Homebrew `_ package manager installed." @@ -6625,11 +7131,11 @@ msgstr "" "구체적인 지침은 macOS를 사용 중이고 'Homebrew `_ 패키지 관리자가 설치되어 있다고 " "가정합니다." -#: ../../source/how-to-monitor-simulation.rst:10 +#: ../../source/how-to-monitor-simulation.rst:13 msgid "Downloads" msgstr "다운로드" -#: ../../source/how-to-monitor-simulation.rst:16 +#: ../../source/how-to-monitor-simulation.rst:19 msgid "" "`Prometheus `_ is used for data collection, while" " `Grafana `_ will enable you to visualize the " @@ -6640,27 +7146,27 @@ msgstr "" "`_는 수집된 데이터를 시각화할 수 있게 해줍니다. 이 두 도구는 모두 Flower가 " "내부적으로 사용하는 `Ray `_와 잘 통합되어 있습니다." -#: ../../source/how-to-monitor-simulation.rst:18 +#: ../../source/how-to-monitor-simulation.rst:23 msgid "" "Overwrite the configuration files (depending on your device, it might be " "installed on a different path)." msgstr "구성 파일을 덮어씁니다(장치에 따라 다른 경로에 설치되어 있을 수 있음)." -#: ../../source/how-to-monitor-simulation.rst:20 +#: ../../source/how-to-monitor-simulation.rst:26 msgid "If you are on an M1 Mac, it should be:" msgstr "M1 Mac을 사용 중이라면:" -#: ../../source/how-to-monitor-simulation.rst:27 +#: ../../source/how-to-monitor-simulation.rst:33 msgid "On the previous generation Intel Mac devices, it should be:" msgstr "이전 세대 Intel Mac 장치에서는:" -#: ../../source/how-to-monitor-simulation.rst:34 +#: ../../source/how-to-monitor-simulation.rst:40 msgid "" "Open the respective configuration files and change them. Depending on " "your device, use one of the two following commands:" msgstr "각 구성 파일을 열고 변경합니다. 장치에 따라 다음 두 명령 중 하나를 사용합니다:" -#: ../../source/how-to-monitor-simulation.rst:44 +#: ../../source/how-to-monitor-simulation.rst:51 msgid "" "and then delete all the text in the file and paste a new Prometheus " "config you see below. You may adjust the time intervals to your " @@ -6669,7 +7175,7 @@ msgstr "" "를 입력한 다음 파일의 모든 텍스트를 삭제하고 아래에 표시된 새 Prometheus 설정을 붙여넣습니다. 요구 사항에 따라 시간 " "간격을 조정할 수 있습니다:" -#: ../../source/how-to-monitor-simulation.rst:59 +#: ../../source/how-to-monitor-simulation.rst:67 msgid "" "Now after you have edited the Prometheus configuration, do the same with " "the Grafana configuration files. Open those using one of the following " @@ -6678,55 +7184,56 @@ msgstr "" "이제 Prometheus 구성을 편집한 후 Grafana 구성 파일에 대해서도 동일한 작업을 수행합니다. 이전과 마찬가지로 다음 " "명령 중 하나를 사용하여 파일을 엽니다:" -#: ../../source/how-to-monitor-simulation.rst:69 +#: ../../source/how-to-monitor-simulation.rst:78 msgid "" "Your terminal editor should open and allow you to apply the following " "configuration as before." msgstr "터미널 편집기가 열리면 이전과 마찬가지로 다음 구성을 적용할 수 있습니다." -#: ../../source/how-to-monitor-simulation.rst:84 +#: ../../source/how-to-monitor-simulation.rst:94 msgid "" "Congratulations, you just downloaded all the necessary software needed " "for metrics tracking. Now, let’s start it." msgstr "축하합니다. 매트릭 트레킹에 필요한 모든 소프트웨어를 다운로드하셨습니다. 이제 시작해 보겠습니다." -#: ../../source/how-to-monitor-simulation.rst:88 +#: ../../source/how-to-monitor-simulation.rst:98 msgid "Tracking metrics" msgstr "매트릭 트래킹" -#: ../../source/how-to-monitor-simulation.rst:90 +#: ../../source/how-to-monitor-simulation.rst:100 msgid "" "Before running your Flower simulation, you have to start the monitoring " "tools you have just installed and configured." msgstr "Flower 시뮬레이션을 실행하기 전에 방금 설치 및 구성한 모니터링 도구를 시작해야 합니다." -#: ../../source/how-to-monitor-simulation.rst:97 +#: ../../source/how-to-monitor-simulation.rst:108 msgid "" "Please include the following argument in your Python code when starting a" " simulation." msgstr "시뮬레이션을 시작할 때 Python 코드에 다음 전달인자를 포함하세요." -#: ../../source/how-to-monitor-simulation.rst:108 +#: ../../source/how-to-monitor-simulation.rst:119 msgid "Now, you are ready to start your workload." msgstr "이제 워크로드를 시작할 준비가 되었습니다." -#: ../../source/how-to-monitor-simulation.rst:110 +#: ../../source/how-to-monitor-simulation.rst:121 msgid "" "Shortly after the simulation starts, you should see the following logs in" " your terminal:" msgstr "시뮬레이션이 시작되고 얼마 지나지 않아 터미널에 다음 로그가 표시됩니다:" -#: ../../source/how-to-monitor-simulation.rst:117 -msgid "You can look at everything at ``_ ." +#: ../../source/how-to-monitor-simulation.rst:127 +#, fuzzy +msgid "You can look at everything at http://127.0.0.1:8265 ." msgstr "``_ 에서 모든 것을 볼 수 있습니다." -#: ../../source/how-to-monitor-simulation.rst:119 +#: ../../source/how-to-monitor-simulation.rst:129 msgid "" "It's a Ray Dashboard. You can navigate to Metrics (on the left panel, the" " lowest option)." msgstr "Ray 대시보드입니다. 메트릭(왼쪽 패널의 가장 아래 옵션)으로 이동할 수 있습니다." -#: ../../source/how-to-monitor-simulation.rst:121 +#: ../../source/how-to-monitor-simulation.rst:132 msgid "" "Or alternatively, you can just see them in Grafana by clicking on the " "right-up corner, “View in Grafana”. Please note that the Ray dashboard is" @@ -6738,26 +7245,27 @@ msgstr "" "시뮬레이션 중에만 액세스할 수 있다는 점에 유의하세요. 시뮬레이션이 종료된 후에는 Grafana를 사용하여 메트릭을 탐색할 수만 " "있습니다. ``http://localhost:3000/``로 이동하여 Grafana를 시작할 수 있습니다." -#: ../../source/how-to-monitor-simulation.rst:123 +#: ../../source/how-to-monitor-simulation.rst:137 +#, fuzzy msgid "" "After you finish the visualization, stop Prometheus and Grafana. This is " -"important as they will otherwise block, for example port :code:`3000` on " -"your machine as long as they are running." +"important as they will otherwise block, for example port ``3000`` on your" +" machine as long as they are running." msgstr "" "시각화를 완료한 후에는 Prometheus와 Grafana를 중지합니다. 그렇지 않으면 실행 중인 동안 컴퓨터에서 포트 " ":code:`3000` 등을 차단하므로 이 작업이 중요합니다." -#: ../../source/how-to-monitor-simulation.rst:132 +#: ../../source/how-to-monitor-simulation.rst:147 msgid "Resource allocation" msgstr "리소스 할당" -#: ../../source/how-to-monitor-simulation.rst:134 +#: ../../source/how-to-monitor-simulation.rst:149 msgid "" "You must understand how the Ray library works to efficiently allocate " "system resources to simulation clients on your own." msgstr "Ray 라이브러리가 어떻게 작동하는지 이해해야 시뮬레이션 클라이언트에 시스템 리소스를 효율적으로 할당할 수 있습니다." -#: ../../source/how-to-monitor-simulation.rst:136 +#: ../../source/how-to-monitor-simulation.rst:152 msgid "" "Initially, the simulation (which Ray handles under the hood) starts by " "default with all the available resources on the system, which it shares " @@ -6770,21 +7278,21 @@ msgstr "" "클라이언트 간에 공유됩니다. 그렇다고 해서 모든 클라이언트에게 균등하게 분배하거나 모든 클라이언트에서 동시에 모델 학습이 이루어지는" " 것은 아닙니다. 이에 대한 자세한 내용은 이 블로그의 뒷부분에서 설명합니다. 다음을 실행하여 시스템 리소스를 확인할 수 있습니다:" -#: ../../source/how-to-monitor-simulation.rst:143 +#: ../../source/how-to-monitor-simulation.rst:164 msgid "In Google Colab, the result you see might be similar to this:" msgstr "Google Colab에서는 이와 유사한 결과가 표시될 수 있습니다:" -#: ../../source/how-to-monitor-simulation.rst:155 +#: ../../source/how-to-monitor-simulation.rst:175 msgid "" "However, you can overwrite the defaults. When starting a simulation, do " "the following (you don't need to overwrite all of them):" msgstr "그러나 기본값을 덮어쓸 수 있습니다. 시뮬레이션을 시작할 때 다음을 수행합니다(모두 덮어쓸 필요는 없음):" -#: ../../source/how-to-monitor-simulation.rst:175 +#: ../../source/how-to-monitor-simulation.rst:195 msgid "Let’s also specify the resource for a single client." msgstr "단일 클라이언트에 대한 리소스도 지정해 보겠습니다." -#: ../../source/how-to-monitor-simulation.rst:205 +#: ../../source/how-to-monitor-simulation.rst:225 msgid "" "Now comes the crucial part. Ray will start a new client only when it has " "all the required resources (such that they run in parallel) when the " @@ -6793,29 +7301,30 @@ msgstr "" "이제 중요한 부분이 나옵니다. Ray는 리소스가 허용하는 경우에만 필요한 모든 리소스가 있을 때(병렬로 실행되는 등) 새 " "클라이언트를 시작합니다." -#: ../../source/how-to-monitor-simulation.rst:207 +#: ../../source/how-to-monitor-simulation.rst:228 +#, fuzzy msgid "" "In the example above, only one client will be run, so your clients won't " -"run concurrently. Setting :code:`client_num_gpus = 0.5` would allow " -"running two clients and therefore enable them to run concurrently. Be " -"careful not to require more resources than available. If you specified " -":code:`client_num_gpus = 2`, the simulation wouldn't start (even if you " -"had 2 GPUs but decided to set 1 in :code:`ray_init_args`)." +"run concurrently. Setting ``client_num_gpus = 0.5`` would allow running " +"two clients and therefore enable them to run concurrently. Be careful not" +" to require more resources than available. If you specified " +"``client_num_gpus = 2``, the simulation wouldn't start (even if you had 2" +" GPUs but decided to set 1 in ``ray_init_args``)." msgstr "" "위의 예에서는 하나의 클라이언트만 실행되므로 클라이언트가 동시에 실행되지 않습니다. :code:`client_num_gpus = " "0.5` 를 설정하면 두 개의 클라이언트를 실행할 수 있으므로 동시에 실행할 수 있습니다. 사용 가능한 리소스보다 더 많은 리소스를" " 요구하지 않도록 주의하세요. :code:`client_num_gpus = 2`를 지정하면 시뮬레이션이 시작되지 않습니다(GPU가 " "2개이지만 :code:`ray_init_args`에서 1개를 설정한 경우에도 마찬가지입니다)." -#: ../../source/how-to-monitor-simulation.rst:212 ../../source/ref-faq.rst:2 +#: ../../source/how-to-monitor-simulation.rst:235 ../../source/ref-faq.rst:2 msgid "FAQ" msgstr "자주 묻는 질문" -#: ../../source/how-to-monitor-simulation.rst:214 +#: ../../source/how-to-monitor-simulation.rst:237 msgid "Q: I don't see any metrics logged." msgstr "질문: 기록된 메트릭이 보이지 않습니다." -#: ../../source/how-to-monitor-simulation.rst:216 +#: ../../source/how-to-monitor-simulation.rst:239 msgid "" "A: The timeframe might not be properly set. The setting is in the top " "right corner (\"Last 30 minutes\" by default). Please change the " @@ -6824,7 +7333,7 @@ msgstr "" "A: 기간이 제대로 설정되지 않았을 수 있습니다. 설정은 오른쪽 상단에 있습니다(기본값은 '지난 30분'). 시뮬레이션이 실행된 " "기간을 반영하도록 기간을 변경해 주세요." -#: ../../source/how-to-monitor-simulation.rst:218 +#: ../../source/how-to-monitor-simulation.rst:243 msgid "" "Q: I see “Grafana server not detected. Please make sure the Grafana " "server is running and refresh this page” after going to the Metrics tab " @@ -6833,38 +7342,41 @@ msgstr "" "질문: \"Grafana 서버가 감지되지 않았습니다. Ray 대시보드의 메트릭 탭으로 이동한 후 Grafana 서버가 실행 중인지 " "확인하고 이 페이지를 새로고침하세요.\"라는 메시지가 표시됩니다." -#: ../../source/how-to-monitor-simulation.rst:220 +#: ../../source/how-to-monitor-simulation.rst:246 msgid "" "A: You probably don't have Grafana running. Please check the running " "services" msgstr "A: Grafana가 실행되고 있지 않을 수 있습니다. 실행 중인 서비스를 확인하세요" -#: ../../source/how-to-monitor-simulation.rst:226 +#: ../../source/how-to-monitor-simulation.rst:252 +#, fuzzy msgid "" "Q: I see \"This site can't be reached\" when going to " -"``_." +"http://127.0.0.1:8265." msgstr "Q: ``_로 이동할 때 \"이 사이트에 연결할 수 없습니다.\"라는 메시지가 표시됩니다." -#: ../../source/how-to-monitor-simulation.rst:228 +#: ../../source/how-to-monitor-simulation.rst:254 msgid "" "A: Either the simulation has already finished, or you still need to start" " Prometheus." msgstr "A: 시뮬레이션이 이미 완료되었거나 아직 Prometheus를 시작해야 합니다." -#: ../../source/how-to-monitor-simulation.rst:232 +#: ../../source/how-to-monitor-simulation.rst:257 msgid "Resources" msgstr "리소스" -#: ../../source/how-to-monitor-simulation.rst:234 +#: ../../source/how-to-monitor-simulation.rst:259 +#, fuzzy msgid "" -"Ray Dashboard: ``_" +"Ray Dashboard: https://docs.ray.io/en/latest/ray-observability/getting-" +"started.html" msgstr "" "Ray 대시보드: ``_" -#: ../../source/how-to-monitor-simulation.rst:236 -msgid "Ray Metrics: ``_" +#: ../../source/how-to-monitor-simulation.rst:261 +#, fuzzy +msgid "Ray Metrics: https://docs.ray.io/en/latest/cluster/metrics.html" msgstr "Ray 메트릭: ``_" #: ../../source/how-to-run-simulations.rst:2 @@ -6894,16 +7406,17 @@ msgstr "" "architecture.html#virtual-client-engine>`_ 또는 VCE를 통해 이러한 시나리오를 수용할 수 " "있습니다." -#: ../../source/how-to-run-simulations.rst:10 +#: ../../source/how-to-run-simulations.rst:19 +#, fuzzy msgid "" -"The :code:`VirtualClientEngine` schedules, launches and manages `virtual`" -" clients. These clients are identical to `non-virtual` clients (i.e. the " +"The ``VirtualClientEngine`` schedules, launches and manages `virtual` " +"clients. These clients are identical to `non-virtual` clients (i.e. the " "ones you launch via the command `flwr.client.start_client `_) in the sense that they can be configure by " "creating a class inheriting, for example, from `flwr.client.NumPyClient " "`_ and therefore behave in an " "identical way. In addition to that, clients managed by the " -":code:`VirtualClientEngine` are:" +"``VirtualClientEngine`` are:" msgstr "" ":code:`VirtualClientEngine`은 `virtual` 클라이언트를 예약, 실행 및 관리합니다. 이러한 클라이언트는 " "`non-virtual` 클라이언트(예: `flwr.client.start_client `_에서 상속하는 클래스 생성으로 구성될 수 있으므로 동일한 " "방식으로 동작합니다. 그 외에도 :code:`VirtualClientEngine`에 의해 관리되는 클라이언트는 다음과 같습니다:" -#: ../../source/how-to-run-simulations.rst:12 +#: ../../source/how-to-run-simulations.rst:26 msgid "" "resource-aware: this means that each client gets assigned a portion of " "the compute and memory on your system. You as a user can control this at " @@ -6923,16 +7436,17 @@ msgstr "" "시뮬레이션을 시작할 때 이를 제어할 수 있으며, 이를 통해 Flower FL 시뮬레이션의 병렬 처리 정도를 제어할 수 있습니다. " "클라이언트당 리소스가 적을수록 동일한 하드웨어에서 더 많은 클라이언트를 동시에 실행할 수 있습니다." -#: ../../source/how-to-run-simulations.rst:13 +#: ../../source/how-to-run-simulations.rst:31 +#, fuzzy msgid "" "self-managed: this means that you as a user do not need to launch clients" -" manually, instead this gets delegated to :code:`VirtualClientEngine`'s " +" manually, instead this gets delegated to ``VirtualClientEngine``'s " "internals." msgstr "" "self-managed: 이는 사용자가 클라이언트를 수동으로 실행할 필요가 없으며, 대신 " ":code:`VirtualClientEngine`의 내부에 위임된다는 의미입니다." -#: ../../source/how-to-run-simulations.rst:14 +#: ../../source/how-to-run-simulations.rst:33 msgid "" "ephemeral: this means that a client is only materialized when it is " "required in the FL process (e.g. to do `fit() `_을 수행하기 위해). 객체는 나중에 소멸되어 할당된 리소스를 해제하고" " 다른 클라이언트가 참여할 수 있도록 허용합니다." -#: ../../source/how-to-run-simulations.rst:16 +#: ../../source/how-to-run-simulations.rst:38 +#, fuzzy msgid "" -"The :code:`VirtualClientEngine` implements `virtual` clients using `Ray " +"The ``VirtualClientEngine`` implements `virtual` clients using `Ray " "`_, an open-source framework for scalable Python " -"workloads. In particular, Flower's :code:`VirtualClientEngine` makes use " -"of `Actors `_ to " -"spawn `virtual` clients and run their workload." +"workloads. In particular, Flower's ``VirtualClientEngine`` makes use of " +"`Actors `_ to spawn " +"`virtual` clients and run their workload." msgstr "" ":code:`VirtualClientEngine`은 확장 가능한 파이썬 워크로드를 위한 오픈 소스 프레임워크인 `Ray " "`_를 사용하여 `virtual` 클라이언트를 구현합니다. 특히 Flower의 " ":code:`VirtualClientEngine`은 `Actors `_를 사용하여 `virtual` 클라이언트를 생성하고 해당 워크로드를 실행합니다." -#: ../../source/how-to-run-simulations.rst:20 +#: ../../source/how-to-run-simulations.rst:45 msgid "Launch your Flower simulation" msgstr "Flower 시뮬레이션 시작" -#: ../../source/how-to-run-simulations.rst:22 +#: ../../source/how-to-run-simulations.rst:47 msgid "" "Running Flower simulations still require you to define your client class," " a strategy, and utility functions to download and load (and potentially " @@ -6975,22 +7490,23 @@ msgstr "" "flwr.html#flwr.simulation.start_simulation>`_을 사용하면 되며, 최소한의 예시는 다음과 " "같습니다:" -#: ../../source/how-to-run-simulations.rst:44 +#: ../../source/how-to-run-simulations.rst:73 msgid "VirtualClientEngine resources" msgstr "VirtualClientEngine 리소스" -#: ../../source/how-to-run-simulations.rst:45 +#: ../../source/how-to-run-simulations.rst:75 +#, fuzzy msgid "" "By default the VCE has access to all system resources (i.e. all CPUs, all" " GPUs, etc) since that is also the default behavior when starting Ray. " "However, in some settings you might want to limit how many of your system" " resources are used for simulation. You can do this via the " -":code:`ray_init_args` input argument to :code:`start_simulation` which " -"the VCE internally passes to Ray's :code:`ray.init` command. For a " -"complete list of settings you can configure check the `ray.init " +"``ray_init_args`` input argument to ``start_simulation`` which the VCE " +"internally passes to Ray's ``ray.init`` command. For a complete list of " +"settings you can configure check the `ray.init " "`_" -" documentation. Do not set :code:`ray_init_args` if you want the VCE to " -"use all your system's CPUs and GPUs." +" documentation. Do not set ``ray_init_args`` if you want the VCE to use " +"all your system's CPUs and GPUs." msgstr "" "기본적으로 VCE는 모든 시스템 리소스(예: 모든 CPU, 모든 GPU 등)에 액세스할 수 있으며, 이는 Ray를 시작할 때의 기본" " 동작이기도 합니다. 그러나 일부 설정에서는 시뮬레이션에 사용되는 시스템 리소스의 수를 제한하고 싶을 수 있습니다. 이 설정은 " @@ -7000,20 +7516,21 @@ msgstr "" " 설명서를 확인하세요. VCE가 시스템의 모든 CPU와 GPU를 사용하도록 하려면 :code:`ray_init_args`를 설정하지" " 마세요." -#: ../../source/how-to-run-simulations.rst:62 +#: ../../source/how-to-run-simulations.rst:97 msgid "Assigning client resources" msgstr "클라이언트 리소스 할당" -#: ../../source/how-to-run-simulations.rst:63 +#: ../../source/how-to-run-simulations.rst:99 +#, fuzzy msgid "" -"By default the :code:`VirtualClientEngine` assigns a single CPU core (and" -" nothing else) to each virtual client. This means that if your system has" -" 10 cores, that many virtual clients can be concurrently running." +"By default the ``VirtualClientEngine`` assigns a single CPU core (and " +"nothing else) to each virtual client. This means that if your system has " +"10 cores, that many virtual clients can be concurrently running." msgstr "" "기본적으로 :code:`VirtualClientEngine`은 각 가상 클라이언트에 단일 CPU 코어를 할당합니다(그 외에는 " "아무것도 할당하지 않음). 즉, 시스템에 코어가 10개인 경우 그만큼 많은 가상 클라이언트를 동시에 실행할 수 있습니다." -#: ../../source/how-to-run-simulations.rst:65 +#: ../../source/how-to-run-simulations.rst:103 msgid "" "More often than not, you would probably like to adjust the resources your" " clients get assigned based on the complexity (i.e. compute and memory " @@ -7028,31 +7545,32 @@ msgstr "" "flwr.html#flwr.simulation.start_simulation>`_로 설정하여 이를 수행할 수 있습니다. Ray는 " "내부적으로 두 개의 키를 사용하여 워크로드(이 경우 Flower 클라이언트)를 스케줄링하고 스폰합니다:" -#: ../../source/how-to-run-simulations.rst:67 -msgid ":code:`num_cpus` indicates the number of CPU cores a client would get." +#: ../../source/how-to-run-simulations.rst:110 +#, fuzzy +msgid "``num_cpus`` indicates the number of CPU cores a client would get." msgstr ":code:`num_cpus`는 클라이언트에서 사용할 수 있는 CPU 코어 수를 나타냅니다." -#: ../../source/how-to-run-simulations.rst:68 -msgid "" -":code:`num_gpus` indicates the **ratio** of GPU memory a client gets " -"assigned." +#: ../../source/how-to-run-simulations.rst:111 +#, fuzzy +msgid "``num_gpus`` indicates the **ratio** of GPU memory a client gets assigned." msgstr ":code:`num_gpus`는 클라이언트에 할당되는 GPU 메모리의 **비율**을 나타냅니다." -#: ../../source/how-to-run-simulations.rst:70 +#: ../../source/how-to-run-simulations.rst:113 msgid "Let's see a few examples:" msgstr "몇 가지 예를 살펴보겠습니다:" -#: ../../source/how-to-run-simulations.rst:89 +#: ../../source/how-to-run-simulations.rst:132 +#, fuzzy msgid "" -"While the :code:`client_resources` can be used to control the degree of " +"While the ``client_resources`` can be used to control the degree of " "concurrency in your FL simulation, this does not stop you from running " "dozens, hundreds or even thousands of clients in the same round and " "having orders of magnitude more `dormant` (i.e. not participating in a " "round) clients. Let's say you want to have 100 clients per round but your" " system can only accommodate 8 clients concurrently. The " -":code:`VirtualClientEngine` will schedule 100 jobs to run (each " -"simulating a client sampled by the strategy) and then will execute them " -"in a resource-aware manner in batches of 8." +"``VirtualClientEngine`` will schedule 100 jobs to run (each simulating a " +"client sampled by the strategy) and then will execute them in a resource-" +"aware manner in batches of 8." msgstr "" "code:`client_resources`를 사용하여 FL 시뮬레이션의 동시성 정도를 제어할 수 있지만, 동일한 라운드에서 수십, " "수백 또는 수천 개의 클라이언트를 실행하고 훨씬 더 많은 '휴면'(즉, 라운드에 참여하지 않는) 클라이언트를 보유하는 것을 막을 " @@ -7060,7 +7578,7 @@ msgstr "" "code:`VirtualClientEngine`은 실행할 100개의 작업(각각 전략에서 샘플링한 클라이언트를 시뮬레이션)을 예약한 " "다음 리소스 인식 방식으로 8개씩 일괄적으로 실행합니다." -#: ../../source/how-to-run-simulations.rst:91 +#: ../../source/how-to-run-simulations.rst:140 msgid "" "To understand all the intricate details on how resources are used to " "schedule FL clients and how to define custom resources, please take a " @@ -7071,11 +7589,11 @@ msgstr "" "이해하려면 'Ray 문서 '를 참조하세요." -#: ../../source/how-to-run-simulations.rst:94 +#: ../../source/how-to-run-simulations.rst:145 msgid "Simulation examples" msgstr "시뮬레이션 예제" -#: ../../source/how-to-run-simulations.rst:96 +#: ../../source/how-to-run-simulations.rst:147 msgid "" "A few ready-to-run complete examples for Flower simulation in " "Tensorflow/Keras and PyTorch are provided in the `Flower repository " @@ -7084,7 +7602,7 @@ msgstr "" "Tensorflow/Keras와 파이토치에서 바로 실행할 수 있는 몇 가지 Flower 시뮬레이션 예제는 `Flower 레포지토리 " "`_에서 제공됩니다. Google Colab에서도 실행할 수 있습니다:" -#: ../../source/how-to-run-simulations.rst:98 +#: ../../source/how-to-run-simulations.rst:151 msgid "" "`Tensorflow/Keras Simulation " "`_: 100개의 클라이언트가 공동으로 MNIST에서 MLP 모델을 훈련합니다." -#: ../../source/how-to-run-simulations.rst:99 +#: ../../source/how-to-run-simulations.rst:154 msgid "" "`PyTorch Simulation `_: 100 clients collaboratively train a CNN model on " @@ -7103,28 +7621,29 @@ msgstr "" "파이토치 시뮬레이션 `_: 100개의 클라이언트가 공동으로 MNIST에서 CNN 모델을 훈련합니다." -#: ../../source/how-to-run-simulations.rst:104 +#: ../../source/how-to-run-simulations.rst:159 msgid "Multi-node Flower simulations" msgstr "멀티 노드 Flower 시뮬레이션" -#: ../../source/how-to-run-simulations.rst:106 +#: ../../source/how-to-run-simulations.rst:161 +#, fuzzy msgid "" -"Flower's :code:`VirtualClientEngine` allows you to run FL simulations " -"across multiple compute nodes. Before starting your multi-node simulation" -" ensure that you:" +"Flower's ``VirtualClientEngine`` allows you to run FL simulations across " +"multiple compute nodes. Before starting your multi-node simulation ensure" +" that you:" msgstr "" "Flower의 :code:`VirtualClientEngine`을 사용하면 여러 컴퓨팅 노드에서 FL 시뮬레이션을 실행할 수 " "있습니다. 멀티 노드 시뮬레이션을 시작하기 전에 다음 사항을 확인하세요:" -#: ../../source/how-to-run-simulations.rst:108 +#: ../../source/how-to-run-simulations.rst:164 msgid "Have the same Python environment in all nodes." msgstr "모든 노드에서 동일한 Python 환경을 유지합니다." -#: ../../source/how-to-run-simulations.rst:109 +#: ../../source/how-to-run-simulations.rst:165 msgid "Have a copy of your code (e.g. your entire repo) in all nodes." msgstr "모든 노드에 코드 사본(예: 전체 레포지토리)을 보관하세요." -#: ../../source/how-to-run-simulations.rst:110 +#: ../../source/how-to-run-simulations.rst:166 msgid "" "Have a copy of your dataset in all nodes (more about this in " ":ref:`simulation considerations `)" @@ -7132,78 +7651,82 @@ msgstr "" "모든 노드에 데이터 세트의 사본을 보유하세요(자세한 내용은 :ref:`simulation considerations " "`에서 확인하세요)" -#: ../../source/how-to-run-simulations.rst:111 +#: ../../source/how-to-run-simulations.rst:168 +#, fuzzy msgid "" -"Pass :code:`ray_init_args={\"address\"=\"auto\"}` to `start_simulation " -"`_ so the " -":code:`VirtualClientEngine` attaches to a running Ray instance." +"Pass ``ray_init_args={\"address\"=\"auto\"}`` to `start_simulation `_ so the " +"``VirtualClientEngine`` attaches to a running Ray instance." msgstr "" ":code:`ray_init_args={\"address\"=\"auto\"}`를 `start_simulation `_에 전달하여 " ":code:`VirtualClientEngine`이 실행 중인 Ray 인스턴스에 연결되도록 합니다." -#: ../../source/how-to-run-simulations.rst:112 +#: ../../source/how-to-run-simulations.rst:171 +#, fuzzy msgid "" -"Start Ray on you head node: on the terminal type :code:`ray start " -"--head`. This command will print a few lines, one of which indicates how " -"to attach other nodes to the head node." +"Start Ray on you head node: on the terminal type ``ray start --head``. " +"This command will print a few lines, one of which indicates how to attach" +" other nodes to the head node." msgstr "" "헤드 노드에서 Ray 시작: 터미널에서 :code:`ray start --head`를 입력합니다. 이 명령은 몇 줄을 출력하며, 그" " 중 하나는 다른 노드를 헤드 노드에 연결하는 방법을 나타냅니다." -#: ../../source/how-to-run-simulations.rst:113 +#: ../../source/how-to-run-simulations.rst:174 +#, fuzzy msgid "" "Attach other nodes to the head node: copy the command shown after " "starting the head and execute it on terminal of a new node: for example " -":code:`ray start --address='192.168.1.132:6379'`" +"``ray start --address='192.168.1.132:6379'``" msgstr "" "헤드 노드에 다른 노드 연결: 헤드를 시작한 후 표시된 명령어을 복사하여 새 노드의 터미널에서 실행합니다: 예: :code:`ray" " start --address='192.168.1.132:6379'`" -#: ../../source/how-to-run-simulations.rst:115 +#: ../../source/how-to-run-simulations.rst:178 msgid "" "With all the above done, you can run your code from the head node as you " "would if the simulation was running on a single node." msgstr "위의 모든 작업이 완료되면 단일 노드에서 시뮬레이션을 실행할 때와 마찬가지로 헤드 노드에서 코드를 실행할 수 있습니다." -#: ../../source/how-to-run-simulations.rst:117 +#: ../../source/how-to-run-simulations.rst:181 +#, fuzzy msgid "" "Once your simulation is finished, if you'd like to dismantle your cluster" -" you simply need to run the command :code:`ray stop` in each node's " -"terminal (including the head node)." +" you simply need to run the command ``ray stop`` in each node's terminal " +"(including the head node)." msgstr "" "시뮬레이션이 완료되면 클러스터를 해체하려면 각 노드(헤드 노드 포함)의 터미널에서 :code:`ray stop` 명령을 실행하기만 " "하면 됩니다." -#: ../../source/how-to-run-simulations.rst:120 +#: ../../source/how-to-run-simulations.rst:185 msgid "Multi-node simulation good-to-know" msgstr "멀티 노드 시뮬레이션에 대해 알아두면 좋은 사항" -#: ../../source/how-to-run-simulations.rst:122 +#: ../../source/how-to-run-simulations.rst:187 msgid "" "Here we list a few interesting functionality when running multi-node FL " "simulations:" msgstr "여기에서는 멀티 노드 FL 시뮬레이션을 실행할 때 흥미로운 몇 가지 기능을 나열합니다:" -#: ../../source/how-to-run-simulations.rst:124 +#: ../../source/how-to-run-simulations.rst:189 +#, fuzzy msgid "" -"User :code:`ray status` to check all nodes connected to your head node as" -" well as the total resources available to the " -":code:`VirtualClientEngine`." +"User ``ray status`` to check all nodes connected to your head node as " +"well as the total resources available to the ``VirtualClientEngine``." msgstr "" "사용자는 :code:`ray status`를 통해 헤드 노드에 연결된 모든 노드와 " ":code:`VirtualClientEngine`에 사용 가능한 총 리소스를 확인할 수 있습니다." -#: ../../source/how-to-run-simulations.rst:126 +#: ../../source/how-to-run-simulations.rst:192 +#, fuzzy msgid "" "When attaching a new node to the head, all its resources (i.e. all CPUs, " "all GPUs) will be visible by the head node. This means that the " -":code:`VirtualClientEngine` can schedule as many `virtual` clients as " -"that node can possible run. In some settings you might want to exclude " -"certain resources from the simulation. You can do this by appending " -"`--num-cpus=` and/or `--num-" -"gpus=` in any :code:`ray start` command (including " -"when starting the head)" +"``VirtualClientEngine`` can schedule as many `virtual` clients as that " +"node can possible run. In some settings you might want to exclude certain" +" resources from the simulation. You can do this by appending `--num-" +"cpus=` and/or `--num-gpus=` in " +"any ``ray start`` command (including when starting the head)" msgstr "" "새 노드를 헤드에 연결하면 해당 노드의 모든 리소스(즉, 모든 CPU, 모든 GPU)가 헤드 노드에 표시됩니다. 즉, " ":code:`VirtualClientEngine`은 해당 노드가 실행할 수 있는 만큼의 `가상` 클라이언트를 예약할 수 있습니다. " @@ -7211,17 +7734,17 @@ msgstr "" "포함)에 `--num-cpus=` 및/또는 `--num-" "gpus=`를 추가하여 이 작업을 수행하면 됩니다" -#: ../../source/how-to-run-simulations.rst:132 +#: ../../source/how-to-run-simulations.rst:202 msgid "Considerations for simulations" msgstr "시뮬레이션 시 고려 사항" -#: ../../source/how-to-run-simulations.rst:135 +#: ../../source/how-to-run-simulations.rst:206 msgid "" "We are actively working on these fronts so to make it trivial to run any " "FL workload with Flower simulation." msgstr "Flower 시뮬레이션으로 모든 FL 워크로드를 간편하게 실행할 수 있도록 이러한 측면에서 적극적으로 노력하고 있습니다." -#: ../../source/how-to-run-simulations.rst:138 +#: ../../source/how-to-run-simulations.rst:209 msgid "" "The current VCE allows you to run Federated Learning workloads in " "simulation mode whether you are prototyping simple scenarios on your " @@ -7236,30 +7759,32 @@ msgstr "" "동안, 아래에서는 Flower로 FL 파이프라인을 설계할 때 염두에 두어야 할 몇 가지 사항을 강조합니다. 또한 현재 구현에서 몇 " "가지 제한 사항을 강조합니다." -#: ../../source/how-to-run-simulations.rst:141 +#: ../../source/how-to-run-simulations.rst:217 msgid "GPU resources" msgstr "GPU 리소스" -#: ../../source/how-to-run-simulations.rst:143 +#: ../../source/how-to-run-simulations.rst:219 +#, fuzzy msgid "" "The VCE assigns a share of GPU memory to a client that specifies the key " -":code:`num_gpus` in :code:`client_resources`. This being said, Ray (used " +"``num_gpus`` in ``client_resources``. This being said, Ray (used " "internally by the VCE) is by default:" msgstr "" "VCE는 :code:`client_resources`에서 :code:`num_gpus` 키를 지정하는 클라이언트에 GPU 메모리 " "공유를 할당합니다. 즉, (VCE에서 내부적으로 사용하는) Ray가 기본적으로 사용됩니다:" -#: ../../source/how-to-run-simulations.rst:146 +#: ../../source/how-to-run-simulations.rst:222 +#, fuzzy msgid "" "not aware of the total VRAM available on the GPUs. This means that if you" -" set :code:`num_gpus=0.5` and you have two GPUs in your system with " -"different (e.g. 32GB and 8GB) VRAM amounts, they both would run 2 clients" -" concurrently." +" set ``num_gpus=0.5`` and you have two GPUs in your system with different" +" (e.g. 32GB and 8GB) VRAM amounts, they both would run 2 clients " +"concurrently." msgstr "" "GPU에서 사용 가능한 총 VRAM을 인식하지 못합니다. 즉, 시스템에 서로 다른(예: 32GB와 8GB) VRAM 용량을 가진 두" " 개의 GPU가 있고 :code:`num_gpus=0.5`를 설정하면 둘 다 동시에 2개의 클라이언트를 실행하게 됩니다." -#: ../../source/how-to-run-simulations.rst:147 +#: ../../source/how-to-run-simulations.rst:225 msgid "" "not aware of other unrelated (i.e. not created by the VCE) workloads are " "running on the GPU. Two takeaways from this are:" @@ -7267,7 +7792,7 @@ msgstr "" "관련 없는(즉, VCE에 의해 생성되지 않은) 다른 워크로드가 GPU에서 실행되고 있는지 알지 못합니다. 여기서 두 가지 시사점을 " "얻을 수 있습니다:" -#: ../../source/how-to-run-simulations.rst:149 +#: ../../source/how-to-run-simulations.rst:228 msgid "" "Your Flower server might need a GPU to evaluate the `global model` after " "aggregation (by instance when making use of the `evaluate method `_를 사용할 때)" -#: ../../source/how-to-run-simulations.rst:150 +#: ../../source/how-to-run-simulations.rst:231 +#, fuzzy msgid "" "If you want to run several independent Flower simulations on the same " "machine you need to mask-out your GPUs with " -":code:`CUDA_VISIBLE_DEVICES=\"\"` when launching your " -"experiment." +"``CUDA_VISIBLE_DEVICES=\"\"`` when launching your experiment." msgstr "" "동일한 머신에서 여러 개의 독립적인 Flower 시뮬레이션을 실행하려면, 실험을 시작할 때 " ":code:`CUDA_VISIBLE_DEVICES=\"\"`로 GPU를 마스킹해야 합니다." -#: ../../source/how-to-run-simulations.rst:153 +#: ../../source/how-to-run-simulations.rst:235 +#, fuzzy msgid "" -"In addition, the GPU resource limits passed to :code:`client_resources` " -"are not `enforced` (i.e. they can be exceeded) which can result in the " +"In addition, the GPU resource limits passed to ``client_resources`` are " +"not `enforced` (i.e. they can be exceeded) which can result in the " "situation of client using more VRAM than the ratio specified when " "starting the simulation." msgstr "" "또한 :code:`client_resources`에 전달된 GPU 리소스 제한이 '강제'되지 않아(즉, 초과할 수 있음) " "클라이언트가 시뮬레이션을 시작할 때 지정된 비율보다 더 많은 VRAM을 사용하는 상황이 발생할 수 있습니다." -#: ../../source/how-to-run-simulations.rst:156 +#: ../../source/how-to-run-simulations.rst:240 msgid "TensorFlow with GPUs" msgstr "GPU를 사용한 TensorFlow" -#: ../../source/how-to-run-simulations.rst:158 +#: ../../source/how-to-run-simulations.rst:242 msgid "" "When `using a GPU with TensorFlow " "`_ nearly your entire GPU memory of" @@ -7318,20 +7844,21 @@ msgstr "" "`_'를 통해 " "이 기본 동작을 비활성화할 수 있습니다." -#: ../../source/how-to-run-simulations.rst:160 +#: ../../source/how-to-run-simulations.rst:249 +#, fuzzy msgid "" "This would need to be done in the main process (which is where the server" " would run) and in each Actor created by the VCE. By means of " -":code:`actor_kwargs` we can pass the reserved key `\"on_actor_init_fn\"` " -"in order to specify a function to be executed upon actor initialization. " -"In this case, to enable GPU growth for TF workloads. It would look as " +"``actor_kwargs`` we can pass the reserved key `\"on_actor_init_fn\"` in " +"order to specify a function to be executed upon actor initialization. In " +"this case, to enable GPU growth for TF workloads. It would look as " "follows:" msgstr "" "이 작업은 메인 프로세스(서버가 실행되는 곳)와 VCE에서 생성한 각 액터에서 수행해야 합니다. " ":code:`actor_kwargs`를 통해 예약 키 `\"on_actor_init_fn\"`을 전달하여 액터 초기화 시 실행할 " "함수를 지정할 수 있습니다. 이 경우 TF 워크로드에 대한 GPU 증가를 활성화합니다. 다음과 같이 보입니다:" -#: ../../source/how-to-run-simulations.rst:179 +#: ../../source/how-to-run-simulations.rst:272 msgid "" "This is precisely the mechanism used in `Tensorflow/Keras Simulation " "`_ 예제에서 사용된 메커니즘입니다." -#: ../../source/how-to-run-simulations.rst:183 +#: ../../source/how-to-run-simulations.rst:276 msgid "Multi-node setups" msgstr "멀티 노드 설정" -#: ../../source/how-to-run-simulations.rst:185 +#: ../../source/how-to-run-simulations.rst:278 msgid "" "The VCE does not currently offer a way to control on which node a " "particular `virtual` client is executed. In other words, if more than a " @@ -7363,7 +7890,7 @@ msgstr "" "방식에 따라 모든 노드에 모든 데이터 세트 파티션의 복사본을 보유하거나 데이터 중복을 피하기 위해 데이터 세트 제공 메커니즘(예: " "nfs, 데이터베이스 사용)을 사용해야 할 수 있습니다." -#: ../../source/how-to-run-simulations.rst:187 +#: ../../source/how-to-run-simulations.rst:286 msgid "" "By definition virtual clients are `stateless` due to their ephemeral " "nature. A client state can be implemented as part of the Flower client " @@ -7396,17 +7923,17 @@ msgid "Model checkpointing" msgstr "모델 체크포인트" #: ../../source/how-to-save-and-load-model-checkpoints.rst:10 +#, fuzzy msgid "" "Model updates can be persisted on the server-side by customizing " -":code:`Strategy` methods. Implementing custom strategies is always an " -"option, but for many cases it may be more convenient to simply customize " -"an existing strategy. The following code example defines a new " -":code:`SaveModelStrategy` which customized the existing built-in " -":code:`FedAvg` strategy. In particular, it customizes " -":code:`aggregate_fit` by calling :code:`aggregate_fit` in the base class " -"(:code:`FedAvg`). It then continues to save returned (aggregated) weights" -" before it returns those aggregated weights to the caller (i.e., the " -"server):" +"``Strategy`` methods. Implementing custom strategies is always an option," +" but for many cases it may be more convenient to simply customize an " +"existing strategy. The following code example defines a new " +"``SaveModelStrategy`` which customized the existing built-in ``FedAvg`` " +"strategy. In particular, it customizes ``aggregate_fit`` by calling " +"``aggregate_fit`` in the base class (``FedAvg``). It then continues to " +"save returned (aggregated) weights before it returns those aggregated " +"weights to the caller (i.e., the server):" msgstr "" ":code:`Strategy` 메소드를 사용자 지정하여 서버 측에서 모델 업데이트를 지속할 수 있습니다. 사용자 지정 전략을 " "구현하는 것은 항상 옵션이지만 대부분의 경우 기존 전략을 간단히 사용자 지정하는 것이 더 편리할 수 있습니다. 다음 코드 예시는 " @@ -7415,11 +7942,11 @@ msgstr "" ":code:`aggregate_fit`을 사용자 지정합니다. 그런 다음 호출자(즉, 서버)에게 집계된 가중치를 반환하기 전에 " "반환된(집계된) 가중치를 계속 저장합니다:" -#: ../../source/how-to-save-and-load-model-checkpoints.rst:47 +#: ../../source/how-to-save-and-load-model-checkpoints.rst:53 msgid "Save and load PyTorch checkpoints" msgstr "파이토치 체크포인트 저장 및 로드" -#: ../../source/how-to-save-and-load-model-checkpoints.rst:49 +#: ../../source/how-to-save-and-load-model-checkpoints.rst:55 msgid "" "Similar to the previous example but with a few extra steps, we'll show " "how to store a PyTorch checkpoint we'll use the ``torch.save`` function. " @@ -7433,7 +7960,7 @@ msgstr "" "``ndarray``의 목록으로 변환되어야 하며, ``OrderedDict`` 클래스 구조에 따라 파이토치 " "``state_dict``로 변환됩니다." -#: ../../source/how-to-save-and-load-model-checkpoints.rst:85 +#: ../../source/how-to-save-and-load-model-checkpoints.rst:98 msgid "" "To load your progress, you simply append the following lines to your " "code. Note that this will iterate over all saved checkpoints and load the" @@ -7442,7 +7969,7 @@ msgstr "" "진행 상황을 로드하려면 코드에 다음 줄을 추가하기만 하면 됩니다. 이렇게 하면 저장된 모든 체크포인트를 반복하고 최신 체크포인트를 " "로드합니다:" -#: ../../source/how-to-save-and-load-model-checkpoints.rst:97 +#: ../../source/how-to-save-and-load-model-checkpoints.rst:111 msgid "" "Return/use this object of type ``Parameters`` wherever necessary, such as" " in the ``initial_parameters`` when defining a ``Strategy``." @@ -7463,22 +7990,22 @@ msgstr "" "Flower 0.19(및 다른 0.x 시리즈 릴리스)와 비교했을 때 기존 0.x 시리즈 프로젝트의 코드를 변경해야 하는 몇 가지 " "획기적인 변경 사항이 있습니다." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:8 -#: ../../source/how-to-upgrade-to-flower-next.rst:43 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:10 +#: ../../source/how-to-upgrade-to-flower-next.rst:63 msgid "Install update" msgstr "업데이트 설치" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:10 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:12 msgid "" "Here's how to update an existing installation to Flower 1.0 using either " "pip or Poetry:" msgstr "다음은 pip 또는 Poetry를 사용하여 기존 설치를 Flower 1.0으로 업데이트하는 방법입니다:" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:12 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:14 msgid "pip: add ``-U`` when installing." msgstr "pip: 설치할 때 ``-U``를 추가합니다." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:14 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:16 msgid "" "``python -m pip install -U flwr`` (when using ``start_server`` and " "``start_client``)" @@ -7486,13 +8013,13 @@ msgstr "" "``python -m pip install -U flwr``(``start_server`` 및 ``start_client``를 " "사용하는 경우)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:15 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:17 msgid "" "``python -m pip install -U 'flwr[simulation]'`` (when using " "``start_simulation``)" msgstr "``python -m pip install -U 'flwr[simulation]'``(``start_simulation`` 사용 시)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:17 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:19 msgid "" "Poetry: update the ``flwr`` dependency in ``pyproject.toml`` and then " "reinstall (don't forget to delete ``poetry.lock`` via ``rm poetry.lock`` " @@ -7502,11 +8029,11 @@ msgstr "" "설치하세요(``poetry 설치``를 실행하기 전에 ``rm poetry.lock``을 통해 ``poetry.lock``을 삭제하는" " 것을 잊지 마세요)." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:19 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:23 msgid "``flwr = \"^1.0.0\"`` (when using ``start_server`` and ``start_client``)" msgstr "``flwr = \"^1.0.0\"``(``start_server`` 및 ``start_client`` 사용 시)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:20 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:24 msgid "" "``flwr = { version = \"^1.0.0\", extras = [\"simulation\"] }`` (when " "using ``start_simulation``)" @@ -7514,26 +8041,26 @@ msgstr "" "``flwr = { version = \"^1.0.0\", extras = [\"simulation\"] }`` " "(``start_simulation`` 사용 시)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:24 -#: ../../source/how-to-upgrade-to-flower-next.rst:100 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:28 +#: ../../source/how-to-upgrade-to-flower-next.rst:120 msgid "Required changes" msgstr "필수 변경 사항" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:26 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:30 msgid "The following breaking changes require manual updates." msgstr "다음과 같은 주요 변경 사항에는 수동 업데이트가 필요합니다." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:29 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:33 msgid "General" msgstr "일반" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:31 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:35 msgid "" "Pass all arguments as keyword arguments (not as positional arguments). " "Here's an example:" msgstr "모든 전달인자를 위치 전달인자가 아닌 키워드 전달인자로 전달합니다. 다음은 예시입니다:" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:33 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:38 msgid "" "Flower 0.19 (positional arguments): ``start_client(\"127.0.0.1:8080\", " "FlowerClient())``" @@ -7541,7 +8068,7 @@ msgstr "" "Flower 0.19 (위치 전달인자): ``start_client(\"127.0.0.1:8080\", " "FlowerClient())``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:34 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:39 msgid "" "Flower 1.0 (keyword arguments): " "``start_client(server_address=\"127.0.0.1:8080\", " @@ -7550,12 +8077,12 @@ msgstr "" "Flower 1.0 (키워드 전달인자): ``start_client(server_address=\"127.0.0.1:8080\", " "client=FlowerClient())``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:37 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:43 #: ../../source/ref-api/flwr.client.Client.rst:2 msgid "Client" msgstr "클라이언트" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:39 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:45 msgid "" "Subclasses of ``NumPyClient``: change ``def get_parameters(self):``` to " "``def get_parameters(self, config):``" @@ -7563,7 +8090,7 @@ msgstr "" "``NumPyClient``의 서브클래스: ``def get_parameters(self):``를 ``def " "get_parameters(self, config):``로 변경합니다" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:40 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:47 msgid "" "Subclasses of ``Client``: change ``def get_parameters(self):``` to ``def " "get_parameters(self, ins: GetParametersIns):``" @@ -7571,11 +8098,11 @@ msgstr "" "``클라이언트``의 서브클래스: ``def get_parameters(self):``를 ``def " "get_parameters(self, ins: GetParametersIns):``로 변경합니다" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:43 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:51 msgid "Strategies / ``start_server`` / ``start_simulation``" msgstr "전략 / ``start_server`` / ``start_simulation``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:45 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:53 msgid "" "Pass ``ServerConfig`` (instead of a dictionary) to ``start_server`` and " "``start_simulation``. Here's an example:" @@ -7583,7 +8110,7 @@ msgstr "" "Dictionary 대신 ``ServerConfig``를 ``start_server`` 및 ``start_simulation``에 " "전달합니다. 다음은 예제입니다:" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:47 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:56 msgid "" "Flower 0.19: ``start_server(..., config={\"num_rounds\": 3, " "\"round_timeout\": 600.0}, ...)``" @@ -7591,7 +8118,7 @@ msgstr "" "Flower 0.19: ``start_server(..., config={\"num_rounds\": 3, " "\"round_timeout\": 600.0}, ...)``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:48 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:58 msgid "" "Flower 1.0: ``start_server(..., " "config=flwr.server.ServerConfig(num_rounds=3, round_timeout=600.0), " @@ -7601,7 +8128,7 @@ msgstr "" "config=flwr.server.ServerConfig(num_rounds=3, round_timeout=600.0), " "...)``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:50 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:61 msgid "" "Replace ``num_rounds=1`` in ``start_simulation`` with the new " "``config=ServerConfig(...)`` (see previous item)" @@ -7609,7 +8136,7 @@ msgstr "" "``start_simulation``의 ``num_rounds=1``을 새로운 ``config=ServerConfig(...)``로" " 바꿉니다(이전 항목 참조)" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:51 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:63 msgid "" "Remove ``force_final_distributed_eval`` parameter from calls to " "``start_server``. Distributed evaluation on all clients can be enabled by" @@ -7620,19 +8147,19 @@ msgstr "" "클라이언트에 대한 분산 평가는 마지막 훈련 라운드 후 평가를 위해 모든 클라이언트를 샘플링하도록 전략을 구성하여 활성화할 수 " "있습니다." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:52 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:66 msgid "Rename parameter/ndarray conversion functions:" msgstr "매개변수/ndarray 변환 함수의 이름을 바꿉니다:" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:54 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:68 msgid "``parameters_to_weights`` --> ``parameters_to_ndarrays``" msgstr "``parameters_to_weights`` --> ``parameters_to_ndarrays``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:55 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:69 msgid "``weights_to_parameters`` --> ``ndarrays_to_parameters``" msgstr "``weights_to_parameters`` --> ``ndarrays_to_parameters``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:57 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:71 msgid "" "Strategy initialization: if the strategy relies on the default values for" " ``fraction_fit`` and ``fraction_evaluate``, set ``fraction_fit`` and " @@ -7647,23 +8174,23 @@ msgstr "" "호출하여)는 이제 ``fraction_fit`` 및 ``fraction_evaluate``를 ``0.1``로 설정하여 FedAvg를" " 수동으로 초기화해야 합니다." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:58 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:77 msgid "Rename built-in strategy parameters (e.g., ``FedAvg``):" msgstr "기본 제공 전략 매개변수의 이름을 바꿉니다(예: ``FedAvg``):" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:60 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:79 msgid "``fraction_eval`` --> ``fraction_evaluate``" msgstr "``fraction_eval`` --> ``fraction_evaluate``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:61 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:80 msgid "``min_eval_clients`` --> ``min_evaluate_clients``" msgstr "``min_eval_clients`` --> ``min_evaluate_clients``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:62 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:81 msgid "``eval_fn`` --> ``evaluate_fn``" msgstr "``eval_fn`` --> ``evaluate_fn``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:64 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:83 msgid "" "Rename ``rnd`` to ``server_round``. This impacts multiple methods and " "functions, for example, ``configure_fit``, ``aggregate_fit``, " @@ -7673,11 +8200,11 @@ msgstr "" " ``aggregate_fit``, ``configure_evaluate``, ``aggregate_evaluate`` 및 " "``evaluate_fn``)에 영향을 미칩니다." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:65 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:86 msgid "Add ``server_round`` and ``config`` to ``evaluate_fn``:" msgstr "``server_round`` 및 ``config``를 ``evaluate_fn``에 추가합니다:" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:67 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:88 msgid "" "Flower 0.19: ``def evaluate(parameters: NDArrays) -> " "Optional[Tuple[float, Dict[str, Scalar]]]:``" @@ -7685,7 +8212,7 @@ msgstr "" "Flower 0.19: ``def evaluate(parameters: NDArrays) -> " "Optional[Tuple[float, Dict[str, Scalar]]]:``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:68 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:90 msgid "" "Flower 1.0: ``def evaluate(server_round: int, parameters: NDArrays, " "config: Dict[str, Scalar]) -> Optional[Tuple[float, Dict[str, " @@ -7695,11 +8222,11 @@ msgstr "" "config: Dict[str, Scalar]) -> Optional[Tuple[float, Dict[str, " "Scalar]]]:``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:71 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:94 msgid "Custom strategies" msgstr "사용자 정의 전략" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:73 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:96 msgid "" "The type of parameter ``failures`` has changed from " "``List[BaseException]`` to ``List[Union[Tuple[ClientProxy, FitRes], " @@ -7712,13 +8239,13 @@ msgstr "" "BaseException]]``(``aggregate_fit``에서) 및 ``List[Union[Tuple[ClientProxy]," " EvaluateRes], BaseException]]``(``aggregate_evaluate``)로 변경되었습니다" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:74 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:100 msgid "" "The ``Strategy`` method ``evaluate`` now receives the current round of " "federated learning/evaluation as the first parameter:" msgstr "이제 ``Strategy`` 메서드 ``evaluate``는 현재 federated 학습/평가 라운드를 첫 번째 파라미터로 받습니다:" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:76 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:103 msgid "" "Flower 0.19: ``def evaluate(self, parameters: Parameters) -> " "Optional[Tuple[float, Dict[str, Scalar]]]:``" @@ -7726,7 +8253,7 @@ msgstr "" "Flower 0.19: ``def evaluate(self, parameters: Parameters) -> " "Optional[Tuple[float, Dict[str, Scalar]]]:``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:77 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:105 msgid "" "Flower 1.0: ``def evaluate(self, server_round: int, parameters: " "Parameters) -> Optional[Tuple[float, Dict[str, Scalar]]]:``" @@ -7734,17 +8261,17 @@ msgstr "" "Flower 1.0: ``def evaluate(self, server_round: int, parameters: " "Parameters) -> Optional[Tuple[float, Dict[str, Scalar]]]:``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:80 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:109 msgid "Optional improvements" msgstr "선택적 개선 사항" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:82 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:111 msgid "" "Along with the necessary changes above, there are a number of potential " "improvements that just became possible:" msgstr "위의 필수 변경 사항과 함께 방금 가능한 여러 가지 잠재적 개선 사항이 있습니다:" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:84 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:114 msgid "" "Remove \"placeholder\" methods from subclasses of ``Client`` or " "``NumPyClient``. If you, for example, use server-side evaluation, then " @@ -7754,7 +8281,7 @@ msgstr "" "``Client`` 또는 ``NumPyClient``의 서브 클래스에서 \"placeholder\" 메서드를 제거합니다. 예를 들어" " 서버 측 평가를 사용하는 경우 ``evaluate``의 빈 자리 표시자 구현은 더 이상 필요하지 않습니다." -#: ../../source/how-to-upgrade-to-flower-1.0.rst:85 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:117 msgid "" "Configure the round timeout via ``start_simulation``: " "``start_simulation(..., config=flwr.server.ServerConfig(num_rounds=3, " @@ -7764,12 +8291,12 @@ msgstr "" "config=flwr.server.ServerConfig(num_rounds=3, round_timeout=600.0), " "...)``" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:89 -#: ../../source/how-to-upgrade-to-flower-next.rst:317 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:121 +#: ../../source/how-to-upgrade-to-flower-next.rst:348 msgid "Further help" msgstr "추가 도움말" -#: ../../source/how-to-upgrade-to-flower-1.0.rst:91 +#: ../../source/how-to-upgrade-to-flower-1.0.rst:123 msgid "" "Most official `Flower code examples " "`_ are already updated" @@ -7798,7 +8325,7 @@ msgstr "" " 시작한 사용자든 상관없이 기존 설정을 원활하게 전환하여 버전 1.8부터 Flower Next의 최신 기능 및 개선 사항을 활용할 " "수 있도록 도와드립니다." -#: ../../source/how-to-upgrade-to-flower-next.rst:9 +#: ../../source/how-to-upgrade-to-flower-next.rst:11 msgid "" "This guide shows how to reuse pre-``1.8`` Flower code with minimum code " "changes by using the *compatibility layer* in Flower Next. In another " @@ -7809,35 +8336,35 @@ msgstr "" " 재사용하는 방법을 보여줍니다. 다른 가이드에서는 순수한 Flower Next API로 Flower Next를 end-to-end로" " 실행하는 방법을 보여드리겠습니다." -#: ../../source/how-to-upgrade-to-flower-next.rst:13 +#: ../../source/how-to-upgrade-to-flower-next.rst:15 msgid "Let's dive in!" msgstr "자세히 알아봅시다!" -#: ../../source/how-to-upgrade-to-flower-next.rst:48 +#: ../../source/how-to-upgrade-to-flower-next.rst:68 msgid "" "Here's how to update an existing installation of Flower to Flower Next " "with ``pip``:" msgstr "기존에 설치된 Flower to Flower Next를 ``pip``으로 업데이트하는 방법은 다음과 같습니다:" -#: ../../source/how-to-upgrade-to-flower-next.rst:54 +#: ../../source/how-to-upgrade-to-flower-next.rst:74 msgid "or if you need Flower Next with simulation:" msgstr "또는 시뮬레이션이 포함된 Flower Next가 필요한 경우:" -#: ../../source/how-to-upgrade-to-flower-next.rst:61 +#: ../../source/how-to-upgrade-to-flower-next.rst:80 msgid "" "Ensure you set the following version constraint in your " "``requirements.txt``" msgstr "``requirements.txt``에서 다음 버전 제약 조건을 설정했는지 확인하세요" -#: ../../source/how-to-upgrade-to-flower-next.rst:71 +#: ../../source/how-to-upgrade-to-flower-next.rst:90 msgid "or ``pyproject.toml``:" msgstr "또는 ``pyproject.toml``:" -#: ../../source/how-to-upgrade-to-flower-next.rst:82 +#: ../../source/how-to-upgrade-to-flower-next.rst:101 msgid "Using Poetry" msgstr "Poetry 사용" -#: ../../source/how-to-upgrade-to-flower-next.rst:84 +#: ../../source/how-to-upgrade-to-flower-next.rst:103 msgid "" "Update the ``flwr`` dependency in ``pyproject.toml`` and then reinstall " "(don't forget to delete ``poetry.lock`` via ``rm poetry.lock`` before " @@ -7846,13 +8373,13 @@ msgstr "" "``pyproject.toml``에서 ``flwr`` 의존성를 업데이트한 다음 다시 설치하세요(``poetry install``을 " "실행하기 전에 ``rm poetry.lock``을 통해 ``poetry.lock``을 삭제하는 것을 잊지 마세요)." -#: ../../source/how-to-upgrade-to-flower-next.rst:86 +#: ../../source/how-to-upgrade-to-flower-next.rst:106 msgid "" "Ensure you set the following version constraint in your " "``pyproject.toml``:" msgstr "``pyproject.toml``에 다음 버전 제약 조건을 설정했는지 확인하세요:" -#: ../../source/how-to-upgrade-to-flower-next.rst:102 +#: ../../source/how-to-upgrade-to-flower-next.rst:122 msgid "" "In Flower Next, the *infrastructure* and *application layers* have been " "decoupled. Instead of starting a client in code via ``start_client()``, " @@ -7871,11 +8398,11 @@ msgstr "" "업데이트가 필요하지 않고 기존 방식과 Flower Next 방식 모두에서 프로젝트를 실행할 수 있는 non-breaking 변경 " "사항은 다음과 같습니다:" -#: ../../source/how-to-upgrade-to-flower-next.rst:109 +#: ../../source/how-to-upgrade-to-flower-next.rst:131 msgid "|clientapp_link|_" msgstr "|clientapp_link|_" -#: ../../source/how-to-upgrade-to-flower-next.rst:110 +#: ../../source/how-to-upgrade-to-flower-next.rst:133 msgid "" "Wrap your existing client with |clientapp_link|_ instead of launching it " "via |startclient_link|_. Here's an example:" @@ -7883,11 +8410,11 @@ msgstr "" "|clientapp_link|_를 통해 실행하는 대신 기존 클라이언트를 |clientapp_link|_로 래핑하세요. 다음은 " "예시입니다:" -#: ../../source/how-to-upgrade-to-flower-next.rst:132 +#: ../../source/how-to-upgrade-to-flower-next.rst:156 msgid "|serverapp_link|_" msgstr "|serverapp_link|_" -#: ../../source/how-to-upgrade-to-flower-next.rst:133 +#: ../../source/how-to-upgrade-to-flower-next.rst:158 msgid "" "Wrap your existing strategy with |serverapp_link|_ instead of starting " "the server via |startserver_link|_. Here's an example:" @@ -7895,11 +8422,11 @@ msgstr "" "서버를 시작하려면 |startserver_link|_를 통해 서버를 시작하는 대신 기존 전략을 |serverapp_link|_로 " "래핑하세요. 다음은 예시입니다:" -#: ../../source/how-to-upgrade-to-flower-next.rst:154 +#: ../../source/how-to-upgrade-to-flower-next.rst:179 msgid "Deployment" msgstr "배포" -#: ../../source/how-to-upgrade-to-flower-next.rst:155 +#: ../../source/how-to-upgrade-to-flower-next.rst:181 msgid "" "Run the ``SuperLink`` using |flowernext_superlink_link|_ before running, " "in sequence, |flowernext_clientapp_link|_ (2x) and " @@ -7910,13 +8437,13 @@ msgstr "" "|flowernext_clientapp_link|_(2회) 및 |flowernext_serverapp_link|_를 순서대로 " "실행합니다. 'client.py'와 'server.py'를 Python 스크립트로 실행할 필요는 없습니다." -#: ../../source/how-to-upgrade-to-flower-next.rst:158 +#: ../../source/how-to-upgrade-to-flower-next.rst:184 msgid "" "Here's an example to start the server without HTTPS (only for " "prototyping):" msgstr "다음은 HTTPS 없이 서버를 시작하는 예제입니다(프로토타이핑용으로만 사용):" -#: ../../source/how-to-upgrade-to-flower-next.rst:174 +#: ../../source/how-to-upgrade-to-flower-next.rst:200 msgid "" "Here's another example to start with HTTPS. Use the ``--ssl-ca-" "certfile``, ``--ssl-certfile``, and ``--ssl-keyfile`` command line " @@ -7926,11 +8453,11 @@ msgstr "" "다음은 HTTPS로 시작하는 또 다른 예제입니다. '`--ssl-ca-certfile``, '`--ssl-certfile``, " "'`--ssl-keyfile`` 명령줄 옵션을 사용하여 (CA 인증서, 서버 인증서 및 서버 개인 키)의 경로를 전달합니다." -#: ../../source/how-to-upgrade-to-flower-next.rst:201 +#: ../../source/how-to-upgrade-to-flower-next.rst:228 msgid "Simulation in CLI" msgstr "CLI 시뮬레이션" -#: ../../source/how-to-upgrade-to-flower-next.rst:202 +#: ../../source/how-to-upgrade-to-flower-next.rst:230 msgid "" "Wrap your existing client and strategy with |clientapp_link|_ and " "|serverapp_link|_, respectively. There is no need to use |startsim_link|_" @@ -7939,7 +8466,7 @@ msgstr "" "기존 클라이언트와 전략을 각각 |clientapp_link|_와 |serverapp_link|_로 래핑하세요. 더 이상 " "|startsim_link|_를 사용할 필요가 없습니다. 다음은 예시입니다:" -#: ../../source/how-to-upgrade-to-flower-next.rst:232 +#: ../../source/how-to-upgrade-to-flower-next.rst:263 msgid "" "Run |flower_simulation_link|_ in CLI and point to the ``server_app`` / " "``client_app`` object in the code instead of executing the Python script." @@ -7950,7 +8477,7 @@ msgstr "" "``server_app`` / ``client_app`` 개체를 가리키세요. 다음은 예제입니다(``server_app`` 및 " "``client_app`` 객체가 ``sim.py`` 모듈에 있다고 가정):" -#: ../../source/how-to-upgrade-to-flower-next.rst:249 +#: ../../source/how-to-upgrade-to-flower-next.rst:280 msgid "" "Set default resources for each |clientapp_link|_ using the ``--backend-" "config`` command line argument instead of setting the " @@ -7959,17 +8486,17 @@ msgstr "" "|startsim_link|_에서 ``client_resources`` 인수를 설정하는 대신 ``--backend-config`` " "명령줄 인수를 사용하여 각 |clientapp_link|_에 대한 기본 리소스를 설정하세요. 다음은 예시입니다:" -#: ../../source/how-to-upgrade-to-flower-next.rst:275 +#: ../../source/how-to-upgrade-to-flower-next.rst:304 msgid "Simulation in a Notebook" msgstr "Notebook에서 시뮬레이션" -#: ../../source/how-to-upgrade-to-flower-next.rst:276 +#: ../../source/how-to-upgrade-to-flower-next.rst:306 msgid "" "Run |runsim_link|_ in your notebook instead of |startsim_link|_. Here's " "an example:" msgstr "notebook에서 |startsim_link|_ 대신 |runsim_link|_를 실행하세요. 다음은 예시입니다:" -#: ../../source/how-to-upgrade-to-flower-next.rst:319 +#: ../../source/how-to-upgrade-to-flower-next.rst:350 msgid "" "Some official `Flower code examples `_ " "are already updated to Flower Next so they can serve as a reference for " @@ -7985,11 +8512,11 @@ msgstr "" "``Flower Discuss `_에 참여하여 질문에 대한 답변을 확인하거나 다른" " 사람들과 Flower Next로의 이동에 대해 공유하고 배울 수 있습니다." -#: ../../source/how-to-upgrade-to-flower-next.rst:325 +#: ../../source/how-to-upgrade-to-flower-next.rst:357 msgid "Important" msgstr "중요" -#: ../../source/how-to-upgrade-to-flower-next.rst:328 +#: ../../source/how-to-upgrade-to-flower-next.rst:359 msgid "" "As we continuously enhance Flower Next at a rapid pace, we'll be " "periodically updating this guide. Please feel free to share any feedback " @@ -7998,7 +8525,7 @@ msgstr "" "Flower Next는 빠른 속도로 지속적으로 개선되고 있으므로 이 가이드는 주기적으로 업데이트될 예정입니다. 피드백이 있으면 " "언제든지 공유해 주세요!" -#: ../../source/how-to-upgrade-to-flower-next.rst:334 +#: ../../source/how-to-upgrade-to-flower-next.rst:365 msgid "Happy migrating! 🚀" msgstr "행복한 마이그레이션! 🚀" @@ -8012,7 +8539,7 @@ msgid "" " interfaces may change in future versions.**" msgstr "**참고: 이 튜토리얼은 실험적인 기능을 다룹니다. 기능 및 인터페이스는 향후 버전에서 변경될 수 있습니다.**" -#: ../../source/how-to-use-built-in-mods.rst:6 +#: ../../source/how-to-use-built-in-mods.rst:7 msgid "" "In this tutorial, we will learn how to utilize built-in mods to augment " "the behavior of a ``ClientApp``. Mods (sometimes also called Modifiers) " @@ -8022,11 +8549,11 @@ msgstr "" "이 튜토리얼에서는 내장 모드를 활용하여 ``ClientApp``의 동작을 보강하는 방법을 배우겠습니다. " "Mods(Modifiers라고도 함)를 사용하면 ``ClientApp``에서 작업이 처리되기 전과 후에 작업을 수행할 수 있습니다." -#: ../../source/how-to-use-built-in-mods.rst:9 +#: ../../source/how-to-use-built-in-mods.rst:12 msgid "What are Mods?" msgstr "Mods란 무엇인가요?" -#: ../../source/how-to-use-built-in-mods.rst:11 +#: ../../source/how-to-use-built-in-mods.rst:14 msgid "" "A Mod is a callable that wraps around a ``ClientApp``. It can manipulate " "or inspect the incoming ``Message`` and the resulting outgoing " @@ -8035,41 +8562,41 @@ msgstr "" "Mod는 ``ClientApp``을 감싸는 콜러블입니다. 들어오는 ``Message``와 그 결과로 나가는 ``Message``를 " "조작하거나 검사할 수 있습니다. ``Mod``의 시그니처는 다음과 같습니다:" -#: ../../source/how-to-use-built-in-mods.rst:18 +#: ../../source/how-to-use-built-in-mods.rst:23 msgid "A typical mod function might look something like this:" msgstr "일반적인 mod 함수는 다음과 같은 모습일 수 있습니다:" -#: ../../source/how-to-use-built-in-mods.rst:31 +#: ../../source/how-to-use-built-in-mods.rst:36 msgid "Using Mods" msgstr "Mods 사용" -#: ../../source/how-to-use-built-in-mods.rst:33 +#: ../../source/how-to-use-built-in-mods.rst:38 msgid "To use mods in your ``ClientApp``, you can follow these steps:" msgstr "``ClientApp``에서 mods를 사용하려면 다음 단계를 따르세요:" -#: ../../source/how-to-use-built-in-mods.rst:36 +#: ../../source/how-to-use-built-in-mods.rst:41 msgid "1. Import the required mods" msgstr "1. 필요한 mods를 가져옵니다" -#: ../../source/how-to-use-built-in-mods.rst:38 +#: ../../source/how-to-use-built-in-mods.rst:43 msgid "First, import the built-in mod you intend to use:" msgstr "먼저 사용하려는 기본 제공 mod를 가져옵니다:" -#: ../../source/how-to-use-built-in-mods.rst:46 +#: ../../source/how-to-use-built-in-mods.rst:51 msgid "2. Define your client function" msgstr "2. 클라이언트 기능 정의" -#: ../../source/how-to-use-built-in-mods.rst:48 +#: ../../source/how-to-use-built-in-mods.rst:53 msgid "" "Define your client function (``client_fn``) that will be wrapped by the " "mod(s):" msgstr "mod(s)로 래핑할 클라이언트 함수('``client_fn``)를 정의합니다:" -#: ../../source/how-to-use-built-in-mods.rst:57 +#: ../../source/how-to-use-built-in-mods.rst:62 msgid "3. Create the ``ClientApp`` with mods" msgstr "3. mods로 ``ClientApp``을 생성합니다" -#: ../../source/how-to-use-built-in-mods.rst:59 +#: ../../source/how-to-use-built-in-mods.rst:64 msgid "" "Create your ``ClientApp`` and pass the mods as a list to the ``mods`` " "argument. The order in which you provide the mods matters:" @@ -8077,39 +8604,39 @@ msgstr "" "``ClientApp``을 생성하고 mods를 ``mods`` argument에 목록으로 전달합니다. mods를 제공하는 순서가 " "중요합니다:" -#: ../../source/how-to-use-built-in-mods.rst:72 +#: ../../source/how-to-use-built-in-mods.rst:78 msgid "Order of execution" msgstr "실행 순서" -#: ../../source/how-to-use-built-in-mods.rst:74 +#: ../../source/how-to-use-built-in-mods.rst:80 msgid "" "When the ``ClientApp`` runs, the mods are executed in the order they are " "provided in the list:" msgstr "``ClientApp``이 실행되면 목록에 제공된 순서대로 모드가 실행됩니다:" -#: ../../source/how-to-use-built-in-mods.rst:76 +#: ../../source/how-to-use-built-in-mods.rst:83 msgid "``example_mod_1`` (outermost mod)" msgstr "``example_mod_1``(가장 바깥쪽 mod)" -#: ../../source/how-to-use-built-in-mods.rst:77 +#: ../../source/how-to-use-built-in-mods.rst:84 msgid "``example_mod_2`` (next mod)" msgstr "``example_mod_2`` (다음 mod)" -#: ../../source/how-to-use-built-in-mods.rst:78 +#: ../../source/how-to-use-built-in-mods.rst:85 msgid "" "Message handler (core function that handles the incoming ``Message`` and " "returns the outgoing ``Message``)" msgstr "Message handler(들어오는 ``Message``를 처리하고 나가는 ``Message``를 반환하는 핵심 함수)" -#: ../../source/how-to-use-built-in-mods.rst:79 +#: ../../source/how-to-use-built-in-mods.rst:87 msgid "``example_mod_2`` (on the way back)" msgstr "``example_mod_2``(돌아가는 방법)" -#: ../../source/how-to-use-built-in-mods.rst:80 +#: ../../source/how-to-use-built-in-mods.rst:88 msgid "``example_mod_1`` (outermost mod on the way back)" msgstr "``example_mod_1``(돌아가는 방법에 가장 바깥쪽 모드)" -#: ../../source/how-to-use-built-in-mods.rst:82 +#: ../../source/how-to-use-built-in-mods.rst:90 msgid "" "Each mod has a chance to inspect and modify the incoming ``Message`` " "before passing it to the next mod, and likewise with the outgoing " @@ -8118,7 +8645,7 @@ msgstr "" "각 mod는 다음 mod로 전달하기 전에 들어오는 ``Message``를 검사하고 수정할 기회가 있으며, 스택 위로 반환하기 전에 " "나가는 ``Message``도 마찬가지로 검사하고 수정할 수 있습니다." -#: ../../source/how-to-use-built-in-mods.rst:87 +#: ../../source/how-to-use-built-in-mods.rst:97 msgid "" "By following this guide, you have learned how to effectively use mods to " "enhance your ``ClientApp``'s functionality. Remember that the order of " @@ -8127,7 +8654,7 @@ msgstr "" "이 가이드를 따라 mods를 효과적으로 사용하여 ``ClientApp``의 기능을 향상시키는 방법을 배웠습니다. mods 순서는 " "매우 중요하며 입력과 출력이 처리되는 방식에 영향을 미친다는 점을 기억하세요." -#: ../../source/how-to-use-built-in-mods.rst:89 +#: ../../source/how-to-use-built-in-mods.rst:101 msgid "Enjoy building a more robust and flexible ``ClientApp`` with mods!" msgstr "Mods를 통해 더욱 강력하고 유연한 ``ClientApp``을 구축해 보세요!" @@ -8135,7 +8662,7 @@ msgstr "Mods를 통해 더욱 강력하고 유연한 ``ClientApp``을 구축해 msgid "Use Differential Privacy" msgstr "차분 개인정보 보호 사용" -#: ../../source/how-to-use-differential-privacy.rst:3 +#: ../../source/how-to-use-differential-privacy.rst:4 msgid "" "This guide explains how you can utilize differential privacy in the " "Flower framework. If you are not yet familiar with differential privacy, " @@ -8144,7 +8671,7 @@ msgstr "" "이 가이드에서는 Flower 프레임워크에서 차분 개인정보 보호 기능을 활용하는 방법을 설명합니다. 차분 개인정보 보호에 대해 아직 " "익숙하지 않은 경우 :doc:`explanation-differential-privacy`를 참조하세요." -#: ../../source/how-to-use-differential-privacy.rst:7 +#: ../../source/how-to-use-differential-privacy.rst:10 msgid "" "Differential Privacy in Flower is in a preview phase. If you plan to use " "these features in a production environment with sensitive data, feel free" @@ -8154,7 +8681,7 @@ msgstr "" "Flower의 차분 개인정보 보호는 현재 프리뷰 단계에 있습니다. 민감한 데이터가 있는 프로덕션 환경에서 이러한 기능을 사용할 " "계획이라면 언제든지 문의하여 요구 사항을 논의하고 이러한 기능을 가장 잘 사용하는 방법에 대한 안내를 받으세요." -#: ../../source/how-to-use-differential-privacy.rst:12 +#: ../../source/how-to-use-differential-privacy.rst:17 #, fuzzy msgid "" "This approach consists of two separate phases: clipping of the updates " @@ -8165,7 +8692,7 @@ msgstr "" "이 접근 방식은 업데이트 클리핑과 집계된 모델에 노이즈 추가라는 두 가지 단계로 구성됩니다. 클리핑 단계의 경우, Flower " "프레임워크는 클리핑을 서버 측에서 수행할지 클라이언트 측에서 수행할지 결정할 수 있도록 했습니다." -#: ../../source/how-to-use-differential-privacy.rst:15 +#: ../../source/how-to-use-differential-privacy.rst:21 msgid "" "**Server-side Clipping**: This approach has the advantage of the server " "enforcing uniform clipping across all clients' updates and reducing the " @@ -8177,7 +8704,7 @@ msgstr "" "값에 대한 통신 오버헤드를 줄일 수 있다는 장점이 있습니다. 하지만 모든 클라이언트에 대해 클리핑 작업을 수행해야 하기 때문에 " "서버의 계산 부하가 증가한다는 단점도 있습니다." -#: ../../source/how-to-use-differential-privacy.rst:16 +#: ../../source/how-to-use-differential-privacy.rst:26 msgid "" "**Client-side Clipping**: This approach has the advantage of reducing the" " computational overhead on the server. However, it also has the " @@ -8187,35 +8714,36 @@ msgstr "" "**Client-side Clipping**: 이 방식은 서버의 계산 오버헤드를 줄일 수 있다는 장점이 있습니다. 하지만 서버가 " "클리핑 프로세스에 대한 통제력이 떨어지기 때문에 centralized 제어가 부족하다는 단점도 있습니다." -#: ../../source/how-to-use-differential-privacy.rst:21 +#: ../../source/how-to-use-differential-privacy.rst:31 msgid "Server-side Clipping" msgstr "서버 측 클리핑" -#: ../../source/how-to-use-differential-privacy.rst:22 +#: ../../source/how-to-use-differential-privacy.rst:33 +#, fuzzy msgid "" -"For central DP with server-side clipping, there are two :code:`Strategy` " -"classes that act as wrappers around the actual :code:`Strategy` instance " -"(for example, :code:`FedAvg`). The two wrapper classes are " -":code:`DifferentialPrivacyServerSideFixedClipping` and " -":code:`DifferentialPrivacyServerSideAdaptiveClipping` for fixed and " -"adaptive clipping." +"For central DP with server-side clipping, there are two ``Strategy`` " +"classes that act as wrappers around the actual ``Strategy`` instance (for" +" example, ``FedAvg``). The two wrapper classes are " +"``DifferentialPrivacyServerSideFixedClipping`` and " +"``DifferentialPrivacyServerSideAdaptiveClipping`` for fixed and adaptive " +"clipping." msgstr "" "서버 측 클리핑이 있는 중앙 DP의 경우, 실제 :code:`Strategy` 인스턴스를 감싸는 래퍼 역할을 하는 두 개의 " ":code:`Strategy` 클래스가 있습니다(예: :code:`FedAvg`). 두 개의 래퍼 클래스는 고정 및 적응형 클리핑을" " 위한 :code:`DifferentialPrivacyServerSideFixedClipping`과 " ":code:`DifferentialPrivacyServerSideAdaptiveClipping`입니다." -#: ../../source/how-to-use-differential-privacy.rst:25 +#: ../../source/how-to-use-differential-privacy.rst:-1 msgid "server side clipping" msgstr "서버 측 클리핑" -#: ../../source/how-to-use-differential-privacy.rst:31 +#: ../../source/how-to-use-differential-privacy.rst:43 +#, fuzzy msgid "" -"The code sample below enables the :code:`FedAvg` strategy to use server-" -"side fixed clipping using the " -":code:`DifferentialPrivacyServerSideFixedClipping` wrapper class. The " -"same approach can be used with " -":code:`DifferentialPrivacyServerSideAdaptiveClipping` by adjusting the " +"The code sample below enables the ``FedAvg`` strategy to use server-side " +"fixed clipping using the ``DifferentialPrivacyServerSideFixedClipping`` " +"wrapper class. The same approach can be used with " +"``DifferentialPrivacyServerSideAdaptiveClipping`` by adjusting the " "corresponding input parameters." msgstr "" "아래 코드 샘플은 :code:`FedAvg` 전략이 " @@ -8224,19 +8752,20 @@ msgstr "" ":code:`DifferentialPrivacyServerSideAdaptiveClipping`과 동일한 접근 방식을 사용할 수 " "있습니다." -#: ../../source/how-to-use-differential-privacy.rst:52 +#: ../../source/how-to-use-differential-privacy.rst:64 msgid "Client-side Clipping" msgstr "클라이언트 측 클리핑" -#: ../../source/how-to-use-differential-privacy.rst:53 +#: ../../source/how-to-use-differential-privacy.rst:66 +#, fuzzy msgid "" "For central DP with client-side clipping, the server sends the clipping " "value to selected clients on each round. Clients can use existing Flower " -":code:`Mods` to perform the clipping. Two mods are available for fixed " -"and adaptive client-side clipping: :code:`fixedclipping_mod` and " -":code:`adaptiveclipping_mod` with corresponding server-side wrappers " -":code:`DifferentialPrivacyClientSideFixedClipping` and " -":code:`DifferentialPrivacyClientSideAdaptiveClipping`." +"``Mods`` to perform the clipping. Two mods are available for fixed and " +"adaptive client-side clipping: ``fixedclipping_mod`` and " +"``adaptiveclipping_mod`` with corresponding server-side wrappers " +"``DifferentialPrivacyClientSideFixedClipping`` and " +"``DifferentialPrivacyClientSideAdaptiveClipping``." msgstr "" "클라이언트 측 클리핑이 있는 중앙 DP의 경우 서버는 각 라운드마다 선택한 클라이언트에 클리핑 값을 보냅니다. 클라이언트는 기존 " "Flower :code:`Mods`를 사용하여 클리핑을 수행할 수 있습니다. 고정 및 적응형 클라이언트 측 클리핑에는 두 가지 " @@ -8244,31 +8773,33 @@ msgstr "" " 해당 서버 측 래퍼 :code:`DifferentialPrivacyClientSideFixedClipping` 및 " ":code:`DifferentialPrivacyClientSideAdaptiveClipping`이 있습니다." -#: ../../source/how-to-use-differential-privacy.rst:57 +#: ../../source/how-to-use-differential-privacy.rst:-1 msgid "client side clipping" msgstr "클라이언트 측 클리핑" -#: ../../source/how-to-use-differential-privacy.rst:63 +#: ../../source/how-to-use-differential-privacy.rst:78 +#, fuzzy msgid "" -"The code sample below enables the :code:`FedAvg` strategy to use " -"differential privacy with client-side fixed clipping using both the " -":code:`DifferentialPrivacyClientSideFixedClipping` wrapper class and, on " -"the client, :code:`fixedclipping_mod`:" +"The code sample below enables the ``FedAvg`` strategy to use differential" +" privacy with client-side fixed clipping using both the " +"``DifferentialPrivacyClientSideFixedClipping`` wrapper class and, on the " +"client, ``fixedclipping_mod``:" msgstr "" "아래 코드 샘플은 :code:`FedAvg` 전략이 클라이언트 측 고정 클리핑과 함께 차분 프라이버시를 사용할 수 있도록 " ":code:`DifferentialPrivacyClientSideFixedClipping` 래퍼 클래스와 클라이언트에서 " ":code:`fixedclipping_mod`를 모두 사용하도록 합니다:" -#: ../../source/how-to-use-differential-privacy.rst:80 +#: ../../source/how-to-use-differential-privacy.rst:97 +#, fuzzy msgid "" -"In addition to the server-side strategy wrapper, the :code:`ClientApp` " -"needs to configure the matching :code:`fixedclipping_mod` to perform the " -"client-side clipping:" +"In addition to the server-side strategy wrapper, the ``ClientApp`` needs " +"to configure the matching ``fixedclipping_mod`` to perform the client-" +"side clipping:" msgstr "" "서버 측 전략 래퍼 외에도 클라이언트 측 클리핑을 수행하려면 :code:`ClientApp`이 일치하는 " ":code:`fixedclipping_mod`를 구성해야 합니다:" -#: ../../source/how-to-use-differential-privacy.rst:97 +#: ../../source/how-to-use-differential-privacy.rst:115 msgid "" "To utilize local differential privacy (DP) and add noise to the client " "model parameters before transmitting them to the server in Flower, you " @@ -8278,15 +8809,16 @@ msgstr "" "로컬 차분 프라이버시(DP)를 활용하고 클라이언트 모델 파라미터를 서버로 전송하기 전에 노이즈를 추가하려면 `LocalDpMod`를" " 사용하면 됩니다. 클리핑 노멀 값, 감도, 엡실론, 델타 등의 하이퍼파라미터를 설정해야 합니다." -#: ../../source/how-to-use-differential-privacy.rst:99 +#: ../../source/how-to-use-differential-privacy.rst:-1 msgid "local DP mod" msgstr "로컬 DP mod" -#: ../../source/how-to-use-differential-privacy.rst:104 -msgid "Below is a code example that shows how to use :code:`LocalDpMod`:" +#: ../../source/how-to-use-differential-privacy.rst:125 +#, fuzzy +msgid "Below is a code example that shows how to use ``LocalDpMod``:" msgstr "다음은 :code:`LocalDpMod`를 사용하는 방법을 보여주는 코드 예시입니다:" -#: ../../source/how-to-use-differential-privacy.rst:122 +#: ../../source/how-to-use-differential-privacy.rst:140 msgid "" "Please note that the order of mods, especially those that modify " "parameters, is important when using multiple modifiers. Typically, " @@ -8296,11 +8828,11 @@ msgstr "" "여러 개의 수정자를 사용할 때는 수정자, 특히 매개변수를 수정하는 수정자의 순서가 중요하다는 점에 유의하세요. 일반적으로 차분 " "프라이버시(DP) 수정자는 매개변수에서 가장 마지막에 작동해야 합니다." -#: ../../source/how-to-use-differential-privacy.rst:125 +#: ../../source/how-to-use-differential-privacy.rst:145 msgid "Local Training using Privacy Engines" msgstr "Privacy Engines을 사용한 로컬 훈련" -#: ../../source/how-to-use-differential-privacy.rst:126 +#: ../../source/how-to-use-differential-privacy.rst:147 msgid "" "For ensuring data instance-level privacy during local model training on " "the client side, consider leveraging privacy engines such as Opacus and " @@ -8322,54 +8854,57 @@ msgid "Use strategies" msgstr "전략 사용하기" #: ../../source/how-to-use-strategies.rst:4 +#, fuzzy msgid "" "Flower allows full customization of the learning process through the " -":code:`Strategy` abstraction. A number of built-in strategies are " -"provided in the core framework." +"``Strategy`` abstraction. A number of built-in strategies are provided in" +" the core framework." msgstr "" "Flower는 :code:`Strategy` abstraction를 통해 학습 과정을 완전히 사용자 정의할 수 있습니다. 핵심 " "프레임워크에는 여러 가지 기본 제공 전략이 제공됩니다." -#: ../../source/how-to-use-strategies.rst:6 +#: ../../source/how-to-use-strategies.rst:7 msgid "" "There are three ways to customize the way Flower orchestrates the " "learning process on the server side:" msgstr "서버 측에서 Flower가 학습 과정을 조율하는 방식을 사용자 지정하는 방법에는 세 가지가 있습니다:" -#: ../../source/how-to-use-strategies.rst:8 -msgid "Use an existing strategy, for example, :code:`FedAvg`" +#: ../../source/how-to-use-strategies.rst:10 +#, fuzzy +msgid "Use an existing strategy, for example, ``FedAvg``" msgstr "기존 전략(예: :code:`FedAvg`)을 사용합니다" -#: ../../source/how-to-use-strategies.rst:9 -#: ../../source/how-to-use-strategies.rst:40 +#: ../../source/how-to-use-strategies.rst:11 +#: ../../source/how-to-use-strategies.rst:43 msgid "Customize an existing strategy with callback functions" msgstr "콜백 함수로 기존 전략 사용자 지정" -#: ../../source/how-to-use-strategies.rst:10 -#: ../../source/how-to-use-strategies.rst:87 +#: ../../source/how-to-use-strategies.rst:12 +#: ../../source/how-to-use-strategies.rst:99 msgid "Implement a novel strategy" msgstr "새로운 전략 구현" -#: ../../source/how-to-use-strategies.rst:14 +#: ../../source/how-to-use-strategies.rst:15 msgid "Use an existing strategy" msgstr "기존 전략 사용" -#: ../../source/how-to-use-strategies.rst:16 +#: ../../source/how-to-use-strategies.rst:17 msgid "" "Flower comes with a number of popular federated learning strategies " "built-in. A built-in strategy can be instantiated as follows:" msgstr "Flower에는 여러 가지 인기 있는 연합 학습 전략이 기본으로 제공됩니다. 기본 제공 전략은 다음과 같이 인스턴스화할 수 있습니다:" -#: ../../source/how-to-use-strategies.rst:25 +#: ../../source/how-to-use-strategies.rst:27 +#, fuzzy msgid "" "This creates a strategy with all parameters left at their default values " -"and passes it to the :code:`start_server` function. It is usually " -"recommended to adjust a few parameters during instantiation:" +"and passes it to the ``start_server`` function. It is usually recommended" +" to adjust a few parameters during instantiation:" msgstr "" "이렇게 하면 모든 매개변수가 기본값으로 유지된 전략이 생성되어 :code:`start_server` 함수에 전달됩니다. 일반적으로 " "인스턴스화 중에 몇 가지 매개변수를 조정하는 것이 좋습니다:" -#: ../../source/how-to-use-strategies.rst:42 +#: ../../source/how-to-use-strategies.rst:45 msgid "" "Existing strategies provide several ways to customize their behaviour. " "Callback functions allow strategies to call user-provided code during " @@ -8378,58 +8913,59 @@ msgstr "" "기존 전략은 동작을 사용자 지정하는 여러 가지 방법을 제공합니다. 콜백 함수를 사용하면 전략이 실행 중에 사용자가 제공한 코드를 " "호출할 수 있습니다." -#: ../../source/how-to-use-strategies.rst:45 +#: ../../source/how-to-use-strategies.rst:49 msgid "Configuring client fit and client evaluate" msgstr "클라이언트 적합성 및 클라이언트 평가 구성" -#: ../../source/how-to-use-strategies.rst:47 +#: ../../source/how-to-use-strategies.rst:51 +#, fuzzy msgid "" "The server can pass new configuration values to the client each round by " -"providing a function to :code:`on_fit_config_fn`. The provided function " -"will be called by the strategy and must return a dictionary of " -"configuration key values pairs that will be sent to the client. It must " -"return a dictionary of arbitrary configuration values :code:`client.fit`" -" and :code:`client.evaluate` functions during each round of federated " -"learning." +"providing a function to ``on_fit_config_fn``. The provided function will " +"be called by the strategy and must return a dictionary of configuration " +"key values pairs that will be sent to the client. It must return a " +"dictionary of arbitrary configuration values ``client.fit`` and " +"``client.evaluate`` functions during each round of federated learning." msgstr "" "서버는 매 라운드마다 새로운 설정 값을 클라이언트에 전달하기 위해 :code:`on_fit_config_fn`에 함수를 제공할 수 " "있습니다. 제공된 함수는 전략에 의해 호출되며 클라이언트에 전송될 구성 키 값 쌍의 dictionary를 반환해야 합니다. 연합 " "학습의 각 라운드 동안 임의의 구성 값 dictionary인 :code:`client.fit` 및 " ":code:`client.evaluate` 함수를 반환해야 합니다." -#: ../../source/how-to-use-strategies.rst:75 +#: ../../source/how-to-use-strategies.rst:84 #, fuzzy msgid "" -"The :code:`on_fit_config_fn` can be used to pass arbitrary configuration " +"The ``on_fit_config_fn`` can be used to pass arbitrary configuration " "values from server to client, and potentially change these values each " "round, for example, to adjust the learning rate. The client will receive " -"the dictionary returned by the :code:`on_fit_config_fn` in its own " -":code:`client.fit()` function." +"the dictionary returned by the ``on_fit_config_fn`` in its own " +"``client.fit()`` function." msgstr "" ":code:`on_fit_config_fn`은 서버에서 클라이언트로 임의의 구성 값을 전달하고, 예를 들어 학습 속도를 조정하기 " "위해 매 라운드마다 이 값을 잠재적으로 변경하는 데 사용할 수 있습니다. 클라이언트는 자체 :code:`client.fit()` " "함수에서 :code:`on_fit_config_fn`이 반환한 dictionary를 받습니다." -#: ../../source/how-to-use-strategies.rst:78 +#: ../../source/how-to-use-strategies.rst:89 +#, fuzzy msgid "" -"Similar to :code:`on_fit_config_fn`, there is also " -":code:`on_evaluate_config_fn` to customize the configuration sent to " -":code:`client.evaluate()`" +"Similar to ``on_fit_config_fn``, there is also ``on_evaluate_config_fn`` " +"to customize the configuration sent to ``client.evaluate()``" msgstr "" ":code:`on_fit_config_fn`과 유사하게, :code:`client.evaluate()`로 전송되는 구성을 사용자 " "지정하는 :code:`on_evaluate_config_fn`도 있습니다" -#: ../../source/how-to-use-strategies.rst:81 +#: ../../source/how-to-use-strategies.rst:93 msgid "Configuring server-side evaluation" msgstr "서버 측 평가 구성" -#: ../../source/how-to-use-strategies.rst:83 +#: ../../source/how-to-use-strategies.rst:95 +#, fuzzy msgid "" "Server-side evaluation can be enabled by passing an evaluation function " -"to :code:`evaluate_fn`." +"to ``evaluate_fn``." msgstr "서버 측 평가는 :code:`evaluate_fn`에 평가 함수를 전달하여 활성화할 수 있습니다." -#: ../../source/how-to-use-strategies.rst:89 +#: ../../source/how-to-use-strategies.rst:101 msgid "" "Writing a fully custom strategy is a bit more involved, but it provides " "the most flexibility. Read the `Implementing Strategies ` | :doc:`iOS `" -#: ../../source/index.rst:64 +#: ../../source/index.rst:70 msgid "We also made video tutorials for PyTorch:" msgstr "파이토치용 동영상 튜토리얼도 만들었습니다:" -#: ../../source/index.rst:69 +#: ../../source/index.rst:75 msgid "And TensorFlow:" msgstr "그리고 TensorFlow도:" -#: ../../source/index.rst:77 +#: ../../source/index.rst:83 msgid "" "Problem-oriented how-to guides show step-by-step how to achieve a " "specific goal." msgstr "문제 중심의 방법 가이드는 특정 목표를 달성하는 방법을 단계별로 보여줍니다." -#: ../../source/index.rst:110 +#: ../../source/index.rst:116 msgid "" "Understanding-oriented concept guides explain and discuss key topics and " "underlying ideas behind Flower and collaborative AI." msgstr "이해 중심의 개념 가이드에서는 Flower와 협업 AI의 주요 주제와 기본 아이디어를 설명하고 토론합니다." -#: ../../source/index.rst:121 +#: ../../source/index.rst:128 msgid "References" msgstr "참조" -#: ../../source/index.rst:123 +#: ../../source/index.rst:130 msgid "Information-oriented API reference and other reference material." msgstr "정보 지향 API 참조 및 기타 참고 자료." -#: ../../source/index.rst:132::1 +#: ../../source/index.rst:139::1 msgid ":py:obj:`flwr `\\" msgstr ":py:obj:`flwr `\\" -#: ../../source/index.rst:132::1 flwr:1 of +#: ../../source/index.rst:139::1 flwr:1 of msgid "Flower main package." msgstr "Flower 메인 패키지." -#: ../../source/index.rst:149 +#: ../../source/index.rst:155 msgid "Contributor docs" msgstr "기여자 문서" -#: ../../source/index.rst:151 +#: ../../source/index.rst:157 msgid "" "The Flower community welcomes contributions. The following docs are " "intended to help along the way." @@ -8704,11 +9240,50 @@ msgstr "" msgid "Arguments" msgstr "빌드 전달인자" -#: ../../flwr install:1 new:1 run:1 +#: ../../flwr install:1 log:1 new:1 run:1 #, fuzzy msgid "Optional argument" msgstr "선택적 개선 사항" +#: ../../flwr install:1 +msgid "The source FAB file to install." +msgstr "" + +#: ../../flwr log:1 +msgid "Get logs from a Flower project run." +msgstr "" + +#: ../../flwr log:1 +msgid "Flag to stream or print logs from the Flower run" +msgstr "" + +#: ../../flwr log run +msgid "default" +msgstr "" + +#: ../../flwr log:1 +#, fuzzy +msgid "``True``" +msgstr "``DISTRO``" + +#: ../../flwr log:1 +#, fuzzy +msgid "Required argument" +msgstr "빌드 전달인자" + +#: ../../flwr log:1 +#, fuzzy +msgid "The Flower run ID to query" +msgstr "Flower 커뮤니티 가입하기" + +#: ../../flwr log:1 +msgid "Path of the Flower project to run" +msgstr "" + +#: ../../flwr log:1 +msgid "Name of the federation to run the app on" +msgstr "" + #: ../../flwr new:1 #, fuzzy msgid "Create new Flower App." @@ -8733,6 +9308,11 @@ msgstr "" msgid "The Flower username of the author" msgstr "" +#: ../../flwr new:1 +#, fuzzy +msgid "The name of the Flower App" +msgstr "Flower 기본 이미지의 태그." + #: ../../flwr run:1 #, fuzzy msgid "Run Flower App." @@ -8754,6 +9334,26 @@ msgid "" " the `pyproject.toml` in order to be properly overriden." msgstr "" +#: ../../flwr run:1 +msgid "" +"Use `--stream` with `flwr run` to display logs; logs are not streamed by " +"default." +msgstr "" + +#: ../../flwr run:1 +#, fuzzy +msgid "``False``" +msgstr "``flwr/base``" + +#: ../../flwr run:1 +#, fuzzy +msgid "Path of the Flower App to run." +msgstr "Flower 기본 이미지의 태그." + +#: ../../flwr run:1 +msgid "Name of the federation to run the app on." +msgstr "" + #: ../../source/ref-api-cli.rst:16 msgid "flower-simulation" msgstr "flower 시뮬레이션" @@ -8771,17 +9371,16 @@ msgstr "Flower SuperNode" msgid "flower-server-app" msgstr "flower 서버 프로그램" -#: ../../source/ref-api-cli.rst:49 +#: ../../source/ref-api-cli.rst:50 msgid "" -"Note that since version :code:`1.11.0`, :code:`flower-server-app` no " -"longer supports passing a reference to a `ServerApp` attribute. Instead, " -"you need to pass the path to Flower app via the argument :code:`--app`. " -"This is the path to a directory containing a `pyproject.toml`. You can " -"create a valid Flower app by executing :code:`flwr new` and following the" -" prompt." +"Note that since version ``1.11.0``, ``flower-server-app`` no longer " +"supports passing a reference to a `ServerApp` attribute. Instead, you " +"need to pass the path to Flower app via the argument ``--app``. This is " +"the path to a directory containing a `pyproject.toml`. You can create a " +"valid Flower app by executing ``flwr new`` and following the prompt." msgstr "" -#: ../../source/ref-api-cli.rst:62 +#: ../../source/ref-api-cli.rst:64 #, fuzzy msgid "flower-superexec" msgstr "flower 초연결" @@ -8797,7 +9396,7 @@ msgstr "Modules" #: ../../source/ref-api/flwr.rst:35::1 #, fuzzy -msgid ":py:obj:`client `\\" +msgid ":py:obj:`flwr.client `\\" msgstr ":py:obj:`flwr.client `\\" #: ../../source/ref-api/flwr.rst:35::1 flwr.client:1 of @@ -8806,7 +9405,7 @@ msgstr "Flower 클라이언트." #: ../../source/ref-api/flwr.rst:35::1 #, fuzzy -msgid ":py:obj:`common `\\" +msgid ":py:obj:`flwr.common `\\" msgstr ":py:obj:`flwr.common `\\" #: ../../source/ref-api/flwr.rst:35::1 flwr.common:1 of @@ -8815,7 +9414,7 @@ msgstr "서버와 클라이언트 간에 공유되는 공통 구성 요소입니 #: ../../source/ref-api/flwr.rst:35::1 #, fuzzy -msgid ":py:obj:`server `\\" +msgid ":py:obj:`flwr.server `\\" msgstr ":py:obj:`flwr.server `\\" #: ../../source/ref-api/flwr.rst:35::1 @@ -8826,7 +9425,7 @@ msgstr "Flower 서버." #: ../../source/ref-api/flwr.rst:35::1 #, fuzzy -msgid ":py:obj:`simulation `\\" +msgid ":py:obj:`flwr.simulation `\\" msgstr ":py:obj:`flwr.simulation `\\" #: ../../source/ref-api/flwr.rst:35::1 flwr.simulation:1 of @@ -8913,7 +9512,7 @@ msgstr "NumPy를 사용하는 Flower 클라이언트를 위한 추상 베이스 #: ../../source/ref-api/flwr.client.rst:50::1 #, fuzzy -msgid ":py:obj:`mod `\\" +msgid ":py:obj:`flwr.client.mod `\\" msgstr ":py:obj:`flwr.client.mod `\\" #: ../../source/ref-api/flwr.client.rst:50::1 flwr.client.mod:1 of @@ -9110,48 +9709,57 @@ msgstr ":py:obj:`context `\\" msgid "Getter for `Context` client attribute." msgstr "" -#: ../../source/ref-api/flwr.client.Client.rst -#: ../../source/ref-api/flwr.client.NumPyClient.rst -#: ../../source/ref-api/flwr.client.mod.LocalDpMod.rst -#: ../../source/ref-api/flwr.common.Array.rst -#: ../../source/ref-api/flwr.common.ConfigsRecord.rst -#: ../../source/ref-api/flwr.common.Context.rst -#: ../../source/ref-api/flwr.common.Error.rst -#: ../../source/ref-api/flwr.common.Message.rst -#: ../../source/ref-api/flwr.common.Metadata.rst -#: ../../source/ref-api/flwr.common.MetricsRecord.rst #: ../../source/ref-api/flwr.common.Parameters.rst:2 -#: ../../source/ref-api/flwr.common.ParametersRecord.rst -#: ../../source/ref-api/flwr.common.RecordSet.rst -#: ../../source/ref-api/flwr.server.ClientManager.rst -#: ../../source/ref-api/flwr.server.Driver.rst -#: ../../source/ref-api/flwr.server.ServerAppComponents.rst -#: ../../source/ref-api/flwr.server.SimpleClientManager.rst -#: ../../source/ref-api/flwr.server.strategy.Bulyan.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgAdaptive.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgFixed.rst -#: ../../source/ref-api/flwr.server.strategy.DifferentialPrivacyClientSideAdaptiveClipping.rst -#: ../../source/ref-api/flwr.server.strategy.DifferentialPrivacyClientSideFixedClipping.rst -#: ../../source/ref-api/flwr.server.strategy.DifferentialPrivacyServerSideAdaptiveClipping.rst -#: ../../source/ref-api/flwr.server.strategy.DifferentialPrivacyServerSideFixedClipping.rst -#: ../../source/ref-api/flwr.server.strategy.FedAdagrad.rst -#: ../../source/ref-api/flwr.server.strategy.FedAdam.rst -#: ../../source/ref-api/flwr.server.strategy.FedAvg.rst -#: ../../source/ref-api/flwr.server.strategy.FedAvgAndroid.rst -#: ../../source/ref-api/flwr.server.strategy.FedAvgM.rst -#: ../../source/ref-api/flwr.server.strategy.FedOpt.rst -#: ../../source/ref-api/flwr.server.strategy.FedProx.rst -#: ../../source/ref-api/flwr.server.strategy.FedTrimmedAvg.rst -#: ../../source/ref-api/flwr.server.strategy.FedYogi.rst -#: ../../source/ref-api/flwr.server.strategy.Krum.rst -#: ../../source/ref-api/flwr.server.strategy.Strategy.rst -#: ../../source/ref-api/flwr.server.workflow.SecAggPlusWorkflow.rst -#: ../../source/ref-api/flwr.server.workflow.SecAggWorkflow.rst -#: ../../source/ref-api/flwr.simulation.run_simulation.rst -#: ../../source/ref-api/flwr.simulation.start_simulation.rst #: flwr.client.app.start_client flwr.client.app.start_numpy_client -#: flwr.server.app.start_server -#: flwr.server.driver.driver.Driver.send_and_receive of +#: flwr.client.client.Client.evaluate flwr.client.client.Client.fit +#: flwr.client.client.Client.get_parameters +#: flwr.client.client.Client.get_properties +#: flwr.client.mod.localdp_mod.LocalDpMod +#: flwr.client.numpy_client.NumPyClient.evaluate +#: flwr.client.numpy_client.NumPyClient.fit +#: flwr.client.numpy_client.NumPyClient.get_parameters +#: flwr.client.numpy_client.NumPyClient.get_properties +#: flwr.common.context.Context flwr.common.message.Error +#: flwr.common.message.Message flwr.common.message.Message.create_error_reply +#: flwr.common.message.Message.create_reply flwr.common.message.Metadata +#: flwr.common.record.configsrecord.ConfigsRecord +#: flwr.common.record.metricsrecord.MetricsRecord +#: flwr.common.record.parametersrecord.Array +#: flwr.common.record.parametersrecord.ParametersRecord +#: flwr.common.record.recordset.RecordSet flwr.server.app.start_server +#: flwr.server.client_manager.ClientManager.register +#: flwr.server.client_manager.ClientManager.unregister +#: flwr.server.client_manager.SimpleClientManager.register +#: flwr.server.client_manager.SimpleClientManager.unregister +#: flwr.server.client_manager.SimpleClientManager.wait_for +#: flwr.server.driver.driver.Driver.create_message +#: flwr.server.driver.driver.Driver.pull_messages +#: flwr.server.driver.driver.Driver.push_messages +#: flwr.server.driver.driver.Driver.send_and_receive +#: flwr.server.serverapp_components.ServerAppComponents +#: flwr.server.strategy.bulyan.Bulyan +#: flwr.server.strategy.dp_adaptive_clipping.DifferentialPrivacyClientSideAdaptiveClipping +#: flwr.server.strategy.dp_adaptive_clipping.DifferentialPrivacyServerSideAdaptiveClipping +#: flwr.server.strategy.dp_fixed_clipping.DifferentialPrivacyClientSideFixedClipping +#: flwr.server.strategy.dp_fixed_clipping.DifferentialPrivacyServerSideFixedClipping +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_evaluate +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_fit +#: flwr.server.strategy.fedadagrad.FedAdagrad +#: flwr.server.strategy.fedadam.FedAdam flwr.server.strategy.fedavg.FedAvg +#: flwr.server.strategy.fedavg_android.FedAvgAndroid +#: flwr.server.strategy.fedavgm.FedAvgM flwr.server.strategy.fedopt.FedOpt +#: flwr.server.strategy.fedprox.FedProx +#: flwr.server.strategy.fedtrimmedavg.FedTrimmedAvg +#: flwr.server.strategy.fedyogi.FedYogi flwr.server.strategy.krum.Krum +#: flwr.server.strategy.strategy.Strategy.aggregate_evaluate +#: flwr.server.strategy.strategy.Strategy.aggregate_fit +#: flwr.server.strategy.strategy.Strategy.configure_evaluate +#: flwr.server.strategy.strategy.Strategy.configure_fit +#: flwr.server.strategy.strategy.Strategy.evaluate +#: flwr.server.strategy.strategy.Strategy.initialize_parameters +#: flwr.server.workflow.secure_aggregation.secagg_workflow.SecAggWorkflow +#: flwr.server.workflow.secure_aggregation.secaggplus_workflow.SecAggPlusWorkflow +#: flwr.simulation.run_simulation.run_simulation of msgid "Parameters" msgstr "파라미터" @@ -9162,21 +9770,31 @@ msgid "" "customize the local evaluation process." msgstr "서버에서 받은 (전역) 모델 파라미터와 로컬 평가 프로세스를 사용자 지정하는 데 사용되는 구성 값 사전이 포함된 평가 지침입니다." -#: ../../source/ref-api/flwr.client.Client.rst -#: ../../source/ref-api/flwr.client.NumPyClient.rst -#: ../../source/ref-api/flwr.common.ConfigsRecord.rst -#: ../../source/ref-api/flwr.common.Message.rst -#: ../../source/ref-api/flwr.common.MetricsRecord.rst -#: ../../source/ref-api/flwr.common.ParametersRecord.rst -#: ../../source/ref-api/flwr.server.ClientManager.rst -#: ../../source/ref-api/flwr.server.Driver.rst -#: ../../source/ref-api/flwr.server.SimpleClientManager.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgAdaptive.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgFixed.rst -#: ../../source/ref-api/flwr.server.strategy.Strategy.rst -#: ../../source/ref-api/flwr.simulation.start_simulation.rst -#: flwr.server.app.start_server -#: flwr.server.driver.driver.Driver.send_and_receive of +#: flwr.client.client.Client.evaluate flwr.client.client.Client.fit +#: flwr.client.client.Client.get_parameters +#: flwr.client.client.Client.get_properties +#: flwr.client.numpy_client.NumPyClient.evaluate +#: flwr.client.numpy_client.NumPyClient.fit +#: flwr.client.numpy_client.NumPyClient.get_parameters +#: flwr.client.numpy_client.NumPyClient.get_properties +#: flwr.common.message.Message.create_reply flwr.server.app.start_server +#: flwr.server.client_manager.ClientManager.num_available +#: flwr.server.client_manager.ClientManager.register +#: flwr.server.client_manager.SimpleClientManager.num_available +#: flwr.server.client_manager.SimpleClientManager.register +#: flwr.server.client_manager.SimpleClientManager.wait_for +#: flwr.server.driver.driver.Driver.create_message +#: flwr.server.driver.driver.Driver.pull_messages +#: flwr.server.driver.driver.Driver.push_messages +#: flwr.server.driver.driver.Driver.send_and_receive +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_evaluate +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_fit +#: flwr.server.strategy.strategy.Strategy.aggregate_evaluate +#: flwr.server.strategy.strategy.Strategy.aggregate_fit +#: flwr.server.strategy.strategy.Strategy.configure_evaluate +#: flwr.server.strategy.strategy.Strategy.configure_fit +#: flwr.server.strategy.strategy.Strategy.evaluate +#: flwr.server.strategy.strategy.Strategy.initialize_parameters of msgid "Returns" msgstr "반환" @@ -9186,18 +9804,29 @@ msgid "" "details such as the number of local data examples used for evaluation." msgstr "로컬 데이터 세트의 손실 및 평가에 사용된 로컬 데이터 예제 수와 같은 기타 세부 정보가 포함된 평가 결과입니다." -#: ../../source/ref-api/flwr.client.Client.rst -#: ../../source/ref-api/flwr.client.NumPyClient.rst -#: ../../source/ref-api/flwr.common.Message.rst -#: ../../source/ref-api/flwr.server.ClientManager.rst -#: ../../source/ref-api/flwr.server.Driver.rst -#: ../../source/ref-api/flwr.server.SimpleClientManager.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgAdaptive.rst -#: ../../source/ref-api/flwr.server.strategy.DPFedAvgFixed.rst -#: ../../source/ref-api/flwr.server.strategy.Strategy.rst -#: ../../source/ref-api/flwr.simulation.start_simulation.rst -#: flwr.server.app.start_server -#: flwr.server.driver.driver.Driver.send_and_receive of +#: flwr.client.client.Client.evaluate flwr.client.client.Client.fit +#: flwr.client.client.Client.get_parameters +#: flwr.client.client.Client.get_properties +#: flwr.client.numpy_client.NumPyClient.get_parameters +#: flwr.client.numpy_client.NumPyClient.get_properties +#: flwr.common.message.Message.create_reply flwr.server.app.start_server +#: flwr.server.client_manager.ClientManager.num_available +#: flwr.server.client_manager.ClientManager.register +#: flwr.server.client_manager.SimpleClientManager.num_available +#: flwr.server.client_manager.SimpleClientManager.register +#: flwr.server.client_manager.SimpleClientManager.wait_for +#: flwr.server.driver.driver.Driver.create_message +#: flwr.server.driver.driver.Driver.pull_messages +#: flwr.server.driver.driver.Driver.push_messages +#: flwr.server.driver.driver.Driver.send_and_receive +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_evaluate +#: flwr.server.strategy.dpfedavg_fixed.DPFedAvgFixed.configure_fit +#: flwr.server.strategy.strategy.Strategy.aggregate_evaluate +#: flwr.server.strategy.strategy.Strategy.aggregate_fit +#: flwr.server.strategy.strategy.Strategy.configure_evaluate +#: flwr.server.strategy.strategy.Strategy.configure_fit +#: flwr.server.strategy.strategy.Strategy.evaluate +#: flwr.server.strategy.strategy.Strategy.initialize_parameters of msgid "Return type" msgstr "반환 타입" @@ -9554,6 +10183,11 @@ msgstr "클라이언트 측 고정 클리핑 수정자." msgid ":py:obj:`make_ffn `\\ \\(ffn\\, mods\\)" msgstr ":py:obj:`make_ffn `\\ \\(ffn\\, mods\\)" +#: ../../source/ref-api/flwr.client.mod.rst:28::1 +#: flwr.client.mod.utils.make_ffn:1 of +msgid "." +msgstr "." + #: ../../source/ref-api/flwr.client.mod.rst:28::1 msgid "" ":py:obj:`message_size_mod `\\ \\(msg\\," @@ -9730,10 +10364,6 @@ msgstr "일반적으로 fixedclipping_mod는 매개변수에서 가장 마지막 msgid "make\\_ffn" msgstr "make\\_ffn" -#: flwr.client.mod.utils.make_ffn:1 of -msgid "." -msgstr "." - #: ../../source/ref-api/flwr.client.mod.message_size_mod.rst:2 msgid "message\\_size\\_mod" msgstr "message\\_size\\_mod" @@ -9760,14 +10390,6 @@ msgstr "secagg\\_mod" msgid "secaggplus\\_mod" msgstr "secaggplus\\_mod" -#: ../../source/ref-api/flwr.client.run_client_app.rst:2 -msgid "run\\_client\\_app" -msgstr "run\\_client\\_app" - -#: ../../source/ref-api/flwr.client.run_supernode.rst:2 -msgid "run\\_supernode" -msgstr "run\\_supernode" - #: ../../source/ref-api/flwr.client.start_client.rst:2 msgid "start\\_client" msgstr "start\\_client" @@ -10569,14 +11191,9 @@ msgstr "이 객체에 저장된 바이트 수를 반환합니다." #: collections.abc.MutableMapping.clear:1::1 of #, fuzzy -msgid ":py:obj:`get `\\ \\(key\\[\\, default\\]\\)" +msgid ":py:obj:`get `\\ \\(k\\[\\,d\\]\\)" msgstr ":py:obj:`get `\\ \\(k\\[\\,d\\]\\)" -#: collections.abc.Mapping.get:1 -#: collections.abc.MutableMapping.clear:1::1 of -msgid "Retrieve the corresponding layout by the string key." -msgstr "" - #: collections.abc.MutableMapping.clear:1::1 of msgid ":py:obj:`items `\\ \\(\\)" msgstr ":py:obj:`items `\\ \\(\\)" @@ -10635,22 +11252,6 @@ msgstr ":py:obj:`values `\\ \\(\\)" msgid "This function counts booleans as occupying 1 Byte." msgstr "이 함수는 booleans을 1바이트를 차지하는 것으로 계산합니다." -#: collections.abc.Mapping.get:3 of -msgid "" -"When there isn't an exact match, all the existing keys in the layout map " -"will be treated as a regex and map against the input key again. The first" -" match will be returned, based on the key insertion order. Return None if" -" there isn't any match found." -msgstr "" - -#: collections.abc.Mapping.get:8 of -msgid "the string key as the query for the layout." -msgstr "" - -#: collections.abc.Mapping.get:10 of -msgid "Corresponding layout based on the query." -msgstr "" - #: ../../source/ref-api/flwr.common.Context.rst:2 msgid "Context" msgstr "컨텍스트" @@ -11446,7 +12047,7 @@ msgstr "인코딩" msgid "The encoding in which to encode the string." msgstr "문자열을 인코딩합니다." -#: flwr.common.EventType.encode:5 of +#: flwr.common.EventType.encode:9 of msgid "errors" msgstr "오류" @@ -11640,7 +12241,7 @@ msgstr "" "문자열이 접미사 문자열로 끝나고 해당 접미사가 비어 있지 않으면 문자열[:-len(suffix)]을 반환합니다. 그렇지 않으면 원본" " 문자열의 복사본을 반환합니다." -#: flwr.common.EventType.replace:3 of +#: flwr.common.EventType.replace:5 of msgid "count" msgstr "카운트" @@ -11680,7 +12281,7 @@ msgid "" "strings and the original string." msgstr "구분 기호를 찾을 수 없는 경우 빈 문자열 2개와 원래 문자열을 포함하는 3-tuple을 반환합니다." -#: flwr.common.EventType.rsplit:3 flwr.common.EventType.split:3 of +#: flwr.common.EventType.rsplit:7 flwr.common.EventType.split:7 of msgid "sep" msgstr "sep" @@ -11697,7 +12298,7 @@ msgstr "" "None(기본값)으로 설정하면 모든 공백 문자(\\\\n \\\\r \\\\t \\\\f 및 공백 포함)를 분할하고 결과에서 빈 " "문자열을 삭제합니다." -#: flwr.common.EventType.rsplit:9 flwr.common.EventType.split:9 of +#: flwr.common.EventType.rsplit:11 flwr.common.EventType.split:11 of msgid "maxsplit" msgstr "maxsplit" @@ -11743,7 +12344,7 @@ msgid "" "remaining cased characters have lower case." msgstr "보다 구체적으로, 단어는 대문자로 시작하고 나머지 모든 대소문자는 소문자로 표기합니다." -#: flwr.common.EventType.translate:3 of +#: flwr.common.EventType.translate:5 of msgid "table" msgstr "table" @@ -12178,7 +12779,7 @@ msgstr ":py:obj:`count_bytes `\\ \\(\\)" #: collections.abc.MutableMapping.clear:1::1 of #, fuzzy -msgid ":py:obj:`get `\\ \\(key\\[\\, default\\]\\)" +msgid ":py:obj:`get `\\ \\(k\\[\\,d\\]\\)" msgstr ":py:obj:`get `\\ \\(k\\[\\,d\\]\\)" #: collections.abc.MutableMapping.clear:1::1 of @@ -12325,9 +12926,7 @@ msgstr ":py:obj:`count_bytes `\\ \\(\\ #: collections.abc.MutableMapping.clear:1::1 of #, fuzzy -msgid "" -":py:obj:`get `\\ \\(key\\[\\, " -"default\\]\\)" +msgid ":py:obj:`get `\\ \\(k\\[\\,d\\]\\)" msgstr ":py:obj:`get `\\ \\(k\\[\\,d\\]\\)" #: collections.abc.MutableMapping.clear:1::1 of @@ -12669,7 +13268,7 @@ msgstr "사용 가능한 클라이언트 그룹 제공." #: ../../source/ref-api/flwr.server.rst:56::1 #, fuzzy -msgid ":py:obj:`strategy `\\" +msgid ":py:obj:`flwr.server.strategy `\\" msgstr ":py:obj:`state `\\" #: ../../source/ref-api/flwr.server.rst:56::1 @@ -12679,7 +13278,7 @@ msgstr "" #: ../../source/ref-api/flwr.server.rst:56::1 #, fuzzy -msgid ":py:obj:`workflow `\\" +msgid ":py:obj:`flwr.server.workflow `\\" msgstr ":py:obj:`flwr.server `\\" #: ../../source/ref-api/flwr.server.rst:56::1 @@ -13161,8 +13760,7 @@ msgid "" msgstr "" #: flwr.server.app.start_server:9 -#: flwr.server.serverapp_components.ServerAppComponents:6 -#: flwr.simulation.app.start_simulation:29 of +#: flwr.server.serverapp_components.ServerAppComponents:6 of msgid "" "Currently supported values are `num_rounds` (int, default: 1) and " "`round_timeout` in seconds (float, default: None)." @@ -13284,14 +13882,6 @@ msgstr "" msgid "**success**" msgstr "" -#: ../../source/ref-api/flwr.server.run_server_app.rst:2 -msgid "run\\_server\\_app" -msgstr "" - -#: ../../source/ref-api/flwr.server.run_superlink.rst:2 -msgid "run\\_superlink" -msgstr "" - #: ../../source/ref-api/flwr.server.start_server.rst:2 msgid "start\\_server" msgstr "" @@ -16319,15 +16909,15 @@ msgstr "" #: ../../source/ref-api/flwr.simulation.rst:18::1 #, fuzzy msgid "" -":py:obj:`start_simulation `\\ \\(\\*\\," -" client\\_fn\\, num\\_clients\\)" +":py:obj:`start_simulation `\\ " +"\\(\\*args\\, \\*\\*kwargs\\)" msgstr "" ":py:obj:`start_client `\\ \\(\\*\\, " "server\\_address\\[\\, client\\_fn\\, ...\\]\\)" #: ../../source/ref-api/flwr.simulation.rst:18::1 -#: flwr.simulation.app.start_simulation:1 of -msgid "Start a Ray-based Flower simulation server." +#: flwr.simulation.start_simulation:1 of +msgid "Log error stating that module `ray` could not be imported." msgstr "" #: ../../source/ref-api/flwr.simulation.run_simulation.rst:2 @@ -16388,120 +16978,6 @@ msgstr "" msgid "start\\_simulation" msgstr "" -#: flwr.simulation.app.start_simulation:3 of -msgid "" -"A function creating `Client` instances. The function must have the " -"signature `client_fn(context: Context). It should return a single client " -"instance of type `Client`. Note that the created client instances are " -"ephemeral and will often be destroyed after a single method invocation. " -"Since client instances are not long-lived, they should not attempt to " -"carry state over method invocations. Any state required by the instance " -"(model, dataset, hyperparameters, ...) should be (re-)created in either " -"the call to `client_fn` or the call to any of the client methods (e.g., " -"load evaluation data in the `evaluate` method itself)." -msgstr "" - -#: flwr.simulation.app.start_simulation:13 of -msgid "The total number of clients in this simulation." -msgstr "" - -#: flwr.simulation.app.start_simulation:15 of -msgid "" -"UNSUPPORTED, WILL BE REMOVED. USE `num_clients` INSTEAD. List " -"`client_id`s for each client. This is only required if `num_clients` is " -"not set. Setting both `num_clients` and `clients_ids` with " -"`len(clients_ids)` not equal to `num_clients` generates an error. Using " -"this argument will raise an error." -msgstr "" - -#: flwr.simulation.app.start_simulation:21 of -msgid "" -"CPU and GPU resources for a single client. Supported keys are `num_cpus` " -"and `num_gpus`. To understand the GPU utilization caused by `num_gpus`, " -"as well as using custom resources, please consult the Ray documentation." -msgstr "" - -#: flwr.simulation.app.start_simulation:26 of -msgid "" -"An implementation of the abstract base class `flwr.server.Server`. If no " -"instance is provided, then `start_server` will create one." -msgstr "" - -#: flwr.simulation.app.start_simulation:32 of -msgid "" -"An implementation of the abstract base class `flwr.server.Strategy`. If " -"no strategy is provided, then `start_server` will use " -"`flwr.server.strategy.FedAvg`." -msgstr "" - -#: flwr.simulation.app.start_simulation:36 of -msgid "" -"An implementation of the abstract base class `flwr.server.ClientManager`." -" If no implementation is provided, then `start_simulation` will use " -"`flwr.server.client_manager.SimpleClientManager`." -msgstr "" - -#: flwr.simulation.app.start_simulation:40 of -msgid "" -"Optional dictionary containing arguments for the call to `ray.init`. If " -"ray_init_args is None (the default), Ray will be initialized with the " -"following default args: { \"ignore_reinit_error\": True, " -"\"include_dashboard\": False } An empty dictionary can be used " -"(ray_init_args={}) to prevent any arguments from being passed to " -"ray.init." -msgstr "" - -#: flwr.simulation.app.start_simulation:40 of -msgid "" -"Optional dictionary containing arguments for the call to `ray.init`. If " -"ray_init_args is None (the default), Ray will be initialized with the " -"following default args:" -msgstr "" - -#: flwr.simulation.app.start_simulation:44 of -msgid "{ \"ignore_reinit_error\": True, \"include_dashboard\": False }" -msgstr "" - -#: flwr.simulation.app.start_simulation:46 of -msgid "" -"An empty dictionary can be used (ray_init_args={}) to prevent any " -"arguments from being passed to ray.init." -msgstr "" - -#: flwr.simulation.app.start_simulation:49 of -msgid "" -"Set to True to prevent `ray.shutdown()` in case " -"`ray.is_initialized()=True`." -msgstr "" - -#: flwr.simulation.app.start_simulation:51 of -msgid "" -"Optionally specify the type of actor to use. The actor object, which " -"persists throughout the simulation, will be the process in charge of " -"executing a ClientApp wrapping input argument `client_fn`." -msgstr "" - -#: flwr.simulation.app.start_simulation:55 of -msgid "" -"If you want to create your own Actor classes, you might need to pass some" -" input argument. You can use this dictionary for such purpose." -msgstr "" - -#: flwr.simulation.app.start_simulation:58 of -msgid "" -"(default: \"DEFAULT\") Optional string (\"DEFAULT\" or \"SPREAD\") for " -"the VCE to choose in which node the actor is placed. If you are an " -"advanced user needed more control you can use lower-level scheduling " -"strategies to pin actors to specific compute nodes (e.g. via " -"NodeAffinitySchedulingStrategy). Please note this is an advanced feature." -" For all details, please refer to the Ray documentation: " -"https://docs.ray.io/en/latest/ray-core/scheduling/index.html" -msgstr "" - -#: flwr.simulation.app.start_simulation:67 of -msgid "**hist** -- Object containing metrics from training." -msgstr "" - #: ../../source/ref-changelog.md:1 msgid "Changelog" msgstr "" @@ -16608,13 +17084,6 @@ msgstr "" msgid "Incompatible changes" msgstr "" -#: ../../source/ref-changelog.md:33 ../../source/ref-changelog.md:399 -#: ../../source/ref-changelog.md:676 ../../source/ref-changelog.md:740 -#: ../../source/ref-changelog.md:798 ../../source/ref-changelog.md:867 -#: ../../source/ref-changelog.md:929 -msgid "None" -msgstr "" - #: ../../source/ref-changelog.md:35 msgid "v1.11.0 (2024-08-30)" msgstr "" @@ -21574,12 +22043,15 @@ msgid "" "`_." msgstr "" -#: ../../source/ref-example-projects.rst:10 -msgid "" -"The following examples are available as standalone projects. Quickstart " -"TensorFlow/Keras ---------------------------" +#: ../../source/ref-example-projects.rst:9 +msgid "The following examples are available as standalone projects." msgstr "" +#: ../../source/ref-example-projects.rst:12 +#, fuzzy +msgid "Quickstart TensorFlow/Keras" +msgstr "빠른 시작 튜토리얼" + #: ../../source/ref-example-projects.rst:14 msgid "" "The TensorFlow/Keras quickstart example shows CIFAR-10 image " @@ -21593,77 +22065,77 @@ msgid "" "tensorflow>`_" msgstr "" -#: ../../source/ref-example-projects.rst:18 +#: ../../source/ref-example-projects.rst:19 msgid ":doc:`Quickstart TensorFlow (Tutorial) `" msgstr "" -#: ../../source/ref-example-projects.rst:19 +#: ../../source/ref-example-projects.rst:20 msgid "" "`Quickstart TensorFlow (Blog Post) `_" msgstr "" -#: ../../source/ref-example-projects.rst:23 -#: ../../source/tutorial-quickstart-pytorch.rst:5 +#: ../../source/ref-example-projects.rst:24 +#: ../../source/tutorial-quickstart-pytorch.rst:4 msgid "Quickstart PyTorch" msgstr "" -#: ../../source/ref-example-projects.rst:25 +#: ../../source/ref-example-projects.rst:26 msgid "" "The PyTorch quickstart example shows CIFAR-10 image classification with a" " simple Convolutional Neural Network:" msgstr "" -#: ../../source/ref-example-projects.rst:28 +#: ../../source/ref-example-projects.rst:29 msgid "" "`Quickstart PyTorch (Code) " "`_" msgstr "" -#: ../../source/ref-example-projects.rst:29 +#: ../../source/ref-example-projects.rst:31 msgid ":doc:`Quickstart PyTorch (Tutorial) `" msgstr "" -#: ../../source/ref-example-projects.rst:33 +#: ../../source/ref-example-projects.rst:34 msgid "PyTorch: From Centralized To Federated" msgstr "" -#: ../../source/ref-example-projects.rst:35 +#: ../../source/ref-example-projects.rst:36 msgid "" "This example shows how a regular PyTorch project can be federated using " "Flower:" msgstr "" -#: ../../source/ref-example-projects.rst:37 +#: ../../source/ref-example-projects.rst:38 msgid "" "`PyTorch: From Centralized To Federated (Code) " "`_" msgstr "" -#: ../../source/ref-example-projects.rst:38 +#: ../../source/ref-example-projects.rst:40 msgid "" ":doc:`PyTorch: From Centralized To Federated (Tutorial) `" msgstr "" -#: ../../source/ref-example-projects.rst:42 +#: ../../source/ref-example-projects.rst:44 msgid "Federated Learning on Raspberry Pi and Nvidia Jetson" msgstr "" -#: ../../source/ref-example-projects.rst:44 +#: ../../source/ref-example-projects.rst:46 msgid "" "This example shows how Flower can be used to build a federated learning " "system that run across Raspberry Pi and Nvidia Jetson:" msgstr "" -#: ../../source/ref-example-projects.rst:46 +#: ../../source/ref-example-projects.rst:49 msgid "" "`Federated Learning on Raspberry Pi and Nvidia Jetson (Code) " "`_" msgstr "" -#: ../../source/ref-example-projects.rst:47 +#: ../../source/ref-example-projects.rst:51 msgid "" "`Federated Learning on Raspberry Pi and Nvidia Jetson (Blog Post) " "`_" @@ -21679,20 +22151,20 @@ msgstr "" msgid ":fa:`eye,mr-1` Can Flower run on Jupyter Notebooks / Google Colab?" msgstr "" -#: ../../source/ref-faq.rst:8 +#: ../../source/ref-faq.rst:9 msgid "" "Yes, it can! Flower even comes with a few under-the-hood optimizations to" " make it work even better on Colab. Here's a quickstart example:" msgstr "" -#: ../../source/ref-faq.rst:10 +#: ../../source/ref-faq.rst:11 msgid "" "`Flower simulation PyTorch " "`_" msgstr "" -#: ../../source/ref-faq.rst:11 +#: ../../source/ref-faq.rst:12 msgid "" "`Flower simulation TensorFlow/Keras " "`_" @@ -21715,20 +22187,20 @@ msgstr "" msgid ":fa:`eye,mr-1` Does Flower support federated learning on Android devices?" msgstr "" -#: ../../source/ref-faq.rst:19 +#: ../../source/ref-faq.rst:20 msgid "" "Yes, it does. Please take a look at our `blog post " "`_ or check out the code examples:" msgstr "" -#: ../../source/ref-faq.rst:21 +#: ../../source/ref-faq.rst:22 msgid "" "`Android Kotlin example `_" msgstr "" -#: ../../source/ref-faq.rst:22 +#: ../../source/ref-faq.rst:23 msgid "`Android Java example `_" msgstr "" @@ -21736,38 +22208,50 @@ msgstr "" msgid ":fa:`eye,mr-1` Can I combine federated learning with blockchain?" msgstr "" -#: ../../source/ref-faq.rst:26 +#: ../../source/ref-faq.rst:27 msgid "" "Yes, of course. A list of available examples using Flower within a " "blockchain environment is available here:" msgstr "" -#: ../../source/ref-faq.rst:28 +#: ../../source/ref-faq.rst:30 +msgid "`FLock: A Decentralised AI Training Platform `_." +msgstr "" + +#: ../../source/ref-faq.rst:30 +msgid "Contribute to on-chain training the model and earn rewards." +msgstr "" + +#: ../../source/ref-faq.rst:31 +msgid "Local blockchain with federated learning simulation." +msgstr "" + +#: ../../source/ref-faq.rst:32 msgid "" "`Flower meets Nevermined GitHub Repository `_." msgstr "" -#: ../../source/ref-faq.rst:29 +#: ../../source/ref-faq.rst:33 msgid "" "`Flower meets Nevermined YouTube video " "`_." msgstr "" -#: ../../source/ref-faq.rst:30 +#: ../../source/ref-faq.rst:34 msgid "" "`Flower meets KOSMoS `_." msgstr "" -#: ../../source/ref-faq.rst:31 +#: ../../source/ref-faq.rst:35 msgid "" "`Flower meets Talan blog post `_ ." msgstr "" -#: ../../source/ref-faq.rst:32 +#: ../../source/ref-faq.rst:36 msgid "" "`Flower meets Talan GitHub Repository " "`_ ." @@ -21973,195 +22457,315 @@ msgid "" "app using Flower." msgstr "" -#: ../../source/tutorial-quickstart-android.rst:5 +#: ../../source/tutorial-quickstart-android.rst:4 msgid "Quickstart Android" msgstr "" -#: ../../source/tutorial-quickstart-android.rst:10 +#: ../../source/tutorial-quickstart-android.rst:9 msgid "" "Let's build a federated learning system using TFLite and Flower on " "Android!" msgstr "" -#: ../../source/tutorial-quickstart-android.rst:12 +#: ../../source/tutorial-quickstart-android.rst:11 msgid "" "Please refer to the `full code example " "`_ to learn " "more." msgstr "" -#: ../../source/tutorial-quickstart-fastai.rst:-1 -msgid "" -"Check out this Federated Learning quickstart tutorial for using Flower " -"with FastAI to train a vision model on CIFAR-10." +#: ../../source/tutorial-quickstart-fastai.rst:4 +msgid "Quickstart fastai" msgstr "" -#: ../../source/tutorial-quickstart-fastai.rst:5 -msgid "Quickstart fastai" +#: ../../source/tutorial-quickstart-fastai.rst:6 +msgid "" +"In this federated learning tutorial we will learn how to train a " +"SqueezeNet model on MNIST using Flower and fastai. It is recommended to " +"create a virtual environment and run everything within a :doc:`virtualenv" +" `." msgstr "" #: ../../source/tutorial-quickstart-fastai.rst:10 -msgid "Let's build a federated learning system using fastai and Flower!" +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:11 +msgid "Then, clone the code example directly from GitHub:" msgstr "" -#: ../../source/tutorial-quickstart-fastai.rst:12 +#: ../../source/tutorial-quickstart-fastai.rst:18 msgid "" -"Please refer to the `full code example " -"`_ " -"to learn more." +"This will create a new directory called `quickstart-fastai` containing " +"the following files:" +msgstr "" + +#: ../../source/tutorial-quickstart-fastai.rst:31 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:32 +#, fuzzy +msgid "Next, activate your environment, then run:" +msgstr "그 후 가상 환경을 활성화합니다:" + +#: ../../source/tutorial-quickstart-fastai.rst:41 +msgid "" +"This example by default runs the Flower Simulation Engine, creating a " +"federation of 10 nodes using `FedAvg `_ " +"as the aggregation strategy. The dataset will be partitioned using Flower" +" Dataset's `IidPartitioner `_." +" Let's run the project:" +msgstr "" + +#: ../../source/tutorial-quickstart-fastai.rst:54 +#: ../../source/tutorial-quickstart-huggingface.rst:61 +#: ../../source/tutorial-quickstart-mlx.rst:60 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:55 +#: ../../source/tutorial-quickstart-pytorch.rst:62 +#: ../../source/tutorial-quickstart-tensorflow.rst:62 +msgid "With default arguments you will see an output like this one:" +msgstr "" + +#: ../../source/tutorial-quickstart-fastai.rst:98 +#: ../../source/tutorial-quickstart-huggingface.rst:112 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:105 +#: ../../source/tutorial-quickstart-pytorch.rst:103 +#: ../../source/tutorial-quickstart-tensorflow.rst:103 +msgid "" +"You can also override the parameters defined in the " +"``[tool.flwr.app.config]`` section in ``pyproject.toml`` like this:" +msgstr "" + +#: ../../source/tutorial-quickstart-fastai.rst:108 +msgid "" +"Check the `source code `_ of this tutorial in ``examples/quickstart-fasai`` " +"in the Flower GitHub repository." msgstr "" #: ../../source/tutorial-quickstart-huggingface.rst:-1 msgid "" "Check out this Federating Learning quickstart tutorial for using Flower " -"with HuggingFace Transformers in order to fine-tune an LLM." +"with 🤗 HuggingFace Transformers in order to fine-tune an LLM." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:5 +#: ../../source/tutorial-quickstart-huggingface.rst:4 msgid "Quickstart 🤗 Transformers" msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:10 +#: ../../source/tutorial-quickstart-huggingface.rst:6 msgid "" -"Let's build a federated learning system using Hugging Face Transformers " -"and Flower!" +"In this federated learning tutorial we will learn how to train a large " +"language model (LLM) on the `IMDB " +"`_ dataset using Flower" +" and the 🤗 Hugging Face Transformers library. It is recommended to create" +" a virtual environment and run everything within a :doc:`virtualenv " +"`." msgstr "" #: ../../source/tutorial-quickstart-huggingface.rst:12 msgid "" -"We will leverage Hugging Face to federate the training of language models" -" over multiple clients using Flower. More specifically, we will fine-tune" -" a pre-trained Transformer model (distilBERT) for sequence classification" -" over a dataset of IMDB ratings. The end goal is to detect if a movie " -"rating is positive or negative." -msgstr "" - -#: ../../source/tutorial-quickstart-huggingface.rst:18 -msgid "Dependencies" +"Let's use ``flwr new`` to create a complete Flower+🤗 Hugging Face " +"project. It will generate all the files needed to run, by default with " +"the Flower Simulation Engine, a federation of 10 nodes using |fedavg|_ " +"The dataset will be partitioned using |flowerdatasets|_'s " +"|iidpartitioner|_." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:20 +#: ../../source/tutorial-quickstart-huggingface.rst:17 +#: ../../source/tutorial-quickstart-mlx.rst:17 +#: ../../source/tutorial-quickstart-pytorch.rst:18 +#: ../../source/tutorial-quickstart-tensorflow.rst:18 msgid "" -"To follow along this tutorial you will need to install the following " -"packages: :code:`datasets`, :code:`evaluate`, :code:`flwr`, " -":code:`torch`, and :code:`transformers`. This can be done using " -":code:`pip`:" +"Now that we have a rough idea of what this example is about, let's get " +"started. First, install Flower in your new environment:" msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:30 -msgid "Standard Hugging Face workflow" +#: ../../source/tutorial-quickstart-huggingface.rst:25 +msgid "" +"Then, run the command below. You will be prompted to select one of the " +"available templates (choose ``HuggingFace``), give a name to your " +"project, and type in your developer name:" msgstr "" #: ../../source/tutorial-quickstart-huggingface.rst:33 -msgid "Handling the data" +#: ../../source/tutorial-quickstart-mlx.rst:32 +#: ../../source/tutorial-quickstart-pytorch.rst:34 +#: ../../source/tutorial-quickstart-tensorflow.rst:34 +msgid "" +"After running it you'll notice a new directory with your project name has" +" been created. It should have the following structure:" msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:35 +#: ../../source/tutorial-quickstart-huggingface.rst:47 +#: ../../source/tutorial-quickstart-mlx.rst:46 +#: ../../source/tutorial-quickstart-pytorch.rst:48 +#: ../../source/tutorial-quickstart-tensorflow.rst:48 msgid "" -"To fetch the IMDB dataset, we will use Hugging Face's :code:`datasets` " -"library. We then need to tokenize the data and create :code:`PyTorch` " -"dataloaders, this is all done in the :code:`load_data` function:" +"If you haven't yet installed the project and its dependencies, you can do" +" so by:" msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:81 -msgid "Training and testing the model" +#: ../../source/tutorial-quickstart-huggingface.rst:54 +#: ../../source/tutorial-quickstart-pytorch.rst:55 +#: ../../source/tutorial-quickstart-tensorflow.rst:55 +msgid "To run the project, do:" msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:83 -msgid "" -"Once we have a way of creating our trainloader and testloader, we can " -"take care of the training and testing. This is very similar to any " -":code:`PyTorch` training or testing loop:" +#: ../../source/tutorial-quickstart-huggingface.rst:102 +msgid "You can also run the project with GPU as follows:" msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:121 -msgid "Creating the model itself" +#: ../../source/tutorial-quickstart-huggingface.rst:109 +msgid "" +"This will use the default arguments where each ``ClientApp`` will use 2 " +"CPUs and at most 4 ``ClientApp``\\s will run in a given GPU." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:123 +#: ../../source/tutorial-quickstart-huggingface.rst:120 +#: ../../source/tutorial-quickstart-mlx.rst:110 +#: ../../source/tutorial-quickstart-pytorch.rst:111 msgid "" -"To create the model itself, we will just load the pre-trained distillBERT" -" model using Hugging Face’s :code:`AutoModelForSequenceClassification` :" +"What follows is an explanation of each component in the project you just " +"created: dataset partition, the model, defining the ``ClientApp`` and " +"defining the ``ServerApp``." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:136 -msgid "Federating the example" -msgstr "" +#: ../../source/tutorial-quickstart-huggingface.rst:124 +#: ../../source/tutorial-quickstart-mlx.rst:114 +#: ../../source/tutorial-quickstart-pytorch.rst:115 +#: ../../source/tutorial-quickstart-tensorflow.rst:112 +#, fuzzy +msgid "The Data" +msgstr "Metadata" -#: ../../source/tutorial-quickstart-huggingface.rst:139 -msgid "Creating the IMDBClient" +#: ../../source/tutorial-quickstart-huggingface.rst:126 +msgid "" +"This tutorial uses |flowerdatasets|_ to easily download and partition the" +" `IMDB `_ dataset. In " +"this example you'll make use of the |iidpartitioner|_ to generate " +"``num_partitions`` partitions. You can choose |otherpartitioners|_ " +"available in Flower Datasets. To tokenize the text, we will also load the" +" tokenizer from the pre-trained Transformer model that we'll use during " +"training - more on that in the next section. Each ``ClientApp`` will call" +" this function to create dataloaders with the data that correspond to " +"their data partition." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:141 -msgid "" -"To federate our example to multiple clients, we first need to write our " -"Flower client class (inheriting from :code:`flwr.client.NumPyClient`). " -"This is very easy, as our model is a standard :code:`PyTorch` model:" +#: ../../source/tutorial-quickstart-huggingface.rst:171 +#: ../../source/tutorial-quickstart-mlx.rst:155 +#: ../../source/tutorial-quickstart-pytorch.rst:150 +#: ../../source/tutorial-quickstart-tensorflow.rst:139 +msgid "The Model" msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:169 +#: ../../source/tutorial-quickstart-huggingface.rst:173 msgid "" -"The :code:`get_parameters` function lets the server get the client's " -"parameters. Inversely, the :code:`set_parameters` function allows the " -"server to send its parameters to the client. Finally, the :code:`fit` " -"function trains the model locally for the client, and the " -":code:`evaluate` function tests the model locally and returns the " -"relevant metrics." +"We will leverage 🤗 Hugging Face to federate the training of language " +"models over multiple clients using Flower. More specifically, we will " +"fine-tune a pre-trained Transformer model (|berttiny|_) for sequence " +"classification over the dataset of IMDB ratings. The end goal is to " +"detect if a movie rating is positive or negative. If you have access to " +"larger GPUs, feel free to use larger models!" msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:175 -msgid "Starting the server" +#: ../../source/tutorial-quickstart-huggingface.rst:185 +msgid "" +"Note that here, ``model_name`` is a string that will be loaded from the " +"``Context`` in the ClientApp and ServerApp." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:177 +#: ../../source/tutorial-quickstart-huggingface.rst:188 msgid "" -"Now that we have a way to instantiate clients, we need to create our " -"server in order to aggregate the results. Using Flower, this can be done " -"very easily by first choosing a strategy (here, we are using " -":code:`FedAvg`, which will define the global weights as the average of " -"all the clients' weights at each round) and then using the " -":code:`flwr.server.start_server` function:" +"In addition to loading the pretrained model weights and architecture, we " +"also include two utility functions to perform both training (i.e. " +"``train()``) and evaluation (i.e. ``test()``) using the above model. " +"These functions should look fairly familiar if you have some prior " +"experience with PyTorch. Note these functions do not have anything " +"specific to Flower. That being said, the training function will normally " +"be called, as we'll see later, from a Flower client passing its own data." +" In summary, your clients can use standard training/testing functions to " +"perform local training or evaluation:" msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:205 +#: ../../source/tutorial-quickstart-huggingface.rst:228 +#: ../../source/tutorial-quickstart-mlx.rst:199 +#: ../../source/tutorial-quickstart-pytorch.rst:224 +#: ../../source/tutorial-quickstart-tensorflow.rst:168 +#, fuzzy +msgid "The ClientApp" +msgstr "클라이언트앱" + +#: ../../source/tutorial-quickstart-huggingface.rst:230 msgid "" -"The :code:`weighted_average` function is there to provide a way to " -"aggregate the metrics distributed amongst the clients (basically this " -"allows us to display a nice average accuracy and loss for every round)." +"The main changes we have to make to use 🤗 Hugging Face with Flower will " +"be found in the ``get_weights()`` and ``set_weights()`` functions. Under " +"the hood, the ``transformers`` library uses PyTorch, which means we can " +"reuse the ``get_weights()`` and ``set_weights()`` code that we defined in" +" the :doc:`Quickstart PyTorch ` tutorial. As" +" a reminder, in ``get_weights()``, PyTorch model parameters are extracted" +" and represented as a list of NumPy arrays. The ``set_weights()`` " +"function that's the opposite: given a list of NumPy arrays it applies " +"them to an existing PyTorch model. Doing this in fairly easy in PyTorch." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:209 -msgid "Putting everything together" +#: ../../source/tutorial-quickstart-huggingface.rst:241 +#: ../../source/tutorial-quickstart-pytorch.rst:234 +msgid "" +"The specific implementation of ``get_weights()`` and ``set_weights()`` " +"depends on the type of models you use. The ones shown below work for a " +"wide range of PyTorch models but you might need to adjust them if you " +"have more exotic model architectures." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:211 -msgid "We can now start client instances using:" +#: ../../source/tutorial-quickstart-huggingface.rst:257 +#: ../../source/tutorial-quickstart-pytorch.rst:250 +msgid "" +"The rest of the functionality is directly inspired by the centralized " +"case. The ``fit()`` method in the client trains the model using the local" +" dataset. Similarly, the ``evaluate()`` method is used to evaluate the " +"model received on a held-out validation set that the client might have:" msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:221 +#: ../../source/tutorial-quickstart-huggingface.rst:283 msgid "" -"And they will be able to connect to the server and start the federated " -"training." +"Finally, we can construct a ``ClientApp`` using the ``FlowerClient`` " +"defined above by means of a ``client_fn()`` callback. Note that the " +"`context` enables you to get access to hyperparemeters defined in your " +"``pyproject.toml`` to configure the run. In this tutorial we access the " +"``local-epochs`` setting to control the number of epochs a ``ClientApp`` " +"will perform when running the ``fit()`` method. You could define " +"additional hyperparameters in ``pyproject.toml`` and access them here." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:223 +#: ../../source/tutorial-quickstart-huggingface.rst:316 +#: ../../source/tutorial-quickstart-mlx.rst:361 +#: ../../source/tutorial-quickstart-pytorch.rst:307 +#: ../../source/tutorial-quickstart-tensorflow.rst:232 +#, fuzzy +msgid "The ServerApp" +msgstr "Flower 서버앱" + +#: ../../source/tutorial-quickstart-huggingface.rst:318 msgid "" -"If you want to check out everything put together, you should check out " -"the `full code example `_ ." +"To construct a ``ServerApp`` we define a ``server_fn()`` callback with an" +" identical signature to that of ``client_fn()`` but the return type is " +"|serverappcomponents|_ as opposed to a |client|_ In this example we use " +"the `FedAvg` strategy. To it we pass a randomly initialized model that " +"will server as the global model to federated. Note that the value of " +"``fraction_fit`` is read from the run config. You can find the default " +"value defined in the ``pyproject.toml``." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:226 +#: ../../source/tutorial-quickstart-huggingface.rst:356 msgid "" -"Of course, this is a very basic example, and a lot can be added or " -"modified, it was just to showcase how simply we could federate a Hugging " -"Face workflow using Flower." +"Congratulations! You've successfully built and run your first federated " +"learning system for an LLM." msgstr "" -#: ../../source/tutorial-quickstart-huggingface.rst:229 +#: ../../source/tutorial-quickstart-huggingface.rst:361 msgid "" -"Note that in this example we used :code:`PyTorch`, but we could have very" -" well used :code:`TensorFlow`." +"Check the source code of the extended version of this tutorial in " +"|quickstart_hf_link|_ in the Flower GitHub repository. For a " +"comprehensive example of a federated fine-tuning of an LLM with Flower, " +"refer to the |flowertune|_ example in the Flower GitHub repository." msgstr "" #: ../../source/tutorial-quickstart-ios.rst:-1 @@ -22170,11 +22774,11 @@ msgid "" "using Flower to train a neural network on MNIST." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:5 +#: ../../source/tutorial-quickstart-ios.rst:4 msgid "Quickstart iOS" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:10 +#: ../../source/tutorial-quickstart-ios.rst:9 msgid "" "In this tutorial we will learn how to train a Neural Network on MNIST " "using Flower and CoreML on iOS devices." @@ -22188,13 +22792,13 @@ msgid "" "implementation in iOS, it is recommended to use Xcode as our IDE." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:15 +#: ../../source/tutorial-quickstart-ios.rst:17 msgid "" "Our example consists of one Python *server* and two iPhone *clients* that" " all have the same model." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:17 +#: ../../source/tutorial-quickstart-ios.rst:20 msgid "" "*Clients* are responsible for generating individual weight updates for " "the model based on their local datasets. These updates are then sent to " @@ -22203,25 +22807,24 @@ msgid "" "each *client*. A complete cycle of weight updates is called a *round*." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:21 +#: ../../source/tutorial-quickstart-ios.rst:26 msgid "" "Now that we have a rough idea of what is going on, let's get started to " "setup our Flower server environment. We first need to install Flower. You" " can do this by using pip:" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:27 +#: ../../source/tutorial-quickstart-ios.rst:33 msgid "Or Poetry:" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:34 -#: ../../source/tutorial-quickstart-scikitlearn.rst:40 -#: ../../source/tutorial-quickstart-tensorflow.rst:29 -#: ../../source/tutorial-quickstart-xgboost.rst:55 +#: ../../source/tutorial-quickstart-ios.rst:40 +#: ../../source/tutorial-quickstart-scikitlearn.rst:43 +#: ../../source/tutorial-quickstart-xgboost.rst:65 msgid "Flower Client" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:36 +#: ../../source/tutorial-quickstart-ios.rst:42 msgid "" "Now that we have all our dependencies installed, let's run a simple " "distributed training using CoreML as our local training pipeline and " @@ -22230,95 +22833,90 @@ msgid "" "the Swift SDK. The client implementation can be seen below:" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:72 +#: ../../source/tutorial-quickstart-ios.rst:80 msgid "" -"Let's create a new application project in Xcode and add :code:`flwr` as a" -" dependency in your project. For our application, we will store the logic" -" of our app in :code:`FLiOSModel.swift` and the UI elements in " -":code:`ContentView.swift`. We will focus more on :code:`FLiOSModel.swift`" -" in this quickstart. Please refer to the `full code example " +"Let's create a new application project in Xcode and add ``flwr`` as a " +"dependency in your project. For our application, we will store the logic " +"of our app in ``FLiOSModel.swift`` and the UI elements in " +"``ContentView.swift``. We will focus more on ``FLiOSModel.swift`` in this" +" quickstart. Please refer to the `full code example " "`_ to learn more " "about the app." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:75 -msgid "Import Flower and CoreML related packages in :code:`FLiOSModel.swift`:" +#: ../../source/tutorial-quickstart-ios.rst:86 +msgid "Import Flower and CoreML related packages in ``FLiOSModel.swift``:" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:83 +#: ../../source/tutorial-quickstart-ios.rst:94 msgid "" "Then add the mlmodel to the project simply by drag-and-drop, the mlmodel " "will be bundled inside the application during deployment to your iOS " "device. We need to pass the url to access mlmodel and run CoreML machine " "learning processes, it can be retrieved by calling the function " -":code:`Bundle.main.url`. For the MNIST dataset, we need to preprocess it " -"into :code:`MLBatchProvider` object. The preprocessing is done inside " -":code:`DataLoader.swift`." +"``Bundle.main.url``. For the MNIST dataset, we need to preprocess it into" +" ``MLBatchProvider`` object. The preprocessing is done inside " +"``DataLoader.swift``." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:99 +#: ../../source/tutorial-quickstart-ios.rst:112 msgid "" "Since CoreML does not allow the model parameters to be seen before " "training, and accessing the model parameters during or after the training" " can only be done by specifying the layer name, we need to know this " "information beforehand, through looking at the model specification, which" " are written as proto files. The implementation can be seen in " -":code:`MLModelInspect`." +"``MLModelInspect``." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:102 +#: ../../source/tutorial-quickstart-ios.rst:118 msgid "" "After we have all of the necessary information, let's create our Flower " "client." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:117 +#: ../../source/tutorial-quickstart-ios.rst:133 msgid "" "Then start the Flower gRPC client and start communicating to the server " -"by passing our Flower client to the function :code:`startFlwrGRPC`." +"by passing our Flower client to the function ``startFlwrGRPC``." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:124 +#: ../../source/tutorial-quickstart-ios.rst:141 msgid "" -"That's it for the client. We only have to implement :code:`Client` or " -"call the provided :code:`MLFlwrClient` and call :code:`startFlwrGRPC()`. " -"The attribute :code:`hostname` and :code:`port` tells the client which " -"server to connect to. This can be done by entering the hostname and port " -"in the application before clicking the start button to start the " -"federated learning process." +"That's it for the client. We only have to implement ``Client`` or call " +"the provided ``MLFlwrClient`` and call ``startFlwrGRPC()``. The attribute" +" ``hostname`` and ``port`` tells the client which server to connect to. " +"This can be done by entering the hostname and port in the application " +"before clicking the start button to start the federated learning process." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:129 -#: ../../source/tutorial-quickstart-scikitlearn.rst:167 -#: ../../source/tutorial-quickstart-tensorflow.rst:98 -#: ../../source/tutorial-quickstart-xgboost.rst:341 +#: ../../source/tutorial-quickstart-ios.rst:148 +#: ../../source/tutorial-quickstart-scikitlearn.rst:179 +#: ../../source/tutorial-quickstart-xgboost.rst:358 msgid "Flower Server" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:131 -#: ../../source/tutorial-quickstart-tensorflow.rst:100 +#: ../../source/tutorial-quickstart-ios.rst:150 msgid "" "For simple workloads we can start a Flower server and leave all the " "configuration possibilities at their default values. In a file named " -":code:`server.py`, import Flower and start the server:" +"``server.py``, import Flower and start the server:" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:142 -#: ../../source/tutorial-quickstart-scikitlearn.rst:230 -#: ../../source/tutorial-quickstart-tensorflow.rst:112 +#: ../../source/tutorial-quickstart-ios.rst:161 +#: ../../source/tutorial-quickstart-scikitlearn.rst:254 msgid "Train the model, federated!" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:144 -#: ../../source/tutorial-quickstart-tensorflow.rst:114 -#: ../../source/tutorial-quickstart-xgboost.rst:567 +#: ../../source/tutorial-quickstart-ios.rst:163 +#: ../../source/tutorial-quickstart-xgboost.rst:590 msgid "" "With both client and server ready, we can now run everything and see " "federated learning in action. FL systems usually have a server and " "multiple clients. We therefore have to start the server first:" msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:152 +#: ../../source/tutorial-quickstart-ios.rst:171 msgid "" "Once the server is running we can start the clients in different " "terminals. Build and run the client through your Xcode, one through Xcode" @@ -22328,12 +22926,12 @@ msgid "" "simulator-or-on-a-device>`_." msgstr "" -#: ../../source/tutorial-quickstart-ios.rst:156 +#: ../../source/tutorial-quickstart-ios.rst:177 msgid "" "Congratulations! You've successfully built and run your first federated " "learning system in your ios device. The full `source code " "`_ for this " -"example can be found in :code:`examples/ios`." +"example can be found in ``examples/ios``." msgstr "" #: ../../source/tutorial-quickstart-jax.rst:-1 @@ -22342,11 +22940,11 @@ msgid "" "with Jax to train a linear regression model on a scikit-learn dataset." msgstr "" -#: ../../source/tutorial-quickstart-jax.rst:5 +#: ../../source/tutorial-quickstart-jax.rst:4 msgid "Quickstart JAX" msgstr "" -#: ../../source/tutorial-quickstart-jax.rst:10 +#: ../../source/tutorial-quickstart-jax.rst:9 msgid "" "This tutorial will show you how to use Flower to build a federated " "version of an existing JAX workload. We are using JAX to train a linear " @@ -22367,38 +22965,41 @@ msgstr "" " 튜토리얼`을 기반으로 centralized 학습 접근 방식을 구축합니다. 그런 다음 centralized 트레이닝 코드를 기반으로" " federated 방식으로 트레이닝을 실행합니다." -#: ../../source/tutorial-quickstart-jax.rst:16 +#: ../../source/tutorial-quickstart-jax.rst:20 +#, fuzzy msgid "" "Before we start building our JAX example, we need install the packages " -":code:`jax`, :code:`jaxlib`, :code:`scikit-learn`, and :code:`flwr`:" +"``jax``, ``jaxlib``, ``scikit-learn``, and ``flwr``:" msgstr "" "JAX 예제 빌드를 시작하기 전에 :code:`jax`, :code:`jaxlib`, :code:`scikit-learn`, " ":code:`flwr` 패키지를 설치해야 합니다:" -#: ../../source/tutorial-quickstart-jax.rst:24 +#: ../../source/tutorial-quickstart-jax.rst:28 msgid "Linear Regression with JAX" msgstr "JAX를 사용한 선형 회귀" -#: ../../source/tutorial-quickstart-jax.rst:26 +#: ../../source/tutorial-quickstart-jax.rst:30 +#, fuzzy msgid "" "We begin with a brief description of the centralized training code based " -"on a :code:`Linear Regression` model. If you want a more in-depth " -"explanation of what's going on then have a look at the official `JAX " -"documentation `_." +"on a ``Linear Regression`` model. If you want a more in-depth explanation" +" of what's going on then have a look at the official `JAX documentation " +"`_." msgstr "" "먼저 :code:`선형 회귀` 모델을 기반으로 하는 중앙 집중식 훈련 코드에 대한 간략한 설명부터 시작하겠습니다. 더 자세한 설명을" " 원하시면 공식 `JAX 문서 `_를 참조하세요." -#: ../../source/tutorial-quickstart-jax.rst:29 +#: ../../source/tutorial-quickstart-jax.rst:34 +#, fuzzy msgid "" -"Let's create a new file called :code:`jax_training.py` with all the " +"Let's create a new file called ``jax_training.py`` with all the " "components required for a traditional (centralized) linear regression " -"training. First, the JAX packages :code:`jax` and :code:`jaxlib` need to " -"be imported. In addition, we need to import :code:`sklearn` since we use " -":code:`make_regression` for the dataset and :code:`train_test_split` to " -"split the dataset into a training and test set. You can see that we do " -"not yet import the :code:`flwr` package for federated learning. This will" -" be done later." +"training. First, the JAX packages ``jax`` and ``jaxlib`` need to be " +"imported. In addition, we need to import ``sklearn`` since we use " +"``make_regression`` for the dataset and ``train_test_split`` to split the" +" dataset into a training and test set. You can see that we do not yet " +"import the ``flwr`` package for federated learning. This will be done " +"later." msgstr "" "전통적인(중앙 집중식) 선형 회귀 훈련에 필요한 모든 구성 요소가 포함된 :code:`jax_training.py`라는 새 파일을 " "생성해 보겠습니다. 먼저, JAX 패키지인 :code:`jax`와 :code:`jaxlib`를 가져와야 합니다. 또한 데이터 세트에" @@ -22406,55 +23007,58 @@ msgstr "" ":code:`train_test_split`을 사용하므로 :code:`sklearn`을 가져와야 합니다. 연합 학습을 위해 아직 " ":code:`flwr` 패키지를 가져오지 않은 것을 볼 수 있습니다. 이 작업은 나중에 수행됩니다." -#: ../../source/tutorial-quickstart-jax.rst:43 -msgid "" -"The :code:`load_data()` function loads the mentioned training and test " -"sets." +#: ../../source/tutorial-quickstart-jax.rst:51 +#, fuzzy +msgid "The ``load_data()`` function loads the mentioned training and test sets." msgstr "code:`load_data()` 함수는 앞서 언급한 트레이닝 및 테스트 세트를 로드합니다." -#: ../../source/tutorial-quickstart-jax.rst:53 +#: ../../source/tutorial-quickstart-jax.rst:63 +#, fuzzy msgid "" -"The model architecture (a very simple :code:`Linear Regression` model) is" -" defined in :code:`load_model()`." +"The model architecture (a very simple ``Linear Regression`` model) is " +"defined in ``load_model()``." msgstr "모델 아키텍처(매우 간단한 :code:`선형 회귀` 모델)는 :code:`load_model()`에 정의되어 있습니다." -#: ../../source/tutorial-quickstart-jax.rst:65 +#: ../../source/tutorial-quickstart-jax.rst:73 +#, fuzzy msgid "" -"We now need to define the training (function :code:`train()`), which " -"loops over the training set and measures the loss (function " -":code:`loss_fn()`) for each batch of training examples. The loss function" -" is separate since JAX takes derivatives with a :code:`grad()` function " -"(defined in the :code:`main()` function and called in :code:`train()`)." +"We now need to define the training (function ``train()``), which loops " +"over the training set and measures the loss (function ``loss_fn()``) for " +"each batch of training examples. The loss function is separate since JAX " +"takes derivatives with a ``grad()`` function (defined in the ``main()`` " +"function and called in ``train()``)." msgstr "" "이제 훈련 집합을 반복하고 각 훈련 예제 배치에 대해 손실을 측정하는(함수 :code:`loss_fn()`) 훈련(함수 " ":code:`train()`)을 정의해야 합니다. JAX는 :code:`grad()` 함수(:code:`main()` 함수에 " "정의되고 :code:`train()`에서 호출됨)로 파생물을 취하므로 손실 함수는 분리되어 있습니다." -#: ../../source/tutorial-quickstart-jax.rst:83 +#: ../../source/tutorial-quickstart-jax.rst:95 +#, fuzzy msgid "" -"The evaluation of the model is defined in the function " -":code:`evaluation()`. The function takes all test examples and measures " -"the loss of the linear regression model." +"The evaluation of the model is defined in the function ``evaluation()``. " +"The function takes all test examples and measures the loss of the linear " +"regression model." msgstr "" "모델의 평가는 :code:`evaluation()` 함수에 정의되어 있습니다. 이 함수는 모든 테스트 예제를 가져와 선형 회귀 " "모델의 손실을 측정합니다." -#: ../../source/tutorial-quickstart-jax.rst:94 +#: ../../source/tutorial-quickstart-jax.rst:107 +#, fuzzy msgid "" "Having defined the data loading, model architecture, training, and " "evaluation we can put everything together and train our model using JAX. " -"As already mentioned, the :code:`jax.grad()` function is defined in " -":code:`main()` and passed to :code:`train()`." +"As already mentioned, the ``jax.grad()`` function is defined in " +"``main()`` and passed to ``train()``." msgstr "" "데이터 로딩, 모델 아키텍처, 훈련 및 평가를 정의했으므로 이제 모든 것을 종합하여 JAX를 사용 모델을 훈련할 수 있습니다. 이미" " 언급했듯이 :code:`jax.grad()` 함수는 :code:`main()`에 정의되어 :code:`train()`에 " "전달됩니다." -#: ../../source/tutorial-quickstart-jax.rst:111 +#: ../../source/tutorial-quickstart-jax.rst:126 msgid "You can now run your (centralized) JAX linear regression workload:" msgstr "이제 (중앙 집중식) JAX 선형 회귀 워크로드를 실행할 수 있습니다:" -#: ../../source/tutorial-quickstart-jax.rst:117 +#: ../../source/tutorial-quickstart-jax.rst:132 msgid "" "So far this should all look fairly familiar if you've used JAX before. " "Let's take the next step and use what we've built to create a simple " @@ -22463,49 +23067,51 @@ msgstr "" "지금까지는 JAX를 사용해 본 적이 있다면 이 모든 것이 상당히 익숙해 보일 것입니다. 다음 단계로 넘어가서 우리가 구축한 것을 " "사용하여 하나의 서버와 두 개의 클라이언트로 구성된 간단한 연합 학습 시스템을 만들어 보겠습니다." -#: ../../source/tutorial-quickstart-jax.rst:121 +#: ../../source/tutorial-quickstart-jax.rst:137 msgid "JAX meets Flower" msgstr "JAX와 Flower의 만남" -#: ../../source/tutorial-quickstart-jax.rst:123 +#: ../../source/tutorial-quickstart-jax.rst:139 +#, fuzzy msgid "" "The concept of federating an existing workload is always the same and " "easy to understand. We have to start a *server* and then use the code in " -":code:`jax_training.py` for the *clients* that are connected to the " -"*server*. The *server* sends model parameters to the clients. The " -"*clients* run the training and update the parameters. The updated " -"parameters are sent back to the *server*, which averages all received " -"parameter updates. This describes one round of the federated learning " -"process, and we repeat this for multiple rounds." +"``jax_training.py`` for the *clients* that are connected to the *server*." +" The *server* sends model parameters to the clients. The *clients* run " +"the training and update the parameters. The updated parameters are sent " +"back to the *server*, which averages all received parameter updates. This" +" describes one round of the federated learning process, and we repeat " +"this for multiple rounds." msgstr "" "기존 워크로드를 연합하는 개념은 항상 동일하고 이해하기 쉽습니다. 서버*를 시작한 다음 *서버*에 연결된 *클라이언트*에 대해 " ":code:`jax_training.py`의 코드를 사용해야 합니다. *서버*는 모델 파라미터를 클라이언트로 전송합니다. " "클라이언트는 학습을 실행하고 파라미터를 업데이트합니다. 업데이트된 파라미터는 *서버*로 다시 전송되며, 수신된 모든 파라미터 " "업데이트의 평균을 구합니다. 이는 연합 학습 프로세스의 한 라운드를 설명하며, 이 과정을 여러 라운드에 걸쳐 반복합니다." -#: ../../source/tutorial-quickstart-jax.rst:145 +#: ../../source/tutorial-quickstart-jax.rst:167 +#, fuzzy msgid "" -"Finally, we will define our *client* logic in :code:`client.py` and build" -" upon the previously defined JAX training in :code:`jax_training.py`. Our" -" *client* needs to import :code:`flwr`, but also :code:`jax` and " -":code:`jaxlib` to update the parameters on our JAX model:" +"Finally, we will define our *client* logic in ``client.py`` and build " +"upon the previously defined JAX training in ``jax_training.py``. Our " +"*client* needs to import ``flwr``, but also ``jax`` and ``jaxlib`` to " +"update the parameters on our JAX model:" msgstr "" "마지막으로, :code:`client.py`에서 *client* 로직을 정의하고 :code:`jax_training.py`에서 " "이전에 정의한 JAX 교육을 기반으로 빌드합니다. *클라이언트*는 :code:`flwr`을 가져와야 하며, JAX 모델의 파라미터를" " 업데이트하기 위해 :code:`jax` 및 :code:`jaxlib`도 가져와야 합니다:" -#: ../../source/tutorial-quickstart-jax.rst:160 +#: ../../source/tutorial-quickstart-jax.rst:182 +#, fuzzy msgid "" "Implementing a Flower *client* basically means implementing a subclass of" -" either :code:`flwr.client.Client` or :code:`flwr.client.NumPyClient`. " -"Our implementation will be based on :code:`flwr.client.NumPyClient` and " -"we'll call it :code:`FlowerClient`. :code:`NumPyClient` is slightly " -"easier to implement than :code:`Client` if you use a framework with good " -"NumPy interoperability (like JAX) because it avoids some of the " -"boilerplate that would otherwise be necessary. :code:`FlowerClient` needs" -" to implement four methods, two methods for getting/setting model " -"parameters, one method for training the model, and one method for testing" -" the model:" +" either ``flwr.client.Client`` or ``flwr.client.NumPyClient``. Our " +"implementation will be based on ``flwr.client.NumPyClient`` and we'll " +"call it ``FlowerClient``. ``NumPyClient`` is slightly easier to implement" +" than ``Client`` if you use a framework with good NumPy interoperability " +"(like JAX) because it avoids some of the boilerplate that would otherwise" +" be necessary. ``FlowerClient`` needs to implement four methods, two " +"methods for getting/setting model parameters, one method for training the" +" model, and one method for testing the model:" msgstr "" "Flower *클라이언트*를 구현한다는 것은 기본적으로 :code:`flwr.client.Client` 또는 " ":code:`flwr.client.NumPyClient`의 서브클래스를 구현하는 것을 의미합니다. 구현은 " @@ -22515,40 +23121,43 @@ msgstr "" "code:`FlowerClient`는 모델 매개변수를 가져오거나 설정하는 메서드 2개, 모델 학습을 위한 메서드 1개, 모델 " "테스트를 위한 메서드 1개 등 총 4개의 메서드를 구현해야 합니다:" -#: ../../source/tutorial-quickstart-jax.rst:165 -msgid ":code:`set_parameters (optional)`" +#: ../../source/tutorial-quickstart-jax.rst:194 +#, fuzzy +msgid "``set_parameters (optional)``" msgstr ":code:`set_parameters (선택사항)`" -#: ../../source/tutorial-quickstart-jax.rst:167 -msgid "transform parameters to NumPy :code:`ndarray`'s" +#: ../../source/tutorial-quickstart-jax.rst:193 +#, fuzzy +msgid "transform parameters to NumPy ``ndarray``'s" msgstr "매개 변수를 NumPy :code:`ndarray`로 변환" -#: ../../source/tutorial-quickstart-jax.rst:174 +#: ../../source/tutorial-quickstart-jax.rst:203 msgid "get the updated local model parameters and return them to the server" msgstr "업데이트된 로컬 모델 파라미터를 가져와 서버로 반환합니다" -#: ../../source/tutorial-quickstart-jax.rst:178 +#: ../../source/tutorial-quickstart-jax.rst:208 msgid "return the local loss to the server" msgstr "로컬 손실을 서버로 반환합니다" -#: ../../source/tutorial-quickstart-jax.rst:180 +#: ../../source/tutorial-quickstart-jax.rst:210 +#, fuzzy msgid "" "The challenging part is to transform the JAX model parameters from " -":code:`DeviceArray` to :code:`NumPy ndarray` to make them compatible with" -" `NumPyClient`." +"``DeviceArray`` to ``NumPy ndarray`` to make them compatible with " +"`NumPyClient`." msgstr "" "어려운 부분은 JAX 모델 매개변수를 :code:`DeviceArray`에서 :code:`NumPy ndarray`로 변환하여 " "`NumPyClient`와 호환되도록 하는 것입니다." -#: ../../source/tutorial-quickstart-jax.rst:182 +#: ../../source/tutorial-quickstart-jax.rst:213 +#, fuzzy msgid "" -"The two :code:`NumPyClient` methods :code:`fit` and :code:`evaluate` make" -" use of the functions :code:`train()` and :code:`evaluate()` previously " -"defined in :code:`jax_training.py`. So what we really do here is we tell " -"Flower through our :code:`NumPyClient` subclass which of our already " -"defined functions to call for training and evaluation. We included type " -"annotations to give you a better understanding of the data types that get" -" passed around." +"The two ``NumPyClient`` methods ``fit`` and ``evaluate`` make use of the " +"functions ``train()`` and ``evaluate()`` previously defined in " +"``jax_training.py``. So what we really do here is we tell Flower through " +"our ``NumPyClient`` subclass which of our already defined functions to " +"call for training and evaluation. We included type annotations to give " +"you a better understanding of the data types that get passed around." msgstr "" "두 개의 :code:`NumPyClient` 메서드인 :code:`fit`과 :code:`evaluate`는 이전에 " ":code:`jax_training.py`에 정의된 함수 :code:`train()`과 :code:`evaluate()`를 " @@ -22556,11 +23165,11 @@ msgstr "" ":code:`NumPyClient` 서브클래스를 통해 Flower에게 알려주는 것입니다. 전달되는 데이터 유형을 더 잘 이해할 수 " "있도록 유형 type annotation을 포함했습니다." -#: ../../source/tutorial-quickstart-jax.rst:251 +#: ../../source/tutorial-quickstart-jax.rst:286 msgid "Having defined the federation process, we can run it." msgstr "연합 프로세스를 정의했으면 이제 실행할 수 있습니다." -#: ../../source/tutorial-quickstart-jax.rst:280 +#: ../../source/tutorial-quickstart-jax.rst:315 msgid "" "in each window (make sure that the server is still running before you do " "so) and see your JAX project run federated learning across two clients. " @@ -22569,7 +23178,7 @@ msgstr "" "를 입력하고(그 전에 서버가 계속 실행 중인지 확인하세요) 두 클라이언트에서 연합 학습을 실행하는 JAX 프로젝트를 확인합니다. " "축하합니다!" -#: ../../source/tutorial-quickstart-jax.rst:285 +#: ../../source/tutorial-quickstart-jax.rst:321 msgid "" "The source code of this example was improved over time and can be found " "here: `Quickstart JAX `_. 두 " "클라이언트가 동일한 데이터 세트를 로드하기 때문에 이 예제는 다소 단순화되어 있습니다." -#: ../../source/tutorial-quickstart-jax.rst:288 +#: ../../source/tutorial-quickstart-jax.rst:325 msgid "" "You're now prepared to explore this topic further. How about using a more" " sophisticated model or using a different dataset? How about adding more " @@ -22589,12 +23198,12 @@ msgstr "" "이제 이 주제를 더 자세히 살펴볼 준비가 되었습니다. 더 정교한 모델을 사용하거나 다른 데이터 집합을 사용해 보는 것은 어떨까요? " "클라이언트를 더 추가하는 것은 어떨까요?" -#: ../../source/tutorial-quickstart-mlx.rst:5 +#: ../../source/tutorial-quickstart-mlx.rst:4 #, fuzzy msgid "Quickstart MLX" msgstr "빠른 시작" -#: ../../source/tutorial-quickstart-mlx.rst:7 +#: ../../source/tutorial-quickstart-mlx.rst:6 msgid "" "In this federated learning tutorial we will learn how to train simple MLP" " on MNIST using Flower and MLX. It is recommended to create a virtual " @@ -22602,7 +23211,7 @@ msgid "" "how-to-set-up-a-virtual-env>`." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:12 +#: ../../source/tutorial-quickstart-mlx.rst:10 msgid "" "Let's use `flwr new` to create a complete Flower+MLX project. It will " "generate all the files needed to run, by default with the Simulation " @@ -22614,64 +23223,24 @@ msgid "" "api/flwr_datasets.partitioner.IidPartitioner.html#flwr_datasets.partitioner.IidPartitioner>`_." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:19 -#: ../../source/tutorial-quickstart-pytorch.rst:19 -msgid "" -"Now that we have a rough idea of what this example is about, let's get " -"started. First, install Flower in your new environment:" -msgstr "" - -#: ../../source/tutorial-quickstart-mlx.rst:27 +#: ../../source/tutorial-quickstart-mlx.rst:25 msgid "" "Then, run the command below. You will be prompted to select of the " "available templates (choose ``MLX``), give a name to your project, and " "type in your developer name:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:35 -#: ../../source/tutorial-quickstart-pytorch.rst:35 -msgid "" -"After running it you'll notice a new directory with your project name has" -" been created. It should have the following structure:" -msgstr "" - -#: ../../source/tutorial-quickstart-mlx.rst:49 -#: ../../source/tutorial-quickstart-pytorch.rst:49 -msgid "" -"If you haven't yet installed the project and its dependencies, you can do" -" so by:" -msgstr "" - -#: ../../source/tutorial-quickstart-mlx.rst:57 +#: ../../source/tutorial-quickstart-mlx.rst:53 msgid "To run the project do:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:64 -#: ../../source/tutorial-quickstart-pytorch.rst:64 -msgid "With default arguments you will see an output like this one:" -msgstr "" - -#: ../../source/tutorial-quickstart-mlx.rst:106 +#: ../../source/tutorial-quickstart-mlx.rst:102 msgid "" "You can also override the parameters defined in " "``[tool.flwr.app.config]`` section in the ``pyproject.toml`` like this:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:114 -#: ../../source/tutorial-quickstart-pytorch.rst:113 -msgid "" -"What follows is an explanation of each component in the project you just " -"created: dataset partition, the model, defining the ``ClientApp`` and " -"defining the ``ServerApp``." -msgstr "" - -#: ../../source/tutorial-quickstart-mlx.rst:120 -#: ../../source/tutorial-quickstart-pytorch.rst:119 -#, fuzzy -msgid "The Data" -msgstr "Metadata" - -#: ../../source/tutorial-quickstart-mlx.rst:122 +#: ../../source/tutorial-quickstart-mlx.rst:116 msgid "" "We will use `Flower Datasets `_ to " "easily download and partition the `MNIST` dataset. In this example you'll" @@ -22682,31 +23251,20 @@ msgid "" "api/flwr_datasets.partitioner.html>`_ available in Flower Datasets:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:164 -#: ../../source/tutorial-quickstart-pytorch.rst:157 -msgid "The Model" -msgstr "" - -#: ../../source/tutorial-quickstart-mlx.rst:166 +#: ../../source/tutorial-quickstart-mlx.rst:157 msgid "" "We define the model as in the `centralized MLX example " "`_, it's a " "simple MLP:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:190 +#: ../../source/tutorial-quickstart-mlx.rst:180 msgid "" "We also define some utility functions to test our model and to iterate " "over batches." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:210 -#: ../../source/tutorial-quickstart-pytorch.rst:234 -#, fuzzy -msgid "The ClientApp" -msgstr "클라이언트앱" - -#: ../../source/tutorial-quickstart-mlx.rst:212 +#: ../../source/tutorial-quickstart-mlx.rst:201 msgid "" "The main changes we have to make to use `MLX` with `Flower` will be found" " in the ``get_params()`` and ``set_params()`` functions. Indeed, MLX " @@ -22715,17 +23273,17 @@ msgid "" "messages to work)." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:218 +#: ../../source/tutorial-quickstart-mlx.rst:206 msgid "The way MLX stores its parameters is as follows:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:231 +#: ../../source/tutorial-quickstart-mlx.rst:219 msgid "" "Therefore, to get our list of ``np.array`` objects, we need to extract " "each array and convert them into a NumPy array:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:240 +#: ../../source/tutorial-quickstart-mlx.rst:228 msgid "" "For the ``set_params()`` function, we perform the reverse operation. We " "receive a list of NumPy arrays and want to convert them into MLX " @@ -22733,24 +23291,24 @@ msgid "" "them to the `weight` and `bias` keys of each layer dict:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:255 +#: ../../source/tutorial-quickstart-mlx.rst:243 msgid "" "The rest of the functionality is directly inspired by the centralized " "case. The ``fit()`` method in the client trains the model using the local" " dataset:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:272 +#: ../../source/tutorial-quickstart-mlx.rst:259 msgid "" "Here, after updating the parameters, we perform the training as in the " "centralized case, and return the new parameters." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:275 +#: ../../source/tutorial-quickstart-mlx.rst:262 msgid "And for the ``evaluate()`` method of the client:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:285 +#: ../../source/tutorial-quickstart-mlx.rst:272 msgid "" "We also begin by updating the parameters with the ones sent by the " "server, and then we compute the loss and accuracy using the functions " @@ -22758,11 +23316,11 @@ msgid "" "the `MLP` model as well as other components such as the optimizer." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:290 +#: ../../source/tutorial-quickstart-mlx.rst:277 msgid "Putting everything together we have:" msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:344 +#: ../../source/tutorial-quickstart-mlx.rst:331 msgid "" "Finally, we can construct a ``ClientApp`` using the ``FlowerClient`` " "defined above by means of a ``client_fn()`` callback. Note that " @@ -22773,13 +23331,7 @@ msgid "" "method." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:376 -#: ../../source/tutorial-quickstart-pytorch.rst:321 -#, fuzzy -msgid "The ServerApp" -msgstr "Flower 서버앱" - -#: ../../source/tutorial-quickstart-mlx.rst:378 +#: ../../source/tutorial-quickstart-mlx.rst:363 msgid "" "To construct a ``ServerApp``, we define a ``server_fn()`` callback with " "an identical signature to that of ``client_fn()``, but the return type is" @@ -22790,14 +23342,15 @@ msgid "" "``FedAvg`` strategy." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:402 -#: ../../source/tutorial-quickstart-pytorch.rst:360 +#: ../../source/tutorial-quickstart-mlx.rst:386 +#: ../../source/tutorial-quickstart-pytorch.rst:344 +#: ../../source/tutorial-quickstart-tensorflow.rst:266 msgid "" "Congratulations! You've successfully built and run your first federated " "learning system." msgstr "" -#: ../../source/tutorial-quickstart-mlx.rst:407 +#: ../../source/tutorial-quickstart-mlx.rst:390 msgid "" "Check the `source code `_ of the extended version of this tutorial in ``examples" @@ -22810,15 +23363,15 @@ msgid "" "with Pandas to perform Federated Analytics." msgstr "" -#: ../../source/tutorial-quickstart-pandas.rst:5 +#: ../../source/tutorial-quickstart-pandas.rst:4 msgid "Quickstart Pandas" msgstr "" -#: ../../source/tutorial-quickstart-pandas.rst:10 +#: ../../source/tutorial-quickstart-pandas.rst:9 msgid "Let's build a federated analytics system using Pandas and Flower!" msgstr "" -#: ../../source/tutorial-quickstart-pandas.rst:12 +#: ../../source/tutorial-quickstart-pandas.rst:11 msgid "" "Please refer to the `full code example " "`_ " @@ -22831,7 +23384,7 @@ msgid "" "with PyTorch to train a CNN model on MNIST." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:7 +#: ../../source/tutorial-quickstart-pytorch.rst:6 msgid "" "In this federated learning tutorial we will learn how to train a " "Convolutional Neural Network on CIFAR-10 using Flower and PyTorch. It is " @@ -22839,7 +23392,7 @@ msgid "" ":doc:`virtualenv `." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:12 +#: ../../source/tutorial-quickstart-pytorch.rst:11 msgid "" "Let's use `flwr new` to create a complete Flower+PyTorch project. It will" " generate all the files needed to run, by default with the Flower " @@ -22851,24 +23404,14 @@ msgid "" "api/flwr_datasets.partitioner.IidPartitioner.html#flwr_datasets.partitioner.IidPartitioner>`_." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:27 +#: ../../source/tutorial-quickstart-pytorch.rst:26 msgid "" "Then, run the command below. You will be prompted to select one of the " "available templates (choose ``PyTorch``), give a name to your project, " "and type in your developer name:" msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:57 -msgid "To run the project, do:" -msgstr "" - -#: ../../source/tutorial-quickstart-pytorch.rst:105 -msgid "" -"You can also override the parameters defined in the " -"``[tool.flwr.app.config]`` section in ``pyproject.toml`` like this:" -msgstr "" - -#: ../../source/tutorial-quickstart-pytorch.rst:121 +#: ../../source/tutorial-quickstart-pytorch.rst:117 msgid "" "This tutorial uses `Flower Datasets `_ " "to easily download and partition the `CIFAR-10` dataset. In this example " @@ -22882,13 +23425,13 @@ msgid "" " that correspond to their data partition." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:159 +#: ../../source/tutorial-quickstart-pytorch.rst:152 msgid "" "We defined a simple Convolutional Neural Network (CNN), but feel free to " "replace it with a more sophisticated model if you'd like:" msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:184 +#: ../../source/tutorial-quickstart-pytorch.rst:177 msgid "" "In addition to defining the model architecture, we also include two " "utility functions to perform both training (i.e. ``train()``) and " @@ -22901,7 +23444,7 @@ msgid "" "training or evaluation:" msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:236 +#: ../../source/tutorial-quickstart-pytorch.rst:226 msgid "" "The main changes we have to make to use `PyTorch` with `Flower` will be " "found in the ``get_weights()`` and ``set_weights()`` functions. In " @@ -22911,23 +23454,7 @@ msgid "" "PyTorch model. Doing this in fairly easy in PyTorch." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:245 -msgid "" -"The specific implementation of ``get_weights()`` and ``set_weights()`` " -"depends on the type of models you use. The ones shown below work for a " -"wide range of PyTorch models but you might need to adjust them if you " -"have more exotic model architectures." -msgstr "" - -#: ../../source/tutorial-quickstart-pytorch.rst:261 -msgid "" -"The rest of the functionality is directly inspired by the centralized " -"case. The ``fit()`` method in the client trains the model using the local" -" dataset. Similarly, the ``evaluate()`` method is used to evaluate the " -"model received on a held-out validation set that the client might have:" -msgstr "" - -#: ../../source/tutorial-quickstart-pytorch.rst:294 +#: ../../source/tutorial-quickstart-pytorch.rst:282 msgid "" "Finally, we can construct a ``ClientApp`` using the ``FlowerClient`` " "defined above by means of a ``client_fn()`` callback. Note that the " @@ -22938,7 +23465,7 @@ msgid "" "additioinal hyperparameters in ``pyproject.toml`` and access them here." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:323 +#: ../../source/tutorial-quickstart-pytorch.rst:309 msgid "" "To construct a ``ServerApp`` we define a ``server_fn()`` callback with an" " identical signature to that of ``client_fn()`` but the return type is " @@ -22952,46 +23479,66 @@ msgid "" "``pyproject.toml``." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:365 +#: ../../source/tutorial-quickstart-pytorch.rst:348 msgid "" "Check the `source code `_ of the extended version of this tutorial in " "``examples/quickstart-pytorch`` in the Flower GitHub repository." msgstr "" -#: ../../source/tutorial-quickstart-pytorch.rst:372 +#: ../../source/tutorial-quickstart-pytorch.rst:354 +#: ../../source/tutorial-quickstart-tensorflow.rst:278 #, fuzzy msgid "Video tutorial" msgstr "튜토리얼" -#: ../../source/tutorial-quickstart-pytorch.rst:376 +#: ../../source/tutorial-quickstart-pytorch.rst:358 msgid "" "The video shown below shows how to setup a PyTorch + Flower project using" " our previously recommended APIs. A new video tutorial will be released " "that shows the new APIs (as the content above does)" msgstr "" -#: ../../source/tutorial-quickstart-pytorch-lightning.rst:-1 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:4 +msgid "Quickstart PyTorch Lightning" +msgstr "" + +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:6 msgid "" -"Check out this Federated Learning quickstart tutorial for using Flower " -"with PyTorch Lightning to train an Auto Encoder model on MNIST." +"In this federated learning tutorial we will learn how to train an " +"AutoEncoder model on MNIST using Flower and PyTorch Lightning. It is " +"recommended to create a virtual environment and run everything within a " +":doc:`virtualenv `." msgstr "" -#: ../../source/tutorial-quickstart-pytorch-lightning.rst:5 -msgid "Quickstart PyTorch Lightning" +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:19 +msgid "" +"This will create a new directory called `quickstart-pytorch-lightning` " +"containing the following files:" msgstr "" -#: ../../source/tutorial-quickstart-pytorch-lightning.rst:10 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:42 msgid "" -"Let's build a horizontal federated learning system using PyTorch " -"Lightning and Flower!" +"By default, Flower Simulation Engine will be started and it will create a" +" federation of 4 nodes using `FedAvg `_ " +"as the aggregation strategy. The dataset will be partitioned using Flower" +" Dataset's `IidPartitioner `_." +" To run the project, do:" msgstr "" -#: ../../source/tutorial-quickstart-pytorch-lightning.rst:12 +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:93 msgid "" -"Please refer to the `full code example " -"`_ to learn more." +"Each simulated `ClientApp` (two per round) will also log a summary of " +"their local training process. Expect this output to be similar to:" +msgstr "" + +#: ../../source/tutorial-quickstart-pytorch-lightning.rst:115 +msgid "" +"Check the `source code `_ of this tutorial in ``examples" +"/quickstart-pytorch-lightning`` in the Flower GitHub repository." msgstr "" #: ../../source/tutorial-quickstart-scikitlearn.rst:-1 @@ -23000,14 +23547,14 @@ msgid "" "with scikit-learn to train a linear regression model." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:5 +#: ../../source/tutorial-quickstart-scikitlearn.rst:4 msgid "Quickstart scikit-learn" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:10 +#: ../../source/tutorial-quickstart-scikitlearn.rst:9 msgid "" -"In this tutorial, we will learn how to train a :code:`Logistic " -"Regression` model on MNIST using Flower and scikit-learn." +"In this tutorial, we will learn how to train a ``Logistic Regression`` " +"model on MNIST using Flower and scikit-learn." msgstr "" #: ../../source/tutorial-quickstart-scikitlearn.rst:12 @@ -23016,13 +23563,13 @@ msgid "" "within this :doc:`virtualenv `." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:14 +#: ../../source/tutorial-quickstart-scikitlearn.rst:15 msgid "" "Our example consists of one *server* and two *clients* all having the " "same model." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:16 +#: ../../source/tutorial-quickstart-scikitlearn.rst:17 msgid "" "*Clients* are responsible for generating individual model parameter " "updates for the model based on their local datasets. These updates are " @@ -23032,189 +23579,190 @@ msgid "" "called a *round*." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:20 +#: ../../source/tutorial-quickstart-scikitlearn.rst:23 msgid "" "Now that we have a rough idea of what is going on, let's get started. We " "first need to install Flower. You can do this by running:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:26 +#: ../../source/tutorial-quickstart-scikitlearn.rst:30 msgid "Since we want to use scikit-learn, let's go ahead and install it:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:32 +#: ../../source/tutorial-quickstart-scikitlearn.rst:36 msgid "Or simply install all dependencies using Poetry:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:42 +#: ../../source/tutorial-quickstart-scikitlearn.rst:45 msgid "" "Now that we have all our dependencies installed, let's run a simple " "distributed training with two clients and one server. However, before " "setting up the client and server, we will define all functionalities that" -" we need for our federated learning setup within :code:`utils.py`. The " -":code:`utils.py` contains different functions defining all the machine " +" we need for our federated learning setup within ``utils.py``. The " +"``utils.py`` contains different functions defining all the machine " "learning basics:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:45 -msgid ":code:`get_model_parameters()`" -msgstr "" +#: ../../source/tutorial-quickstart-scikitlearn.rst:51 +#, fuzzy +msgid "``get_model_parameters()``" +msgstr "모델 매개변수." -#: ../../source/tutorial-quickstart-scikitlearn.rst:46 -msgid "Returns the parameters of a :code:`sklearn` LogisticRegression model" +#: ../../source/tutorial-quickstart-scikitlearn.rst:52 +msgid "Returns the parameters of a ``sklearn`` LogisticRegression model" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:47 -msgid ":code:`set_model_params()`" +#: ../../source/tutorial-quickstart-scikitlearn.rst:53 +msgid "``set_model_params()``" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:48 -msgid "Sets the parameters of a :code:`sklearn` LogisticRegression model" +#: ../../source/tutorial-quickstart-scikitlearn.rst:54 +msgid "Sets the parameters of a ``sklearn`` LogisticRegression model" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:49 -msgid ":code:`set_initial_params()`" +#: ../../source/tutorial-quickstart-scikitlearn.rst:56 +msgid "``set_initial_params()``" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:50 +#: ../../source/tutorial-quickstart-scikitlearn.rst:56 msgid "Initializes the model parameters that the Flower server will ask for" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:52 +#: ../../source/tutorial-quickstart-scikitlearn.rst:58 msgid "" -"Please check out :code:`utils.py` `here " +"Please check out ``utils.py`` `here " "`_ for more details. The pre-defined functions are used in" -" the :code:`client.py` and imported. The :code:`client.py` also requires " -"to import several packages such as Flower and scikit-learn:" +" the ``client.py`` and imported. The ``client.py`` also requires to " +"import several packages such as Flower and scikit-learn:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:67 +#: ../../source/tutorial-quickstart-scikitlearn.rst:75 msgid "" "Prior to local training, we need to load the MNIST dataset, a popular " "image classification dataset of handwritten digits for machine learning, " "and partition the dataset for FL. This can be conveniently achieved using" " `Flower Datasets `_. The " -":code:`FederatedDataset.load_partition()` method loads the partitioned " -"training set for each partition ID defined in the :code:`--partition-id` " +"``FederatedDataset.load_partition()`` method loads the partitioned " +"training set for each partition ID defined in the ``--partition-id`` " "argument." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:95 +#: ../../source/tutorial-quickstart-scikitlearn.rst:106 msgid "" "Next, the logistic regression model is defined and initialized with " -":code:`utils.set_initial_params()`." +"``utils.set_initial_params()``." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:107 +#: ../../source/tutorial-quickstart-scikitlearn.rst:119 msgid "" "The Flower server interacts with clients through an interface called " -":code:`Client`. When the server selects a particular client for training," -" it sends training instructions over the network. The client receives " -"those instructions and calls one of the :code:`Client` methods to run " -"your code (i.e., to fit the logistic regression we defined earlier)." +"``Client``. When the server selects a particular client for training, it " +"sends training instructions over the network. The client receives those " +"instructions and calls one of the ``Client`` methods to run your code " +"(i.e., to fit the logistic regression we defined earlier)." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:113 +#: ../../source/tutorial-quickstart-scikitlearn.rst:124 msgid "" -"Flower provides a convenience class called :code:`NumPyClient` which " -"makes it easier to implement the :code:`Client` interface when your " -"workload uses scikit-learn. Implementing :code:`NumPyClient` usually " -"means defining the following methods (:code:`set_parameters` is optional " -"though):" +"Flower provides a convenience class called ``NumPyClient`` which makes it" +" easier to implement the ``Client`` interface when your workload uses " +"scikit-learn. Implementing ``NumPyClient`` usually means defining the " +"following methods (``set_parameters`` is optional though):" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:119 +#: ../../source/tutorial-quickstart-scikitlearn.rst:130 msgid "return the model weight as a list of NumPy ndarrays" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:120 -msgid ":code:`set_parameters` (optional)" -msgstr "" +#: ../../source/tutorial-quickstart-scikitlearn.rst:132 +#, fuzzy +msgid "``set_parameters`` (optional)" +msgstr ":code:`set_parameters (선택사항)`" -#: ../../source/tutorial-quickstart-scikitlearn.rst:121 +#: ../../source/tutorial-quickstart-scikitlearn.rst:132 msgid "" "update the local model weights with the parameters received from the " "server" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:122 -msgid "is directly imported with :code:`utils.set_model_params()`" +#: ../../source/tutorial-quickstart-scikitlearn.rst:133 +msgid "is directly imported with ``utils.set_model_params()``" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:124 +#: ../../source/tutorial-quickstart-scikitlearn.rst:135 msgid "set the local model weights" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:125 +#: ../../source/tutorial-quickstart-scikitlearn.rst:136 msgid "train the local model" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:126 +#: ../../source/tutorial-quickstart-scikitlearn.rst:137 #, fuzzy msgid "return the updated local model weights" msgstr "현재 로컬 모델 파라미터를 반환합니다." -#: ../../source/tutorial-quickstart-scikitlearn.rst:128 +#: ../../source/tutorial-quickstart-scikitlearn.rst:139 msgid "test the local model" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:130 +#: ../../source/tutorial-quickstart-scikitlearn.rst:141 msgid "The methods can be implemented in the following way:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:153 +#: ../../source/tutorial-quickstart-scikitlearn.rst:163 msgid "" -"We can now create an instance of our class :code:`MnistClient` and add " -"one line to actually run this client:" +"We can now create an instance of our class ``MnistClient`` and add one " +"line to actually run this client:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:160 +#: ../../source/tutorial-quickstart-scikitlearn.rst:170 msgid "" -"That's it for the client. We only have to implement :code:`Client` or " -":code:`NumPyClient` and call :code:`fl.client.start_client()`. If you " -"implement a client of type :code:`NumPyClient` you'll need to first call " -"its :code:`to_client()` method. The string :code:`\"0.0.0.0:8080\"` tells" -" the client which server to connect to. In our case we can run the server" -" and the client on the same machine, therefore we use " -":code:`\"0.0.0.0:8080\"`. If we run a truly federated workload with the " -"server and clients running on different machines, all that needs to " -"change is the :code:`server_address` we pass to the client." +"That's it for the client. We only have to implement ``Client`` or " +"``NumPyClient`` and call ``fl.client.start_client()``. If you implement a" +" client of type ``NumPyClient`` you'll need to first call its " +"``to_client()`` method. The string ``\"0.0.0.0:8080\"`` tells the client " +"which server to connect to. In our case we can run the server and the " +"client on the same machine, therefore we use ``\"0.0.0.0:8080\"``. If we " +"run a truly federated workload with the server and clients running on " +"different machines, all that needs to change is the ``server_address`` we" +" pass to the client." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:169 +#: ../../source/tutorial-quickstart-scikitlearn.rst:181 msgid "" "The following Flower server is a little bit more advanced and returns an " "evaluation function for the server-side evaluation. First, we import " "again all required libraries such as Flower and scikit-learn." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:172 -msgid ":code:`server.py`, import Flower and start the server:" +#: ../../source/tutorial-quickstart-scikitlearn.rst:185 +msgid "``server.py``, import Flower and start the server:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:185 +#: ../../source/tutorial-quickstart-scikitlearn.rst:198 msgid "" -"The number of federated learning rounds is set in :code:`fit_round()` and" -" the evaluation is defined in :code:`get_evaluate_fn()`. The evaluation " -"function is called after each federated learning round and gives you " -"information about loss and accuracy. Note that we also make use of Flower" -" Datasets here to load the test split of the MNIST dataset for server-" -"side evaluation." +"The number of federated learning rounds is set in ``fit_round()`` and the" +" evaluation is defined in ``get_evaluate_fn()``. The evaluation function " +"is called after each federated learning round and gives you information " +"about loss and accuracy. Note that we also make use of Flower Datasets " +"here to load the test split of the MNIST dataset for server-side " +"evaluation." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:213 +#: ../../source/tutorial-quickstart-scikitlearn.rst:228 msgid "" -"The :code:`main` contains the server-side parameter initialization " -":code:`utils.set_initial_params()` as well as the aggregation strategy " -":code:`fl.server.strategy:FedAvg()`. The strategy is the default one, " +"The ``main`` contains the server-side parameter initialization " +"``utils.set_initial_params()`` as well as the aggregation strategy " +"``fl.server.strategy:FedAvg()``. The strategy is the default one, " "federated averaging (or FedAvg), with two clients and evaluation after " "each federated learning round. The server can be started with the command" -" :code:`fl.server.start_server(server_address=\"0.0.0.0:8080\", " -"strategy=strategy, config=fl.server.ServerConfig(num_rounds=3))`." +" ``fl.server.start_server(server_address=\"0.0.0.0:8080\", " +"strategy=strategy, config=fl.server.ServerConfig(num_rounds=3))``." msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:232 +#: ../../source/tutorial-quickstart-scikitlearn.rst:256 msgid "" "With both client and server ready, we can now run everything and see " "federated learning in action. Federated learning systems usually have a " @@ -23222,133 +23770,139 @@ msgid "" "first:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:239 -#: ../../source/tutorial-quickstart-tensorflow.rst:122 -#: ../../source/tutorial-quickstart-xgboost.rst:575 +#: ../../source/tutorial-quickstart-scikitlearn.rst:264 +#: ../../source/tutorial-quickstart-xgboost.rst:598 msgid "" "Once the server is running we can start the clients in different " "terminals. Open a new terminal and start the first client:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:246 -#: ../../source/tutorial-quickstart-tensorflow.rst:129 -#: ../../source/tutorial-quickstart-xgboost.rst:582 +#: ../../source/tutorial-quickstart-scikitlearn.rst:271 +#: ../../source/tutorial-quickstart-xgboost.rst:605 msgid "Open another terminal and start the second client:" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:252 -#: ../../source/tutorial-quickstart-xgboost.rst:588 +#: ../../source/tutorial-quickstart-scikitlearn.rst:277 +#: ../../source/tutorial-quickstart-xgboost.rst:611 msgid "" "Each client will have its own dataset. You should now see how the " "training does in the very first terminal (the one that started the " "server):" msgstr "" -#: ../../source/tutorial-quickstart-scikitlearn.rst:286 +#: ../../source/tutorial-quickstart-scikitlearn.rst:311 msgid "" "Congratulations! You've successfully built and run your first federated " "learning system. The full `source code " "`_ for this example can be found in :code:`examples/sklearn-logreg-" -"mnist`." +"mnist>`_ for this example can be found in ``examples/sklearn-logreg-" +"mnist``." msgstr "" #: ../../source/tutorial-quickstart-tensorflow.rst:-1 msgid "" "Check out this Federated Learning quickstart tutorial for using Flower " -"with TensorFlow to train a MobilNetV2 model on CIFAR-10." +"with TensorFlow to train a CNN model on CIFAR-10." msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:5 +#: ../../source/tutorial-quickstart-tensorflow.rst:4 msgid "Quickstart TensorFlow" msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:13 -msgid "Let's build a federated learning system in less than 20 lines of code!" -msgstr "" - -#: ../../source/tutorial-quickstart-tensorflow.rst:15 -msgid "Before Flower can be imported we have to install it:" -msgstr "" - -#: ../../source/tutorial-quickstart-tensorflow.rst:21 +#: ../../source/tutorial-quickstart-tensorflow.rst:6 msgid "" -"Since we want to use the Keras API of TensorFlow (TF), we have to install" -" TF as well:" -msgstr "" - -#: ../../source/tutorial-quickstart-tensorflow.rst:31 -msgid "Next, in a file called :code:`client.py`, import Flower and TensorFlow:" +"In this tutorial we will learn how to train a Convolutional Neural " +"Network on CIFAR-10 using the Flower framework and TensorFlow. First of " +"all, it is recommended to create a virtual environment and run everything" +" within a :doc:`virtualenv `." msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:38 +#: ../../source/tutorial-quickstart-tensorflow.rst:11 msgid "" -"We use the Keras utilities of TF to load CIFAR10, a popular colored image" -" classification dataset for machine learning. The call to " -":code:`tf.keras.datasets.cifar10.load_data()` downloads CIFAR10, caches " -"it locally, and then returns the entire training and test set as NumPy " -"ndarrays." +"Let's use `flwr new` to create a complete Flower+TensorFlow project. It " +"will generate all the files needed to run, by default with the Flower " +"Simulation Engine, a federation of 10 nodes using `FedAvg " +"`_. The " +"dataset will be partitioned using Flower Dataset's `IidPartitioner " +"`_." msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:47 +#: ../../source/tutorial-quickstart-tensorflow.rst:26 msgid "" -"Next, we need a model. For the purpose of this tutorial, we use " -"MobilNetV2 with 10 output classes:" +"Then, run the command below. You will be prompted to select one of the " +"available templates (choose ``TensorFlow``), give a name to your project," +" and type in your developer name:" msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:54 +#: ../../source/tutorial-quickstart-tensorflow.rst:114 msgid "" -"The Flower server interacts with clients through an interface called " -":code:`Client`. When the server selects a particular client for training," -" it sends training instructions over the network. The client receives " -"those instructions and calls one of the :code:`Client` methods to run " -"your code (i.e., to train the neural network we defined earlier)." +"This tutorial uses `Flower Datasets `_ " +"to easily download and partition the `CIFAR-10` dataset. In this example " +"you'll make use of the `IidPartitioner `_" +" to generate `num_partitions` partitions. You can choose `other " +"partitioners `_ available in Flower Datasets. Each " +"``ClientApp`` will call this function to create the ``NumPy`` arrays that" +" correspond to their data partition." msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:60 +#: ../../source/tutorial-quickstart-tensorflow.rst:141 msgid "" -"Flower provides a convenience class called :code:`NumPyClient` which " -"makes it easier to implement the :code:`Client` interface when your " -"workload uses Keras. The :code:`NumPyClient` interface defines three " -"methods which can be implemented in the following way:" +"Next, we need a model. We defined a simple Convolutional Neural Network " +"(CNN), but feel free to replace it with a more sophisticated model if " +"you'd like:" msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:82 +#: ../../source/tutorial-quickstart-tensorflow.rst:170 msgid "" -"We can now create an instance of our class :code:`CifarClient` and add " -"one line to actually run this client:" +"With `TensorFlow`, we can use the built-in ``get_weights()`` and " +"``set_weights()`` functions, which simplifies the implementation with " +"`Flower`. The rest of the functionality in the ClientApp is directly " +"inspired by the centralized case. The ``fit()`` method in the client " +"trains the model using the local dataset. Similarly, the ``evaluate()`` " +"method is used to evaluate the model received on a held-out validation " +"set that the client might have:" msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:90 +#: ../../source/tutorial-quickstart-tensorflow.rst:203 msgid "" -"That's it for the client. We only have to implement :code:`Client` or " -":code:`NumPyClient` and call :code:`fl.client.start_client()`. If you " -"implement a client of type :code:`NumPyClient` you'll need to first call " -"its :code:`to_client()` method. The string :code:`\"[::]:8080\"` tells " -"the client which server to connect to. In our case we can run the server " -"and the client on the same machine, therefore we use " -":code:`\"[::]:8080\"`. If we run a truly federated workload with the " -"server and clients running on different machines, all that needs to " -"change is the :code:`server_address` we point the client at." +"Finally, we can construct a ``ClientApp`` using the ``FlowerClient`` " +"defined above by means of a ``client_fn()`` callback. Note that the " +"`context` enables you to get access to hyperparameters defined in your " +"``pyproject.toml`` to configure the run. For example, in this tutorial we" +" access the `local-epochs` setting to control the number of epochs a " +"``ClientApp`` will perform when running the ``fit()`` method, in addition" +" to `batch-size`. You could define additional hyperparameters in " +"``pyproject.toml`` and access them here." msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:135 -msgid "Each client will have its own dataset." +#: ../../source/tutorial-quickstart-tensorflow.rst:234 +msgid "" +"To construct a ``ServerApp`` we define a ``server_fn()`` callback with an" +" identical signature to that of ``client_fn()`` but the return type is " +"`ServerAppComponents `_ as " +"opposed to a `Client `_. In this example we use the " +"`FedAvg`. To it we pass a randomly initialized model that will serve as " +"the global model to federate." msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:137 +#: ../../source/tutorial-quickstart-tensorflow.rst:270 msgid "" -"You should now see how the training does in the very first terminal (the " -"one that started the server):" +"Check the source code of the extended version of this tutorial in " +"|quickstart_tf_link|_ in the Flower GitHub repository." msgstr "" -#: ../../source/tutorial-quickstart-tensorflow.rst:169 +#: ../../source/tutorial-quickstart-tensorflow.rst:282 msgid "" -"Congratulations! You've successfully built and run your first federated " -"learning system. The full `source code " -"`_ for this can be found in :code:`examples" -"/quickstart-tensorflow/client.py`." +"The video shown below shows how to setup a TensorFlow + Flower project " +"using our previously recommended APIs. A new video tutorial will be " +"released that shows the new APIs (as the content above does)" msgstr "" #: ../../source/tutorial-quickstart-xgboost.rst:-1 @@ -23357,15 +23911,15 @@ msgid "" "with XGBoost to train classification models on trees." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:5 +#: ../../source/tutorial-quickstart-xgboost.rst:4 msgid "Quickstart XGBoost" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:14 +#: ../../source/tutorial-quickstart-xgboost.rst:13 msgid "Federated XGBoost" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:16 +#: ../../source/tutorial-quickstart-xgboost.rst:15 msgid "" "EXtreme Gradient Boosting (**XGBoost**) is a robust and efficient " "implementation of gradient-boosted decision tree (**GBDT**), that " @@ -23375,18 +23929,18 @@ msgid "" "concurrently, unlike the sequential approach taken by GBDT." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:20 +#: ../../source/tutorial-quickstart-xgboost.rst:21 msgid "" "Often, for tabular data on medium-sized datasets with fewer than 10k " "training examples, XGBoost surpasses the results of deep learning " "techniques." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:23 +#: ../../source/tutorial-quickstart-xgboost.rst:25 msgid "Why federated XGBoost?" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:25 +#: ../../source/tutorial-quickstart-xgboost.rst:27 msgid "" "Indeed, as the demand for data privacy and decentralized learning grows, " "there's an increasing requirement to implement federated XGBoost systems " @@ -23394,7 +23948,7 @@ msgid "" "detection." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:27 +#: ../../source/tutorial-quickstart-xgboost.rst:31 msgid "" "Federated learning ensures that raw data remains on the local device, " "making it an attractive approach for sensitive domains where data " @@ -23403,10 +23957,10 @@ msgid "" "solution for these specific challenges." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:30 +#: ../../source/tutorial-quickstart-xgboost.rst:36 msgid "" "In this tutorial we will learn how to train a federated XGBoost model on " -"HIGGS dataset using Flower and :code:`xgboost` package. We use a simple " +"HIGGS dataset using Flower and ``xgboost`` package. We use a simple " "example (`full code xgboost-quickstart " "`_)" " with two *clients* and one *server* to demonstrate how federated XGBoost" @@ -23415,30 +23969,30 @@ msgid "" "comprehensive>`_) to run various experiments." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:37 +#: ../../source/tutorial-quickstart-xgboost.rst:46 msgid "Environment Setup" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:39 +#: ../../source/tutorial-quickstart-xgboost.rst:48 msgid "" "First of all, it is recommended to create a virtual environment and run " "everything within a :doc:`virtualenv `." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:41 +#: ../../source/tutorial-quickstart-xgboost.rst:51 msgid "" "We first need to install Flower and Flower Datasets. You can do this by " "running :" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:47 +#: ../../source/tutorial-quickstart-xgboost.rst:57 msgid "" -"Since we want to use :code:`xgboost` package to build up XGBoost trees, " -"let's go ahead and install :code:`xgboost`:" +"Since we want to use ``xgboost`` package to build up XGBoost trees, let's" +" go ahead and install ``xgboost``:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:57 +#: ../../source/tutorial-quickstart-xgboost.rst:67 msgid "" "*Clients* are responsible for generating individual weight-updates for " "the model based on their local datasets. Now that we have all our " @@ -23446,219 +24000,215 @@ msgid "" "clients and one server." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:60 +#: ../../source/tutorial-quickstart-xgboost.rst:71 msgid "" -"In a file called :code:`client.py`, import xgboost, Flower, Flower " -"Datasets and other related functions:" +"In a file called ``client.py``, import xgboost, Flower, Flower Datasets " +"and other related functions:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:87 +#: ../../source/tutorial-quickstart-xgboost.rst:99 msgid "Dataset partition and hyper-parameter selection" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:89 +#: ../../source/tutorial-quickstart-xgboost.rst:101 msgid "" "Prior to local training, we require loading the HIGGS dataset from Flower" " Datasets and conduct data partitioning for FL:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:102 +#: ../../source/tutorial-quickstart-xgboost.rst:115 msgid "" "In this example, we split the dataset into 30 partitions with uniform " -"distribution (:code:`IidPartitioner(num_partitions=30)`). Then, we load " -"the partition for the given client based on :code:`partition_id`:" +"distribution (``IidPartitioner(num_partitions=30)``). Then, we load the " +"partition for the given client based on ``partition_id``:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:121 +#: ../../source/tutorial-quickstart-xgboost.rst:135 msgid "" "After that, we do train/test splitting on the given partition (client's " -"local data), and transform data format for :code:`xgboost` package." +"local data), and transform data format for ``xgboost`` package." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:134 +#: ../../source/tutorial-quickstart-xgboost.rst:149 msgid "" -"The functions of :code:`train_test_split` and " -":code:`transform_dataset_to_dmatrix` are defined as below:" +"The functions of ``train_test_split`` and " +"``transform_dataset_to_dmatrix`` are defined as below:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:158 +#: ../../source/tutorial-quickstart-xgboost.rst:174 msgid "Finally, we define the hyper-parameters used for XGBoost training." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:174 +#: ../../source/tutorial-quickstart-xgboost.rst:190 msgid "" -"The :code:`num_local_round` represents the number of iterations for local" -" tree boost. We use CPU for the training in default. One can shift it to " -"GPU by setting :code:`tree_method` to :code:`gpu_hist`. We use AUC as " -"evaluation metric." +"The ``num_local_round`` represents the number of iterations for local " +"tree boost. We use CPU for the training in default. One can shift it to " +"GPU by setting ``tree_method`` to ``gpu_hist``. We use AUC as evaluation " +"metric." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:181 +#: ../../source/tutorial-quickstart-xgboost.rst:195 msgid "Flower client definition for XGBoost" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:183 +#: ../../source/tutorial-quickstart-xgboost.rst:197 msgid "" "After loading the dataset we define the Flower client. We follow the " -"general rule to define :code:`XgbClient` class inherited from " -":code:`fl.client.Client`." +"general rule to define ``XgbClient`` class inherited from " +"``fl.client.Client``." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:205 +#: ../../source/tutorial-quickstart-xgboost.rst:219 msgid "" -"All required parameters defined above are passed to :code:`XgbClient`'s " +"All required parameters defined above are passed to ``XgbClient``'s " "constructor." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:207 +#: ../../source/tutorial-quickstart-xgboost.rst:221 msgid "" -"Then, we override :code:`get_parameters`, :code:`fit` and " -":code:`evaluate` methods insides :code:`XgbClient` class as follows." +"Then, we override ``get_parameters``, ``fit`` and ``evaluate`` methods " +"insides ``XgbClient`` class as follows." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:221 +#: ../../source/tutorial-quickstart-xgboost.rst:236 msgid "" "Unlike neural network training, XGBoost trees are not started from a " -"specified random weights. In this case, we do not use " -":code:`get_parameters` and :code:`set_parameters` to initialise model " -"parameters for XGBoost. As a result, let's return an empty tensor in " -":code:`get_parameters` when it is called by the server at the first " -"round." +"specified random weights. In this case, we do not use ``get_parameters`` " +"and ``set_parameters`` to initialise model parameters for XGBoost. As a " +"result, let's return an empty tensor in ``get_parameters`` when it is " +"called by the server at the first round." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:262 +#: ../../source/tutorial-quickstart-xgboost.rst:278 msgid "" -"In :code:`fit`, at the first round, we call :code:`xgb.train()` to build " -"up the first set of trees. From the second round, we load the global " -"model sent from server to new build Booster object, and then update model" -" weights on local training data with function :code:`local_boost` as " -"follows:" +"In ``fit``, at the first round, we call ``xgb.train()`` to build up the " +"first set of trees. From the second round, we load the global model sent " +"from server to new build Booster object, and then update model weights on" +" local training data with function ``local_boost`` as follows:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:281 +#: ../../source/tutorial-quickstart-xgboost.rst:298 msgid "" -"Given :code:`num_local_round`, we update trees by calling " -":code:`bst_input.update` method. After training, the last " -":code:`N=num_local_round` trees will be extracted to send to the server." +"Given ``num_local_round``, we update trees by calling " +"``bst_input.update`` method. After training, the last " +"``N=num_local_round`` trees will be extracted to send to the server." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:313 +#: ../../source/tutorial-quickstart-xgboost.rst:330 msgid "" -"In :code:`evaluate`, after loading the global model, we call " -":code:`bst.eval_set` function to conduct evaluation on valid set. The AUC" -" value will be returned." +"In ``evaluate``, after loading the global model, we call ``bst.eval_set``" +" function to conduct evaluation on valid set. The AUC value will be " +"returned." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:316 +#: ../../source/tutorial-quickstart-xgboost.rst:333 msgid "" -"Now, we can create an instance of our class :code:`XgbClient` and add one" -" line to actually run this client:" +"Now, we can create an instance of our class ``XgbClient`` and add one " +"line to actually run this client:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:332 +#: ../../source/tutorial-quickstart-xgboost.rst:350 msgid "" -"That's it for the client. We only have to implement :code:`Client` and " -"call :code:`fl.client.start_client()`. The string :code:`\"[::]:8080\"` " -"tells the client which server to connect to. In our case we can run the " -"server and the client on the same machine, therefore we use " -":code:`\"[::]:8080\"`. If we run a truly federated workload with the " -"server and clients running on different machines, all that needs to " -"change is the :code:`server_address` we point the client at." +"That's it for the client. We only have to implement ``Client`` and call " +"``fl.client.start_client()``. The string ``\"[::]:8080\"`` tells the " +"client which server to connect to. In our case we can run the server and " +"the client on the same machine, therefore we use ``\"[::]:8080\"``. If we" +" run a truly federated workload with the server and clients running on " +"different machines, all that needs to change is the ``server_address`` we" +" point the client at." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:343 +#: ../../source/tutorial-quickstart-xgboost.rst:360 msgid "" "These updates are then sent to the *server* which will aggregate them to " "produce a better model. Finally, the *server* sends this improved version" " of the model back to each *client* to finish a complete FL round." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:346 +#: ../../source/tutorial-quickstart-xgboost.rst:364 msgid "" -"In a file named :code:`server.py`, import Flower and FedXgbBagging from " -":code:`flwr.server.strategy`." +"In a file named ``server.py``, import Flower and FedXgbBagging from " +"``flwr.server.strategy``." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:348 +#: ../../source/tutorial-quickstart-xgboost.rst:367 msgid "We first define a strategy for XGBoost bagging aggregation." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:380 +#: ../../source/tutorial-quickstart-xgboost.rst:401 msgid "" -"We use two clients for this example. An " -":code:`evaluate_metrics_aggregation` function is defined to collect and " -"wighted average the AUC values from clients. The :code:`config_func` " -"function is to return the current FL round number to client's " -":code:`fit()` and :code:`evaluate()` methods." +"We use two clients for this example. An ``evaluate_metrics_aggregation`` " +"function is defined to collect and wighted average the AUC values from " +"clients. The ``config_func`` function is to return the current FL round " +"number to client's ``fit()`` and ``evaluate()`` methods." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:384 +#: ../../source/tutorial-quickstart-xgboost.rst:406 msgid "Then, we start the server:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:396 +#: ../../source/tutorial-quickstart-xgboost.rst:418 msgid "Tree-based bagging aggregation" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:398 +#: ../../source/tutorial-quickstart-xgboost.rst:420 msgid "" "You must be curious about how bagging aggregation works. Let's look into " "the details." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:400 +#: ../../source/tutorial-quickstart-xgboost.rst:422 msgid "" -"In file :code:`flwr.server.strategy.fedxgb_bagging.py`, we define " -":code:`FedXgbBagging` inherited from :code:`flwr.server.strategy.FedAvg`." -" Then, we override the :code:`aggregate_fit`, :code:`aggregate_evaluate` " -"and :code:`evaluate` methods as follows:" +"In file ``flwr.server.strategy.fedxgb_bagging.py``, we define " +"``FedXgbBagging`` inherited from ``flwr.server.strategy.FedAvg``. Then, " +"we override the ``aggregate_fit``, ``aggregate_evaluate`` and " +"``evaluate`` methods as follows:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:496 +#: ../../source/tutorial-quickstart-xgboost.rst:519 msgid "" -"In :code:`aggregate_fit`, we sequentially aggregate the clients' XGBoost " -"trees by calling :code:`aggregate()` function:" +"In ``aggregate_fit``, we sequentially aggregate the clients' XGBoost " +"trees by calling ``aggregate()`` function:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:555 +#: ../../source/tutorial-quickstart-xgboost.rst:579 msgid "" "In this function, we first fetch the number of trees and the number of " "parallel trees for the current and previous model by calling " -":code:`_get_tree_nums`. Then, the fetched information will be aggregated." -" After that, the trees (containing model weights) are aggregated to " +"``_get_tree_nums``. Then, the fetched information will be aggregated. " +"After that, the trees (containing model weights) are aggregated to " "generate a new tree model." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:560 +#: ../../source/tutorial-quickstart-xgboost.rst:584 msgid "" "After traversal of all clients' models, a new global model is generated, " "followed by the serialisation, and sending back to each client." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:565 +#: ../../source/tutorial-quickstart-xgboost.rst:588 msgid "Launch Federated XGBoost!" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:641 +#: ../../source/tutorial-quickstart-xgboost.rst:664 msgid "" "Congratulations! You've successfully built and run your first federated " -"XGBoost system. The AUC values can be checked in " -":code:`metrics_distributed`. One can see that the average AUC increases " -"over FL rounds." +"XGBoost system. The AUC values can be checked in ``metrics_distributed``." +" One can see that the average AUC increases over FL rounds." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:646 +#: ../../source/tutorial-quickstart-xgboost.rst:668 msgid "" "The full `source code `_ for this example can be found in :code:`examples" -"/xgboost-quickstart`." +"/xgboost-quickstart/>`_ for this example can be found in ``examples" +"/xgboost-quickstart``." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:650 +#: ../../source/tutorial-quickstart-xgboost.rst:673 msgid "Comprehensive Federated XGBoost" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:652 +#: ../../source/tutorial-quickstart-xgboost.rst:675 msgid "" "Now that you have known how federated XGBoost work with Flower, it's time" " to run some more comprehensive experiments by customising the " @@ -23671,11 +24221,11 @@ msgid "" "client cohorts in a resource-aware manner. Let's take a look!" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:659 +#: ../../source/tutorial-quickstart-xgboost.rst:685 msgid "Cyclic training" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:661 +#: ../../source/tutorial-quickstart-xgboost.rst:687 msgid "" "In addition to bagging aggregation, we offer a cyclic training scheme, " "which performs FL in a client-by-client fashion. Instead of aggregating " @@ -23685,178 +24235,176 @@ msgid "" "for next round's boosting." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:665 -msgid "" -"To do this, we first customise a :code:`ClientManager` in " -":code:`server_utils.py`:" +#: ../../source/tutorial-quickstart-xgboost.rst:693 +msgid "To do this, we first customise a ``ClientManager`` in ``server_utils.py``:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:705 +#: ../../source/tutorial-quickstart-xgboost.rst:733 msgid "" -"The customised :code:`ClientManager` samples all available clients in " -"each FL round based on the order of connection to the server. Then, we " -"define a new strategy :code:`FedXgbCyclic` in " -":code:`flwr.server.strategy.fedxgb_cyclic.py`, in order to sequentially " +"The customised ``ClientManager`` samples all available clients in each FL" +" round based on the order of connection to the server. Then, we define a " +"new strategy ``FedXgbCyclic`` in " +"``flwr.server.strategy.fedxgb_cyclic.py``, in order to sequentially " "select only one client in given round and pass the received model to next" " client." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:746 +#: ../../source/tutorial-quickstart-xgboost.rst:775 msgid "" -"Unlike the original :code:`FedAvg`, we don't perform aggregation here. " +"Unlike the original ``FedAvg``, we don't perform aggregation here. " "Instead, we just make a copy of the received client model as global model" -" by overriding :code:`aggregate_fit`." +" by overriding ``aggregate_fit``." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:749 +#: ../../source/tutorial-quickstart-xgboost.rst:778 msgid "" -"Also, the customised :code:`configure_fit` and :code:`configure_evaluate`" -" methods ensure the clients to be sequentially selected given FL round:" +"Also, the customised ``configure_fit`` and ``configure_evaluate`` methods" +" ensure the clients to be sequentially selected given FL round:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:813 +#: ../../source/tutorial-quickstart-xgboost.rst:840 msgid "Customised data partitioning" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:815 +#: ../../source/tutorial-quickstart-xgboost.rst:842 msgid "" -"In :code:`dataset.py`, we have a function :code:`instantiate_partitioner`" -" to instantiate the data partitioner based on the given " -":code:`num_partitions` and :code:`partitioner_type`. Currently, we " -"provide four supported partitioner type to simulate the uniformity/non-" -"uniformity in data quantity (uniform, linear, square, exponential)." +"In ``dataset.py``, we have a function ``instantiate_partitioner`` to " +"instantiate the data partitioner based on the given ``num_partitions`` " +"and ``partitioner_type``. Currently, we provide four supported " +"partitioner type to simulate the uniformity/non-uniformity in data " +"quantity (uniform, linear, square, exponential)." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:846 +#: ../../source/tutorial-quickstart-xgboost.rst:873 msgid "Customised centralised/distributed evaluation" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:848 +#: ../../source/tutorial-quickstart-xgboost.rst:875 msgid "" "To facilitate centralised evaluation, we define a function in " -":code:`server_utils.py`:" +"``server_utils.py``:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:880 +#: ../../source/tutorial-quickstart-xgboost.rst:907 msgid "" "This function returns a evaluation function which instantiates a " -":code:`Booster` object and loads the global model weights to it. The " -"evaluation is conducted by calling :code:`eval_set()` method, and the " -"tested AUC value is reported." +"``Booster`` object and loads the global model weights to it. The " +"evaluation is conducted by calling ``eval_set()`` method, and the tested " +"AUC value is reported." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:883 +#: ../../source/tutorial-quickstart-xgboost.rst:911 msgid "" "As for distributed evaluation on the clients, it's same as the quick-" -"start example by overriding the :code:`evaluate()` method insides the " -":code:`XgbClient` class in :code:`client_utils.py`." +"start example by overriding the ``evaluate()`` method insides the " +"``XgbClient`` class in ``client_utils.py``." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:887 +#: ../../source/tutorial-quickstart-xgboost.rst:916 msgid "Flower simulation" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:888 +#: ../../source/tutorial-quickstart-xgboost.rst:918 msgid "" -"We also provide an example code (:code:`sim.py`) to use the simulation " +"We also provide an example code (``sim.py``) to use the simulation " "capabilities of Flower to simulate federated XGBoost training on either a" " single machine or a cluster of machines." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:922 +#: ../../source/tutorial-quickstart-xgboost.rst:954 msgid "" -"After importing all required packages, we define a :code:`main()` " -"function to perform the simulation process:" +"After importing all required packages, we define a ``main()`` function to" +" perform the simulation process:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:977 +#: ../../source/tutorial-quickstart-xgboost.rst:1010 msgid "" "We first load the dataset and perform data partitioning, and the pre-" -"processed data is stored in a :code:`list`. After the simulation begins, " -"the clients won't need to pre-process their partitions again." +"processed data is stored in a ``list``. After the simulation begins, the " +"clients won't need to pre-process their partitions again." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:980 +#: ../../source/tutorial-quickstart-xgboost.rst:1014 msgid "Then, we define the strategies and other hyper-parameters:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1031 +#: ../../source/tutorial-quickstart-xgboost.rst:1065 msgid "" "After that, we start the simulation by calling " -":code:`fl.simulation.start_simulation`:" +"``fl.simulation.start_simulation``:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1051 +#: ../../source/tutorial-quickstart-xgboost.rst:1085 msgid "" -"One of key parameters for :code:`start_simulation` is :code:`client_fn` " -"which returns a function to construct a client. We define it as follows:" +"One of key parameters for ``start_simulation`` is ``client_fn`` which " +"returns a function to construct a client. We define it as follows:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1094 +#: ../../source/tutorial-quickstart-xgboost.rst:1126 msgid "Arguments parser" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1096 +#: ../../source/tutorial-quickstart-xgboost.rst:1128 msgid "" -"In :code:`utils.py`, we define the arguments parsers for clients, server " -"and simulation, allowing users to specify different experimental " -"settings. Let's first see the sever side:" +"In ``utils.py``, we define the arguments parsers for clients, server and " +"simulation, allowing users to specify different experimental settings. " +"Let's first see the sever side:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1142 +#: ../../source/tutorial-quickstart-xgboost.rst:1175 msgid "" "This allows user to specify training strategies / the number of total " "clients / FL rounds / participating clients / clients for evaluation, and" -" evaluation fashion. Note that with :code:`--centralised-eval`, the sever" -" will do centralised evaluation and all functionalities for client " +" evaluation fashion. Note that with ``--centralised-eval``, the sever " +"will do centralised evaluation and all functionalities for client " "evaluation will be disabled." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1146 +#: ../../source/tutorial-quickstart-xgboost.rst:1180 msgid "Then, the argument parser on client side:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1200 +#: ../../source/tutorial-quickstart-xgboost.rst:1234 msgid "" "This defines various options for client data partitioning. Besides, " "clients also have an option to conduct evaluation on centralised test set" -" by setting :code:`--centralised-eval`, as well as an option to perform " -"scaled learning rate based on the number of clients by setting :code" -":`--scaled-lr`." +" by setting ``--centralised-eval``, as well as an option to perform " +"scaled learning rate based on the number of clients by setting " +"``--scaled-lr``." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1204 +#: ../../source/tutorial-quickstart-xgboost.rst:1239 msgid "We also have an argument parser for simulation:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1282 +#: ../../source/tutorial-quickstart-xgboost.rst:1317 msgid "This integrates all arguments for both client and server sides." msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1285 +#: ../../source/tutorial-quickstart-xgboost.rst:1320 msgid "Example commands" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1287 +#: ../../source/tutorial-quickstart-xgboost.rst:1322 msgid "" "To run a centralised evaluated experiment with bagging strategy on 5 " "clients with exponential distribution for 50 rounds, we first start the " "server as below:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1294 +#: ../../source/tutorial-quickstart-xgboost.rst:1329 msgid "Then, on each client terminal, we start the clients:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1300 +#: ../../source/tutorial-quickstart-xgboost.rst:1335 msgid "To run the same experiment with Flower simulation:" msgstr "" -#: ../../source/tutorial-quickstart-xgboost.rst:1306 +#: ../../source/tutorial-quickstart-xgboost.rst:1341 msgid "" "The full `code `_ for this comprehensive example can be found in" -" :code:`examples/xgboost-comprehensive`." +" ``examples/xgboost-comprehensive``." msgstr "" #: ../../source/tutorial-series-build-a-strategy-from-scratch-pytorch.ipynb:9 @@ -25217,7 +25765,7 @@ msgstr "" " 수도 있습니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:41 -msgid "|e5918c1c06a4434bbe4bf49235e40059|" +msgid "|3a7aceef05f0421794726ac54aaf12fd|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:109 @@ -25234,7 +25782,7 @@ msgstr "" " 바둑과 같은 게임을 하는 것일 수 있습니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:53 -msgid "|c0165741bd1944f09ec55ce49032377d|" +msgid "|d741075f8e624331b42c0746f7d258a0|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:111 @@ -25257,7 +25805,7 @@ msgstr "" "부르리는 것을 듣는 스마트 스피커에서 비롯됩니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:67 -msgid "|0a0ac9427ac7487b8e52d75ed514f04e|" +msgid "|8fc92d668bcb42b8bda55143847f2329|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:113 @@ -25275,7 +25823,7 @@ msgstr "" "있습니다. 하지만 여러 조직이 모두 같은 작업을 위해 데이터를 생성하는 것일 수도 있습니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:79 -msgid "|5defee3ea4ca40d99fcd3e4ea045be25|" +msgid "|1c705d833a024f22adcaeb8ae3d13b0b|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:115 @@ -25293,7 +25841,7 @@ msgstr "" "서버는 데이터 센터 어딘가에 있을 수도 있고 클라우드 어딘가에 있을 수도 있습니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:91 -msgid "|74f26ca701254d3db57d7899bd91eb55|" +msgid "|77a037b546a84262b608e04bc82a2c96|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:117 @@ -25310,7 +25858,7 @@ msgstr "" " 우리가 기본적으로 사용해 온 머신러닝 방법입니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:103 -msgid "|bda79f21f8154258a40e5766b2634ad7|" +msgid "|f568e24c9fb0435690ac628210a4be96|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:119 @@ -25332,7 +25880,7 @@ msgstr "" "트래픽을 분석하는 것이 있습니다. 이러한 사례에서 모든 데이터는 자연스럽게 중앙 서버에 존재합니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:138 -msgid "|89d30862e62e4f9989e193483a08680a|" +msgid "|a7bf029981514e2593aa3a2b48c9d76a|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:173 @@ -25349,7 +25897,7 @@ msgstr "" "좋은 모델을 훈련하기에 충분하지 않을 수 있습니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:150 -msgid "|77e9918671c54b4f86e01369c0785ce8|" +msgid "|3f645ad807f84be8b1f8f3267173939c|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:175 @@ -25516,7 +26064,7 @@ msgstr "" "체크포인트에서 모델 매개변수를 초기화합니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:210 -msgid "|7e4ccef37cc94148a067107b34eb7447|" +msgid "|a06a9dbd603f45819afd8e8cfc3c4b8f|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:307 @@ -25543,7 +26091,7 @@ msgstr "" "개의 연결 노드만 사용합니다. 그 이유는 점점 더 많은 클라이언트 노드를 선택하면 학습의 효율성이 감소하기 때문입니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:225 -msgid "|28e47e4cded14479a0846c8e5f22c872|" +msgid "|edcf9a04d96e42608fd01a333375febe|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:309 @@ -25570,7 +26118,7 @@ msgstr "" "데이터에서 한 단계 정도로 짧거나 몇 단계(mini-batches)에 불과할 수 있습니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:240 -msgid "|4b8c5d1afa144294b76ffc76e4658a38|" +msgid "|3dae22fe797043968e2b7aa7073c78bd|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:311 @@ -25596,7 +26144,7 @@ msgstr "" "보냅니다. 보내는 모델 업데이트는 전체 모델 파라미터거나 로컬 교육 중에 누적된 그레디언트(gradient)일 수 있습니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:255 -msgid "|9dbdb3a0f6cb4a129fac863eaa414c17|" +msgid "|ba178f75267d4ad8aa7363f20709195f|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:313 @@ -25645,7 +26193,7 @@ msgstr "" "많은 영향을 미칩니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:273 -msgid "|81749d0ac0834c36a83bd38f433fea31|" +msgid "|c380c750bfd2444abce039a1c6fa8e60|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:315 @@ -25764,7 +26312,7 @@ msgstr "" "사용자는 모든 워크로드, 머신러닝 프레임워크 및 모든 프로그래밍 언어를 통합할 수 있습니다." #: ../../source/tutorial-series-what-is-federated-learning.ipynb:334 -msgid "|ed9aae51da70428eab7eef32f21e819e|" +msgid "|e7cec00a114b48359935c6510595132e|" msgstr "" #: ../../source/tutorial-series-what-is-federated-learning.ipynb:340 @@ -27424,3 +27972,1287 @@ msgstr "" #~ msgid "|c00bf2750bc24d229737a0fe1395f0fc|" #~ msgstr "|c00bf2750bc24d229737a0fe1395f0fc|" +#~ msgid "run\\_client\\_app" +#~ msgstr "run\\_client\\_app" + +#~ msgid "run\\_supernode" +#~ msgstr "run\\_supernode" + +#~ msgid "Retrieve the corresponding layout by the string key." +#~ msgstr "" + +#~ msgid "" +#~ "When there isn't an exact match, " +#~ "all the existing keys in the " +#~ "layout map will be treated as a" +#~ " regex and map against the input " +#~ "key again. The first match will be" +#~ " returned, based on the key insertion" +#~ " order. Return None if there isn't" +#~ " any match found." +#~ msgstr "" + +#~ msgid "the string key as the query for the layout." +#~ msgstr "" + +#~ msgid "Corresponding layout based on the query." +#~ msgstr "" + +#~ msgid "run\\_server\\_app" +#~ msgstr "" + +#~ msgid "run\\_superlink" +#~ msgstr "" + +#~ msgid "Start a Ray-based Flower simulation server." +#~ msgstr "" + +#~ msgid "" +#~ "A function creating `Client` instances. " +#~ "The function must have the signature " +#~ "`client_fn(context: Context). It should return" +#~ " a single client instance of type " +#~ "`Client`. Note that the created client" +#~ " instances are ephemeral and will " +#~ "often be destroyed after a single " +#~ "method invocation. Since client instances " +#~ "are not long-lived, they should " +#~ "not attempt to carry state over " +#~ "method invocations. Any state required " +#~ "by the instance (model, dataset, " +#~ "hyperparameters, ...) should be (re-)created" +#~ " in either the call to `client_fn`" +#~ " or the call to any of the " +#~ "client methods (e.g., load evaluation " +#~ "data in the `evaluate` method itself)." +#~ msgstr "" + +#~ msgid "The total number of clients in this simulation." +#~ msgstr "" + +#~ msgid "" +#~ "UNSUPPORTED, WILL BE REMOVED. USE " +#~ "`num_clients` INSTEAD. List `client_id`s for" +#~ " each client. This is only required" +#~ " if `num_clients` is not set. Setting" +#~ " both `num_clients` and `clients_ids` with" +#~ " `len(clients_ids)` not equal to " +#~ "`num_clients` generates an error. Using " +#~ "this argument will raise an error." +#~ msgstr "" + +#~ msgid "" +#~ "CPU and GPU resources for a single" +#~ " client. Supported keys are `num_cpus` " +#~ "and `num_gpus`. To understand the GPU" +#~ " utilization caused by `num_gpus`, as " +#~ "well as using custom resources, please" +#~ " consult the Ray documentation." +#~ msgstr "" + +#~ msgid "" +#~ "An implementation of the abstract base" +#~ " class `flwr.server.Server`. If no instance" +#~ " is provided, then `start_server` will " +#~ "create one." +#~ msgstr "" + +#~ msgid "" +#~ "An implementation of the abstract base" +#~ " class `flwr.server.Strategy`. If no " +#~ "strategy is provided, then `start_server` " +#~ "will use `flwr.server.strategy.FedAvg`." +#~ msgstr "" + +#~ msgid "" +#~ "An implementation of the abstract base" +#~ " class `flwr.server.ClientManager`. If no " +#~ "implementation is provided, then " +#~ "`start_simulation` will use " +#~ "`flwr.server.client_manager.SimpleClientManager`." +#~ msgstr "" + +#~ msgid "" +#~ "Optional dictionary containing arguments for" +#~ " the call to `ray.init`. If " +#~ "ray_init_args is None (the default), Ray" +#~ " will be initialized with the " +#~ "following default args: { " +#~ "\"ignore_reinit_error\": True, \"include_dashboard\": " +#~ "False } An empty dictionary can " +#~ "be used (ray_init_args={}) to prevent " +#~ "any arguments from being passed to " +#~ "ray.init." +#~ msgstr "" + +#~ msgid "" +#~ "Optional dictionary containing arguments for" +#~ " the call to `ray.init`. If " +#~ "ray_init_args is None (the default), Ray" +#~ " will be initialized with the " +#~ "following default args:" +#~ msgstr "" + +#~ msgid "{ \"ignore_reinit_error\": True, \"include_dashboard\": False }" +#~ msgstr "" + +#~ msgid "" +#~ "An empty dictionary can be used " +#~ "(ray_init_args={}) to prevent any arguments" +#~ " from being passed to ray.init." +#~ msgstr "" + +#~ msgid "" +#~ "Set to True to prevent `ray.shutdown()`" +#~ " in case `ray.is_initialized()=True`." +#~ msgstr "" + +#~ msgid "" +#~ "Optionally specify the type of actor " +#~ "to use. The actor object, which " +#~ "persists throughout the simulation, will " +#~ "be the process in charge of " +#~ "executing a ClientApp wrapping input " +#~ "argument `client_fn`." +#~ msgstr "" + +#~ msgid "" +#~ "If you want to create your own " +#~ "Actor classes, you might need to " +#~ "pass some input argument. You can " +#~ "use this dictionary for such purpose." +#~ msgstr "" + +#~ msgid "" +#~ "(default: \"DEFAULT\") Optional string " +#~ "(\"DEFAULT\" or \"SPREAD\") for the VCE" +#~ " to choose in which node the " +#~ "actor is placed. If you are an " +#~ "advanced user needed more control you" +#~ " can use lower-level scheduling " +#~ "strategies to pin actors to specific " +#~ "compute nodes (e.g. via " +#~ "NodeAffinitySchedulingStrategy). Please note this" +#~ " is an advanced feature. For all " +#~ "details, please refer to the Ray " +#~ "documentation: https://docs.ray.io/en/latest/ray-" +#~ "core/scheduling/index.html" +#~ msgstr "" + +#~ msgid "**hist** -- Object containing metrics from training." +#~ msgstr "" + +#~ msgid "" +#~ "Check out this Federated Learning " +#~ "quickstart tutorial for using Flower " +#~ "with FastAI to train a vision " +#~ "model on CIFAR-10." +#~ msgstr "" + +#~ msgid "Let's build a federated learning system using fastai and Flower!" +#~ msgstr "" + +#~ msgid "" +#~ "Please refer to the `full code " +#~ "example `_ to learn more." +#~ msgstr "" + +#~ msgid "" +#~ "Check out this Federating Learning " +#~ "quickstart tutorial for using Flower " +#~ "with HuggingFace Transformers in order " +#~ "to fine-tune an LLM." +#~ msgstr "" + +#~ msgid "" +#~ "Let's build a federated learning system" +#~ " using Hugging Face Transformers and " +#~ "Flower!" +#~ msgstr "" + +#~ msgid "" +#~ "We will leverage Hugging Face to " +#~ "federate the training of language models" +#~ " over multiple clients using Flower. " +#~ "More specifically, we will fine-tune " +#~ "a pre-trained Transformer model " +#~ "(distilBERT) for sequence classification over" +#~ " a dataset of IMDB ratings. The " +#~ "end goal is to detect if a " +#~ "movie rating is positive or negative." +#~ msgstr "" + +#~ msgid "Dependencies" +#~ msgstr "" + +#~ msgid "" +#~ "To follow along this tutorial you " +#~ "will need to install the following " +#~ "packages: :code:`datasets`, :code:`evaluate`, " +#~ ":code:`flwr`, :code:`torch`, and " +#~ ":code:`transformers`. This can be done " +#~ "using :code:`pip`:" +#~ msgstr "" + +#~ msgid "Standard Hugging Face workflow" +#~ msgstr "" + +#~ msgid "Handling the data" +#~ msgstr "" + +#~ msgid "" +#~ "To fetch the IMDB dataset, we will" +#~ " use Hugging Face's :code:`datasets` " +#~ "library. We then need to tokenize " +#~ "the data and create :code:`PyTorch` " +#~ "dataloaders, this is all done in " +#~ "the :code:`load_data` function:" +#~ msgstr "" + +#~ msgid "Training and testing the model" +#~ msgstr "" + +#~ msgid "" +#~ "Once we have a way of creating " +#~ "our trainloader and testloader, we can" +#~ " take care of the training and " +#~ "testing. This is very similar to " +#~ "any :code:`PyTorch` training or testing " +#~ "loop:" +#~ msgstr "" + +#~ msgid "Creating the model itself" +#~ msgstr "" + +#~ msgid "" +#~ "To create the model itself, we " +#~ "will just load the pre-trained " +#~ "distillBERT model using Hugging Face’s " +#~ ":code:`AutoModelForSequenceClassification` :" +#~ msgstr "" + +#~ msgid "Federating the example" +#~ msgstr "" + +#~ msgid "Creating the IMDBClient" +#~ msgstr "" + +#~ msgid "" +#~ "To federate our example to multiple " +#~ "clients, we first need to write " +#~ "our Flower client class (inheriting from" +#~ " :code:`flwr.client.NumPyClient`). This is very" +#~ " easy, as our model is a " +#~ "standard :code:`PyTorch` model:" +#~ msgstr "" + +#~ msgid "" +#~ "The :code:`get_parameters` function lets the" +#~ " server get the client's parameters. " +#~ "Inversely, the :code:`set_parameters` function " +#~ "allows the server to send its " +#~ "parameters to the client. Finally, the" +#~ " :code:`fit` function trains the model " +#~ "locally for the client, and the " +#~ ":code:`evaluate` function tests the model " +#~ "locally and returns the relevant " +#~ "metrics." +#~ msgstr "" + +#~ msgid "Starting the server" +#~ msgstr "" + +#~ msgid "" +#~ "Now that we have a way to " +#~ "instantiate clients, we need to create" +#~ " our server in order to aggregate " +#~ "the results. Using Flower, this can " +#~ "be done very easily by first " +#~ "choosing a strategy (here, we are " +#~ "using :code:`FedAvg`, which will define " +#~ "the global weights as the average " +#~ "of all the clients' weights at " +#~ "each round) and then using the " +#~ ":code:`flwr.server.start_server` function:" +#~ msgstr "" + +#~ msgid "" +#~ "The :code:`weighted_average` function is there" +#~ " to provide a way to aggregate " +#~ "the metrics distributed amongst the " +#~ "clients (basically this allows us to " +#~ "display a nice average accuracy and " +#~ "loss for every round)." +#~ msgstr "" + +#~ msgid "Putting everything together" +#~ msgstr "" + +#~ msgid "We can now start client instances using:" +#~ msgstr "" + +#~ msgid "" +#~ "And they will be able to connect" +#~ " to the server and start the " +#~ "federated training." +#~ msgstr "" + +#~ msgid "" +#~ "If you want to check out " +#~ "everything put together, you should " +#~ "check out the `full code example " +#~ "`_ ." +#~ msgstr "" + +#~ msgid "" +#~ "Of course, this is a very basic" +#~ " example, and a lot can be " +#~ "added or modified, it was just to" +#~ " showcase how simply we could " +#~ "federate a Hugging Face workflow using" +#~ " Flower." +#~ msgstr "" + +#~ msgid "" +#~ "Note that in this example we used" +#~ " :code:`PyTorch`, but we could have " +#~ "very well used :code:`TensorFlow`." +#~ msgstr "" + +#~ msgid "" +#~ "Check out this Federated Learning " +#~ "quickstart tutorial for using Flower " +#~ "with PyTorch Lightning to train an " +#~ "Auto Encoder model on MNIST." +#~ msgstr "" + +#~ msgid "" +#~ "Let's build a horizontal federated " +#~ "learning system using PyTorch Lightning " +#~ "and Flower!" +#~ msgstr "" + +#~ msgid "" +#~ "Please refer to the `full code " +#~ "example `_ to learn " +#~ "more." +#~ msgstr "" + +#~ msgid "" +#~ "Check out this Federated Learning " +#~ "quickstart tutorial for using Flower " +#~ "with TensorFlow to train a MobilNetV2" +#~ " model on CIFAR-10." +#~ msgstr "" + +#~ msgid "Let's build a federated learning system in less than 20 lines of code!" +#~ msgstr "" + +#~ msgid "Before Flower can be imported we have to install it:" +#~ msgstr "" + +#~ msgid "" +#~ "Since we want to use the Keras " +#~ "API of TensorFlow (TF), we have to" +#~ " install TF as well:" +#~ msgstr "" + +#~ msgid "Next, in a file called :code:`client.py`, import Flower and TensorFlow:" +#~ msgstr "" + +#~ msgid "" +#~ "We use the Keras utilities of TF" +#~ " to load CIFAR10, a popular colored" +#~ " image classification dataset for machine" +#~ " learning. The call to " +#~ ":code:`tf.keras.datasets.cifar10.load_data()` downloads " +#~ "CIFAR10, caches it locally, and then " +#~ "returns the entire training and test " +#~ "set as NumPy ndarrays." +#~ msgstr "" + +#~ msgid "" +#~ "Next, we need a model. For the " +#~ "purpose of this tutorial, we use " +#~ "MobilNetV2 with 10 output classes:" +#~ msgstr "" + +#~ msgid "" +#~ "The Flower server interacts with clients" +#~ " through an interface called " +#~ ":code:`Client`. When the server selects " +#~ "a particular client for training, it " +#~ "sends training instructions over the " +#~ "network. The client receives those " +#~ "instructions and calls one of the " +#~ ":code:`Client` methods to run your code" +#~ " (i.e., to train the neural network" +#~ " we defined earlier)." +#~ msgstr "" + +#~ msgid "" +#~ "Flower provides a convenience class " +#~ "called :code:`NumPyClient` which makes it " +#~ "easier to implement the :code:`Client` " +#~ "interface when your workload uses Keras." +#~ " The :code:`NumPyClient` interface defines " +#~ "three methods which can be implemented" +#~ " in the following way:" +#~ msgstr "" + +#~ msgid "" +#~ "We can now create an instance of" +#~ " our class :code:`CifarClient` and add " +#~ "one line to actually run this " +#~ "client:" +#~ msgstr "" + +#~ msgid "" +#~ "That's it for the client. We only" +#~ " have to implement :code:`Client` or " +#~ ":code:`NumPyClient` and call " +#~ ":code:`fl.client.start_client()`. If you implement" +#~ " a client of type :code:`NumPyClient` " +#~ "you'll need to first call its " +#~ ":code:`to_client()` method. The string " +#~ ":code:`\"[::]:8080\"` tells the client which" +#~ " server to connect to. In our " +#~ "case we can run the server and " +#~ "the client on the same machine, " +#~ "therefore we use :code:`\"[::]:8080\"`. If " +#~ "we run a truly federated workload " +#~ "with the server and clients running " +#~ "on different machines, all that needs" +#~ " to change is the :code:`server_address`" +#~ " we point the client at." +#~ msgstr "" + +#~ msgid "Each client will have its own dataset." +#~ msgstr "" + +#~ msgid "" +#~ "You should now see how the " +#~ "training does in the very first " +#~ "terminal (the one that started the " +#~ "server):" +#~ msgstr "" + +#~ msgid "" +#~ "Congratulations! You've successfully built and" +#~ " run your first federated learning " +#~ "system. The full `source code " +#~ "`_ for this can be " +#~ "found in :code:`examples/quickstart-" +#~ "tensorflow/client.py`." +#~ msgstr "" + +#~ msgid "|e5918c1c06a4434bbe4bf49235e40059|" +#~ msgstr "" + +#~ msgid "|c0165741bd1944f09ec55ce49032377d|" +#~ msgstr "" + +#~ msgid "|0a0ac9427ac7487b8e52d75ed514f04e|" +#~ msgstr "" + +#~ msgid "|5defee3ea4ca40d99fcd3e4ea045be25|" +#~ msgstr "" + +#~ msgid "|74f26ca701254d3db57d7899bd91eb55|" +#~ msgstr "" + +#~ msgid "|bda79f21f8154258a40e5766b2634ad7|" +#~ msgstr "" + +#~ msgid "|89d30862e62e4f9989e193483a08680a|" +#~ msgstr "" + +#~ msgid "|77e9918671c54b4f86e01369c0785ce8|" +#~ msgstr "" + +#~ msgid "|7e4ccef37cc94148a067107b34eb7447|" +#~ msgstr "" + +#~ msgid "|28e47e4cded14479a0846c8e5f22c872|" +#~ msgstr "" + +#~ msgid "|4b8c5d1afa144294b76ffc76e4658a38|" +#~ msgstr "" + +#~ msgid "|9dbdb3a0f6cb4a129fac863eaa414c17|" +#~ msgstr "" + +#~ msgid "|81749d0ac0834c36a83bd38f433fea31|" +#~ msgstr "" + +#~ msgid "|ed9aae51da70428eab7eef32f21e819e|" +#~ msgstr "" + +#~ msgid "|e87b69b2ada74ea49412df16f4a0b9cc|" +#~ msgstr "" + +#~ msgid "|33cacb7d985c4906b348515c1a5cd993|" +#~ msgstr "" + +#~ msgid "|cc080a555947492fa66131dc3a967603|" +#~ msgstr "" + +#~ msgid "|085c3e0fb8664c6aa06246636524b20b|" +#~ msgstr "" + +#~ msgid "|bfe69c74e48c45d49b50251c38c2a019|" +#~ msgstr "" + +#~ msgid "|ebbecd651f0348d99c6511ea859bf4ca|" +#~ msgstr "" + +#~ msgid "|163117eb654a4273babba413cf8065f5|" +#~ msgstr "" + +#~ msgid "|452ac3ba453b4cd1be27be1ba7560d64|" +#~ msgstr "" + +#~ msgid "|f403fcd69e4e44409627e748b404c086|" +#~ msgstr "" + +#~ msgid "|4b00fe63870145968f8443619a792a42|" +#~ msgstr "" + +#~ msgid "|368378731066486fa4397e89bc6b870c|" +#~ msgstr "" + +#~ msgid "|a66aa83d85bf4ffba7ed660b718066da|" +#~ msgstr "" + +#~ msgid "|82324b9af72a4582a81839d55caab767|" +#~ msgstr "" + +#~ msgid "|fbf2da0da3cc4f8ab3b3eff852d80c41|" +#~ msgstr "" + +#~ msgid "" +#~ "Some quickstart examples may have " +#~ "limitations or requirements that prevent " +#~ "them from running on every environment." +#~ " For more information, please see " +#~ "`Limitations`_." +#~ msgstr "" + +#~ msgid "" +#~ "Change the application code. For " +#~ "example, change the ``seed`` in " +#~ "``quickstart_docker/task.py`` to ``43`` and " +#~ "save it:" +#~ msgstr "" + +#~ msgid ":code:`fit`" +#~ msgstr ":code:`fit`" + +#~ msgid "" +#~ "Note that since version :code:`1.11.0`, " +#~ ":code:`flower-server-app` no longer " +#~ "supports passing a reference to a " +#~ "`ServerApp` attribute. Instead, you need " +#~ "to pass the path to Flower app " +#~ "via the argument :code:`--app`. This is" +#~ " the path to a directory containing" +#~ " a `pyproject.toml`. You can create a" +#~ " valid Flower app by executing " +#~ ":code:`flwr new` and following the " +#~ "prompt." +#~ msgstr "" + +#~ msgid "" +#~ "The following examples are available as" +#~ " standalone projects. Quickstart TensorFlow/Keras" +#~ " ---------------------------" +#~ msgstr "" + +#~ msgid "" +#~ "Let's create a new application project" +#~ " in Xcode and add :code:`flwr` as " +#~ "a dependency in your project. For " +#~ "our application, we will store the " +#~ "logic of our app in " +#~ ":code:`FLiOSModel.swift` and the UI elements" +#~ " in :code:`ContentView.swift`. We will " +#~ "focus more on :code:`FLiOSModel.swift` in " +#~ "this quickstart. Please refer to the " +#~ "`full code example " +#~ "`_ to " +#~ "learn more about the app." +#~ msgstr "" + +#~ msgid "Import Flower and CoreML related packages in :code:`FLiOSModel.swift`:" +#~ msgstr "" + +#~ msgid "" +#~ "Then add the mlmodel to the " +#~ "project simply by drag-and-drop, " +#~ "the mlmodel will be bundled inside " +#~ "the application during deployment to " +#~ "your iOS device. We need to pass" +#~ " the url to access mlmodel and " +#~ "run CoreML machine learning processes, " +#~ "it can be retrieved by calling the" +#~ " function :code:`Bundle.main.url`. For the " +#~ "MNIST dataset, we need to preprocess " +#~ "it into :code:`MLBatchProvider` object. The" +#~ " preprocessing is done inside " +#~ ":code:`DataLoader.swift`." +#~ msgstr "" + +#~ msgid "" +#~ "Since CoreML does not allow the " +#~ "model parameters to be seen before " +#~ "training, and accessing the model " +#~ "parameters during or after the training" +#~ " can only be done by specifying " +#~ "the layer name, we need to know" +#~ " this information beforehand, through " +#~ "looking at the model specification, " +#~ "which are written as proto files. " +#~ "The implementation can be seen in " +#~ ":code:`MLModelInspect`." +#~ msgstr "" + +#~ msgid "" +#~ "Then start the Flower gRPC client " +#~ "and start communicating to the server" +#~ " by passing our Flower client to " +#~ "the function :code:`startFlwrGRPC`." +#~ msgstr "" + +#~ msgid "" +#~ "That's it for the client. We only" +#~ " have to implement :code:`Client` or " +#~ "call the provided :code:`MLFlwrClient` and " +#~ "call :code:`startFlwrGRPC()`. The attribute " +#~ ":code:`hostname` and :code:`port` tells the" +#~ " client which server to connect to." +#~ " This can be done by entering " +#~ "the hostname and port in the " +#~ "application before clicking the start " +#~ "button to start the federated learning" +#~ " process." +#~ msgstr "" + +#~ msgid "" +#~ "For simple workloads we can start " +#~ "a Flower server and leave all the" +#~ " configuration possibilities at their " +#~ "default values. In a file named " +#~ ":code:`server.py`, import Flower and start " +#~ "the server:" +#~ msgstr "" + +#~ msgid "" +#~ "Congratulations! You've successfully built and" +#~ " run your first federated learning " +#~ "system in your ios device. The " +#~ "full `source code " +#~ "`_ for" +#~ " this example can be found in " +#~ ":code:`examples/ios`." +#~ msgstr "" + +#~ msgid "" +#~ "In this tutorial, we will learn " +#~ "how to train a :code:`Logistic " +#~ "Regression` model on MNIST using Flower" +#~ " and scikit-learn." +#~ msgstr "" + +#~ msgid "" +#~ "Now that we have all our " +#~ "dependencies installed, let's run a " +#~ "simple distributed training with two " +#~ "clients and one server. However, before" +#~ " setting up the client and server," +#~ " we will define all functionalities " +#~ "that we need for our federated " +#~ "learning setup within :code:`utils.py`. The" +#~ " :code:`utils.py` contains different functions" +#~ " defining all the machine learning " +#~ "basics:" +#~ msgstr "" + +#~ msgid ":code:`get_model_parameters()`" +#~ msgstr "" + +#~ msgid "Returns the parameters of a :code:`sklearn` LogisticRegression model" +#~ msgstr "" + +#~ msgid ":code:`set_model_params()`" +#~ msgstr "" + +#~ msgid "Sets the parameters of a :code:`sklearn` LogisticRegression model" +#~ msgstr "" + +#~ msgid ":code:`set_initial_params()`" +#~ msgstr "" + +#~ msgid "" +#~ "Please check out :code:`utils.py` `here " +#~ "`_ for more details. " +#~ "The pre-defined functions are used " +#~ "in the :code:`client.py` and imported. " +#~ "The :code:`client.py` also requires to " +#~ "import several packages such as Flower" +#~ " and scikit-learn:" +#~ msgstr "" + +#~ msgid "" +#~ "Prior to local training, we need " +#~ "to load the MNIST dataset, a " +#~ "popular image classification dataset of " +#~ "handwritten digits for machine learning, " +#~ "and partition the dataset for FL. " +#~ "This can be conveniently achieved using" +#~ " `Flower Datasets `_." +#~ " The :code:`FederatedDataset.load_partition()` method" +#~ " loads the partitioned training set " +#~ "for each partition ID defined in " +#~ "the :code:`--partition-id` argument." +#~ msgstr "" + +#~ msgid "" +#~ "Next, the logistic regression model is" +#~ " defined and initialized with " +#~ ":code:`utils.set_initial_params()`." +#~ msgstr "" + +#~ msgid "" +#~ "The Flower server interacts with clients" +#~ " through an interface called " +#~ ":code:`Client`. When the server selects " +#~ "a particular client for training, it " +#~ "sends training instructions over the " +#~ "network. The client receives those " +#~ "instructions and calls one of the " +#~ ":code:`Client` methods to run your code" +#~ " (i.e., to fit the logistic " +#~ "regression we defined earlier)." +#~ msgstr "" + +#~ msgid "" +#~ "Flower provides a convenience class " +#~ "called :code:`NumPyClient` which makes it " +#~ "easier to implement the :code:`Client` " +#~ "interface when your workload uses " +#~ "scikit-learn. Implementing :code:`NumPyClient` " +#~ "usually means defining the following " +#~ "methods (:code:`set_parameters` is optional " +#~ "though):" +#~ msgstr "" + +#~ msgid ":code:`set_parameters` (optional)" +#~ msgstr "" + +#~ msgid "is directly imported with :code:`utils.set_model_params()`" +#~ msgstr "" + +#~ msgid "" +#~ "We can now create an instance of" +#~ " our class :code:`MnistClient` and add " +#~ "one line to actually run this " +#~ "client:" +#~ msgstr "" + +#~ msgid "" +#~ "That's it for the client. We only" +#~ " have to implement :code:`Client` or " +#~ ":code:`NumPyClient` and call " +#~ ":code:`fl.client.start_client()`. If you implement" +#~ " a client of type :code:`NumPyClient` " +#~ "you'll need to first call its " +#~ ":code:`to_client()` method. The string " +#~ ":code:`\"0.0.0.0:8080\"` tells the client " +#~ "which server to connect to. In our" +#~ " case we can run the server and" +#~ " the client on the same machine, " +#~ "therefore we use :code:`\"0.0.0.0:8080\"`. If" +#~ " we run a truly federated workload" +#~ " with the server and clients running" +#~ " on different machines, all that " +#~ "needs to change is the " +#~ ":code:`server_address` we pass to the " +#~ "client." +#~ msgstr "" + +#~ msgid ":code:`server.py`, import Flower and start the server:" +#~ msgstr "" + +#~ msgid "" +#~ "The number of federated learning rounds" +#~ " is set in :code:`fit_round()` and " +#~ "the evaluation is defined in " +#~ ":code:`get_evaluate_fn()`. The evaluation function" +#~ " is called after each federated " +#~ "learning round and gives you information" +#~ " about loss and accuracy. Note that" +#~ " we also make use of Flower " +#~ "Datasets here to load the test " +#~ "split of the MNIST dataset for " +#~ "server-side evaluation." +#~ msgstr "" + +#~ msgid "" +#~ "The :code:`main` contains the server-" +#~ "side parameter initialization " +#~ ":code:`utils.set_initial_params()` as well as " +#~ "the aggregation strategy " +#~ ":code:`fl.server.strategy:FedAvg()`. The strategy is" +#~ " the default one, federated averaging " +#~ "(or FedAvg), with two clients and " +#~ "evaluation after each federated learning " +#~ "round. The server can be started " +#~ "with the command " +#~ ":code:`fl.server.start_server(server_address=\"0.0.0.0:8080\", " +#~ "strategy=strategy, " +#~ "config=fl.server.ServerConfig(num_rounds=3))`." +#~ msgstr "" + +#~ msgid "" +#~ "Congratulations! You've successfully built and" +#~ " run your first federated learning " +#~ "system. The full `source code " +#~ "`_ for this example can " +#~ "be found in :code:`examples/sklearn-logreg-" +#~ "mnist`." +#~ msgstr "" + +#~ msgid "" +#~ "In this tutorial we will learn how" +#~ " to train a federated XGBoost model" +#~ " on HIGGS dataset using Flower and" +#~ " :code:`xgboost` package. We use a " +#~ "simple example (`full code xgboost-" +#~ "quickstart `_) with two *clients* " +#~ "and one *server* to demonstrate how " +#~ "federated XGBoost works, and then we " +#~ "dive into a more complex example " +#~ "(`full code xgboost-comprehensive " +#~ "`_) to run various experiments." +#~ msgstr "" + +#~ msgid "" +#~ "Since we want to use :code:`xgboost` " +#~ "package to build up XGBoost trees, " +#~ "let's go ahead and install " +#~ ":code:`xgboost`:" +#~ msgstr "" + +#~ msgid "" +#~ "In a file called :code:`client.py`, " +#~ "import xgboost, Flower, Flower Datasets " +#~ "and other related functions:" +#~ msgstr "" + +#~ msgid "" +#~ "In this example, we split the " +#~ "dataset into 30 partitions with uniform" +#~ " distribution (:code:`IidPartitioner(num_partitions=30)`)." +#~ " Then, we load the partition for " +#~ "the given client based on " +#~ ":code:`partition_id`:" +#~ msgstr "" + +#~ msgid "" +#~ "After that, we do train/test splitting" +#~ " on the given partition (client's " +#~ "local data), and transform data format" +#~ " for :code:`xgboost` package." +#~ msgstr "" + +#~ msgid "" +#~ "The functions of :code:`train_test_split` and" +#~ " :code:`transform_dataset_to_dmatrix` are defined " +#~ "as below:" +#~ msgstr "" + +#~ msgid "" +#~ "The :code:`num_local_round` represents the " +#~ "number of iterations for local tree " +#~ "boost. We use CPU for the training" +#~ " in default. One can shift it " +#~ "to GPU by setting :code:`tree_method` to" +#~ " :code:`gpu_hist`. We use AUC as " +#~ "evaluation metric." +#~ msgstr "" + +#~ msgid "" +#~ "After loading the dataset we define " +#~ "the Flower client. We follow the " +#~ "general rule to define :code:`XgbClient` " +#~ "class inherited from :code:`fl.client.Client`." +#~ msgstr "" + +#~ msgid "" +#~ "All required parameters defined above " +#~ "are passed to :code:`XgbClient`'s constructor." +#~ msgstr "" + +#~ msgid "" +#~ "Then, we override :code:`get_parameters`, " +#~ ":code:`fit` and :code:`evaluate` methods " +#~ "insides :code:`XgbClient` class as follows." +#~ msgstr "" + +#~ msgid "" +#~ "Unlike neural network training, XGBoost " +#~ "trees are not started from a " +#~ "specified random weights. In this case," +#~ " we do not use :code:`get_parameters` " +#~ "and :code:`set_parameters` to initialise model" +#~ " parameters for XGBoost. As a result," +#~ " let's return an empty tensor in " +#~ ":code:`get_parameters` when it is called " +#~ "by the server at the first round." +#~ msgstr "" + +#~ msgid "" +#~ "In :code:`fit`, at the first round, " +#~ "we call :code:`xgb.train()` to build up" +#~ " the first set of trees. From " +#~ "the second round, we load the " +#~ "global model sent from server to " +#~ "new build Booster object, and then " +#~ "update model weights on local training" +#~ " data with function :code:`local_boost` as" +#~ " follows:" +#~ msgstr "" + +#~ msgid "" +#~ "Given :code:`num_local_round`, we update trees" +#~ " by calling :code:`bst_input.update` method. " +#~ "After training, the last " +#~ ":code:`N=num_local_round` trees will be " +#~ "extracted to send to the server." +#~ msgstr "" + +#~ msgid "" +#~ "In :code:`evaluate`, after loading the " +#~ "global model, we call :code:`bst.eval_set` " +#~ "function to conduct evaluation on valid" +#~ " set. The AUC value will be " +#~ "returned." +#~ msgstr "" + +#~ msgid "" +#~ "Now, we can create an instance of" +#~ " our class :code:`XgbClient` and add " +#~ "one line to actually run this " +#~ "client:" +#~ msgstr "" + +#~ msgid "" +#~ "That's it for the client. We only" +#~ " have to implement :code:`Client` and " +#~ "call :code:`fl.client.start_client()`. The string" +#~ " :code:`\"[::]:8080\"` tells the client " +#~ "which server to connect to. In our" +#~ " case we can run the server and" +#~ " the client on the same machine, " +#~ "therefore we use :code:`\"[::]:8080\"`. If " +#~ "we run a truly federated workload " +#~ "with the server and clients running " +#~ "on different machines, all that needs" +#~ " to change is the :code:`server_address`" +#~ " we point the client at." +#~ msgstr "" + +#~ msgid "" +#~ "In a file named :code:`server.py`, " +#~ "import Flower and FedXgbBagging from " +#~ ":code:`flwr.server.strategy`." +#~ msgstr "" + +#~ msgid "" +#~ "We use two clients for this " +#~ "example. An :code:`evaluate_metrics_aggregation` " +#~ "function is defined to collect and " +#~ "wighted average the AUC values from " +#~ "clients. The :code:`config_func` function is" +#~ " to return the current FL round " +#~ "number to client's :code:`fit()` and " +#~ ":code:`evaluate()` methods." +#~ msgstr "" + +#~ msgid "" +#~ "In file :code:`flwr.server.strategy.fedxgb_bagging.py`," +#~ " we define :code:`FedXgbBagging` inherited " +#~ "from :code:`flwr.server.strategy.FedAvg`. Then, we" +#~ " override the :code:`aggregate_fit`, " +#~ ":code:`aggregate_evaluate` and :code:`evaluate` " +#~ "methods as follows:" +#~ msgstr "" + +#~ msgid "" +#~ "In :code:`aggregate_fit`, we sequentially " +#~ "aggregate the clients' XGBoost trees by" +#~ " calling :code:`aggregate()` function:" +#~ msgstr "" + +#~ msgid "" +#~ "In this function, we first fetch " +#~ "the number of trees and the number" +#~ " of parallel trees for the current" +#~ " and previous model by calling " +#~ ":code:`_get_tree_nums`. Then, the fetched " +#~ "information will be aggregated. After " +#~ "that, the trees (containing model " +#~ "weights) are aggregated to generate a" +#~ " new tree model." +#~ msgstr "" + +#~ msgid "" +#~ "Congratulations! You've successfully built and" +#~ " run your first federated XGBoost " +#~ "system. The AUC values can be " +#~ "checked in :code:`metrics_distributed`. One " +#~ "can see that the average AUC " +#~ "increases over FL rounds." +#~ msgstr "" + +#~ msgid "" +#~ "The full `source code " +#~ "`_ for this example can be" +#~ " found in :code:`examples/xgboost-quickstart`." +#~ msgstr "" + +#~ msgid "" +#~ "To do this, we first customise a" +#~ " :code:`ClientManager` in :code:`server_utils.py`:" +#~ msgstr "" + +#~ msgid "" +#~ "The customised :code:`ClientManager` samples " +#~ "all available clients in each FL " +#~ "round based on the order of " +#~ "connection to the server. Then, we " +#~ "define a new strategy :code:`FedXgbCyclic` " +#~ "in :code:`flwr.server.strategy.fedxgb_cyclic.py`, in " +#~ "order to sequentially select only one" +#~ " client in given round and pass " +#~ "the received model to next client." +#~ msgstr "" + +#~ msgid "" +#~ "Unlike the original :code:`FedAvg`, we " +#~ "don't perform aggregation here. Instead, " +#~ "we just make a copy of the " +#~ "received client model as global model" +#~ " by overriding :code:`aggregate_fit`." +#~ msgstr "" + +#~ msgid "" +#~ "Also, the customised :code:`configure_fit` and" +#~ " :code:`configure_evaluate` methods ensure the" +#~ " clients to be sequentially selected " +#~ "given FL round:" +#~ msgstr "" + +#~ msgid "" +#~ "In :code:`dataset.py`, we have a " +#~ "function :code:`instantiate_partitioner` to " +#~ "instantiate the data partitioner based " +#~ "on the given :code:`num_partitions` and " +#~ ":code:`partitioner_type`. Currently, we provide " +#~ "four supported partitioner type to " +#~ "simulate the uniformity/non-uniformity in " +#~ "data quantity (uniform, linear, square, " +#~ "exponential)." +#~ msgstr "" + +#~ msgid "" +#~ "To facilitate centralised evaluation, we " +#~ "define a function in :code:`server_utils.py`:" +#~ msgstr "" + +#~ msgid "" +#~ "This function returns a evaluation " +#~ "function which instantiates a :code:`Booster`" +#~ " object and loads the global model" +#~ " weights to it. The evaluation is " +#~ "conducted by calling :code:`eval_set()` " +#~ "method, and the tested AUC value " +#~ "is reported." +#~ msgstr "" + +#~ msgid "" +#~ "As for distributed evaluation on the " +#~ "clients, it's same as the quick-" +#~ "start example by overriding the " +#~ ":code:`evaluate()` method insides the " +#~ ":code:`XgbClient` class in :code:`client_utils.py`." +#~ msgstr "" + +#~ msgid "" +#~ "We also provide an example code " +#~ "(:code:`sim.py`) to use the simulation " +#~ "capabilities of Flower to simulate " +#~ "federated XGBoost training on either a" +#~ " single machine or a cluster of " +#~ "machines." +#~ msgstr "" + +#~ msgid "" +#~ "After importing all required packages, " +#~ "we define a :code:`main()` function to" +#~ " perform the simulation process:" +#~ msgstr "" + +#~ msgid "" +#~ "We first load the dataset and " +#~ "perform data partitioning, and the " +#~ "pre-processed data is stored in a " +#~ ":code:`list`. After the simulation begins, " +#~ "the clients won't need to pre-" +#~ "process their partitions again." +#~ msgstr "" + +#~ msgid "" +#~ "After that, we start the simulation " +#~ "by calling :code:`fl.simulation.start_simulation`:" +#~ msgstr "" + +#~ msgid "" +#~ "One of key parameters for " +#~ ":code:`start_simulation` is :code:`client_fn` which" +#~ " returns a function to construct a" +#~ " client. We define it as follows:" +#~ msgstr "" + +#~ msgid "" +#~ "In :code:`utils.py`, we define the " +#~ "arguments parsers for clients, server " +#~ "and simulation, allowing users to " +#~ "specify different experimental settings. Let's" +#~ " first see the sever side:" +#~ msgstr "" + +#~ msgid "" +#~ "This allows user to specify training " +#~ "strategies / the number of total " +#~ "clients / FL rounds / participating " +#~ "clients / clients for evaluation, and" +#~ " evaluation fashion. Note that with " +#~ ":code:`--centralised-eval`, the sever will " +#~ "do centralised evaluation and all " +#~ "functionalities for client evaluation will " +#~ "be disabled." +#~ msgstr "" + +#~ msgid "" +#~ "This defines various options for client" +#~ " data partitioning. Besides, clients also" +#~ " have an option to conduct evaluation" +#~ " on centralised test set by setting" +#~ " :code:`--centralised-eval`, as well as " +#~ "an option to perform scaled learning " +#~ "rate based on the number of " +#~ "clients by setting :code:`--scaled-lr`." +#~ msgstr "" + +#~ msgid "" +#~ "The full `code " +#~ "`_ for this comprehensive " +#~ "example can be found in :code:`examples" +#~ "/xgboost-comprehensive`." +#~ msgstr "" + +#~ msgid "|b8714c45b74b4d8fb008e2ebb3bc1d44|" +#~ msgstr "" + +#~ msgid "|75f1561efcfd422ea67d28d1513120dc|" +#~ msgstr "" + +#~ msgid "|6a1f51b235304558a9bdaaabfc93b8d2|" +#~ msgstr "" + +#~ msgid "|35e70dab1fb544af9aa3a9c09c4f9797|" +#~ msgstr "" + +#~ msgid "|d7efb5705dd3467f991ed23746824a07|" +#~ msgstr "" + +#~ msgid "|94e7b021c7b540bfbedf7f082a41ff87|" +#~ msgstr "" + +#~ msgid "|a80714782dde439ab73936518f91fc3c|" +#~ msgstr "" + +#~ msgid "|c62080ca6197473da57d191c8225a9d9|" +#~ msgstr "" + +#~ msgid "|21a8f1e6a5b14a7bbb8559979d0e8a2b|" +#~ msgstr "" + +#~ msgid "|c310f2a22f7b4917bf42775aae7a1c09|" +#~ msgstr "" + +#~ msgid "|a0c5b43401194535a8460bcf02e65f9a|" +#~ msgstr "" + +#~ msgid "|aabfdbd5564e41a790f8ea93cc21a444|" +#~ msgstr "" + +#~ msgid "|c9cc8f160fa647b09e742fe4dc8edb54|" +#~ msgstr "" + +#~ msgid "|7e83aad011cd4907b2f02f907c6922e9|" +#~ msgstr "" + +#~ msgid "|4627c2bb6cc443ae9e079f81f33c9dd9|" +#~ msgstr "" + +#~ msgid "|131af8322dc5466b827afd24be98f8c0|" +#~ msgstr "" + +#~ msgid "|f92920b87f3a40179bf7ddd0b6144c53|" +#~ msgstr "" + +#~ msgid "|d62da263071d45a496f543e41fce3a19|" +#~ msgstr "" + +#~ msgid "|ad851971645b4e1fbf8d15bcc0b2ee11|" +#~ msgstr "" + +#~ msgid "|929e9a6de6b34edb8488e644e2bb5221|" +#~ msgstr "" + +#~ msgid "|404cf9c9e8d64784a55646c0f9479cbc|" +#~ msgstr "" + +#~ msgid "|b021ff9d25814458b1e631f8985a648b|" +#~ msgstr "" + +#~ msgid "|e6ca84e1df244f238288a768352678e5|" +#~ msgstr "" + +#~ msgid "|39c2422082554a21963baffb33a0d057|" +#~ msgstr "" + +#~ msgid "|07ecf5fcd6814e88906accec6fa0fbfb|" +#~ msgstr "" + +#~ msgid "|57e78c0ca8a94ba5a64a04b1f2280e55|" +#~ msgstr "" + +#~ msgid "|9819b40e59ee40a4921e1244e8c99bac|" +#~ msgstr "" + +#~ msgid "|797bf279c4894b5ead31dc9b0534ed62|" +#~ msgstr "" + diff --git a/doc/locales/pt_BR/LC_MESSAGES/framework-docs.po b/doc/locales/pt_BR/LC_MESSAGES/framework-docs.po index d5f52b193e87..9c7a59d09008 100644 --- a/doc/locales/pt_BR/LC_MESSAGES/framework-docs.po +++ b/doc/locales/pt_BR/LC_MESSAGES/framework-docs.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: Flower main\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-09-15 09:09+0200\n" +"POT-Creation-Date: 2024-09-27 00:30+0000\n" "PO-Revision-Date: 2024-05-25 11:09+0000\n" "Last-Translator: Gustavo Bertoli \n" "Language: pt_BR\n" @@ -17,7 +17,7 @@ msgstr "" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.15.0\n" +"Generated-By: Babel 2.16.0\n" #: ../../source/contributor-explanation-public-and-private-apis.rst:2 msgid "Public and private APIs" @@ -62,22 +62,22 @@ msgid "" "or not by reading the Flower source code." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:22 +#: ../../source/contributor-explanation-public-and-private-apis.rst:23 msgid "Flower public API" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:24 +#: ../../source/contributor-explanation-public-and-private-apis.rst:25 msgid "Flower has a well-defined public API. Let's look at this in more detail." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:28 +#: ../../source/contributor-explanation-public-and-private-apis.rst:29 msgid "" "Every component that is reachable by recursively following " "``__init__.__all__`` starting from the root package (``flwr``) is part of" " the public API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:30 +#: ../../source/contributor-explanation-public-and-private-apis.rst:32 msgid "" "If you want to determine whether a component " "(class/function/generator/...) is part of the public API or not, you need" @@ -85,13 +85,13 @@ msgid "" "src/py/flwr`` to look at the Python sub-packages contained ``flwr``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:43 +#: ../../source/contributor-explanation-public-and-private-apis.rst:46 msgid "" "Contrast this with the definition of ``__all__`` in the root " "``src/py/flwr/__init__.py``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:55 +#: ../../source/contributor-explanation-public-and-private-apis.rst:59 msgid "" "You can see that ``flwr`` has six subpackages (``cli``, ``client``, " "``common``, ``proto``, ``server``, ``simulation``), but only four of them" @@ -99,7 +99,7 @@ msgid "" "``simulation``)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:57 +#: ../../source/contributor-explanation-public-and-private-apis.rst:63 msgid "" "What does this mean? It means that ``client``, ``common``, ``server`` and" " ``simulation`` are part of the public API, but ``cli`` and ``proto`` are" @@ -110,21 +110,21 @@ msgid "" "even be removed completely." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:62 +#: ../../source/contributor-explanation-public-and-private-apis.rst:70 msgid "Therefore, as a Flower user:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:64 +#: ../../source/contributor-explanation-public-and-private-apis.rst:72 msgid "``from flwr import client`` ✅ Ok, you're importing a public API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:65 +#: ../../source/contributor-explanation-public-and-private-apis.rst:73 msgid "" "``from flwr import proto`` ❌ Not recommended, you're importing a private " "API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:67 +#: ../../source/contributor-explanation-public-and-private-apis.rst:75 msgid "" "What about components that are nested deeper in the hierarchy? Let's look" " at Flower strategies to see another typical pattern. Flower strategies " @@ -133,7 +133,7 @@ msgid "" "``src/py/flwr/server/strategy/__init__.py``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:81 +#: ../../source/contributor-explanation-public-and-private-apis.rst:91 msgid "" "What's notable here is that all strategies are implemented in dedicated " "modules (e.g., ``fedavg.py``). In ``__init__.py``, we *import* the " @@ -145,33 +145,33 @@ msgid "" "the public API (as long as we update the import path in ``__init__.py``)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:86 +#: ../../source/contributor-explanation-public-and-private-apis.rst:99 msgid "Therefore:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:88 +#: ../../source/contributor-explanation-public-and-private-apis.rst:101 msgid "" "``from flwr.server.strategy import FedAvg`` ✅ Ok, you're importing a " "class that is part of the public API." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:89 +#: ../../source/contributor-explanation-public-and-private-apis.rst:103 msgid "" "``from flwr.server.strategy import fedavg`` ❌ Not recommended, you're " "importing a private module." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:91 +#: ../../source/contributor-explanation-public-and-private-apis.rst:106 msgid "" "This approach is also implemented in the tooling that automatically " "builds API reference docs." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:94 +#: ../../source/contributor-explanation-public-and-private-apis.rst:110 msgid "Flower public API of private packages" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:96 +#: ../../source/contributor-explanation-public-and-private-apis.rst:112 msgid "" "We also use this to define the public API of private subpackages. Public," " in this context, means the API that other ``flwr`` subpackages should " @@ -179,14 +179,14 @@ msgid "" "not exported via ``src/py/flwr/server/__init__.py``'s ``__all__``)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:100 +#: ../../source/contributor-explanation-public-and-private-apis.rst:117 msgid "" "Still, the private sub-package ``flwr.server.driver`` defines a " "\"public\" API using ``__all__`` in " "``src/py/flwr/server/driver/__init__.py``:" msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:114 +#: ../../source/contributor-explanation-public-and-private-apis.rst:132 msgid "" "The interesting part is that both ``GrpcDriver`` and ``InMemoryDriver`` " "are never used by Flower framework users, only by other parts of the " @@ -198,7 +198,7 @@ msgid "" "``InMemoryDriver`` class definition)." msgstr "" -#: ../../source/contributor-explanation-public-and-private-apis.rst:117 +#: ../../source/contributor-explanation-public-and-private-apis.rst:140 msgid "" "This is because ``flwr.server.driver`` defines a public interface for " "other ``flwr`` subpackages. This allows codeowners of " @@ -237,16 +237,16 @@ msgstr "" "Antes de começarmos, precisamos encontrar alguns pré-requisitos em nosso " "ambiente de desenvolvimento local." -#: ../../source/contributor-how-to-build-docker-images.rst:12 +#: ../../source/contributor-how-to-build-docker-images.rst:13 #, fuzzy msgid "Clone the ``flower`` repository." msgstr "Clone o repositório do flower." -#: ../../source/contributor-how-to-build-docker-images.rst:18 +#: ../../source/contributor-how-to-build-docker-images.rst:19 msgid "Verify the Docker daemon is running." msgstr "Verifique que o serviço Docker está rodando." -#: ../../source/contributor-how-to-build-docker-images.rst:20 +#: ../../source/contributor-how-to-build-docker-images.rst:21 msgid "" "The build instructions that assemble the images are located in the " "respective Dockerfiles. You can find them in the subdirectories of " @@ -256,7 +256,7 @@ msgstr "" "respectivos Dockerfiles. Você pode encontrá-los nos subdiretórios " "``src/docker```." -#: ../../source/contributor-how-to-build-docker-images.rst:23 +#: ../../source/contributor-how-to-build-docker-images.rst:24 #, fuzzy msgid "" "Flower Docker images are configured via build arguments. Through build " @@ -276,146 +276,146 @@ msgstr "" "Todos os argumentos de compilação disponíveis para cada imagem estão " "listados em uma das tabelas abaixo." -#: ../../source/contributor-how-to-build-docker-images.rst:30 +#: ../../source/contributor-how-to-build-docker-images.rst:32 #, fuzzy msgid "Building the Base Image" msgstr "Construindo a imagem base" -#: ../../source/contributor-how-to-build-docker-images.rst:36 -#: ../../source/contributor-how-to-build-docker-images.rst:98 +#: ../../source/contributor-how-to-build-docker-images.rst:38 +#: ../../source/contributor-how-to-build-docker-images.rst:104 msgid "Build argument" msgstr "Argumento de compilação" -#: ../../source/contributor-how-to-build-docker-images.rst:37 -#: ../../source/contributor-how-to-build-docker-images.rst:99 +#: ../../source/contributor-how-to-build-docker-images.rst:39 +#: ../../source/contributor-how-to-build-docker-images.rst:105 msgid "Description" msgstr "Descrição" -#: ../../source/contributor-how-to-build-docker-images.rst:38 -#: ../../source/contributor-how-to-build-docker-images.rst:100 +#: ../../source/contributor-how-to-build-docker-images.rst:40 +#: ../../source/contributor-how-to-build-docker-images.rst:106 msgid "Required" msgstr "Necessário" -#: ../../source/contributor-how-to-build-docker-images.rst:39 -#: ../../source/contributor-how-to-build-docker-images.rst:101 -#: ../../source/docker/persist-superlink-state.rst:18 -#: ../../source/docker/pin-version.rst:11 +#: ../../source/contributor-how-to-build-docker-images.rst:41 +#: ../../source/contributor-how-to-build-docker-images.rst:107 +#: ../../source/docker/persist-superlink-state.rst:19 +#: ../../source/docker/pin-version.rst:12 #: ../../source/docker/set-environment-variables.rst:8 msgid "Example" msgstr "Exemplo" -#: ../../source/contributor-how-to-build-docker-images.rst:40 +#: ../../source/contributor-how-to-build-docker-images.rst:42 msgid "``DISTRO``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:41 +#: ../../source/contributor-how-to-build-docker-images.rst:43 #, fuzzy msgid "The Linux distribution to use as the base image." msgstr "O nome do repositório da imagem base." -#: ../../source/contributor-how-to-build-docker-images.rst:42 -#: ../../source/contributor-how-to-build-docker-images.rst:46 -#: ../../source/contributor-how-to-build-docker-images.rst:50 -#: ../../source/contributor-how-to-build-docker-images.rst:66 -#: ../../source/contributor-how-to-build-docker-images.rst:70 -#: ../../source/contributor-how-to-build-docker-images.rst:104 +#: ../../source/contributor-how-to-build-docker-images.rst:44 +#: ../../source/contributor-how-to-build-docker-images.rst:48 +#: ../../source/contributor-how-to-build-docker-images.rst:52 +#: ../../source/contributor-how-to-build-docker-images.rst:68 +#: ../../source/contributor-how-to-build-docker-images.rst:75 +#: ../../source/contributor-how-to-build-docker-images.rst:110 msgid "No" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:43 +#: ../../source/contributor-how-to-build-docker-images.rst:45 #, fuzzy msgid "``ubuntu``" msgstr "``UBUNTU_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:44 +#: ../../source/contributor-how-to-build-docker-images.rst:46 #, fuzzy msgid "``DISTRO_VERSION``" msgstr "``PIP_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:45 +#: ../../source/contributor-how-to-build-docker-images.rst:47 msgid "Version of the Linux distribution." msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:47 +#: ../../source/contributor-how-to-build-docker-images.rst:49 msgid ":substitution-code:`|ubuntu_version|`" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:48 +#: ../../source/contributor-how-to-build-docker-images.rst:50 msgid "``PYTHON_VERSION``" msgstr "``PYTHON_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:49 +#: ../../source/contributor-how-to-build-docker-images.rst:51 msgid "Version of ``python`` to be installed." msgstr "Versão do ``python`` a ser instalada." -#: ../../source/contributor-how-to-build-docker-images.rst:51 +#: ../../source/contributor-how-to-build-docker-images.rst:53 msgid "``3.11`` or ``3.11.1``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:52 +#: ../../source/contributor-how-to-build-docker-images.rst:54 msgid "``PIP_VERSION``" msgstr "``PIP_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:53 +#: ../../source/contributor-how-to-build-docker-images.rst:55 msgid "Version of ``pip`` to be installed." msgstr "Versão do ``pip`` a ser instalada." -#: ../../source/contributor-how-to-build-docker-images.rst:54 -#: ../../source/contributor-how-to-build-docker-images.rst:58 -#: ../../source/contributor-how-to-build-docker-images.rst:62 -#: ../../source/contributor-how-to-build-docker-images.rst:108 +#: ../../source/contributor-how-to-build-docker-images.rst:56 +#: ../../source/contributor-how-to-build-docker-images.rst:60 +#: ../../source/contributor-how-to-build-docker-images.rst:64 +#: ../../source/contributor-how-to-build-docker-images.rst:114 msgid "Yes" msgstr "Sim" -#: ../../source/contributor-how-to-build-docker-images.rst:55 +#: ../../source/contributor-how-to-build-docker-images.rst:57 msgid ":substitution-code:`|pip_version|`" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:56 +#: ../../source/contributor-how-to-build-docker-images.rst:58 msgid "``SETUPTOOLS_VERSION``" msgstr "``SETUPTOOLS_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:57 +#: ../../source/contributor-how-to-build-docker-images.rst:59 msgid "Version of ``setuptools`` to be installed." msgstr "Versão do ``setuptools`` a ser instalada." -#: ../../source/contributor-how-to-build-docker-images.rst:59 +#: ../../source/contributor-how-to-build-docker-images.rst:61 #, fuzzy msgid ":substitution-code:`|setuptools_version|`" msgstr "``SETUPTOOLS_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:60 +#: ../../source/contributor-how-to-build-docker-images.rst:62 msgid "``FLWR_VERSION``" msgstr "``FLWR_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:61 +#: ../../source/contributor-how-to-build-docker-images.rst:63 msgid "Version of Flower to be installed." msgstr "Versão do Flower a ser instalada." -#: ../../source/contributor-how-to-build-docker-images.rst:63 +#: ../../source/contributor-how-to-build-docker-images.rst:65 msgid ":substitution-code:`|stable_flwr_version|`" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:64 +#: ../../source/contributor-how-to-build-docker-images.rst:66 #, fuzzy msgid "``FLWR_PACKAGE``" msgstr "``FLWR_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:65 +#: ../../source/contributor-how-to-build-docker-images.rst:67 #, fuzzy msgid "The Flower package to be installed." msgstr "Versão do Flower a ser instalada." -#: ../../source/contributor-how-to-build-docker-images.rst:67 +#: ../../source/contributor-how-to-build-docker-images.rst:69 msgid "``flwr`` or ``flwr-nightly``" msgstr "" -#: ../../source/contributor-how-to-build-docker-images.rst:68 +#: ../../source/contributor-how-to-build-docker-images.rst:70 #, fuzzy msgid "``FLWR_VERSION_REF``" msgstr "``FLWR_VERSION``" -#: ../../source/contributor-how-to-build-docker-images.rst:69 +#: ../../source/contributor-how-to-build-docker-images.rst:71 msgid "" "A `direct reference " "`_." -#: ../../source/contributor-how-to-contribute-translations.rst:29 +#: ../../source/contributor-how-to-contribute-translations.rst:28 msgid "" "Once you are signed in to Weblate, you can navigate to the `Flower " "Framework project `_. Aqui, você deve ver os diferentes idiomas existentes " "que podem ser encontrados no site." -#: ../../source/contributor-how-to-contribute-translations.rst:34 +#: ../../source/contributor-how-to-contribute-translations.rst:32 msgid "" "Once you have selected the language you want to contribute to, you should" " see a similar interface to this:" @@ -585,7 +585,7 @@ msgstr "" "Uma vez que você tenha selecionado o idioma que deseja contribuir, você " "deve ver uma interface semelhante a esta:" -#: ../../source/contributor-how-to-contribute-translations.rst:39 +#: ../../source/contributor-how-to-contribute-translations.rst:37 msgid "" "The most straight forward option here is to click on the ``Translate`` " "button on the top right (in the ``Translation status`` section). This " @@ -597,12 +597,12 @@ msgstr "" "automaticamente para a interface de tradução de strings ainda não " "traduzidas." -#: ../../source/contributor-how-to-contribute-translations.rst:43 +#: ../../source/contributor-how-to-contribute-translations.rst:41 #, fuzzy msgid "This is what the interface looks like:" msgstr "É assim que a interface se parece:" -#: ../../source/contributor-how-to-contribute-translations.rst:47 +#: ../../source/contributor-how-to-contribute-translations.rst:45 msgid "" "You input your translation in the text box at the top and then, once you " "are happy with it, you either press ``Save and continue`` (to save the " @@ -619,7 +619,7 @@ msgstr "" "ou ``Skip`` (para ir para a próxima string não traduzida sem salvar nada " "na atual)." -#: ../../source/contributor-how-to-contribute-translations.rst:54 +#: ../../source/contributor-how-to-contribute-translations.rst:51 msgid "" "In order to help with the translations, you can see on the bottom the " "``Nearby strings``, the ``Comments`` (from other contributors), the " @@ -634,7 +634,7 @@ msgstr "" "(outras línguas), e o ``History`` (histórico) de traduções para esta " "string." -#: ../../source/contributor-how-to-contribute-translations.rst:59 +#: ../../source/contributor-how-to-contribute-translations.rst:56 msgid "" "On the right, under the ``String information`` section, you can also " "click the link under ``Source string location`` in order to view the " @@ -644,7 +644,7 @@ msgstr "" " link sob ``Source string location`` para visualizar a fonte do arquivo " "doc que contém a string." -#: ../../source/contributor-how-to-contribute-translations.rst:63 +#: ../../source/contributor-how-to-contribute-translations.rst:60 msgid "" "For more information about translating using Weblate, you can check out " "this `in-depth guide " @@ -654,12 +654,12 @@ msgstr "" "pode conferir este `guia detalhado " "`_." -#: ../../source/contributor-how-to-contribute-translations.rst:67 +#: ../../source/contributor-how-to-contribute-translations.rst:64 #, fuzzy msgid "Add new languages" msgstr "Adicionar novos idiomas" -#: ../../source/contributor-how-to-contribute-translations.rst:69 +#: ../../source/contributor-how-to-contribute-translations.rst:66 msgid "" "If you want to add a new language, you will first have to contact us, " "either on `Slack `_, or by opening an issue" @@ -682,20 +682,19 @@ msgid "" "extension. What is it? Read the following quote:" msgstr "" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:7 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:8 msgid "" "The Visual Studio Code Remote - Containers extension lets you use a " "Docker container as a fully-featured development environment. It allows " "you to open any folder inside (or mounted into) a container and take " "advantage of Visual Studio Code's full feature set. A " -":code:`devcontainer.json` file in your project tells VS Code how to " -"access (or create) a development container with a well-defined tool and " -"runtime stack. This container can be used to run an application or to " -"separate tools, libraries, or runtimes needed for working with a " -"codebase." +"``devcontainer.json`` file in your project tells VS Code how to access " +"(or create) a development container with a well-defined tool and runtime " +"stack. This container can be used to run an application or to separate " +"tools, libraries, or runtimes needed for working with a codebase." msgstr "" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:9 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:16 msgid "" "Workspace files are mounted from the local file system or copied or " "cloned into the container. Extensions are installed and run inside the " @@ -704,28 +703,28 @@ msgid "" " environment just by connecting to a different container." msgstr "" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:11 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:22 msgid "" "Source: `Official VSCode documentation " "`_" msgstr "" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:15 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:26 msgid "Getting started" msgstr "" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:17 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:28 msgid "" -"Configuring and setting up the :code:`Dockerfile` as well the " -"configuration for the devcontainer can be a bit more involved. The good " -"thing is you don't have to do it. Usually it should be enough to install " -"`Docker `_ on your system and " -"ensure its available on your command line. Additionally, install the " -"`VSCode Containers Extension `_ on your system and ensure its" +" available on your command line. Additionally, install the `VSCode " +"Containers Extension `_." msgstr "" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:19 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:35 msgid "" "Now you should be good to go. When starting VSCode, it will ask you to " "run in the container environment and - if you confirm - automatically " @@ -735,20 +734,20 @@ msgid "" "option *(Re)Open Folder in Container*." msgstr "" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:21 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:41 msgid "" "In some cases your setup might be more involved. For those cases consult " "the following sources:" msgstr "" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:23 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:44 msgid "" "`Developing inside a Container " "`_" msgstr "" -#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:24 +#: ../../source/contributor-how-to-develop-in-vscode-dev-containers.rst:46 msgid "" "`Remote development in Containers " "`_" @@ -773,154 +772,154 @@ msgid "" "``poetry.lock`` (``rm poetry.lock``) before running ``poetry install``)." msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:12 +#: ../../source/contributor-how-to-install-development-versions.rst:14 msgid "" "``flwr = { version = \"1.0.0a0\", allow-prereleases = true }`` (without " "extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:13 +#: ../../source/contributor-how-to-install-development-versions.rst:15 msgid "" "``flwr = { version = \"1.0.0a0\", allow-prereleases = true, extras = " "[\"simulation\"] }`` (with extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:15 +#: ../../source/contributor-how-to-install-development-versions.rst:18 msgid "" "Install ``flwr`` from a local copy of the Flower source code via " "``pyproject.toml``:" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:17 +#: ../../source/contributor-how-to-install-development-versions.rst:20 msgid "``flwr = { path = \"../../\", develop = true }`` (without extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:18 +#: ../../source/contributor-how-to-install-development-versions.rst:21 msgid "" "``flwr = { path = \"../../\", develop = true, extras = [\"simulation\"] " "}`` (with extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:20 +#: ../../source/contributor-how-to-install-development-versions.rst:23 msgid "Install ``flwr`` from a local wheel file via ``pyproject.toml``:" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:22 +#: ../../source/contributor-how-to-install-development-versions.rst:25 msgid "" "``flwr = { path = \"../../dist/flwr-1.8.0-py3-none-any.whl\" }`` (without" " extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:23 +#: ../../source/contributor-how-to-install-development-versions.rst:26 msgid "" "``flwr = { path = \"../../dist/flwr-1.8.0-py3-none-any.whl\", extras = " "[\"simulation\"] }`` (with extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:25 +#: ../../source/contributor-how-to-install-development-versions.rst:29 msgid "" "Please refer to the Poetry documentation for further details: `Poetry " "Dependency Specification `_" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:28 +#: ../../source/contributor-how-to-install-development-versions.rst:33 msgid "Using pip (recommended on Colab)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:30 +#: ../../source/contributor-how-to-install-development-versions.rst:35 msgid "Install a ``flwr`` pre-release from PyPI:" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:32 +#: ../../source/contributor-how-to-install-development-versions.rst:37 msgid "``pip install -U --pre flwr`` (without extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:33 +#: ../../source/contributor-how-to-install-development-versions.rst:38 msgid "``pip install -U --pre 'flwr[simulation]'`` (with extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:35 +#: ../../source/contributor-how-to-install-development-versions.rst:40 msgid "" "Python packages can be installed from git repositories. Use one of the " "following commands to install the Flower directly from GitHub." msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:37 +#: ../../source/contributor-how-to-install-development-versions.rst:43 msgid "Install ``flwr`` from the default GitHub branch (``main``):" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:39 +#: ../../source/contributor-how-to-install-development-versions.rst:45 msgid "" "``pip install flwr@git+https://github.com/adap/flower.git`` (without " "extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:40 +#: ../../source/contributor-how-to-install-development-versions.rst:46 msgid "" "``pip install 'flwr[simulation]@git+https://github.com/adap/flower.git'``" " (with extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:42 +#: ../../source/contributor-how-to-install-development-versions.rst:49 msgid "Install ``flwr`` from a specific GitHub branch (``branch-name``):" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:44 +#: ../../source/contributor-how-to-install-development-versions.rst:51 msgid "" "``pip install flwr@git+https://github.com/adap/flower.git@branch-name`` " "(without extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:45 +#: ../../source/contributor-how-to-install-development-versions.rst:53 msgid "" "``pip install 'flwr[simulation]@git+https://github.com/adap/flower.git" "@branch-name'`` (with extras)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:49 +#: ../../source/contributor-how-to-install-development-versions.rst:57 msgid "Open Jupyter Notebooks on Google Colab" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:51 +#: ../../source/contributor-how-to-install-development-versions.rst:59 msgid "" "Open the notebook ``doc/source/tutorial-series-get-started-with-flower-" "pytorch.ipynb``:" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:53 +#: ../../source/contributor-how-to-install-development-versions.rst:61 msgid "" "https://colab.research.google.com/github/adap/flower/blob/main/doc/source" "/tutorial-series-get-started-with-flower-pytorch.ipynb" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:55 +#: ../../source/contributor-how-to-install-development-versions.rst:63 msgid "" "Open a development version of the same notebook from branch `branch-name`" " by changing ``main`` to ``branch-name`` (right after ``blob``):" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:57 +#: ../../source/contributor-how-to-install-development-versions.rst:66 msgid "" "https://colab.research.google.com/github/adap/flower/blob/branch-" "name/doc/source/tutorial-series-get-started-with-flower-pytorch.ipynb" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:59 +#: ../../source/contributor-how-to-install-development-versions.rst:68 msgid "Install a `whl` on Google Colab:" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:61 +#: ../../source/contributor-how-to-install-development-versions.rst:70 msgid "" "In the vertical icon grid on the left hand side, select ``Files`` > " "``Upload to session storage``" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:62 +#: ../../source/contributor-how-to-install-development-versions.rst:72 msgid "Upload the whl (e.g., ``flwr-1.8.0-py3-none-any.whl``)" msgstr "" -#: ../../source/contributor-how-to-install-development-versions.rst:63 +#: ../../source/contributor-how-to-install-development-versions.rst:73 msgid "" "Change ``!pip install -q 'flwr[simulation]' torch torchvision " "matplotlib`` to ``!pip install -q 'flwr-1.8.0-py3-none-" @@ -937,25 +936,25 @@ msgid "" "change in the future." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:7 +#: ../../source/contributor-how-to-release-flower.rst:8 msgid "During the release" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:9 +#: ../../source/contributor-how-to-release-flower.rst:10 msgid "" "The version number of a release is stated in ``pyproject.toml``. To " "release a new version of Flower, the following things need to happen (in " "that order):" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:11 +#: ../../source/contributor-how-to-release-flower.rst:13 msgid "" "Run ``python3 src/py/flwr_tool/update_changelog.py `` in " "order to add every new change to the changelog (feel free to make manual " "changes to the changelog afterwards until it looks good)." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:12 +#: ../../source/contributor-how-to-release-flower.rst:16 msgid "" "Once the changelog has been updated with all the changes, run ``./dev" "/prepare-release-changelog.sh v``, where ```` " @@ -965,7 +964,7 @@ msgid "" "the contributors. Open a pull request with those changes." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:13 +#: ../../source/contributor-how-to-release-flower.rst:22 msgid "" "Once the pull request is merged, tag the release commit with the version " "number as soon as the PR is merged: ``git tag v`` (notice " @@ -974,100 +973,100 @@ msgid "" "artifacts and the relevant part of the changelog." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:14 +#: ../../source/contributor-how-to-release-flower.rst:26 msgid "Check the draft release on GitHub, and if everything is good, publish it." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:17 +#: ../../source/contributor-how-to-release-flower.rst:29 msgid "After the release" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:19 +#: ../../source/contributor-how-to-release-flower.rst:31 msgid "Create a pull request which contains the following changes:" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:21 +#: ../../source/contributor-how-to-release-flower.rst:33 msgid "Increase the minor version in ``pyproject.toml`` by one." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:22 +#: ../../source/contributor-how-to-release-flower.rst:34 msgid "Update all files which contain the current version number if necessary." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:23 +#: ../../source/contributor-how-to-release-flower.rst:35 msgid "Add a new ``Unreleased`` section in ``changelog.md``." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:25 +#: ../../source/contributor-how-to-release-flower.rst:37 msgid "" "Merge the pull request on the same day (i.e., before a new nightly " "release gets published to PyPI)." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:28 +#: ../../source/contributor-how-to-release-flower.rst:41 msgid "Publishing a pre-release" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:31 +#: ../../source/contributor-how-to-release-flower.rst:44 msgid "Pre-release naming" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:33 +#: ../../source/contributor-how-to-release-flower.rst:46 msgid "" "PyPI supports pre-releases (alpha, beta, release candidate). Pre-releases" " MUST use one of the following naming patterns:" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:35 +#: ../../source/contributor-how-to-release-flower.rst:49 msgid "Alpha: ``MAJOR.MINOR.PATCHaN``" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:36 +#: ../../source/contributor-how-to-release-flower.rst:50 msgid "Beta: ``MAJOR.MINOR.PATCHbN``" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:37 +#: ../../source/contributor-how-to-release-flower.rst:51 msgid "Release candidate (RC): ``MAJOR.MINOR.PATCHrcN``" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:39 +#: ../../source/contributor-how-to-release-flower.rst:53 msgid "Examples include:" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:41 +#: ../../source/contributor-how-to-release-flower.rst:55 msgid "``1.0.0a0``" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:42 +#: ../../source/contributor-how-to-release-flower.rst:56 msgid "``1.0.0b0``" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:43 +#: ../../source/contributor-how-to-release-flower.rst:57 msgid "``1.0.0rc0``" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:44 +#: ../../source/contributor-how-to-release-flower.rst:58 msgid "``1.0.0rc1``" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:46 +#: ../../source/contributor-how-to-release-flower.rst:60 msgid "" "This is in line with PEP-440 and the recommendations from the Python " "Packaging Authority (PyPA):" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:49 +#: ../../source/contributor-how-to-release-flower.rst:63 msgid "`PEP-440 `_" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:50 +#: ../../source/contributor-how-to-release-flower.rst:64 msgid "" "`PyPA Choosing a versioning scheme " "`_" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:52 +#: ../../source/contributor-how-to-release-flower.rst:67 msgid "" "Note that the approach defined by PyPA is not compatible with SemVer " "2.0.0 spec, for details consult the `Semantic Versioning Specification " @@ -1075,26 +1074,26 @@ msgid "" "11 on precedence)." msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:55 +#: ../../source/contributor-how-to-release-flower.rst:73 msgid "Pre-release classification" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:57 +#: ../../source/contributor-how-to-release-flower.rst:75 msgid "Should the next pre-release be called alpha, beta, or release candidate?" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:59 +#: ../../source/contributor-how-to-release-flower.rst:77 msgid "" "RC: feature complete, no known issues (apart from issues that are " "classified as \"won't fix\" for the next stable release) - if no issues " "surface this will become the next stable release" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:60 +#: ../../source/contributor-how-to-release-flower.rst:80 msgid "Beta: feature complete, allowed to have known issues" msgstr "" -#: ../../source/contributor-how-to-release-flower.rst:61 +#: ../../source/contributor-how-to-release-flower.rst:81 msgid "Alpha: not feature complete, allowed to have known issues" msgstr "" @@ -1110,19 +1109,19 @@ msgid "" "the instructions or choose your preferred setup." msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:9 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:10 msgid "Python Version" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:11 -#: ../../source/how-to-install-flower.rst:8 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:12 +#: ../../source/how-to-install-flower.rst:7 msgid "" "Flower requires at least `Python 3.9 `_, " "but `Python 3.10 `_ or above is " "recommended." msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:14 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:17 msgid "" "Due to a known incompatibility with `ray " "`_, we currently recommend utilizing at " @@ -1130,11 +1129,11 @@ msgid "" "simulations." msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:19 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:22 msgid "Virtualenv with Pyenv/Virtualenv" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:21 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:24 msgid "" "One of the recommended virtual environment is `pyenv " "`_/`virtualenv `_ for details." msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:23 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:29 msgid "" "Once Pyenv is set up, you can use it to install `Python Version 3.10 " "`_ or above:" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:29 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:36 msgid "Create the virtualenv with:" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:36 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:42 msgid "Activate the virtualenv by running the following command:" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:44 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:49 msgid "Virtualenv with Poetry" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:46 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:51 msgid "" "The Flower examples are based on `Poetry `_ to manage dependencies. After installing Poetry you " "simply create a virtual environment with:" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:52 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:58 msgid "" "If you open a new terminal you can activate the previously created " "virtual environment with the following command:" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:60 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:66 msgid "Virtualenv with Anaconda" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:62 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:68 msgid "" "If you prefer to use Anaconda for your virtual environment then install " "and setup the `conda `_ package. After setting it up you can " +"/user-guide/install/index.html>`_ package. After setting it up you can " "create a virtual environment with:" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:68 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:76 msgid "and activate the virtual environment with:" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:76 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:83 msgid "And then?" msgstr "" -#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:78 +#: ../../source/contributor-how-to-set-up-a-virtual-env.rst:85 msgid "" "As soon as you created your virtual environment you clone one of the " "`Flower examples `_." @@ -1203,11 +1202,11 @@ msgstr "" msgid "Write documentation" msgstr "" -#: ../../source/contributor-how-to-write-documentation.rst:6 +#: ../../source/contributor-how-to-write-documentation.rst:5 msgid "Project layout" msgstr "" -#: ../../source/contributor-how-to-write-documentation.rst:8 +#: ../../source/contributor-how-to-write-documentation.rst:7 msgid "" "The Flower documentation lives in the ``doc`` directory. The Sphinx-based" " documentation system supports both reStructuredText (``.rst`` files) and" @@ -1215,7 +1214,7 @@ msgid "" msgstr "" #: ../../source/contributor-how-to-write-documentation.rst:10 -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:169 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:193 msgid "" "Note that, in order to build the documentation locally (with ``poetry run" " make html``, like described below), `Pandoc " @@ -1223,20 +1222,20 @@ msgid "" "system." msgstr "" -#: ../../source/contributor-how-to-write-documentation.rst:14 +#: ../../source/contributor-how-to-write-documentation.rst:15 msgid "Edit an existing page" msgstr "" -#: ../../source/contributor-how-to-write-documentation.rst:16 +#: ../../source/contributor-how-to-write-documentation.rst:17 msgid "Edit an existing ``.rst`` (or ``.md``) file under ``doc/source/``" msgstr "" -#: ../../source/contributor-how-to-write-documentation.rst:17 +#: ../../source/contributor-how-to-write-documentation.rst:18 #: ../../source/contributor-how-to-write-documentation.rst:27 msgid "Compile the docs: ``cd doc``, then ``poetry run make html``" msgstr "" -#: ../../source/contributor-how-to-write-documentation.rst:18 +#: ../../source/contributor-how-to-write-documentation.rst:19 #: ../../source/contributor-how-to-write-documentation.rst:28 msgid "Open ``doc/build/html/index.html`` in the browser to check the result" msgstr "" @@ -1269,41 +1268,41 @@ msgid "" "the Flower codebase." msgstr "" -#: ../../source/contributor-ref-good-first-contributions.rst:11 +#: ../../source/contributor-ref-good-first-contributions.rst:9 msgid "Where to start" msgstr "" -#: ../../source/contributor-ref-good-first-contributions.rst:13 +#: ../../source/contributor-ref-good-first-contributions.rst:11 msgid "" "Until the Flower core library matures it will be easier to get PR's " "accepted if they only touch non-core areas of the codebase. Good " "candidates to get started are:" msgstr "" -#: ../../source/contributor-ref-good-first-contributions.rst:17 +#: ../../source/contributor-ref-good-first-contributions.rst:14 msgid "Documentation: What's missing? What could be expressed more clearly?" msgstr "" -#: ../../source/contributor-ref-good-first-contributions.rst:18 +#: ../../source/contributor-ref-good-first-contributions.rst:15 msgid "Baselines: See below." msgstr "" -#: ../../source/contributor-ref-good-first-contributions.rst:19 +#: ../../source/contributor-ref-good-first-contributions.rst:16 msgid "Examples: See below." msgstr "" -#: ../../source/contributor-ref-good-first-contributions.rst:23 +#: ../../source/contributor-ref-good-first-contributions.rst:19 msgid "Request for Flower Baselines" msgstr "" -#: ../../source/contributor-ref-good-first-contributions.rst:25 +#: ../../source/contributor-ref-good-first-contributions.rst:21 msgid "" "If you are not familiar with Flower Baselines, you should probably check-" "out our `contributing guide for baselines " "`_." msgstr "" -#: ../../source/contributor-ref-good-first-contributions.rst:27 +#: ../../source/contributor-ref-good-first-contributions.rst:25 msgid "" "You should then check out the open `issues " "`_" @@ -1312,7 +1311,7 @@ msgid "" "working on it!" msgstr "" -#: ../../source/contributor-ref-good-first-contributions.rst:31 +#: ../../source/contributor-ref-good-first-contributions.rst:30 msgid "" "Otherwise, if you don't find a baseline you'd like to work on, be sure to" " open a new issue with the baseline request template!" @@ -1353,30 +1352,30 @@ msgid "" "special case of the SecAgg+ protocol." msgstr "" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:8 -msgid "The :code:`SecAgg+` abstraction" +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:9 +msgid "The ``SecAgg+`` abstraction" msgstr "" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:10 -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:161 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:11 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:163 msgid "" "In this implementation, each client will be assigned with a unique index " "(int) for secure aggregation, and thus many python dictionaries used have" " keys of int type rather than ClientProxy type." msgstr "" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:65 -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:198 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:67 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:204 msgid "" "The Flower server will execute and process received results in the " "following order:" msgstr "" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:159 -msgid "The :code:`LightSecAgg` abstraction" +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:161 +msgid "The ``LightSecAgg`` abstraction" msgstr "" -#: ../../source/contributor-ref-secure-aggregation-protocols.rst:271 +#: ../../source/contributor-ref-secure-aggregation-protocols.rst:277 msgid "Types" msgstr "" @@ -1390,22 +1389,22 @@ msgid "" "are not used to contributing to GitHub projects." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:6 +#: ../../source/contributor-tutorial-contribute-on-github.rst:7 msgid "" "If you're familiar with how contributing on GitHub works, you can " "directly checkout our :doc:`getting started guide for contributors " "`." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:10 +#: ../../source/contributor-tutorial-contribute-on-github.rst:12 msgid "Setting up the repository" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:12 +#: ../../source/contributor-tutorial-contribute-on-github.rst:29 msgid "**Create a GitHub account and setup Git**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:13 +#: ../../source/contributor-tutorial-contribute-on-github.rst:15 msgid "" "Git is a distributed version control tool. This allows for an entire " "codebase's history to be stored and every developer's machine. It is a " @@ -1414,20 +1413,20 @@ msgid "" "started-with-git/set-up-git>`_ to set it up." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:16 +#: ../../source/contributor-tutorial-contribute-on-github.rst:21 msgid "" "GitHub, itself, is a code hosting platform for version control and " "collaboration. It allows for everyone to collaborate and work from " "anywhere on remote repositories." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:18 +#: ../../source/contributor-tutorial-contribute-on-github.rst:25 msgid "" "If you haven't already, you will need to create an account on `GitHub " "`_." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:20 +#: ../../source/contributor-tutorial-contribute-on-github.rst:28 msgid "" "The idea behind the generic Git and GitHub workflow boils down to this: " "you download code from a remote repository on GitHub, make changes " @@ -1435,19 +1434,19 @@ msgid "" "history back to GitHub." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:23 +#: ../../source/contributor-tutorial-contribute-on-github.rst:42 msgid "**Forking the Flower repository**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:24 +#: ../../source/contributor-tutorial-contribute-on-github.rst:32 msgid "" "A fork is a personal copy of a GitHub repository. To create one for " -"Flower, you must navigate to ``_ (while " +"Flower, you must navigate to https://github.com/adap/flower (while " "connected to your GitHub account) and click the ``Fork`` button situated " "on the top right of the page." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:29 +#: ../../source/contributor-tutorial-contribute-on-github.rst:38 msgid "" "You can change the name if you want, but this is not necessary as this " "version of Flower will be yours and will sit inside your own account " @@ -1455,11 +1454,11 @@ msgid "" " the top left corner that you are looking at your own version of Flower." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:34 +#: ../../source/contributor-tutorial-contribute-on-github.rst:59 msgid "**Cloning your forked repository**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:35 +#: ../../source/contributor-tutorial-contribute-on-github.rst:45 msgid "" "The next step is to download the forked repository on your machine to be " "able to make changes to it. On your forked repository page, you should " @@ -1467,27 +1466,27 @@ msgid "" "ability to copy the HTTPS link of the repository." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:41 +#: ../../source/contributor-tutorial-contribute-on-github.rst:52 msgid "" "Once you copied the \\, you can open a terminal on your machine, " "navigate to the place you want to download the repository to and type:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:47 +#: ../../source/contributor-tutorial-contribute-on-github.rst:59 msgid "" "This will create a ``flower/`` (or the name of your fork if you renamed " "it) folder in the current working directory." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:49 +#: ../../source/contributor-tutorial-contribute-on-github.rst:78 msgid "**Add origin**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:50 +#: ../../source/contributor-tutorial-contribute-on-github.rst:62 msgid "You can then go into the repository folder:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:56 +#: ../../source/contributor-tutorial-contribute-on-github.rst:68 msgid "" "And here we will need to add an origin to our repository. The origin is " "the \\ of the remote fork repository. To obtain it, we can do as " @@ -1495,27 +1494,27 @@ msgid "" "account and copying the link." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:61 +#: ../../source/contributor-tutorial-contribute-on-github.rst:75 msgid "" "Once the \\ is copied, we can type the following command in our " "terminal:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:68 +#: ../../source/contributor-tutorial-contribute-on-github.rst:102 msgid "**Add upstream**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:69 +#: ../../source/contributor-tutorial-contribute-on-github.rst:81 msgid "" "Now we will add an upstream address to our repository. Still in the same " "directory, we must run the following command:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:76 +#: ../../source/contributor-tutorial-contribute-on-github.rst:88 msgid "The following diagram visually explains what we did in the previous steps:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:80 +#: ../../source/contributor-tutorial-contribute-on-github.rst:92 msgid "" "The upstream is the GitHub remote address of the parent repository (in " "this case Flower), i.e. the one we eventually want to contribute to and " @@ -1524,17 +1523,17 @@ msgid "" "in our own account." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:84 +#: ../../source/contributor-tutorial-contribute-on-github.rst:97 msgid "" "To make sure our local version of the fork is up-to-date with the latest " "changes from the Flower repository, we can execute the following command:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:93 +#: ../../source/contributor-tutorial-contribute-on-github.rst:105 msgid "Setting up the coding environment" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:95 +#: ../../source/contributor-tutorial-contribute-on-github.rst:107 msgid "" "This can be achieved by following this :doc:`getting started guide for " "contributors ` (note " @@ -1542,158 +1541,158 @@ msgid "" "code and test it, you can finally start making changes!" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:100 +#: ../../source/contributor-tutorial-contribute-on-github.rst:113 msgid "Making changes" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:102 +#: ../../source/contributor-tutorial-contribute-on-github.rst:115 msgid "" "Before making any changes make sure you are up-to-date with your " "repository:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:108 +#: ../../source/contributor-tutorial-contribute-on-github.rst:121 msgid "And with Flower's repository:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:114 +#: ../../source/contributor-tutorial-contribute-on-github.rst:134 msgid "**Create a new branch**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:115 +#: ../../source/contributor-tutorial-contribute-on-github.rst:128 msgid "" "To make the history cleaner and easier to work with, it is good practice " "to create a new branch for each feature/project that needs to be " "implemented." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:118 +#: ../../source/contributor-tutorial-contribute-on-github.rst:131 msgid "" "To do so, just run the following command inside the repository's " "directory:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:124 +#: ../../source/contributor-tutorial-contribute-on-github.rst:136 msgid "**Make changes**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:125 +#: ../../source/contributor-tutorial-contribute-on-github.rst:137 msgid "Write great code and create wonderful changes using your favorite editor!" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:127 +#: ../../source/contributor-tutorial-contribute-on-github.rst:149 msgid "**Test and format your code**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:128 +#: ../../source/contributor-tutorial-contribute-on-github.rst:139 msgid "" "Don't forget to test and format your code! Otherwise your code won't be " "able to be merged into the Flower repository. This is done so the " "codebase stays consistent and easy to understand." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:131 +#: ../../source/contributor-tutorial-contribute-on-github.rst:143 msgid "To do so, we have written a few scripts that you can execute:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:140 +#: ../../source/contributor-tutorial-contribute-on-github.rst:162 msgid "**Stage changes**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:141 +#: ../../source/contributor-tutorial-contribute-on-github.rst:152 msgid "" "Before creating a commit that will update your history, you must specify " "to Git which files it needs to take into account." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:143 +#: ../../source/contributor-tutorial-contribute-on-github.rst:155 msgid "This can be done with:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:149 +#: ../../source/contributor-tutorial-contribute-on-github.rst:161 msgid "" "To check which files have been modified compared to the last version " "(last commit) and to see which files are staged for commit, you can use " -"the :code:`git status` command." +"the ``git status`` command." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:152 +#: ../../source/contributor-tutorial-contribute-on-github.rst:173 msgid "**Commit changes**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:153 +#: ../../source/contributor-tutorial-contribute-on-github.rst:165 msgid "" -"Once you have added all the files you wanted to commit using :code:`git " -"add`, you can finally create your commit using this command:" +"Once you have added all the files you wanted to commit using ``git add``," +" you can finally create your commit using this command:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:159 +#: ../../source/contributor-tutorial-contribute-on-github.rst:172 msgid "" "The \\ is there to explain to others what the commit " "does. It should be written in an imperative style and be concise. An " -"example would be :code:`git commit -m \"Add images to README\"`." +"example would be ``git commit -m \"Add images to README\"``." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:162 +#: ../../source/contributor-tutorial-contribute-on-github.rst:185 msgid "**Push the changes to the fork**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:163 +#: ../../source/contributor-tutorial-contribute-on-github.rst:176 msgid "" "Once we have committed our changes, we have effectively updated our local" " history, but GitHub has no way of knowing this unless we push our " "changes to our origin's remote address:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:170 +#: ../../source/contributor-tutorial-contribute-on-github.rst:184 msgid "" "Once this is done, you will see on the GitHub that your forked repo was " "updated with the changes you have made." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:174 +#: ../../source/contributor-tutorial-contribute-on-github.rst:188 msgid "Creating and merging a pull request (PR)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:176 +#: ../../source/contributor-tutorial-contribute-on-github.rst:226 msgid "**Create the PR**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:177 +#: ../../source/contributor-tutorial-contribute-on-github.rst:191 msgid "" "Once you have pushed changes, on the GitHub webpage of your repository " "you should see the following message:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:181 +#: ../../source/contributor-tutorial-contribute-on-github.rst:196 msgid "Otherwise you can always find this option in the ``Branches`` page." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:183 +#: ../../source/contributor-tutorial-contribute-on-github.rst:198 msgid "" "Once you click the ``Compare & pull request`` button, you should see " "something similar to this:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:187 +#: ../../source/contributor-tutorial-contribute-on-github.rst:203 msgid "At the top you have an explanation of which branch will be merged where:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:191 +#: ../../source/contributor-tutorial-contribute-on-github.rst:207 msgid "" "In this example you can see that the request is to merge the branch " "``doc-fixes`` from my forked repository to branch ``main`` from the " "Flower repository." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:193 +#: ../../source/contributor-tutorial-contribute-on-github.rst:210 msgid "" "The title should be changed to adhere to the :ref:`pr_title_format` " "guidelines, otherwise it won't be possible to merge the PR. So in this " "case, a correct title might be ``docs(framework:skip) Fix typos``." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:196 +#: ../../source/contributor-tutorial-contribute-on-github.rst:214 msgid "" "The input box in the middle is there for you to describe what your PR " "does and to link it to existing issues. We have placed comments (that " @@ -1701,167 +1700,167 @@ msgid "" "process." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:199 +#: ../../source/contributor-tutorial-contribute-on-github.rst:218 msgid "It is important to follow the instructions described in comments." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:201 +#: ../../source/contributor-tutorial-contribute-on-github.rst:220 msgid "" "At the bottom you will find the button to open the PR. This will notify " "reviewers that a new PR has been opened and that they should look over it" " to merge or to request changes." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:204 +#: ../../source/contributor-tutorial-contribute-on-github.rst:224 msgid "" "If your PR is not yet ready for review, and you don't want to notify " "anyone, you have the option to create a draft pull request:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:208 +#: ../../source/contributor-tutorial-contribute-on-github.rst:230 msgid "**Making new changes**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:209 +#: ../../source/contributor-tutorial-contribute-on-github.rst:229 msgid "" "Once the PR has been opened (as draft or not), you can still push new " "commits to it the same way we did before, by making changes to the branch" " associated with the PR." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:211 +#: ../../source/contributor-tutorial-contribute-on-github.rst:253 msgid "**Review the PR**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:212 +#: ../../source/contributor-tutorial-contribute-on-github.rst:233 msgid "" "Once the PR has been opened or once the draft PR has been marked as " "ready, a review from code owners will be automatically requested:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:216 +#: ../../source/contributor-tutorial-contribute-on-github.rst:238 msgid "" "Code owners will then look into the code, ask questions, request changes " "or validate the PR." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:218 +#: ../../source/contributor-tutorial-contribute-on-github.rst:241 msgid "Merging will be blocked if there are ongoing requested changes." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:222 +#: ../../source/contributor-tutorial-contribute-on-github.rst:245 msgid "" "To resolve them, just push the necessary changes to the branch associated" " with the PR:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:226 +#: ../../source/contributor-tutorial-contribute-on-github.rst:250 msgid "And resolve the conversation:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:230 +#: ../../source/contributor-tutorial-contribute-on-github.rst:254 msgid "" "Once all the conversations have been resolved, you can re-request a " "review." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:233 +#: ../../source/contributor-tutorial-contribute-on-github.rst:274 msgid "**Once the PR is merged**" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:234 +#: ../../source/contributor-tutorial-contribute-on-github.rst:256 msgid "" "If all the automatic tests have passed and reviewers have no more changes" " to request, they can approve the PR and merge it." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:238 +#: ../../source/contributor-tutorial-contribute-on-github.rst:261 msgid "" "Once it is merged, you can delete the branch on GitHub (a button should " "appear to do so) and also delete it locally by doing:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:245 +#: ../../source/contributor-tutorial-contribute-on-github.rst:269 msgid "Then you should update your forked repository by doing:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:254 +#: ../../source/contributor-tutorial-contribute-on-github.rst:277 msgid "Example of first contribution" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:257 +#: ../../source/contributor-tutorial-contribute-on-github.rst:280 msgid "Problem" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:259 +#: ../../source/contributor-tutorial-contribute-on-github.rst:282 msgid "" "For our documentation, we've started to use the `Diàtaxis framework " "`_." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:261 +#: ../../source/contributor-tutorial-contribute-on-github.rst:285 msgid "" "Our \"How to\" guides should have titles that continue the sentence \"How" " to …\", for example, \"How to upgrade to Flower 1.0\"." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:263 +#: ../../source/contributor-tutorial-contribute-on-github.rst:288 msgid "" "Most of our guides do not follow this new format yet, and changing their " "title is (unfortunately) more involved than one might think." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:265 +#: ../../source/contributor-tutorial-contribute-on-github.rst:291 msgid "" "This issue is about changing the title of a doc from present continuous " "to present simple." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:267 +#: ../../source/contributor-tutorial-contribute-on-github.rst:294 msgid "" "Let's take the example of \"Saving Progress\" which we changed to \"Save " "Progress\". Does this pass our check?" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:269 +#: ../../source/contributor-tutorial-contribute-on-github.rst:297 msgid "Before: \"How to saving progress\" ❌" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:271 +#: ../../source/contributor-tutorial-contribute-on-github.rst:299 msgid "After: \"How to save progress\" ✅" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:274 +#: ../../source/contributor-tutorial-contribute-on-github.rst:302 msgid "Solution" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:276 +#: ../../source/contributor-tutorial-contribute-on-github.rst:304 msgid "" "This is a tiny change, but it'll allow us to test your end-to-end setup. " "After cloning and setting up the Flower repo, here's what you should do:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:278 +#: ../../source/contributor-tutorial-contribute-on-github.rst:307 msgid "Find the source file in ``doc/source``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:279 +#: ../../source/contributor-tutorial-contribute-on-github.rst:308 msgid "" "Make the change in the ``.rst`` file (beware, the dashes under the title " "should be the same length as the title itself)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:280 +#: ../../source/contributor-tutorial-contribute-on-github.rst:310 msgid "" "Build the docs and `check the result `_" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:283 +#: ../../source/contributor-tutorial-contribute-on-github.rst:314 msgid "Rename file" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:285 +#: ../../source/contributor-tutorial-contribute-on-github.rst:316 msgid "" "You might have noticed that the file name still reflects the old wording." " If we just change the file, then we break all existing links to it - it " @@ -1869,68 +1868,68 @@ msgid "" "engine ranking." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:288 +#: ../../source/contributor-tutorial-contribute-on-github.rst:320 msgid "Here's how to change the file name:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:290 +#: ../../source/contributor-tutorial-contribute-on-github.rst:322 msgid "Change the file name to ``save-progress.rst``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:291 +#: ../../source/contributor-tutorial-contribute-on-github.rst:323 msgid "Add a redirect rule to ``doc/source/conf.py``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:293 +#: ../../source/contributor-tutorial-contribute-on-github.rst:325 msgid "" "This will cause a redirect from ``saving-progress.html`` to ``save-" "progress.html``, old links will continue to work." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:296 +#: ../../source/contributor-tutorial-contribute-on-github.rst:329 msgid "Apply changes in the index file" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:298 +#: ../../source/contributor-tutorial-contribute-on-github.rst:331 msgid "" "For the lateral navigation bar to work properly, it is very important to " "update the ``index.rst`` file as well. This is where we define the whole " "arborescence of the navbar." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:301 +#: ../../source/contributor-tutorial-contribute-on-github.rst:335 msgid "Find and modify the file name in ``index.rst``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:304 +#: ../../source/contributor-tutorial-contribute-on-github.rst:338 msgid "Open PR" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:306 +#: ../../source/contributor-tutorial-contribute-on-github.rst:340 msgid "" "Commit the changes (commit messages are always imperative: \"Do " "something\", in this case \"Change …\")" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:307 +#: ../../source/contributor-tutorial-contribute-on-github.rst:342 msgid "Push the changes to your fork" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:308 +#: ../../source/contributor-tutorial-contribute-on-github.rst:343 msgid "" "Open a PR (as shown above) with title ``docs(framework) Update how-to " "guide title``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:309 +#: ../../source/contributor-tutorial-contribute-on-github.rst:344 msgid "Wait for it to be approved!" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:310 +#: ../../source/contributor-tutorial-contribute-on-github.rst:345 msgid "Congrats! 🥳 You're now officially a Flower contributor!" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:314 +#: ../../source/contributor-tutorial-contribute-on-github.rst:348 #: ../../source/tutorial-series-build-a-strategy-from-scratch-pytorch.ipynb:573 #: ../../source/tutorial-series-customize-the-client-pytorch.ipynb:1012 #: ../../source/tutorial-series-get-started-with-flower-pytorch.ipynb:811 @@ -1939,39 +1938,39 @@ msgstr "" msgid "Next steps" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:316 +#: ../../source/contributor-tutorial-contribute-on-github.rst:350 msgid "" "Once you have made your first PR, and want to contribute more, be sure to" " check out the following :" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:318 +#: ../../source/contributor-tutorial-contribute-on-github.rst:353 msgid "" ":doc:`Good first contributions `, where you should particularly look into the " -":code:`baselines` contributions." +"``baselines`` contributions." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:322 +#: ../../source/contributor-tutorial-contribute-on-github.rst:357 #: ../../source/fed/0000-20200102-fed-template.md:60 msgid "Appendix" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:327 +#: ../../source/contributor-tutorial-contribute-on-github.rst:362 msgid "PR title format" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:329 +#: ../../source/contributor-tutorial-contribute-on-github.rst:364 msgid "We enforce the following PR title format:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:335 +#: ../../source/contributor-tutorial-contribute-on-github.rst:370 msgid "" "(or ``(:skip) `` to ignore the PR in the " "changelog)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:337 +#: ../../source/contributor-tutorial-contribute-on-github.rst:372 msgid "" "Where ```` needs to be in ``{ci, fix, feat, docs, refactor, " "break}``, ```` should be in ``{framework, baselines, datasets, " @@ -1980,50 +1979,50 @@ msgid "" "verb in the imperative mood." msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:341 +#: ../../source/contributor-tutorial-contribute-on-github.rst:377 #, fuzzy msgid "Valid examples:" msgstr "Exemplo" -#: ../../source/contributor-tutorial-contribute-on-github.rst:343 +#: ../../source/contributor-tutorial-contribute-on-github.rst:379 msgid "``feat(framework) Add flwr build CLI command``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:344 +#: ../../source/contributor-tutorial-contribute-on-github.rst:380 msgid "``refactor(examples:skip) Improve quickstart-pytorch logging``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:345 +#: ../../source/contributor-tutorial-contribute-on-github.rst:381 msgid "``ci(*:skip) Enforce PR title format``" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:347 +#: ../../source/contributor-tutorial-contribute-on-github.rst:383 msgid "Invalid examples:" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:349 +#: ../../source/contributor-tutorial-contribute-on-github.rst:385 msgid "``feat(framework): Add flwr build CLI command`` (extra ``:``)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:350 +#: ../../source/contributor-tutorial-contribute-on-github.rst:386 msgid "" "``feat(*) Add flwr build CLI command`` (missing ``skip`` flag along with " "``*``)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:351 +#: ../../source/contributor-tutorial-contribute-on-github.rst:387 msgid "``feat(skip) Add flwr build CLI command`` (missing ````)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:352 +#: ../../source/contributor-tutorial-contribute-on-github.rst:388 msgid "``feat(framework) add flwr build CLI command`` (non capitalised verb)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:353 +#: ../../source/contributor-tutorial-contribute-on-github.rst:389 msgid "``feat(framework) Add flwr build CLI command.`` (dot at the end)" msgstr "" -#: ../../source/contributor-tutorial-contribute-on-github.rst:354 +#: ../../source/contributor-tutorial-contribute-on-github.rst:390 msgid "``Add flwr build CLI command.`` (missing ``()``)" msgstr "" @@ -2033,7 +2032,9 @@ msgstr "" #: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:5 #: ../../source/docker/run-as-subprocess.rst:11 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:12 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:16 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:18 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:13 #: ../../source/docker/tutorial-quickstart-docker.rst:11 msgid "Prerequisites" msgstr "" @@ -2056,16 +2057,16 @@ msgstr "" #: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:12 msgid "" -"Flower uses :code:`pyproject.toml` to manage dependencies and configure " +"Flower uses ``pyproject.toml`` to manage dependencies and configure " "development tools (the ones which support it). Poetry is a build tool " "which supports `PEP 517 `_." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:18 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:17 msgid "Developer Machine Setup" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:21 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:20 msgid "Preliminaries" msgstr "" @@ -2083,94 +2084,93 @@ msgid "" "installation actions to add `brew` to your PATH." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:28 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:29 msgid "" "Install `xz` (to install different Python versions) and `pandoc` to build" -" the docs::" +" the docs:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:34 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:36 msgid "For Ubuntu" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:35 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:38 msgid "" "Ensure you system (Ubuntu 22.04+) is up-to-date, and you have all " -"necessary packages::" +"necessary packages:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:44 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:47 msgid "Create Flower Dev Environment" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:46 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:49 msgid "" -"1. Clone the `Flower repository `_ from " -"GitHub::" +"Clone the `Flower repository `_ from " +"GitHub:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:52 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:56 msgid "" "Let's create the Python environment for all-things Flower. If you wish to" -" use :code:`pyenv`, we provide two convenience scripts that you can use. " -"If you prefer using something else than :code:`pyenv`, create a new " -"environment, activate and skip to the last point where all packages are " -"installed." +" use ``pyenv``, we provide two convenience scripts that you can use. If " +"you prefer using something else than ``pyenv``, create a new environment," +" activate and skip to the last point where all packages are installed." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:54 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:61 msgid "" -"If you don't have :code:`pyenv` installed, the following script that will" -" install it, set it up, and create the virtual environment (with " -":code:`Python 3.9.20` by default)::" +"If you don't have ``pyenv`` installed, the following script that will " +"install it, set it up, and create the virtual environment (with ``Python " +"3.9.20`` by default):" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:58 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:68 msgid "" -"If you already have :code:`pyenv` installed (along with the :code:`pyenv-" -"virtualenv` plugin), you can use the following convenience script (with " -":code:`Python 3.9.20` by default)::" +"If you already have ``pyenv`` installed (along with the ``pyenv-" +"virtualenv`` plugin), you can use the following convenience script (with " +"``Python 3.9.20`` by default):" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:62 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:75 msgid "" -"3. Install the Flower package in development mode (think :code:`pip " -"install -e`) along with all necessary dependencies::" +"3. Install the Flower package in development mode (think ``pip install " +"-e``) along with all necessary dependencies:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:69 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:83 msgid "Convenience Scripts" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:71 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:85 msgid "" "The Flower repository contains a number of convenience scripts to make " -"recurring development tasks easier and less error-prone. See the " -":code:`/dev` subdirectory for a full list. The following scripts are " -"amongst the most important ones:" +"recurring development tasks easier and less error-prone. See the ``/dev``" +" subdirectory for a full list. The following scripts are amongst the most" +" important ones:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:77 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:90 msgid "Create/Delete Virtual Environment" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:85 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:98 msgid "Compile ProtoBuf Definitions" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:92 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:105 msgid "Auto-Format Code" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:99 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:112 msgid "Run Linters and Tests" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:106 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:119 msgid "Add a pre-commit hook" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:108 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:121 msgid "" "Developers may integrate a pre-commit hook into their workflow utilizing " "the `pre-commit `_ library. The pre-" @@ -2178,85 +2178,85 @@ msgid "" "``./dev/format.sh`` and ``./dev/test.sh`` scripts." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:110 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:125 msgid "There are multiple ways developers can use this:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:112 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:127 msgid "Install the pre-commit hook to your local git directory by simply running:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:118 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:133 msgid "" "Each ``git commit`` will trigger the execution of formatting and " "linting/test scripts." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:119 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:135 msgid "" "If in a hurry, bypass the hook using ``--no-verify`` with the ``git " -"commit`` command. ::" +"commit`` command." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:124 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:142 msgid "" "For developers who prefer not to install the hook permanently, it is " "possible to execute a one-time check prior to committing changes by using" " the following command:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:130 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:149 msgid "" "This executes the formatting and linting checks/tests on all the files " "without modifying the default behavior of ``git commit``." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:133 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:153 msgid "Run Github Actions (CI) locally" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:135 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:155 msgid "" "Developers could run the full set of Github Actions workflows under their" " local environment by using `Act `_. " "Please refer to the installation instructions under the linked repository" -" and run the next command under Flower main cloned repository folder::" +" and run the next command under Flower main cloned repository folder:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:142 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:164 msgid "" "The Flower default workflow would run by setting up the required Docker " "machines underneath." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:147 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:168 msgid "Build Release" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:149 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:170 msgid "" "Flower uses Poetry to build releases. The necessary command is wrapped in" -" a simple script::" +" a simple script:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:154 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:177 msgid "" -"The resulting :code:`.whl` and :code:`.tar.gz` releases will be stored in" -" the :code:`/dist` subdirectory." +"The resulting ``.whl`` and ``.tar.gz`` releases will be stored in the " +"``/dist`` subdirectory." msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:159 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:181 msgid "Build Documentation" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:161 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:183 msgid "" "Flower's documentation uses `Sphinx `_. " "There's no convenience script to re-build the documentation yet, but it's" -" pretty easy::" +" pretty easy:" msgstr "" -#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:167 +#: ../../source/contributor-tutorial-get-started-as-a-contributor.rst:191 msgid "This will generate HTML documentation in ``doc/build/html``." msgstr "" @@ -2299,7 +2299,7 @@ msgid "" msgstr "" #: ../../source/docker/enable-tls.rst:23 -#: ../../source/docker/persist-superlink-state.rst:14 +#: ../../source/docker/persist-superlink-state.rst:15 msgid "" "If you later want to delete the directory, you can change the user ID " "back to the current user ID by running ``sudo chown -R $USER:$(id -gn) " @@ -2321,21 +2321,21 @@ msgstr "" msgid "Understanding the command" msgstr "" -#: ../../source/docker/enable-tls.rst:44 ../../source/docker/enable-tls.rst:91 +#: ../../source/docker/enable-tls.rst:45 ../../source/docker/enable-tls.rst:92 #: ../../source/docker/enable-tls.rst:125 #: ../../source/docker/tutorial-quickstart-docker.rst:66 #: ../../source/docker/tutorial-quickstart-docker.rst:103 -#: ../../source/docker/tutorial-quickstart-docker.rst:213 -#: ../../source/docker/tutorial-quickstart-docker.rst:300 +#: ../../source/docker/tutorial-quickstart-docker.rst:217 +#: ../../source/docker/tutorial-quickstart-docker.rst:305 msgid "``docker run``: This tells Docker to run a container from an image." msgstr "" -#: ../../source/docker/enable-tls.rst:45 ../../source/docker/enable-tls.rst:92 +#: ../../source/docker/enable-tls.rst:46 ../../source/docker/enable-tls.rst:93 #: ../../source/docker/enable-tls.rst:126 #: ../../source/docker/tutorial-quickstart-docker.rst:67 #: ../../source/docker/tutorial-quickstart-docker.rst:104 -#: ../../source/docker/tutorial-quickstart-docker.rst:214 -#: ../../source/docker/tutorial-quickstart-docker.rst:301 +#: ../../source/docker/tutorial-quickstart-docker.rst:218 +#: ../../source/docker/tutorial-quickstart-docker.rst:306 msgid "``--rm``: Remove the container once it is stopped or the command exits." msgstr "" @@ -2438,18 +2438,18 @@ msgstr "" msgid "the network." msgstr "" -#: ../../source/docker/enable-tls.rst:71 +#: ../../source/docker/enable-tls.rst:72 msgid "SuperNode" msgstr "" -#: ../../source/docker/enable-tls.rst:73 +#: ../../source/docker/enable-tls.rst:74 msgid "" "Assuming that the ``ca.crt`` certificate already exists locally, we can " "use the flag ``--volume`` to mount the local certificate into the " "container's ``/app/`` directory." msgstr "" -#: ../../source/docker/enable-tls.rst:78 +#: ../../source/docker/enable-tls.rst:79 msgid "" "If you're generating self-signed certificates and the ``ca.crt`` " "certificate doesn't exist on the SuperNode, you can copy it over after " @@ -2562,15 +2562,15 @@ msgstr "" msgid "Getting Started" msgstr "" -#: ../../source/docker/index.rst:20 +#: ../../source/docker/index.rst:19 msgid "Running in Production" msgstr "" -#: ../../source/docker/index.rst:29 +#: ../../source/docker/index.rst:28 msgid "Advanced Options" msgstr "" -#: ../../source/docker/index.rst:41 +#: ../../source/docker/index.rst:40 msgid "Run Flower using Docker Compose" msgstr "" @@ -2592,7 +2592,7 @@ msgid "" " on your host system and a name for the database file." msgstr "" -#: ../../source/docker/persist-superlink-state.rst:10 +#: ../../source/docker/persist-superlink-state.rst:11 msgid "" "By default, the SuperLink container runs with a non-root user called " "``app`` with the user ID ``49999``. It is recommended to create a new " @@ -2600,7 +2600,7 @@ msgid "" "the mounted directory has the proper permissions." msgstr "" -#: ../../source/docker/persist-superlink-state.rst:20 +#: ../../source/docker/persist-superlink-state.rst:21 msgid "" "In the example below, we create a new directory called ``state``, change " "the user ID and tell Docker via the flag ``--volume`` to mount the local " @@ -2609,7 +2609,7 @@ msgid "" "database file." msgstr "" -#: ../../source/docker/persist-superlink-state.rst:35 +#: ../../source/docker/persist-superlink-state.rst:36 msgid "" "As soon as the SuperLink starts, the file ``state.db`` is created in the " "``state`` directory on your host system. If the file already exists, the " @@ -2634,17 +2634,17 @@ msgid "" "by-digest-immutable-identifier>`_ of the image instead of the tag." msgstr "" -#: ../../source/docker/pin-version.rst:13 +#: ../../source/docker/pin-version.rst:14 msgid "" "The following command returns the current image digest referenced by the " ":substitution-code:`superlink:|stable_flwr_version|` tag:" msgstr "" -#: ../../source/docker/pin-version.rst:22 +#: ../../source/docker/pin-version.rst:23 msgid "This will output" msgstr "" -#: ../../source/docker/pin-version.rst:29 +#: ../../source/docker/pin-version.rst:30 msgid "Next, we can pin the digest when running a new SuperLink container:" msgstr "" @@ -2691,7 +2691,7 @@ msgid "" "``USER root`` directive within your Dockerfile." msgstr "" -#: ../../source/docker/run-as-root-user.rst:29 +#: ../../source/docker/run-as-root-user.rst:30 #, fuzzy msgid "SuperNode Dockerfile" msgstr "Construindo a imagem do servidor" @@ -2717,11 +2717,11 @@ msgid "" "done by extending the SuperNode image:" msgstr "" -#: ../../source/docker/run-as-subprocess.rst:16 +#: ../../source/docker/run-as-subprocess.rst:17 msgid "Dockerfile.supernode" msgstr "" -#: ../../source/docker/run-as-subprocess.rst:30 +#: ../../source/docker/run-as-subprocess.rst:31 msgid "" "Next, build the SuperNode Docker image by running the following command " "in the directory where Dockerfile is located:" @@ -2737,6 +2737,222 @@ msgid "" " the SuperNode to execute the ClientApp as a subprocess:" msgstr "" +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:2 +msgid "Run Flower Quickstart Examples with Docker Compose" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:4 +msgid "" +"Flower provides a set of `quickstart examples " +"`_ to help you get " +"started with the framework. These examples are designed to demonstrate " +"the capabilities of Flower and by default run using the Simulation " +"Engine. This guide demonstrates how to run them using Flower's Deployment" +" Engine via Docker Compose." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:12 +msgid "" +"Some quickstart examples may have limitations or requirements that " +"prevent them from running on every environment. For more information, " +"please see Limitations_." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:18 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:15 +#: ../../source/docker/tutorial-quickstart-docker.rst:13 +msgid "Before you start, make sure that:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:20 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:22 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:17 +#: ../../source/docker/tutorial-quickstart-docker.rst:15 +msgid "The ``flwr`` CLI is :doc:`installed <../how-to-install-flower>` locally." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:21 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:18 +#: ../../source/docker/tutorial-quickstart-docker.rst:16 +#, fuzzy +msgid "The Docker daemon is running." +msgstr "Verifique que o serviço Docker está rodando." + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:22 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:19 +msgid "Docker Compose is `installed `_." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:25 +msgid "Run the Quickstart Example" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:27 +msgid "" +"Clone the quickstart example you like to run. For example, ``quickstart-" +"pytorch``:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:35 +msgid "" +"Download the `compose.yml " +"`_" +" file into the example directory:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:44 +msgid "Build and start the services using the following command:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:50 +msgid "" +"Append the following lines to the end of the ``pyproject.toml`` file and " +"save it:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:52 +#: ../../source/docker/tutorial-quickstart-docker.rst:324 +msgid "pyproject.toml" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:61 +msgid "" +"You can customize the string that follows ``tool.flwr.federations.`` to " +"fit your needs. However, please note that the string cannot contain a dot" +" (``.``)." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:64 +msgid "" +"In this example, ``local-deployment`` has been used. Just remember to " +"replace ``local-deployment`` with your chosen name in both the " +"``tool.flwr.federations.`` string and the corresponding ``flwr run .`` " +"command." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:68 +#, fuzzy +msgid "Run the example:" +msgstr "Exemplo" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:74 +msgid "Follow the logs of the SuperExec service:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:80 +msgid "" +"That is all it takes! You can monitor the progress of the run through the" +" logs of the SuperExec." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:84 +msgid "Run a Different Quickstart Example" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:86 +msgid "" +"To run a different quickstart example, such as ``quickstart-tensorflow``," +" first, shut down the Docker Compose services of the current example:" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:93 +msgid "After that, you can repeat the steps above." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:96 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:102 +msgid "Limitations" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:101 +msgid "Quickstart Example" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:103 +msgid "quickstart-fastai" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:104 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:106 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:115 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:117 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:121 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:123 +#: ../../source/ref-changelog.md:33 ../../source/ref-changelog.md:399 +#: ../../source/ref-changelog.md:676 ../../source/ref-changelog.md:740 +#: ../../source/ref-changelog.md:798 ../../source/ref-changelog.md:867 +#: ../../source/ref-changelog.md:929 +msgid "None" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:105 +msgid "quickstart-huggingface" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:107 +msgid "quickstart-jax" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:108 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:110 +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:125 +msgid "" +"The example has not yet been updated to work with the latest ``flwr`` " +"version." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:109 +msgid "quickstart-mlcube" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:111 +msgid "quickstart-mlx" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:112 +msgid "" +"`Requires to run on macOS with Apple Silicon `_." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:114 +msgid "quickstart-monai" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:116 +msgid "quickstart-pandas" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:118 +msgid "quickstart-pytorch-lightning" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:119 +msgid "" +"Requires an older pip version that is not supported by the Flower Docker " +"images." +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:120 +msgid "quickstart-pytorch" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:122 +msgid "quickstart-sklearn-tabular" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:124 +msgid "quickstart-tabnet" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:126 +msgid "quickstart-tensorflow" +msgstr "" + +#: ../../source/docker/run-quickstart-examples-docker-compose.rst:127 +msgid "Only runs on AMD64." +msgstr "" + #: ../../source/docker/set-environment-variables.rst:2 msgid "Set Environment Variables" msgstr "" @@ -2748,45 +2964,221 @@ msgid "" "environment variables for a container." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:2 -msgid "Quickstart with Docker" +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:2 +msgid "Deploy Flower on Multiple Machines with Docker Compose" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:4 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:4 msgid "" -"This quickstart aims to guide you through the process of containerizing a" -" Flower project and running it end to end using Docker on your local " -"machine." +"This guide will help you set up a Flower project on multiple machines " +"using Docker Compose." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:7 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:7 msgid "" -"This tutorial does not use production-ready settings, so you can focus on" -" understanding the basic workflow that uses the minimum configurations." +"You will learn how to run the Flower client and server components on two " +"separate machines, with Flower configured to use TLS encryption and " +"persist SuperLink state across restarts. A server consists of a SuperLink" +" and ``SuperExec``. For more details about the Flower architecture, refer" +" to the :doc:`../explanation-flower-architecture` explainer page." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:14 -#: ../../source/docker/tutorial-quickstart-docker.rst:13 -msgid "Before you start, make sure that:" +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:13 +msgid "" +"This guide assumes you have completed the :doc:`tutorial-quickstart-" +"docker-compose` tutorial. It is highly recommended that you follow and " +"understand the contents of that tutorial before proceeding with this " +"guide." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:16 -#: ../../source/docker/tutorial-quickstart-docker.rst:15 -msgid "The ``flwr`` CLI is :doc:`installed <../how-to-install-flower>` locally." +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:20 +msgid "Before you begin, make sure you have the following prerequisites:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:17 -#: ../../source/docker/tutorial-quickstart-docker.rst:16 -#, fuzzy -msgid "The Docker daemon is running." -msgstr "Verifique que o serviço Docker está rodando." +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:23 +msgid "The Docker daemon is running on your local machine and the remote machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:24 +msgid "" +"Docker Compose V2 is installed on both your local machine and the remote " +"machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:25 +msgid "You can connect to the remote machine from your local machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:26 +msgid "Ports ``9091`` and ``9093`` are accessible on the remote machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:30 +msgid "" +"The guide uses the |quickstart_sklearn_tabular|_ example as an example " +"project." +msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:21 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:32 +msgid "" +"If your project has a different name or location, please remember to " +"adjust the commands/paths accordingly." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:36 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:22 #: ../../source/docker/tutorial-quickstart-docker.rst:19 msgid "Step 1: Set Up" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:31 +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:38 +msgid "Clone the Flower repository and change to the ``distributed`` directory:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:45 +msgid "Get the IP address from the remote machine and save it for later." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:46 +msgid "" +"Use the ``certs.yml`` Compose file to generate your own self-signed " +"certificates. If you have certificates, you can continue with Step 2." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:51 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:221 +msgid "These certificates should be used only for development purposes." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:53 +msgid "" +"For production environments, you may have to use dedicated services to " +"obtain your certificates." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:56 +msgid "" +"First, set the environment variables ``SUPERLINK_IP`` and " +"``SUPEREXEC_IP`` with the IP address from the remote machine. For " +"example, if the IP is ``192.168.2.33``, execute:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:65 +msgid "Next, generate the self-signed certificates:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:72 +msgid "Step 2: Copy the Server Compose Files" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:74 +msgid "" +"Use the method that works best for you to copy the ``server`` directory, " +"the certificates, and your Flower project to the remote machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:77 +msgid "For example, you can use ``scp`` to copy the directories:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:87 +msgid "Step 3: Start the Flower Server Components" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:89 +msgid "" +"Log into the remote machine using ``ssh`` and run the following command " +"to start the SuperLink and SuperExec services:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:102 +msgid "" +"The Path of the ``PROJECT_DIR`` should be relative to the location of the" +" ``server`` Docker Compose files." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:105 +msgid "Go back to your terminal on your local machine." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:108 +msgid "Step 4: Start the Flower Client Components" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:110 +msgid "" +"On your local machine, run the following command to start the client " +"components:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:120 +msgid "" +"The Path of the ``PROJECT_DIR`` should be relative to the location of the" +" ``client`` Docker Compose files." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:124 +msgid "Step 5: Run Your Flower Project" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:126 +msgid "" +"Specify the remote SuperExec IP addresses and the path to the root " +"certificate in the ``[tool.flwr.federations.remote-superexec]`` table in " +"the ``pyproject.toml`` file. Here, we have named our remote federation " +"``remote-superexec``:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:130 +msgid "examples/quickstart-sklearn-tabular/pyproject.toml" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:139 +msgid "" +"The Path of the ``root-certificates`` should be relative to the location " +"of the ``pyproject.toml`` file." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:142 +msgid "To run the project, execute:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:148 +msgid "" +"That's it! With these steps, you've set up Flower on two separate " +"machines and are ready to start using it." +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:152 +msgid "Step 6: Clean Up" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:154 +msgid "Shut down the Flower client components:" +msgstr "" + +#: ../../source/docker/tutorial-deploy-on-multiple-machines.rst:161 +msgid "Shut down the Flower server components and delete the SuperLink state:" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:2 +msgid "Quickstart with Docker" +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:4 +msgid "" +"This quickstart aims to guide you through the process of containerizing a" +" Flower project and running it end to end using Docker on your local " +"machine." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker.rst:7 +msgid "" +"This tutorial does not use production-ready settings, so you can focus on" +" understanding the basic workflow that uses the minimum configurations." +msgstr "" + +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:32 #: ../../source/docker/tutorial-quickstart-docker.rst:21 msgid "Create a new Flower project (PyTorch):" msgstr "" @@ -2807,7 +3199,7 @@ msgstr "" msgid "Step 2: Start the SuperLink" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:60 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:62 #: ../../source/docker/tutorial-quickstart-docker.rst:52 msgid "Open your terminal and run:" msgstr "" @@ -2833,8 +3225,8 @@ msgstr "" #: ../../source/docker/tutorial-quickstart-docker.rst:71 #: ../../source/docker/tutorial-quickstart-docker.rst:108 -#: ../../source/docker/tutorial-quickstart-docker.rst:215 -#: ../../source/docker/tutorial-quickstart-docker.rst:304 +#: ../../source/docker/tutorial-quickstart-docker.rst:219 +#: ../../source/docker/tutorial-quickstart-docker.rst:309 msgid "" "``--network flwr-network``: Make the container join the network named " "``flwr-network``." @@ -2846,8 +3238,8 @@ msgstr "" #: ../../source/docker/tutorial-quickstart-docker.rst:73 #: ../../source/docker/tutorial-quickstart-docker.rst:110 -#: ../../source/docker/tutorial-quickstart-docker.rst:216 -#: ../../source/docker/tutorial-quickstart-docker.rst:306 +#: ../../source/docker/tutorial-quickstart-docker.rst:220 +#: ../../source/docker/tutorial-quickstart-docker.rst:311 msgid "" "``--detach``: Run the container in the background, freeing up the " "terminal." @@ -2963,13 +3355,13 @@ msgid "" "extends the ClientApp image and installs the required dependencies." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:148 +#: ../../source/docker/tutorial-quickstart-docker.rst:149 msgid "" "Create a ClientApp Dockerfile called ``Dockerfile.clientapp`` and paste " "the following code into it:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:150 +#: ../../source/docker/tutorial-quickstart-docker.rst:152 msgid "Dockerfile.clientapp" msgstr "" @@ -3051,7 +3443,7 @@ msgstr "" msgid "the default command run when the container is started." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:184 +#: ../../source/docker/tutorial-quickstart-docker.rst:186 msgid "" "Note that `flwr `__ is already installed " "in the ``flwr/clientapp`` base image, so only other package dependencies " @@ -3060,20 +3452,20 @@ msgid "" "after it has been copied into the Docker image (see line 5)." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:189 +#: ../../source/docker/tutorial-quickstart-docker.rst:192 msgid "" "Next, build the ClientApp Docker image by running the following command " "in the directory where the Dockerfile is located:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:198 +#: ../../source/docker/tutorial-quickstart-docker.rst:201 msgid "" "The image name was set as ``flwr_clientapp`` with the tag ``0.0.1``. " "Remember that these values are merely examples, and you can customize " "them according to your requirements." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:201 +#: ../../source/docker/tutorial-quickstart-docker.rst:205 msgid "Start the first ClientApp container:" msgstr "" @@ -3093,33 +3485,33 @@ msgstr "" msgid "``supernode-1:9094``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:222 +#: ../../source/docker/tutorial-quickstart-docker.rst:226 msgid "Start the second ClientApp container:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:233 +#: ../../source/docker/tutorial-quickstart-docker.rst:237 msgid "Step 5: Start the SuperExec" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:235 +#: ../../source/docker/tutorial-quickstart-docker.rst:239 msgid "" "The procedure for building and running a SuperExec image is almost " "identical to the ClientApp image." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:237 +#: ../../source/docker/tutorial-quickstart-docker.rst:242 msgid "" "Similar to the ClientApp image, you will need to create a Dockerfile that" " extends the SuperExec image and installs the required FAB dependencies." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:240 +#: ../../source/docker/tutorial-quickstart-docker.rst:245 msgid "" "Create a SuperExec Dockerfile called ``Dockerfile.superexec`` and paste " "the following code in:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:242 +#: ../../source/docker/tutorial-quickstart-docker.rst:248 msgid "Dockerfile.superexec" msgstr "" @@ -3149,13 +3541,13 @@ msgstr "" msgid "``flwr.superexec.deployment:executor`` executor to run the ServerApps." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:277 +#: ../../source/docker/tutorial-quickstart-docker.rst:283 msgid "" "Afterward, in the directory that holds the Dockerfile, execute this " "Docker command to build the SuperExec image:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:285 +#: ../../source/docker/tutorial-quickstart-docker.rst:290 msgid "Start the SuperExec container:" msgstr "" @@ -3169,7 +3561,7 @@ msgid "" "``http://localhost:9093``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:305 +#: ../../source/docker/tutorial-quickstart-docker.rst:310 msgid "``--name superexec``: Assign the name ``superexec`` to the container." msgstr "" @@ -3189,78 +3581,75 @@ msgstr "" msgid "connect to the SuperLink running on port ``9091``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:315 +#: ../../source/docker/tutorial-quickstart-docker.rst:320 msgid "Step 6: Run the Quickstart Project" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:317 +#: ../../source/docker/tutorial-quickstart-docker.rst:322 msgid "Add the following lines to the ``pyproject.toml``:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:319 -msgid "pyproject.toml" -msgstr "" - -#: ../../source/docker/tutorial-quickstart-docker.rst:326 +#: ../../source/docker/tutorial-quickstart-docker.rst:331 msgid "Run the ``quickstart-docker`` project by executing the command:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:332 +#: ../../source/docker/tutorial-quickstart-docker.rst:337 msgid "Follow the SuperExec logs to track the execution of the run:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:339 +#: ../../source/docker/tutorial-quickstart-docker.rst:344 msgid "Step 7: Update the Application" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:341 +#: ../../source/docker/tutorial-quickstart-docker.rst:346 msgid "" -"Change the application code. For example, change the ``seed`` in " +"Change the application code. For example, change the ``seed`` in " "``quickstart_docker/task.py`` to ``43`` and save it:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:344 +#: ../../source/docker/tutorial-quickstart-docker.rst:349 msgid "quickstart_docker/task.py" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:351 +#: ../../source/docker/tutorial-quickstart-docker.rst:356 msgid "Stop the current ClientApp containers:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:357 +#: ../../source/docker/tutorial-quickstart-docker.rst:362 #, fuzzy msgid "Rebuild the FAB and ClientApp image:" msgstr "Construindo a imagem base" -#: ../../source/docker/tutorial-quickstart-docker.rst:363 +#: ../../source/docker/tutorial-quickstart-docker.rst:368 msgid "Launch two new ClientApp containers based on the newly built image:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:378 +#: ../../source/docker/tutorial-quickstart-docker.rst:383 msgid "Run the updated project:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:385 +#: ../../source/docker/tutorial-quickstart-docker.rst:390 msgid "Step 8: Clean Up" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:387 +#: ../../source/docker/tutorial-quickstart-docker.rst:392 msgid "Remove the containers and the bridge network:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:399 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:408 +#: ../../source/docker/tutorial-quickstart-docker.rst:404 msgid "Where to Go Next" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:401 +#: ../../source/docker/tutorial-quickstart-docker.rst:406 msgid ":doc:`enable-tls`" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:402 +#: ../../source/docker/tutorial-quickstart-docker.rst:407 msgid ":doc:`persist-superlink-state`" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker.rst:403 +#: ../../source/docker/tutorial-quickstart-docker.rst:408 msgid ":doc:`tutorial-quickstart-docker-compose`" msgstr "" @@ -3282,180 +3671,176 @@ msgid "" "configuration that best suits your project's needs." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:18 -msgid "Docker Compose is `installed `_." +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:24 +msgid "Clone the Docker Compose ``complete`` directory:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:23 -msgid "Clone the Docker Compose ``complete`` directory:" -msgstr "" - -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:37 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:38 msgid "" "Export the path of the newly created project. The path should be relative" " to the location of the Docker Compose files:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:44 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:45 msgid "" "Setting the ``PROJECT_DIR`` helps Docker Compose locate the " "``pyproject.toml`` file, allowing it to install dependencies in the " "SuperExec and SuperNode images correctly." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:48 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:49 msgid "Step 2: Run Flower in Insecure Mode" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:50 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:51 msgid "" "To begin, start Flower with the most basic configuration. In this setup, " "Flower will run without TLS and without persisting the state." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:55 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:56 msgid "" "Without TLS, the data sent between the services remains **unencrypted**. " "Use it only for development purposes." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:58 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:59 msgid "" "For production-oriented use cases, :ref:`enable TLS` for secure data" " transmission." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:68 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:179 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:70 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:184 msgid "``docker compose``: The Docker command to run the Docker Compose tool." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:69 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:180 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:71 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:185 msgid "" "``-f compose.yml``: Specify the YAML file that contains the basic Flower " "service definitions." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:70 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:185 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:72 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:190 msgid "" "``--build``: Rebuild the images for each service if they don't already " "exist." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:71 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:186 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:73 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:191 msgid "" "``-d``: Detach the containers from the terminal and run them in the " "background." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:74 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:76 msgid "Step 3: Run the Quickstart Project" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:76 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:78 msgid "" "Now that the Flower services have been started via Docker Compose, it is " "time to run the quickstart example." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:79 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:81 msgid "" "To ensure the ``flwr`` CLI connects to the SuperExec, you need to specify" " the SuperExec addresses in the ``pyproject.toml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:82 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:226 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:84 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:232 msgid "Add the following lines to the ``quickstart-compose/pyproject.toml``:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:84 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:228 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:86 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:234 msgid "quickstart-compose/pyproject.toml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:91 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:93 msgid "Execute the command to run the quickstart example:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:97 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:99 msgid "Monitor the SuperExec logs and wait for the summary to appear:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:104 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:106 msgid "Step 4: Update the Application" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:106 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:108 msgid "In the next step, change the application code." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:108 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:110 msgid "" "For example, go to the ``task.py`` file in the ``quickstart-" "compose/quickstart_compose/`` directory and add a ``print`` call in the " "``get_weights`` function:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:111 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:114 msgid "quickstart-compose/quickstart_compose/task.py" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:120 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:125 msgid "Rebuild and restart the services." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:124 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:129 msgid "" "If you have modified the dependencies listed in your ``pyproject.toml`` " "file, it is essential to rebuild images." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:127 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:132 msgid "If you haven't made any changes, you can skip this step." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:129 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:134 msgid "Run the following command to rebuild and restart the services:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:135 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:140 msgid "Run the updated quickstart example:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:142 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:147 msgid "In the SuperExec logs, you should find the ``Get weights`` line:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:159 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:164 msgid "Step 5: Persisting the SuperLink State" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:161 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:166 msgid "" "In this step, Flower services are configured to persist the state of the " "SuperLink service, ensuring that it maintains its state even after a " "restart." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:166 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:171 msgid "" "When working with Docker Compose on Linux, you may need to create the " "``state`` directory first and change its ownership to ensure proper " "access and permissions." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:169 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:174 msgid "" "For more information, consult the following page: :doc:`persist-" "superlink-state`." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:171 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:220 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:176 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:226 msgid "Run the command:" msgstr "" @@ -3476,17 +3861,17 @@ msgid "" "rules>`_." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:188 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:241 -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:362 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:193 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:247 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:375 msgid "Rerun the ``quickstart-compose`` project:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:194 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:199 msgid "Check the content of the ``state`` directory:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:201 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:206 msgid "" "You should see a ``state.db`` file in the ``state`` directory. If you " "restart the service, the state file will be used to restore the state " @@ -3494,122 +3879,122 @@ msgid "" "if the containers are stopped and started again." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:208 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:214 msgid "Step 6: Run Flower with TLS" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:210 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:216 msgid "" "To demonstrate how to enable TLS, generate self-signed certificates using" " the ``certs.yml`` Compose file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:215 -msgid "These certificates should be used only for development purposes." -msgstr "" - -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:217 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:223 msgid "" "For production environments, use a service like `Let's Encrypt " "`_ to obtain your certificates." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:235 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:241 msgid "Restart the services with TLS enabled:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:249 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:255 msgid "Step 7: Add another SuperNode" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:251 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:257 msgid "" "You can add more SuperNodes and ClientApps by duplicating their " "definitions in the ``compose.yml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:254 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:260 msgid "" "Just give each new SuperNode and ClientApp service a unique service name " "like ``supernode-3``, ``clientapp-3``, etc." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:257 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:263 msgid "In ``compose.yml``, add the following:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:259 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:265 msgid "compose.yml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:303 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:316 msgid "" "If you also want to enable TLS for the new SuperNodes, duplicate the " "SuperNode definition for each new SuperNode service in the ``with-" "tls.yml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:306 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:319 msgid "" "Make sure that the names of the services match with the one in the " "``compose.yml`` file." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:308 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:321 msgid "In ``with-tls.yml``, add the following:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:310 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:323 msgid "with-tls.yml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:332 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:345 msgid "Step 8: Persisting the SuperLink State and Enabling TLS" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:334 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:347 msgid "" "To run Flower with persisted SuperLink state and enabled TLS, a slight " "change in the ``with-state.yml`` file is required:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:337 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:350 msgid "Comment out the lines 2-4 and uncomment the lines 5-9:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:339 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:352 msgid "with-state.yml" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:356 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:369 msgid "Restart the services:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:370 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:383 msgid "Step 9: Merge Multiple Compose Files" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:372 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:385 msgid "" "You can merge multiple Compose files into a single file. For instance, if" " you wish to combine the basic configuration with the TLS configuration, " "execute the following command:" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:380 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:394 msgid "" "This will merge the contents of ``compose.yml`` and ``with-tls.yml`` into" " a new file called ``my_compose.yml``." msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:384 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:398 msgid "Step 10: Clean Up" msgstr "" -#: ../../source/docker/tutorial-quickstart-docker-compose.rst:386 +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:400 msgid "Remove all services and volumes:" msgstr "" +#: ../../source/docker/tutorial-quickstart-docker-compose.rst:410 +msgid ":doc:`run-quickstart-examples-docker-compose`" +msgstr "" + #: ../../source/docker/use-a-different-version.rst:2 msgid "Use a Different Flower Version" msgstr "" @@ -3621,7 +4006,7 @@ msgid "" " `Docker Hub `__." msgstr "" -#: ../../source/docker/use-a-different-version.rst:9 +#: ../../source/docker/use-a-different-version.rst:10 msgid "" "When using Flower nightly, the SuperLink nightly image must be paired " "with the corresponding SuperNode and ServerApp nightly images released on" @@ -3645,31 +4030,31 @@ msgid "" "centralized-to-federated>`." msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:9 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:10 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:12 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:12 msgid "Centralized Training" msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:10 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:14 msgid "" "All files are revised based on :doc:`Example: PyTorch - From Centralized " "To Federated `. The only " -"thing to do is modifying the file called :code:`cifar.py`, revised part " -"is shown below:" +"thing to do is modifying the file called ``cifar.py``, revised part is " +"shown below:" msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:13 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:18 msgid "" "The model architecture defined in class Net() is added with Batch " "Normalization layers accordingly." msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:41 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:157 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:47 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:171 msgid "You can now run your machine learning workload:" msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:47 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:53 msgid "" "So far this should all look fairly familiar if you've used PyTorch " "before. Let's take the next step and use what we've built to create a " @@ -3677,53 +4062,53 @@ msgid "" " and two clients." msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:51 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:167 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:58 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:182 msgid "Federated Training" msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:53 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:60 msgid "" "If you have read :doc:`Example: PyTorch - From Centralized To Federated " "`, the following parts are" -" easy to follow, only :code:`get_parameters` and :code:`set_parameters` " -"function in :code:`client.py` needed to revise. If not, please read the " -":doc:`Example: PyTorch - From Centralized To Federated `. first." +" easy to follow, only ``get_parameters`` and ``set_parameters`` function " +"in ``client.py`` needed to revise. If not, please read the :doc:`Example:" +" PyTorch - From Centralized To Federated `. first." msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:56 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:66 msgid "" "Our example consists of one *server* and two *clients*. In FedBN, " -":code:`server.py` keeps unchanged, we can start the server directly." +"``server.py`` keeps unchanged, we can start the server directly." msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:62 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:73 msgid "" -"Finally, we will revise our *client* logic by changing " -":code:`get_parameters` and :code:`set_parameters` in :code:`client.py`, " -"we will exclude batch normalization parameters from model parameter list " -"when sending to or receiving from the server." +"Finally, we will revise our *client* logic by changing ``get_parameters``" +" and ``set_parameters`` in ``client.py``, we will exclude batch " +"normalization parameters from model parameter list when sending to or " +"receiving from the server." msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:85 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:102 msgid "Now, you can now open two additional terminal windows and run" msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:91 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:108 msgid "" "in each window (make sure that the server is still running before you do " "so) and see your (previously centralized) PyTorch project run federated " "learning with FedBN strategy across two clients. Congratulations!" msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:94 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:310 -#: ../../source/tutorial-quickstart-jax.rst:283 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:113 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:349 +#: ../../source/tutorial-quickstart-jax.rst:319 msgid "Next Steps" msgstr "" -#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:96 +#: ../../source/example-fedbn-pytorch-from-centralized-to-federated.rst:115 msgid "" "The full source code for this example can be found `here " "`_." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:15 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:18 msgid "" -"Let's create a new file called :code:`cifar.py` with all the components " +"Let's create a new file called ``cifar.py`` with all the components " "required for a traditional (centralized) training on CIFAR-10. First, all" -" required packages (such as :code:`torch` and :code:`torchvision`) need " -"to be imported. You can see that we do not import any package for " -"federated learning. You can keep all these imports as they are even when " -"we add the federated learning components at a later point." +" required packages (such as ``torch`` and ``torchvision``) need to be " +"imported. You can see that we do not import any package for federated " +"learning. You can keep all these imports as they are even when we add the" +" federated learning components at a later point." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:32 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:36 msgid "" "As already mentioned we will use the CIFAR-10 dataset for this machine " "learning workload. The model architecture (a very simple Convolutional " -"Neural Network) is defined in :code:`class Net()`." +"Neural Network) is defined in ``class Net()``." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:56 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:62 msgid "" -"The :code:`load_data()` function loads the CIFAR-10 training and test " -"sets. The :code:`transform` normalized the data after loading." +"The ``load_data()`` function loads the CIFAR-10 training and test sets. " +"The ``transform`` normalized the data after loading." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:74 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:84 msgid "" -"We now need to define the training (function :code:`train()`) which loops" -" over the training set, measures the loss, backpropagates it, and then " +"We now need to define the training (function ``train()``) which loops " +"over the training set, measures the loss, backpropagates it, and then " "takes one optimizer step for each batch of training examples." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:76 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:88 msgid "" -"The evaluation of the model is defined in the function :code:`test()`. " -"The function loops over all test samples and measures the loss of the " -"model based on the test dataset." +"The evaluation of the model is defined in the function ``test()``. The " +"function loops over all test samples and measures the loss of the model " +"based on the test dataset." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:136 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:149 msgid "" "Having defined the data loading, model architecture, training, and " "evaluation we can put everything together and train our CNN on CIFAR-10." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:163 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:177 msgid "" "So far, this should all look fairly familiar if you've used PyTorch " "before. Let's take the next step and use what we've built to create a " @@ -3809,7 +4194,7 @@ msgid "" "clients." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:169 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:184 msgid "" "The simple machine learning project discussed in the previous section " "trains the model on a single dataset (CIFAR-10), we call this centralized" @@ -3820,162 +4205,161 @@ msgid "" "everything up from scratch. This can be a considerable effort." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:173 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:191 msgid "" "However, with Flower you can evolve your pre-existing code into a " "federated learning setup without the need for a major rewrite." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:175 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:194 msgid "" "The concept is easy to understand. We have to start a *server* and then " -"use the code in :code:`cifar.py` for the *clients* that are connected to " -"the *server*. The *server* sends model parameters to the clients. The " +"use the code in ``cifar.py`` for the *clients* that are connected to the " +"*server*. The *server* sends model parameters to the clients. The " "*clients* run the training and update the parameters. The updated " "parameters are sent back to the *server* which averages all received " "parameter updates. This describes one round of the federated learning " "process and we repeat this for multiple rounds." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:181 -#: ../../source/tutorial-quickstart-jax.rst:129 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:201 +#: ../../source/tutorial-quickstart-jax.rst:147 msgid "" "Our example consists of one *server* and two *clients*. Let's set up " -":code:`server.py` first. The *server* needs to import the Flower package " -":code:`flwr`. Next, we use the :code:`start_server` function to start a " -"server and tell it to perform three rounds of federated learning." +"``server.py`` first. The *server* needs to import the Flower package " +"``flwr``. Next, we use the ``start_server`` function to start a server " +"and tell it to perform three rounds of federated learning." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:191 -#: ../../source/tutorial-quickstart-jax.rst:139 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:215 +#: ../../source/tutorial-quickstart-jax.rst:161 msgid "We can already start the *server*:" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:197 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:221 msgid "" -"Finally, we will define our *client* logic in :code:`client.py` and build" -" upon the previously defined centralized training in :code:`cifar.py`. " -"Our *client* needs to import :code:`flwr`, but also :code:`torch` to " -"update the parameters on our PyTorch model:" +"Finally, we will define our *client* logic in ``client.py`` and build " +"upon the previously defined centralized training in ``cifar.py``. Our " +"*client* needs to import ``flwr``, but also ``torch`` to update the " +"parameters on our PyTorch model:" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:213 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:238 msgid "" "Implementing a Flower *client* basically means implementing a subclass of" -" either :code:`flwr.client.Client` or :code:`flwr.client.NumPyClient`. " -"Our implementation will be based on :code:`flwr.client.NumPyClient` and " -"we'll call it :code:`CifarClient`. :code:`NumPyClient` is slightly easier" -" to implement than :code:`Client` if you use a framework with good NumPy " -"interoperability (like PyTorch or TensorFlow/Keras) because it avoids " -"some of the boilerplate that would otherwise be necessary. " -":code:`CifarClient` needs to implement four methods, two methods for " -"getting/setting model parameters, one method for training the model, and " -"one method for testing the model:" +" either ``flwr.client.Client`` or ``flwr.client.NumPyClient``. Our " +"implementation will be based on ``flwr.client.NumPyClient`` and we'll " +"call it ``CifarClient``. ``NumPyClient`` is slightly easier to implement " +"than ``Client`` if you use a framework with good NumPy interoperability " +"(like PyTorch or TensorFlow/Keras) because it avoids some of the " +"boilerplate that would otherwise be necessary. ``CifarClient`` needs to " +"implement four methods, two methods for getting/setting model parameters," +" one method for training the model, and one method for testing the model:" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:218 -msgid ":code:`set_parameters`" -msgstr "" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:249 +#, fuzzy +msgid "``set_parameters``" +msgstr "``SETUPTOOLS_VERSION``" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:219 -#: ../../source/tutorial-quickstart-jax.rst:166 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:248 +#: ../../source/tutorial-quickstart-jax.rst:192 msgid "" "set the model parameters on the local model that are received from the " "server" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:220 -#: ../../source/tutorial-quickstart-jax.rst:168 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:249 +#: ../../source/tutorial-quickstart-jax.rst:194 msgid "" -"loop over the list of model parameters received as NumPy " -":code:`ndarray`'s (think list of neural network layers)" +"loop over the list of model parameters received as NumPy ``ndarray``'s " +"(think list of neural network layers)" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:221 -#: ../../source/tutorial-quickstart-jax.rst:169 -#: ../../source/tutorial-quickstart-scikitlearn.rst:118 -msgid ":code:`get_parameters`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:252 +#: ../../source/tutorial-quickstart-jax.rst:197 +#: ../../source/tutorial-quickstart-scikitlearn.rst:129 +msgid "``get_parameters``" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:222 -#: ../../source/tutorial-quickstart-jax.rst:170 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:252 +#: ../../source/tutorial-quickstart-jax.rst:197 msgid "" -"get the model parameters and return them as a list of NumPy " -":code:`ndarray`'s (which is what :code:`flwr.client.NumPyClient` expects)" +"get the model parameters and return them as a list of NumPy ``ndarray``'s" +" (which is what ``flwr.client.NumPyClient`` expects)" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:223 -#: ../../source/tutorial-quickstart-jax.rst:171 -#: ../../source/tutorial-quickstart-scikitlearn.rst:123 -msgid ":code:`fit`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:257 +#: ../../source/tutorial-quickstart-jax.rst:202 +#: ../../source/tutorial-quickstart-scikitlearn.rst:136 +msgid "``fit``" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:224 -#: ../../source/example-pytorch-from-centralized-to-federated.rst:228 -#: ../../source/tutorial-quickstart-jax.rst:172 -#: ../../source/tutorial-quickstart-jax.rst:176 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:255 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:260 +#: ../../source/tutorial-quickstart-jax.rst:200 +#: ../../source/tutorial-quickstart-jax.rst:205 msgid "" "update the parameters of the local model with the parameters received " "from the server" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:225 -#: ../../source/tutorial-quickstart-jax.rst:173 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:257 +#: ../../source/tutorial-quickstart-jax.rst:202 msgid "train the model on the local training set" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:226 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:258 msgid "get the updated local model weights and return them to the server" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:227 -#: ../../source/tutorial-quickstart-jax.rst:175 -#: ../../source/tutorial-quickstart-scikitlearn.rst:127 -msgid ":code:`evaluate`" +#: ../../source/example-pytorch-from-centralized-to-federated.rst:263 +#: ../../source/tutorial-quickstart-jax.rst:208 +#: ../../source/tutorial-quickstart-scikitlearn.rst:139 +msgid "``evaluate``" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:229 -#: ../../source/tutorial-quickstart-jax.rst:177 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:262 +#: ../../source/tutorial-quickstart-jax.rst:207 msgid "evaluate the updated model on the local test set" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:230 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:263 msgid "return the local loss and accuracy to the server" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:232 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:265 msgid "" -"The two :code:`NumPyClient` methods :code:`fit` and :code:`evaluate` make" -" use of the functions :code:`train()` and :code:`test()` previously " -"defined in :code:`cifar.py`. So what we really do here is we tell Flower " -"through our :code:`NumPyClient` subclass which of our already defined " -"functions to call for training and evaluation. We included type " -"annotations to give you a better understanding of the data types that get" -" passed around." +"The two ``NumPyClient`` methods ``fit`` and ``evaluate`` make use of the " +"functions ``train()`` and ``test()`` previously defined in ``cifar.py``. " +"So what we really do here is we tell Flower through our ``NumPyClient`` " +"subclass which of our already defined functions to call for training and " +"evaluation. We included type annotations to give you a better " +"understanding of the data types that get passed around." msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:280 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:315 msgid "" "All that's left to do it to define a function that loads both model and " -"data, creates a :code:`CifarClient`, and starts this client. You load " -"your data and model by using :code:`cifar.py`. Start :code:`CifarClient` " -"with the function :code:`fl.client.start_client()` by pointing it at the " -"same IP address we used in :code:`server.py`:" +"data, creates a ``CifarClient``, and starts this client. You load your " +"data and model by using ``cifar.py``. Start ``CifarClient`` with the " +"function ``fl.client.start_client()`` by pointing it at the same IP " +"address we used in ``server.py``:" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:301 -#: ../../source/tutorial-quickstart-jax.rst:274 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:338 +#: ../../source/tutorial-quickstart-jax.rst:309 msgid "And that's it. You can now open two additional terminal windows and run" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:307 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:344 msgid "" "in each window (make sure that the server is running before you do so) " "and see your (previously centralized) PyTorch project run federated " "learning across two clients. Congratulations!" msgstr "" -#: ../../source/example-pytorch-from-centralized-to-federated.rst:312 +#: ../../source/example-pytorch-from-centralized-to-federated.rst:351 msgid "" "The full source code for this example: `PyTorch: From Centralized To " "Federated (Code)