Skip to content

Commit

Permalink
fix linting (#1270)
Browse files Browse the repository at this point in the history
* fix linting

* fix
  • Loading branch information
milocress authored Jun 9, 2024
1 parent dd92abf commit 5571101
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 4 deletions.
4 changes: 2 additions & 2 deletions llmfoundry/data/dataloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

"""Dataloader builder utilities."""

from typing import Any, Dict
from typing import Any, Dict, Union

from composer import DataSpec
from transformers import PreTrainedTokenizerBase
Expand All @@ -19,7 +19,7 @@
def build_dataloader(
cfg: Dict[str, Any],
tokenizer: PreTrainedTokenizerBase,
device_batch_size: int,
device_batch_size: Union[int, float],
) -> DataSpec:
"""Builds a dataloader from a config.
Expand Down
3 changes: 1 addition & 2 deletions llmfoundry/utils/config_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ class TrainConfig:
optimizer: Dict[str, Any] = MISSING
scheduler: Dict[str, Any] = MISSING
train_loader: Dict[str, Any] = MISSING
device_train_batch_size: int = MISSING
device_train_batch_size: Union[int, float] = MISSING
device_eval_batch_size: int = MISSING
max_duration: Union[int, str] = MISSING
eval_interval: Union[int, str] = MISSING
Expand Down Expand Up @@ -183,7 +183,6 @@ class TrainConfig:

# Fields created by `update_batch_size_info`
n_gpus: int = MISSING
device_train_batch_size: int = MISSING
device_train_grad_accum: str = MISSING


Expand Down

0 comments on commit 5571101

Please sign in to comment.