Skip to content

Commit

Permalink
Remove useless pass and abc (Lightning-AI#11522)
Browse files Browse the repository at this point in the history
  • Loading branch information
chunyang-wen authored Jan 24, 2022
1 parent 350c88e commit fe34bf2
Showing 1 changed file with 2 additions and 50 deletions.
52 changes: 2 additions & 50 deletions pytorch_lightning/callbacks/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
r"""
Abstract base class used to build new callbacks.
Base class used to build new callbacks.
"""

import abc
from typing import Any, Dict, List, Optional, Type

import torch
Expand All @@ -26,7 +25,7 @@
from pytorch_lightning.utilities.types import STEP_OUTPUT


class Callback(abc.ABC):
class Callback:
r"""
Abstract base class used to build new callbacks.
Expand Down Expand Up @@ -62,15 +61,12 @@ def on_configure_sharded_model(self, trainer: "pl.Trainer", pl_module: "pl.Light

def on_before_accelerator_backend_setup(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called before accelerator is being setup."""
pass

def setup(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", stage: Optional[str] = None) -> None:
"""Called when fit, validate, test, predict, or tune begins."""
pass

def teardown(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", stage: Optional[str] = None) -> None:
"""Called when fit, validate, test, predict, or tune ends."""
pass

def on_init_start(self, trainer: "pl.Trainer") -> None:
r"""
Expand All @@ -79,7 +75,6 @@ def on_init_start(self, trainer: "pl.Trainer") -> None:
Called when the trainer initialization begins, model has not yet been set.
"""
pass

def on_init_end(self, trainer: "pl.Trainer") -> None:
r"""
Expand All @@ -88,23 +83,18 @@ def on_init_end(self, trainer: "pl.Trainer") -> None:
Called when the trainer initialization ends, model has not yet been set.
"""
pass

def on_fit_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when fit begins."""
pass

def on_fit_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when fit ends."""
pass

def on_sanity_check_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the validation sanity check starts."""
pass

def on_sanity_check_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the validation sanity check ends."""
pass

def on_train_batch_start(
self,
Expand All @@ -115,7 +105,6 @@ def on_train_batch_start(
unused: int = 0,
) -> None:
"""Called when the train batch begins."""
pass

def on_train_batch_end(
self,
Expand All @@ -127,11 +116,9 @@ def on_train_batch_end(
unused: int = 0,
) -> None:
"""Called when the train batch ends."""
pass

def on_train_epoch_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the train epoch begins."""
pass

def on_train_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the train epoch ends.
Expand All @@ -141,53 +128,41 @@ def on_train_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModu
1. Implement `training_epoch_end` in the `LightningModule` and access outputs via the module OR
2. Cache data across train batch hooks inside the callback implementation to post-process in this hook.
"""
pass

def on_validation_epoch_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the val epoch begins."""
pass

def on_validation_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the val epoch ends."""
pass

def on_test_epoch_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the test epoch begins."""
pass

def on_test_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the test epoch ends."""
pass

def on_predict_epoch_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the predict epoch begins."""
pass

def on_predict_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", outputs: List[Any]) -> None:
"""Called when the predict epoch ends."""
pass

def on_epoch_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when either of train/val/test epoch begins."""
pass

def on_epoch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when either of train/val/test epoch ends."""
pass

def on_batch_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the training batch begins."""
pass

def on_batch_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the training batch ends."""
pass

def on_validation_batch_start(
self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", batch: Any, batch_idx: int, dataloader_idx: int
) -> None:
"""Called when the validation batch begins."""
pass

def on_validation_batch_end(
self,
Expand All @@ -199,13 +174,11 @@ def on_validation_batch_end(
dataloader_idx: int,
) -> None:
"""Called when the validation batch ends."""
pass

def on_test_batch_start(
self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", batch: Any, batch_idx: int, dataloader_idx: int
) -> None:
"""Called when the test batch begins."""
pass

def on_test_batch_end(
self,
Expand All @@ -217,13 +190,11 @@ def on_test_batch_end(
dataloader_idx: int,
) -> None:
"""Called when the test batch ends."""
pass

def on_predict_batch_start(
self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", batch: Any, batch_idx: int, dataloader_idx: int
) -> None:
"""Called when the predict batch begins."""
pass

def on_predict_batch_end(
self,
Expand All @@ -235,47 +206,36 @@ def on_predict_batch_end(
dataloader_idx: int,
) -> None:
"""Called when the predict batch ends."""
pass

def on_train_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the train begins."""
pass

def on_train_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the train ends."""
pass

def on_pretrain_routine_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the pretrain routine begins."""
pass

def on_pretrain_routine_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the pretrain routine ends."""
pass

def on_validation_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the validation loop begins."""
pass

def on_validation_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the validation loop ends."""
pass

def on_test_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the test begins."""
pass

def on_test_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the test ends."""
pass

def on_predict_start(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when the predict begins."""
pass

def on_predict_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called when predict ends."""
pass

def on_keyboard_interrupt(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
r"""
Expand All @@ -284,11 +244,9 @@ def on_keyboard_interrupt(self, trainer: "pl.Trainer", pl_module: "pl.LightningM
Called when any trainer execution is interrupted by KeyboardInterrupt.
"""
pass

def on_exception(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", exception: BaseException) -> None:
"""Called when any trainer execution is interrupted by an exception."""
pass

def on_save_checkpoint(
self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", checkpoint: Dict[str, Any]
Expand All @@ -303,7 +261,6 @@ def on_save_checkpoint(
Returns:
The callback state.
"""
pass

def on_load_checkpoint(
self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", callback_state: Dict[str, Any]
Expand All @@ -320,22 +277,17 @@ def on_load_checkpoint(
If your ``on_load_checkpoint`` hook behavior doesn't rely on a state,
you will still need to override ``on_save_checkpoint`` to return a ``dummy state``.
"""
pass

def on_before_backward(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", loss: torch.Tensor) -> None:
"""Called before ``loss.backward()``."""
pass

def on_after_backward(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> None:
"""Called after ``loss.backward()`` and before optimizers are stepped."""
pass

def on_before_optimizer_step(
self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", optimizer: Optimizer, opt_idx: int
) -> None:
"""Called before ``optimizer.step()``."""
pass

def on_before_zero_grad(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule", optimizer: Optimizer) -> None:
"""Called before ``optimizer.zero_grad()``."""
pass

0 comments on commit fe34bf2

Please sign in to comment.