Skip to content

Commit

Permalink
Remove unused imports
Browse files Browse the repository at this point in the history
  • Loading branch information
borzunov committed Jul 24, 2024
1 parent 5321af8 commit dc1a0f4
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 4 deletions.
2 changes: 1 addition & 1 deletion src/petals/models/bloom/block.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import torch
from transformers.modeling_attn_mask_utils import _prepare_4d_causal_attention_mask
from transformers.models.bloom.modeling_bloom import BloomBlock, BloomModel, build_alibi_tensor
from transformers.models.bloom.modeling_bloom import BloomBlock, build_alibi_tensor

from petals.utils.misc import is_dummy

Expand Down
1 change: 0 additions & 1 deletion src/petals/models/llama/block.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
LlamaConfig,
LlamaDecoderLayer,
LlamaMLP,
LlamaModel,
LlamaRMSNorm,
repeat_kv,
rotate_half,
Expand Down
3 changes: 1 addition & 2 deletions src/petals/models/mixtral/block.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import json
from typing import Optional, Tuple

import torch
Expand All @@ -8,7 +7,7 @@
_prepare_4d_causal_attention_mask,
_prepare_4d_causal_attention_mask_for_sdpa,
)
from transformers.models.mixtral.modeling_mixtral import MixtralDecoderLayer, MixtralModel
from transformers.models.mixtral.modeling_mixtral import MixtralDecoderLayer


class WrappedMixtralBlock(MixtralDecoderLayer):
Expand Down

0 comments on commit dc1a0f4

Please sign in to comment.