From f1e28dde73b8a1dd1b52e8b5175fac29f0c2ed2d Mon Sep 17 00:00:00 2001 From: Shomy Date: Tue, 8 Oct 2024 17:46:55 +0000 Subject: [PATCH] isort fix --- vllm/model_executor/models/llama.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vllm/model_executor/models/llama.py b/vllm/model_executor/models/llama.py index 7dcfb3a0b7d26..6b6f8e165b5ca 100644 --- a/vllm/model_executor/models/llama.py +++ b/vllm/model_executor/models/llama.py @@ -27,6 +27,7 @@ from torch import nn from transformers import LlamaConfig +import vllm.envs as envs from vllm import _custom_ops as ops from vllm.attention import Attention, AttentionMetadata from vllm.config import CacheConfig, LoRAConfig @@ -55,7 +56,6 @@ from .interfaces import SupportsLoRA from .utils import PPMissingLayer, is_pp_missing_parameter, make_layers -import vllm.envs as envs class LlamaMLP(nn.Module):