From ea0ee3d37f39726c5b265584b9f386baee61d3f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9D=A8=E4=BA=9A=E6=9D=B0?= <39705429+JieGenius@users.noreply.github.com> Date: Sun, 3 Mar 2024 23:38:13 +0800 Subject: [PATCH] =?UTF-8?q?=E6=B7=BB=E5=8A=A0=E6=9B=B4=E5=A4=9A=E7=9A=84?= =?UTF-8?q?=E8=BE=93=E5=87=BA=EF=BC=8C=E8=BF=BD=E8=B8=AA=E6=97=A5=E5=BF=97?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 4 ++-- requirements.txt | 4 ++-- web_demo.py | 7 ++++++- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/app.py b/app.py index 30cb248..310a5b2 100644 --- a/app.py +++ b/app.py @@ -7,7 +7,7 @@ MODEL_REPOSITORY_OPENXLAB = "OpenLMLab/internlm2-chat-7b" if __name__ == '__main__': - print("ls /usr/local") + os.system("ls /usr/local") if not os.path.exists(MODEL_DIR): from openxlab.model import download @@ -16,6 +16,6 @@ print("解压后目录结果如下:") print(os.listdir(MODEL_DIR)) os.system(f"lmdeploy convert internlm2-chat-7b {MODEL_DIR}") - os.system("lmdeploy serve api_server ./workspace --server-name 0.0.0.0 --server-port 23333 --tp 1 --cache-max-entry-count 0.5 >/dev/null 2>&1 &") + os.system("lmdeploy serve api_server ./workspace --server-name 0.0.0.0 --server-port 23333 --tp 1 --cache-max-entry-count 0.5 &") time.sleep(5) os.system('streamlit run web_demo.py --server.address=0.0.0.0 --server.port 7860 --server.enableStaticServing True --server.enableStaticServing true') diff --git a/requirements.txt b/requirements.txt index 0b6e012..637333a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ pandas python-docx -torch -torchvision +# torch +# torchvision modelscope transformers xtuner[all] diff --git a/web_demo.py b/web_demo.py index 6e84584..60af605 100644 --- a/web_demo.py +++ b/web_demo.py @@ -3,7 +3,7 @@ import json import os import re - +import torch import streamlit as st from lagent.actions import ActionExecutor @@ -356,6 +356,11 @@ def main(): if __name__ == '__main__': + print("torch.cuda.is_available():", torch.cuda.is_available()) + print("torch.__version__:", torch.__version__) + print("torch.version.cuda:", torch.version.cuda) + print("torch.cuda.get_device_name(0):", torch.cuda.get_device_name(0)) + print("nvidia-smi:", os.popen('nvidia-smi').read()) root_dir = 'static' os.makedirs(root_dir, exist_ok=True) main()