Skip to content

Commit

Permalink
fixed bug: endpoint error
Browse files Browse the repository at this point in the history
  • Loading branch information
yym68686 committed Nov 26, 2023
1 parent 38f4a3e commit b133bde
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 11 deletions.
5 changes: 3 additions & 2 deletions chatgpt2api/chatgpt2api.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ def __init__(
self.source_api_url: str = api_url
parsed_url = urlparse(self.source_api_url)
self.base_url: str = urlunparse(parsed_url[:2] + ("",) * 4)
self.v1_url: str = urlunparse(parsed_url[:2] + ("/v1",) + ("",) * 3)
self.chat_url: str = urlunparse(parsed_url[:2] + ("/v1/chat/completions",) + ("",) * 3)
self.image_url: str = urlunparse(parsed_url[:2] + ("/v1/images/generations",) + ("",) * 3)

Expand Down Expand Up @@ -545,8 +546,8 @@ def search_summary(
chatllm = EducationalLLM(callback_manager=CallbackManager([chainStreamHandler]))
chainllm = EducationalLLM()
else:
chatllm = ChatOpenAI(streaming=True, callback_manager=CallbackManager([chainStreamHandler]), temperature=config.temperature, openai_api_base=config.API_URL.split("chat")[0], model_name=self.engine, openai_api_key=config.API)
chainllm = ChatOpenAI(temperature=config.temperature, openai_api_base=config.API_URL.split("chat")[0], model_name=config.GPT_ENGINE, openai_api_key=config.API)
chatllm = ChatOpenAI(streaming=True, callback_manager=CallbackManager([chainStreamHandler]), temperature=config.temperature, openai_api_base=bot_api_url.v1_url, model_name=self.engine, openai_api_key=config.API)
chainllm = ChatOpenAI(temperature=config.temperature, openai_api_base=bot_api_url.v1_url, model_name=config.GPT_ENGINE, openai_api_key=config.API)

if config.SEARCH_USE_GPT:
gpt_search_thread = ThreadWithReturnValue(target=gptsearch, args=(prompt, chainllm,))
Expand Down
3 changes: 2 additions & 1 deletion test/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,10 @@ def __init__(
self.source_api_url: str = api_url
parsed_url = urlparse(self.source_api_url)
self.base_url: str = urlunparse(parsed_url[:2] + ("",) * 4)
self.v1_url: str = urlunparse(parsed_url[:2] + ("/v1",) + ("",) * 3)
self.chat_url: str = urlunparse(parsed_url[:2] + ("/v1/chat/completions",) + ("",) * 3)
self.image_url: str = urlunparse(parsed_url[:2] + ("/v1/images/generations",) + ("",) * 3)


a = openaiAPI()
print(a.chat_url)
print(a.v1_url)
17 changes: 9 additions & 8 deletions utils/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
from langchain.utilities import WikipediaAPIWrapper
from utils.googlesearch import GoogleSearchAPIWrapper
from langchain.document_loaders import UnstructuredPDFLoader
from chatgpt2api.chatgpt2api import bot_api_url

def getmd5(string):
import hashlib
Expand Down Expand Up @@ -95,8 +96,8 @@ def get_chain(store, llm):
return chain

async def docQA(docpath, query_message, persist_db_path="db", model = "gpt-3.5-turbo"):
chatllm = ChatOpenAI(temperature=0.5, openai_api_base=config.API_URL.split("chat")[0], model_name=model, openai_api_key=config.API)
embeddings = OpenAIEmbeddings(openai_api_base=config.API_URL.split("chat")[0], openai_api_key=config.API)
chatllm = ChatOpenAI(temperature=0.5, openai_api_base=bot_api_url.v1_url, model_name=model, openai_api_key=config.API)
embeddings = OpenAIEmbeddings(openai_api_base=bot_api_url.v1_url, openai_api_key=config.API)

sitemap = "sitemap.xml"
match = re.match(r'^(https?|ftp)://[^\s/$.?#].[^\s]*$', docpath)
Expand Down Expand Up @@ -135,7 +136,7 @@ def get_doc_from_url(url):
return filename

def persist_emdedding_pdf(docurl, persist_db_path):
embeddings = OpenAIEmbeddings(openai_api_base=config.API_URL.split("chat")[0], openai_api_key=os.environ.get('API', None))
embeddings = OpenAIEmbeddings(openai_api_base=bot_api_url.v1_url, openai_api_key=os.environ.get('API', None))
filename = get_doc_from_url(docurl)
docpath = os.getcwd() + "/" + filename
loader = UnstructuredPDFLoader(docpath)
Expand All @@ -150,8 +151,8 @@ def persist_emdedding_pdf(docurl, persist_db_path):
return vector_store

async def pdfQA(docurl, docpath, query_message, model="gpt-3.5-turbo"):
chatllm = ChatOpenAI(temperature=0.5, openai_api_base=config.API_URL.split("chat")[0], model_name=model, openai_api_key=os.environ.get('API', None))
embeddings = OpenAIEmbeddings(openai_api_base=config.API_URL.split("chat")[0], openai_api_key=os.environ.get('API', None))
chatllm = ChatOpenAI(temperature=0.5, openai_api_base=bot_api_url.v1_url, model_name=model, openai_api_key=os.environ.get('API', None))
embeddings = OpenAIEmbeddings(openai_api_base=bot_api_url.v1_url, openai_api_key=os.environ.get('API', None))
persist_db_path = getmd5(docpath)
if not os.path.exists(persist_db_path):
vector_store = persist_emdedding_pdf(docurl, persist_db_path)
Expand All @@ -163,8 +164,8 @@ async def pdfQA(docurl, docpath, query_message, model="gpt-3.5-turbo"):
return result['result']

def pdf_search(docurl, query_message, model="gpt-3.5-turbo"):
chatllm = ChatOpenAI(temperature=0.5, openai_api_base=config.API_URL.split("chat")[0], model_name=model, openai_api_key=os.environ.get('API', None))
embeddings = OpenAIEmbeddings(openai_api_base=config.API_URL.split("chat")[0], openai_api_key=os.environ.get('API', None))
chatllm = ChatOpenAI(temperature=0.5, openai_api_base=bot_api_url.v1_url, model_name=model, openai_api_key=os.environ.get('API', None))
embeddings = OpenAIEmbeddings(openai_api_base=bot_api_url.v1_url, openai_api_key=os.environ.get('API', None))
filename = get_doc_from_url(docurl)
docpath = os.getcwd() + "/" + filename
loader = UnstructuredPDFLoader(docpath)
Expand Down Expand Up @@ -319,7 +320,7 @@ def get_google_search_results(prompt: str, context_max_tokens: int):
if config.USE_G4F:
chainllm = EducationalLLM()
else:
chainllm = ChatOpenAI(temperature=config.temperature, openai_api_base=config.API_URL.split("chat")[0], model_name=config.GPT_ENGINE, openai_api_key=config.API)
chainllm = ChatOpenAI(temperature=config.temperature, openai_api_base=bot_api_url.v1_url, model_name=config.GPT_ENGINE, openai_api_key=config.API)

if config.SEARCH_USE_GPT:
gpt_search_thread = ThreadWithReturnValue(target=gptsearch, args=(prompt, chainllm,))
Expand Down

0 comments on commit b133bde

Please sign in to comment.