-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy path4.2.ChatPromptTemplate.py
44 lines (37 loc) · 1.39 KB
/
4.2.ChatPromptTemplate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
#
# import openai
#
# openai.ChatCompletion.create(
# model="gpt-3.5-turbo",
# messages=[
# {"role": "system", "content": "You are a helpful assistant."},
# {"role": "user", "content": "Who won the world series in 2020?"},
# {"role": "assistant", "content": "The Los Angeles Dodgers won the World Series in 2020."},
# {"role": "user", "content": "Where was it played?"}
# ]
# )
# 通过.env管理api_token
from dotenv import load_dotenv
load_dotenv(override=True)
# 导入聊天消息类模板
from langchain.prompts import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
)
# 模板的构建
# system
template = "你是一位专业顾问,负责为专注于{product}的公司起名。"
system_message_prompt = SystemMessagePromptTemplate.from_template(template)
# user
human_template = "公司主打产品是{product_detail}。"
human_message_prompt = HumanMessagePromptTemplate.from_template(human_template)
# 组合
prompt_template = ChatPromptTemplate.from_messages([system_message_prompt, human_message_prompt])
# 格式化提示消息生成提示
prompt = prompt_template.format_prompt(product="鲜花装饰", product_detail="创新的鲜花设计。").to_messages()
# 下面调用模型,把提示传入模型,生成结果
from langchain.chat_models import ChatOpenAI
chat = ChatOpenAI()
result = chat(prompt)
print(result)