diff --git a/docs/components/ai_classifier.md b/docs/components/ai_classifier.md index 6452fa174..be24b1ce8 100644 --- a/docs/components/ai_classifier.md +++ b/docs/components/ai_classifier.md @@ -194,10 +194,8 @@ By default, `@ai_classifier` uses the global LLM settings. To specify a particul ```python -from marvin.engine.language_models import chat_llm - -@ai_classifier(model=chat_llm("openai/gpt-3.5-turbo-0613")) +@ai_classifier(model="openai/gpt-3.5-turbo-0613", temperature = 0) class Sentiment(Enum): POSITIVE = 1 NEGATIVE = -1 diff --git a/docs/components/ai_model.md b/docs/components/ai_model.md index 749057856..f0c433184 100644 --- a/docs/components/ai_model.md +++ b/docs/components/ai_model.md @@ -143,26 +143,19 @@ Note that the kwarg is `model_` with a trailing underscore; this is to avoid con ```python -from marvin.engine.language_models import chat_llm - -@ai_model(model=chat_llm(model="openai/gpt-3.5-turbo", temperature=0)) +@ai_model(model="openai/gpt-3.5-turbo", temperature=0) class Location(BaseModel): city: str state: str print(Location("The Big Apple")) -print( - Location( - "The Big Apple", - model_=chat_llm(model="openai/gpt-3.5-turbo", temperature=1), - ) -) + ``` city='New York' state='New York' - city='New York' state='New York' + ## Features diff --git a/docs/components/overview.md b/docs/components/overview.md index 085fc79b8..e0a2544eb 100644 --- a/docs/components/overview.md +++ b/docs/components/overview.md @@ -10,9 +10,8 @@ Marvin introduces a number of components that can become the building blocks of ```python - from marvin.engine.language_models.openai import OpenAIChatLLM - @ai_fn(model=OpenAIChatLLM(model="openai/gpt-3.5-turbo-16k")) + @ai_fn(model="openai/gpt-3.5-turbo-16k", temperature = 0) def my_ai_fn(): """...""" ``` diff --git a/docs/llms/llms.md b/docs/llms/llms.md index 1f92e88b0..52c4ed1f7 100644 --- a/docs/llms/llms.md +++ b/docs/llms/llms.md @@ -38,13 +38,13 @@ specify a model name. # Call claude-2 simply by specifying it inside of ChatCompletion. anthropic = ChatCompletion('claude-2').create(messages = messages) ``` - We can now access both results as we would with OpenAI. + We can now access both results as we would with OpenAI (after calling .response) ```python - print(openai.choices[0].message.content) + print(openai.response.choices[0].message.content) # Hello! I'm an AI, so I don't have feelings, but I'm here to help you. How can I assist you? - print(anthropic.choices[0].message.content) + print(anthropic.response.choices[0].message.content) # I'm doing well, thanks for asking! ``` @@ -87,7 +87,7 @@ keyword arguments passed to ChatCompletion will be persisted and passed to subse from marvin import openai - openai.ChatCompletion.create( + openai.ChatCompletion().create( messages = [{ 'role': 'user', 'content': 'Hey! How are you?' @@ -124,7 +124,7 @@ Let's consider two examples. with_sugar: bool = False - response = openai.ChatCompletion.create( + response = openai.ChatCompletion().create( messages = [{ 'role': 'user', 'content': 'Can I get a small soymilk latte?' @@ -154,7 +154,7 @@ Let's consider two examples. swedish: str - response = openai.ChatCompletion.create( + response = openai.ChatCompletion().create( messages = [ { 'role': 'system', @@ -214,7 +214,7 @@ Let's consider an example. ```python - response = openai.ChatCompletion.create( + response = ZZZcreate( messages = [{ 'role': 'user', 'content': 'What if I put it $100 every month for 60 months at 12%?' @@ -348,11 +348,11 @@ Let's consider an example. conversation.send(messages = [{'role': 'user', 'content': prompt}]) # While the most recent turn has a function call, evaluate it. - while conversation.last_response.has_function_call(): + while conversation.last_turn.has_function_call(): # Send the most recent function call to the conversation. conversation.send(messages = [ - conversation.last_response.call_function() + conversation.last_turn.call_function() ]) ``` @@ -361,7 +361,7 @@ Let's consider an example. ```python - conversation.last_response.choices[0].message.content + conversation.last_turn.choices[0].message.content # The result of adding 4124124 and 424242 is 4548366. When this result is divided by 48124, # the answer is approximately 94.51346521486161. @@ -371,29 +371,33 @@ Let's consider an example. If we want to see the entire state, every `[request, response]` pair is held in the conversation's `turns`. ```python - [response.choices[0].message for response in conversation.turns] - - # [ JSON: { - # "role": "assistant", - # "content": null, - # "function_call": { - # "name": "add", - # "arguments": "{\n \"x\": 4124124,\n \"y\": 424242\n}" - # } - # }, - # JSON: { - # "role": "assistant", - # "content": null, - # "function_call": { - # "name": "divide", - # "arguments": "{\n \"x\": 4548366,\n \"y\": 48124\n}" - # } - # }, - # JSON: { - # "role": "assistant", - # "content": "The result of adding 4124124 and 424242 is 4548366. - # When this result is divided by 48124, the answer is - # approximately 94.51346521486161." - # }] + [turn.response.choices[0].message.dict() for turn in conversation.turns] + + [ + { + "content": null, + "role": "assistant", + "name": null, + "function_call": { + "name": "add", + "arguments": "{\n \"x\": 4124124,\n \"y\": 424242\n}" + } + }, + { + "content": null, + "role": "assistant", + "name": null, + "function_call": { + "name": "divide", + "arguments": "{\n \"x\": 4548366,\n \"y\": 48124\n}" + } + }, + { + "content": "4124124 + 424242 divided by 48124 is approximately 94.51346521486161.", + "role": "assistant", + "name": null, + "function_call": null + } + ] ``` diff --git a/docs/prompting/prompt_function.md b/docs/prompting/prompt_function.md index 76b54798b..a8d0617bc 100644 --- a/docs/prompting/prompt_function.md +++ b/docs/prompting/prompt_function.md @@ -244,7 +244,7 @@ This is the easiest way to use Azure / OpenAI's function calling API. @prompt_fn(response_model_name = 'Fruits') def list_fruits(n: int, color: str = 'red') -> list[Fruit]: - '''Generates a list of {{n}} {{color}} {{response_model.__name__.lower()}}''' + '''Generates a list of {{n}} {{color}} {{'{{ response_model.__name__.lower() }}'}}''' list_fruits(3, color = 'blue').serialize()