Skip to content

Commit

Permalink
change docs
Browse files Browse the repository at this point in the history
  • Loading branch information
aaazzam committed Sep 22, 2023
1 parent 1c8cab8 commit 9495a2e
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 50 deletions.
4 changes: 1 addition & 3 deletions docs/components/ai_classifier.md
Original file line number Diff line number Diff line change
Expand Up @@ -194,10 +194,8 @@ By default, `@ai_classifier` uses the global LLM settings. To specify a particul


```python
from marvin.engine.language_models import chat_llm


@ai_classifier(model=chat_llm("openai/gpt-3.5-turbo-0613"))
@ai_classifier(model="openai/gpt-3.5-turbo-0613", temperature = 0)
class Sentiment(Enum):
POSITIVE = 1
NEGATIVE = -1
Expand Down
13 changes: 3 additions & 10 deletions docs/components/ai_model.md
Original file line number Diff line number Diff line change
Expand Up @@ -143,26 +143,19 @@ Note that the kwarg is `model_` with a trailing underscore; this is to avoid con


```python
from marvin.engine.language_models import chat_llm


@ai_model(model=chat_llm(model="openai/gpt-3.5-turbo", temperature=0))
@ai_model(model="openai/gpt-3.5-turbo", temperature=0)
class Location(BaseModel):
city: str
state: str


print(Location("The Big Apple"))
print(
Location(
"The Big Apple",
model_=chat_llm(model="openai/gpt-3.5-turbo", temperature=1),
)
)

```

city='New York' state='New York'
city='New York' state='New York'



## Features
Expand Down
3 changes: 1 addition & 2 deletions docs/components/overview.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@ Marvin introduces a number of components that can become the building blocks of


```python
from marvin.engine.language_models.openai import OpenAIChatLLM

@ai_fn(model=OpenAIChatLLM(model="openai/gpt-3.5-turbo-16k"))
@ai_fn(model="openai/gpt-3.5-turbo-16k", temperature = 0)
def my_ai_fn():
"""..."""
```
Expand Down
72 changes: 38 additions & 34 deletions docs/llms/llms.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,13 @@ specify a model name.
# Call claude-2 simply by specifying it inside of ChatCompletion.
anthropic = ChatCompletion('claude-2').create(messages = messages)
```
We can now access both results as we would with OpenAI.
We can now access both results as we would with OpenAI (after calling .response)

```python
print(openai.choices[0].message.content)
print(openai.response.choices[0].message.content)
# Hello! I'm an AI, so I don't have feelings, but I'm here to help you. How can I assist you?

print(anthropic.choices[0].message.content)
print(anthropic.response.choices[0].message.content)
# I'm doing well, thanks for asking!
```

Expand Down Expand Up @@ -87,7 +87,7 @@ keyword arguments passed to ChatCompletion will be persisted and passed to subse
from marvin import openai


openai.ChatCompletion.create(
openai.ChatCompletion().create(
messages = [{
'role': 'user',
'content': 'Hey! How are you?'
Expand Down Expand Up @@ -124,7 +124,7 @@ Let's consider two examples.
with_sugar: bool = False


response = openai.ChatCompletion.create(
response = openai.ChatCompletion().create(
messages = [{
'role': 'user',
'content': 'Can I get a small soymilk latte?'
Expand Down Expand Up @@ -154,7 +154,7 @@ Let's consider two examples.
swedish: str


response = openai.ChatCompletion.create(
response = openai.ChatCompletion().create(
messages = [
{
'role': 'system',
Expand Down Expand Up @@ -214,7 +214,7 @@ Let's consider an example.

```python

response = openai.ChatCompletion.create(
response = ZZZcreate(
messages = [{
'role': 'user',
'content': 'What if I put it $100 every month for 60 months at 12%?'
Expand Down Expand Up @@ -348,11 +348,11 @@ Let's consider an example.
conversation.send(messages = [{'role': 'user', 'content': prompt}])

# While the most recent turn has a function call, evaluate it.
while conversation.last_response.has_function_call():
while conversation.last_turn.has_function_call():

# Send the most recent function call to the conversation.
conversation.send(messages = [
conversation.last_response.call_function()
conversation.last_turn.call_function()
])

```
Expand All @@ -361,7 +361,7 @@ Let's consider an example.

```python

conversation.last_response.choices[0].message.content
conversation.last_turn.choices[0].message.content

# The result of adding 4124124 and 424242 is 4548366. When this result is divided by 48124,
# the answer is approximately 94.51346521486161.
Expand All @@ -371,29 +371,33 @@ Let's consider an example.
If we want to see the entire state, every `[request, response]` pair is held in the conversation's
`turns`.
```python
[response.choices[0].message for response in conversation.turns]

# [<OpenAIObject at 0x120667c50> JSON: {
# "role": "assistant",
# "content": null,
# "function_call": {
# "name": "add",
# "arguments": "{\n \"x\": 4124124,\n \"y\": 424242\n}"
# }
# },
# <OpenAIObject at 0x1206f4830> JSON: {
# "role": "assistant",
# "content": null,
# "function_call": {
# "name": "divide",
# "arguments": "{\n \"x\": 4548366,\n \"y\": 48124\n}"
# }
# },
# <OpenAIObject at 0x1206f4b90> JSON: {
# "role": "assistant",
# "content": "The result of adding 4124124 and 424242 is 4548366.
# When this result is divided by 48124, the answer is
# approximately 94.51346521486161."
# }]
[turn.response.choices[0].message.dict() for turn in conversation.turns]

[
{
"content": null,
"role": "assistant",
"name": null,
"function_call": {
"name": "add",
"arguments": "{\n \"x\": 4124124,\n \"y\": 424242\n}"
}
},
{
"content": null,
"role": "assistant",
"name": null,
"function_call": {
"name": "divide",
"arguments": "{\n \"x\": 4548366,\n \"y\": 48124\n}"
}
},
{
"content": "4124124 + 424242 divided by 48124 is approximately 94.51346521486161.",
"role": "assistant",
"name": null,
"function_call": null
}
]

```
2 changes: 1 addition & 1 deletion docs/prompting/prompt_function.md
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ This is the easiest way to use Azure / OpenAI's function calling API.

@prompt_fn(response_model_name = 'Fruits')
def list_fruits(n: int, color: str = 'red') -> list[Fruit]:
'''Generates a list of {{n}} {{color}} {{response_model.__name__.lower()}}'''
'''Generates a list of {{n}} {{color}} {{'{{ response_model.__name__.lower() }}'}}'''
list_fruits(3, color = 'blue').serialize()

Expand Down

0 comments on commit 9495a2e

Please sign in to comment.