-
Notifications
You must be signed in to change notification settings - Fork 569
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
example: update create example (#418)
1 parent
2cad1f5
commit 02495ff
Showing
2 changed files
with
6 additions
and
33 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,30 +1,5 @@ | ||
import sys | ||
from ollama import Client | ||
|
||
from ollama import create | ||
|
||
|
||
args = sys.argv[1:] | ||
if len(args) == 2: | ||
# create from local file | ||
path = args[1] | ||
else: | ||
print('usage: python create.py <name> <filepath>') | ||
sys.exit(1) | ||
|
||
# TODO: update to real Modelfile values | ||
modelfile = f""" | ||
FROM {path} | ||
""" | ||
example_modelfile = """ | ||
FROM llama3.2 | ||
# sets the temperature to 1 [higher is more creative, lower is more coherent] | ||
PARAMETER temperature 1 | ||
# sets the context window size to 4096, this controls how many tokens the LLM can use as context to generate the next token | ||
PARAMETER num_ctx 4096 | ||
# sets a custom system message to specify the behavior of the chat assistant | ||
SYSTEM You are Mario from super mario bros, acting as an assistant. | ||
""" | ||
|
||
for response in create(model=args[0], modelfile=modelfile, stream=True): | ||
print(response['status']) | ||
client = Client() | ||
response = client.create(model='my-assistant', from_='llama3.2', stream=False) | ||
print(response.status) |