Skip to content

Commit

Permalink
reduced LLM temperatures
Browse files Browse the repository at this point in the history
  • Loading branch information
emcf committed Sep 4, 2024
1 parent e69a00a commit 336dd4f
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion thepipe/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def extract_from_chunk(chunk: Chunk, chunk_index: int, schema: str, ai_model: st
model=ai_model,
messages=messages,
response_format={"type": "json_object"},
temperature=0.2
temperature=0.1,
)
llm_response = response.choices[0].message.content
input_tokens = calculate_tokens([chunk])
Expand Down
4 changes: 2 additions & 2 deletions thepipe/scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def process_page(page_num):
response = openrouter_client.chat.completions.create(
model=ai_model,
messages=messages,
temperature=0.2
temperature=0.1
)
try:
llm_response = response.choices[0].message.content.strip()
Expand Down Expand Up @@ -361,7 +361,7 @@ def ai_extract_webpage_content(url: str, text_only: Optional[bool] = False, verb
response = openrouter_client.chat.completions.create(
model=ai_model,
messages=messages,
temperature=0.2
temperature=0.1
)
llm_response = response.choices[0].message.content
chunk = Chunk(path=url, texts=[llm_response], images=[stacked_image])
Expand Down

0 comments on commit 336dd4f

Please sign in to comment.