diff --git a/thepipe/extract.py b/thepipe/extract.py index 7323f64..b13b1ca 100644 --- a/thepipe/extract.py +++ b/thepipe/extract.py @@ -75,7 +75,7 @@ def extract_from_chunk(chunk: Chunk, chunk_index: int, schema: str, ai_model: st model=ai_model, messages=messages, response_format={"type": "json_object"}, - temperature=0.2 + temperature=0.1, ) llm_response = response.choices[0].message.content input_tokens = calculate_tokens([chunk]) diff --git a/thepipe/scraper.py b/thepipe/scraper.py index 0a7418d..b9c25a2 100644 --- a/thepipe/scraper.py +++ b/thepipe/scraper.py @@ -190,7 +190,7 @@ def process_page(page_num): response = openrouter_client.chat.completions.create( model=ai_model, messages=messages, - temperature=0.2 + temperature=0.1 ) try: llm_response = response.choices[0].message.content.strip() @@ -361,7 +361,7 @@ def ai_extract_webpage_content(url: str, text_only: Optional[bool] = False, verb response = openrouter_client.chat.completions.create( model=ai_model, messages=messages, - temperature=0.2 + temperature=0.1 ) llm_response = response.choices[0].message.content chunk = Chunk(path=url, texts=[llm_response], images=[stacked_image])