From b17c9d31a9e94f2a31eadf5ec47a551b3775658c Mon Sep 17 00:00:00 2001 From: Matt Eng Date: Fri, 14 Mar 2025 20:39:23 -0700 Subject: [PATCH 1/2] Fix temperature using default if 0 --- app/llm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/llm.py b/app/llm.py index 8c085ae..98343e6 100644 --- a/app/llm.py +++ b/app/llm.py @@ -151,7 +151,7 @@ class LLM: params["max_completion_tokens"] = self.max_tokens else: params["max_tokens"] = self.max_tokens - params["temperature"] = temperature or self.temperature + params["temperature"] = temperature if temperature is not None else self.temperature if not stream: # Non-streaming request @@ -255,7 +255,7 @@ class LLM: params["max_completion_tokens"] = self.max_tokens else: params["max_tokens"] = self.max_tokens - params["temperature"] = temperature or self.temperature + params["temperature"] = temperature if temperature is not None else self.temperature response = await self.client.chat.completions.create(**params) From 49ccd72815487a67ac72cadea7919ab1e9b4ec1a Mon Sep 17 00:00:00 2001 From: Matt Eng Date: Fri, 14 Mar 2025 21:41:43 -0700 Subject: [PATCH 2/2] Reformat --- app/llm.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/app/llm.py b/app/llm.py index 98343e6..39871f6 100644 --- a/app/llm.py +++ b/app/llm.py @@ -151,7 +151,9 @@ class LLM: params["max_completion_tokens"] = self.max_tokens else: params["max_tokens"] = self.max_tokens - params["temperature"] = temperature if temperature is not None else self.temperature + params["temperature"] = ( + temperature if temperature is not None else self.temperature + ) if not stream: # Non-streaming request @@ -255,7 +257,9 @@ class LLM: params["max_completion_tokens"] = self.max_tokens else: params["max_tokens"] = self.max_tokens - params["temperature"] = temperature if temperature is not None else self.temperature + params["temperature"] = ( + temperature if temperature is not None else self.temperature + ) response = await self.client.chat.completions.create(**params)