Merge pull request #642 from matengm1/bug/fix-temperature-defaulting

Fix temperature using default if 0
This commit is contained in:
mannaandpoem 2025-03-15 14:16:46 +08:00 committed by GitHub
commit 3bb8f8fe71
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -240,7 +240,9 @@ class LLM:
params["max_completion_tokens"] = self.max_tokens
else:
params["max_tokens"] = self.max_tokens
params["temperature"] = temperature or self.temperature
params["temperature"] = (
temperature if temperature is not None else self.temperature
)
if not stream:
# Non-streaming request
@ -379,7 +381,9 @@ class LLM:
params["max_completion_tokens"] = self.max_tokens
else:
params["max_tokens"] = self.max_tokens
params["temperature"] = temperature or self.temperature
params["temperature"] = (
temperature if temperature is not None else self.temperature
)
response = await self.client.chat.completions.create(**params)