Merge pull request #642 from matengm1/bug/fix-temperature-defaulting
Fix temperature using default if 0
This commit is contained in:
commit
3bb8f8fe71
@ -240,7 +240,9 @@ class LLM:
|
||||
params["max_completion_tokens"] = self.max_tokens
|
||||
else:
|
||||
params["max_tokens"] = self.max_tokens
|
||||
params["temperature"] = temperature or self.temperature
|
||||
params["temperature"] = (
|
||||
temperature if temperature is not None else self.temperature
|
||||
)
|
||||
|
||||
if not stream:
|
||||
# Non-streaming request
|
||||
@ -379,7 +381,9 @@ class LLM:
|
||||
params["max_completion_tokens"] = self.max_tokens
|
||||
else:
|
||||
params["max_tokens"] = self.max_tokens
|
||||
params["temperature"] = temperature or self.temperature
|
||||
params["temperature"] = (
|
||||
temperature if temperature is not None else self.temperature
|
||||
)
|
||||
|
||||
response = await self.client.chat.completions.create(**params)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user