Add config support for Ollama
This commit is contained in:
parent
7db0b2fbf0
commit
7a5de55615
@ -15,6 +15,14 @@ temperature = 0.0 # Controls randomness
|
|||||||
# temperature = 0.0
|
# temperature = 0.0
|
||||||
# api_version="AZURE API VERSION" #"2024-08-01-preview"
|
# api_version="AZURE API VERSION" #"2024-08-01-preview"
|
||||||
|
|
||||||
|
# [llm] #OLLAMA:
|
||||||
|
# api_type = 'ollama'
|
||||||
|
# model = "llama3.2"
|
||||||
|
# base_url = "http://localhost:11434/v1"
|
||||||
|
# api_key = "ollama"
|
||||||
|
# max_tokens = 4096
|
||||||
|
# temperature = 0.0
|
||||||
|
|
||||||
# Optional configuration for specific LLM models
|
# Optional configuration for specific LLM models
|
||||||
[llm.vision]
|
[llm.vision]
|
||||||
model = "claude-3-7-sonnet" # The vision model to use
|
model = "claude-3-7-sonnet" # The vision model to use
|
||||||
@ -23,6 +31,14 @@ api_key = "sk-..." # Your API key for vision model
|
|||||||
max_tokens = 8192 # Maximum number of tokens in the response
|
max_tokens = 8192 # Maximum number of tokens in the response
|
||||||
temperature = 0.0 # Controls randomness for vision model
|
temperature = 0.0 # Controls randomness for vision model
|
||||||
|
|
||||||
|
# [llm.vision] #OLLAMA VISION:
|
||||||
|
# api_type = 'ollama'
|
||||||
|
# model = "llama3.2-vision"
|
||||||
|
# base_url = "http://localhost:11434/v1"
|
||||||
|
# api_key = "ollama"
|
||||||
|
# max_tokens = 4096
|
||||||
|
# temperature = 0.0
|
||||||
|
|
||||||
# Optional configuration for specific browser configuration
|
# Optional configuration for specific browser configuration
|
||||||
# [browser]
|
# [browser]
|
||||||
# Whether to run browser in headless mode (default: false)
|
# Whether to run browser in headless mode (default: false)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user