Add config support for Ollama

This commit is contained in:
the0807 2025-03-14 14:02:32 +09:00
parent 7db0b2fbf0
commit 7a5de55615

View File

@ -15,6 +15,14 @@ temperature = 0.0 # Controls randomness
# temperature = 0.0
# api_version="AZURE API VERSION" #"2024-08-01-preview"
# [llm] #OLLAMA:
# api_type = 'ollama'
# model = "llama3.2"
# base_url = "http://localhost:11434/v1"
# api_key = "ollama"
# max_tokens = 4096
# temperature = 0.0
# Optional configuration for specific LLM models
[llm.vision]
model = "claude-3-7-sonnet" # The vision model to use
@ -23,6 +31,14 @@ api_key = "sk-..." # Your API key for vision model
max_tokens = 8192 # Maximum number of tokens in the response
temperature = 0.0 # Controls randomness for vision model
# [llm.vision] #OLLAMA VISION:
# api_type = 'ollama'
# model = "llama3.2-vision"
# base_url = "http://localhost:11434/v1"
# api_key = "ollama"
# max_tokens = 4096
# temperature = 0.0
# Optional configuration for specific browser configuration
# [browser]
# Whether to run browser in headless mode (default: false)