diff --git a/config/config.example.toml b/config/config.example.toml index 762f42c..e9a9620 100644 --- a/config/config.example.toml +++ b/config/config.example.toml @@ -15,6 +15,14 @@ temperature = 0.0 # Controls randomness # temperature = 0.0 # api_version="AZURE API VERSION" #"2024-08-01-preview" +# [llm] #OLLAMA: +# api_type = 'ollama' +# model = "llama3.2" +# base_url = "http://localhost:11434/v1" +# api_key = "ollama" +# max_tokens = 4096 +# temperature = 0.0 + # Optional configuration for specific LLM models [llm.vision] model = "claude-3-7-sonnet" # The vision model to use @@ -23,6 +31,14 @@ api_key = "sk-..." # Your API key for vision model max_tokens = 8192 # Maximum number of tokens in the response temperature = 0.0 # Controls randomness for vision model +# [llm.vision] #OLLAMA VISION: +# api_type = 'ollama' +# model = "llama3.2-vision" +# base_url = "http://localhost:11434/v1" +# api_key = "ollama" +# max_tokens = 4096 +# temperature = 0.0 + # Optional configuration for specific browser configuration # [browser] # Whether to run browser in headless mode (default: false)