# Global LLM configuration [llm] model = "claude-3-5-sonnet" base_url = "https://api.openai.com/v1" api_key = "sk-..." max_tokens = 4096 temperature = 0.0 # [llm] #AZURE OPENAI: # api_type= 'azure' # model = "YOUR_MODEL_NAME" #"gpt-4o-mini" # base_url = "{YOUR_AZURE_ENDPOINT.rstrip('/')}/openai/deployments/{AZURE_DEPOLYMENT_ID}" # api_key = "AZURE API KEY" # max_tokens = 8096 # temperature = 0.0 # api_version="AZURE API VERSION" #"2024-08-01-preview" # Optional configuration for specific LLM models [llm.vision] model = "claude-3-5-sonnet" base_url = "https://api.openai.com/v1" api_key = "sk-..." # Optional configuration for specific browser configuration # [browser] # Whether to run browser in headless mode (default: false) #headless = false # Disable browser security features (default: true) #disable_security = true # Extra arguments to pass to the browser #extra_chromium_args = [] # Path to a Chrome instance to use to connect to your normal browser # e.g. '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome' #chrome_instance_path = "" # Connect to a browser instance via WebSocket #wss_url = "" # Connect to a browser instance via CDP #cdp_url = "" # Optional configuration, Proxy settings for the browser # [browser.proxy] # server = "http://proxy-server:port" # username = "proxy-username" # password = "proxy-password"