From 114bd467206fb34c53696903e9d3001863bc342d Mon Sep 17 00:00:00 2001 From: liangxinbing <1580466765@qq.com> Date: Mon, 17 Mar 2025 00:04:17 +0800 Subject: [PATCH] update config.example.toml and format file_saver.py --- app/tool/file_saver.py | 2 +- config/config.example.toml | 21 ++++++++++----------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/app/tool/file_saver.py b/app/tool/file_saver.py index 96d64b3..7d92a02 100644 --- a/app/tool/file_saver.py +++ b/app/tool/file_saver.py @@ -2,8 +2,8 @@ import os import aiofiles -from app.tool.base import BaseTool from app.config import WORKSPACE_ROOT +from app.tool.base import BaseTool class FileSaver(BaseTool): diff --git a/config/config.example.toml b/config/config.example.toml index 2eecdfb..51b8ead 100644 --- a/config/config.example.toml +++ b/config/config.example.toml @@ -1,11 +1,10 @@ # Global LLM configuration [llm] -model = "gpt-4o" # The LLM model to use -base_url = "https://api.openai.com/v1" # API endpoint URL -api_key = "sk-..." # Your API key -max_tokens = 8192 # Maximum number of tokens in the response -temperature = 0.0 # Controls randomness -# max_input_tokens = 100000 # Maximum input tokens to use across all requests (set to null or delete this line for unlimited) +model = "claude-3-7-sonnet-20250219" # The LLM model to use +base_url = "https://api.anthropic.com/v1/" # API endpoint URL +api_key = "YOUR_API_KEY" # Your API key +max_tokens = 8192 # Maximum number of tokens in the response +temperature = 0.0 # Controls randomness # [llm] #AZURE OPENAI: # api_type= 'azure' @@ -26,11 +25,11 @@ temperature = 0.0 # Controls randomness # Optional configuration for specific LLM models [llm.vision] -model = "gpt-4o" # The vision model to use -base_url = "https://api.openai.com/v1" # API endpoint URL for vision model -api_key = "sk-..." # Your API key for vision model -max_tokens = 8192 # Maximum number of tokens in the response -temperature = 0.0 # Controls randomness for vision model +model = "claude-3-7-sonnet-20250219" # The vision model to use +base_url = "https://api.anthropic.com/v1/" # API endpoint URL for vision model +api_key = "YOUR_API_KEY" # Your API key for vision model +max_tokens = 8192 # Maximum number of tokens in the response +temperature = 0.0 # Controls randomness for vision model # [llm.vision] #OLLAMA VISION: # api_type = 'ollama'