Merge pull request #772 from a-holm/fix-for-search-rate-limits
feat(search): Add configurable fallback engines and retry logic for robust web search
This commit is contained in:
commit
d63e88f089
@ -37,6 +37,18 @@ class ProxySettings(BaseModel):
|
||||
|
||||
class SearchSettings(BaseModel):
|
||||
engine: str = Field(default="Google", description="Search engine the llm to use")
|
||||
fallback_engines: List[str] = Field(
|
||||
default_factory=lambda: ["DuckDuckGo", "Baidu"],
|
||||
description="Fallback search engines to try if the primary engine fails",
|
||||
)
|
||||
retry_delay: int = Field(
|
||||
default=60,
|
||||
description="Seconds to wait before retrying all engines again after they all fail",
|
||||
)
|
||||
max_retries: int = Field(
|
||||
default=3,
|
||||
description="Maximum number of times to retry all engines when all fail",
|
||||
)
|
||||
|
||||
|
||||
class BrowserSettings(BaseModel):
|
||||
|
@ -4,6 +4,7 @@ from typing import List
|
||||
from tenacity import retry, stop_after_attempt, wait_exponential
|
||||
|
||||
from app.config import config
|
||||
from app.logger import logger
|
||||
from app.tool.base import BaseTool
|
||||
from app.tool.search import (
|
||||
BaiduSearchEngine,
|
||||
@ -44,6 +45,8 @@ class WebSearch(BaseTool):
|
||||
async def execute(self, query: str, num_results: int = 10) -> List[str]:
|
||||
"""
|
||||
Execute a Web search and return a list of URLs.
|
||||
Tries engines in order based on configuration, falling back if an engine fails with errors.
|
||||
If all engines fail, it will wait and retry up to the configured number of times.
|
||||
|
||||
Args:
|
||||
query (str): The search query to submit to the search engine.
|
||||
@ -52,37 +55,109 @@ class WebSearch(BaseTool):
|
||||
Returns:
|
||||
List[str]: A list of URLs matching the search query.
|
||||
"""
|
||||
# Get retry settings from config
|
||||
retry_delay = 60 # Default to 60 seconds
|
||||
max_retries = 3 # Default to 3 retries
|
||||
|
||||
if config.search_config:
|
||||
retry_delay = getattr(config.search_config, "retry_delay", 60)
|
||||
max_retries = getattr(config.search_config, "max_retries", 3)
|
||||
|
||||
# Try searching with retries when all engines fail
|
||||
for retry_count in range(
|
||||
max_retries + 1
|
||||
): # +1 because first try is not a retry
|
||||
links = await self._try_all_engines(query, num_results)
|
||||
if links:
|
||||
return links
|
||||
|
||||
if retry_count < max_retries:
|
||||
# All engines failed, wait and retry
|
||||
logger.warning(
|
||||
f"All search engines failed. Waiting {retry_delay} seconds before retry {retry_count + 1}/{max_retries}..."
|
||||
)
|
||||
await asyncio.sleep(retry_delay)
|
||||
else:
|
||||
logger.error(
|
||||
f"All search engines failed after {max_retries} retries. Giving up."
|
||||
)
|
||||
|
||||
return []
|
||||
|
||||
async def _try_all_engines(self, query: str, num_results: int) -> List[str]:
|
||||
"""
|
||||
Try all search engines in the configured order.
|
||||
|
||||
Args:
|
||||
query (str): The search query to submit to the search engine.
|
||||
num_results (int): The number of search results to return.
|
||||
|
||||
Returns:
|
||||
List[str]: A list of URLs matching the search query, or empty list if all engines fail.
|
||||
"""
|
||||
engine_order = self._get_engine_order()
|
||||
failed_engines = []
|
||||
|
||||
for engine_name in engine_order:
|
||||
engine = self._search_engine[engine_name]
|
||||
try:
|
||||
logger.info(f"🔎 Attempting search with {engine_name.capitalize()}...")
|
||||
links = await self._perform_search_with_engine(
|
||||
engine, query, num_results
|
||||
)
|
||||
if links:
|
||||
if failed_engines:
|
||||
logger.info(
|
||||
f"Search successful with {engine_name.capitalize()} after trying: {', '.join(failed_engines)}"
|
||||
)
|
||||
return links
|
||||
except Exception as e:
|
||||
print(f"Search engine '{engine_name}' failed with error: {e}")
|
||||
failed_engines.append(engine_name.capitalize())
|
||||
is_rate_limit = "429" in str(e) or "Too Many Requests" in str(e)
|
||||
|
||||
if is_rate_limit:
|
||||
logger.warning(
|
||||
f"⚠️ {engine_name.capitalize()} search engine rate limit exceeded, trying next engine..."
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"⚠️ {engine_name.capitalize()} search failed with error: {e}"
|
||||
)
|
||||
|
||||
if failed_engines:
|
||||
logger.error(f"All search engines failed: {', '.join(failed_engines)}")
|
||||
return []
|
||||
|
||||
def _get_engine_order(self) -> List[str]:
|
||||
"""
|
||||
Determines the order in which to try search engines.
|
||||
Preferred engine is first (based on configuration), followed by the remaining engines.
|
||||
Preferred engine is first (based on configuration), followed by fallback engines,
|
||||
and then the remaining engines.
|
||||
|
||||
Returns:
|
||||
List[str]: Ordered list of search engine names.
|
||||
"""
|
||||
preferred = "google"
|
||||
if config.search_config and config.search_config.engine:
|
||||
preferred = config.search_config.engine.lower()
|
||||
fallbacks = []
|
||||
|
||||
if config.search_config:
|
||||
if config.search_config.engine:
|
||||
preferred = config.search_config.engine.lower()
|
||||
if config.search_config.fallback_engines:
|
||||
fallbacks = [
|
||||
engine.lower() for engine in config.search_config.fallback_engines
|
||||
]
|
||||
|
||||
engine_order = []
|
||||
# Add preferred engine first
|
||||
if preferred in self._search_engine:
|
||||
engine_order.append(preferred)
|
||||
for key in self._search_engine:
|
||||
if key not in engine_order:
|
||||
engine_order.append(key)
|
||||
|
||||
# Add configured fallback engines in order
|
||||
for fallback in fallbacks:
|
||||
if fallback in self._search_engine and fallback not in engine_order:
|
||||
engine_order.append(fallback)
|
||||
|
||||
return engine_order
|
||||
|
||||
@retry(
|
||||
|
@ -73,6 +73,13 @@ temperature = 0.0 # Controls randomness for vision mod
|
||||
# [search]
|
||||
# Search engine for agent to use. Default is "Google", can be set to "Baidu" or "DuckDuckGo".
|
||||
#engine = "Google"
|
||||
# Fallback engine order. Default is ["DuckDuckGo", "Baidu"] - will try in this order after primary engine fails.
|
||||
#fallback_engines = ["DuckDuckGo", "Baidu"]
|
||||
# Seconds to wait before retrying all engines again when they all fail due to rate limits. Default is 60.
|
||||
#retry_delay = 60
|
||||
# Maximum number of times to retry all engines when all fail. Default is 3.
|
||||
#max_retries = 3
|
||||
|
||||
|
||||
## Sandbox configuration
|
||||
#[sandbox]
|
||||
@ -83,3 +90,4 @@ temperature = 0.0 # Controls randomness for vision mod
|
||||
#cpu_limit = 2.0
|
||||
#timeout = 300
|
||||
#network_enabled = true
|
||||
>>>>>>>>> Temporary merge branch 2
|
||||
|
Loading…
x
Reference in New Issue
Block a user