diff --git a/README.md b/README.md index 6b6700b..32df2fc 100644 --- a/README.md +++ b/README.md @@ -143,8 +143,7 @@ Join our networking group on Feishu and share your experience with other develop Thanks to [anthropic-computer-use](https://github.com/anthropics/anthropic-quickstarts/tree/main/computer-use-demo) and [browser-use](https://github.com/browser-use/browser-use) for providing basic support for this project! -Additionally, we are grateful to [AAAJ](https://github.com/metauto-ai/agent-as-a-judge) -and [MetaGPT](https://github.com/mannaandpoem/MetaGPT). +Additionally, we are grateful to [AAAJ](https://github.com/metauto-ai/agent-as-a-judge), [MetaGPT](https://github.com/geekan/MetaGPT) and [OpenHands](https://github.com/All-Hands-AI/OpenHands). OpenManus is built by contributors from MetaGPT. Huge thanks to this agent community! diff --git a/README_zh.md b/README_zh.md index 33ba0a1..43506b0 100644 --- a/README_zh.md +++ b/README_zh.md @@ -145,7 +145,6 @@ python run_flow.py 特别感谢 [anthropic-computer-use](https://github.com/anthropics/anthropic-quickstarts/tree/main/computer-use-demo) 和 [browser-use](https://github.com/browser-use/browser-use) 为本项目提供的基础支持! -此外,我们感谢 [AAAJ](https://github.com/metauto-ai/agent-as-a-judge) -和 [MetaGPT](https://github.com/mannaandpoem/MetaGPT). +此外,我们感谢 [AAAJ](https://github.com/metauto-ai/agent-as-a-judge),[MetaGPT](https://github.com/geekan/MetaGPT) 和 [OpenHands](https://github.com/All-Hands-AI/OpenHands). OpenManus 由 MetaGPT 社区的贡献者共同构建,感谢这个充满活力的智能体开发者社区! diff --git a/app/llm/inference.py b/app/llm/inference.py index 6785e82..2abc881 100644 --- a/app/llm/inference.py +++ b/app/llm/inference.py @@ -38,7 +38,7 @@ class LLM: self, config_name: str = "default", llm_config: Optional[LLMSettings] = None ): if not hasattr( - self, "initialized" + self, "initialized" ): # Only initialize if not already initialized llm_config = llm_config or config.llm llm_config = llm_config.get(config_name, llm_config["default"]) @@ -214,7 +214,7 @@ class LLM: for substring in ["localhost", "127.0.0.1", "0.0.0.0"] ) if self.model and ( - self.model.startswith("ollama") or "local" in self.model.lower() + self.model.startswith("ollama") or "local" in self.model.lower() ): return True return False @@ -248,7 +248,7 @@ class LLM: return base64.b64encode(image_file.read()).decode("utf-8") def prepare_messages( - self, text: str, image_path: Optional[str] = None + self, text: str, image_path: Optional[str] = None ) -> List[dict]: """ Prepare messages for completion, including multimodal content if needed. @@ -273,7 +273,7 @@ class LLM: return messages def do_multimodal_completion( - self, text: str, image_path: str + self, text: str, image_path: str ) -> Tuple[Any, float, float]: """ Perform a multimodal completion with text and image.