From fbd4e06df5d185d05d1daed1a0ee2d9db2c9b947 Mon Sep 17 00:00:00 2001 From: BillSchumacher <34168009+BillSchumacher@users.noreply.github.com> Date: Sun, 16 Apr 2023 23:39:33 -0500 Subject: [PATCH] Add early abort functions. --- autogpt/agent/agent.py | 4 ++++ autogpt/agent/agent_manager.py | 12 ++++++++++++ autogpt/chat.py | 2 ++ autogpt/llm_utils.py | 2 ++ 4 files changed, 20 insertions(+) diff --git a/autogpt/agent/agent.py b/autogpt/agent/agent.py index e65c7e61..7b1b5e15 100644 --- a/autogpt/agent/agent.py +++ b/autogpt/agent/agent.py @@ -180,6 +180,8 @@ class Agent: result = f"Human feedback: {user_input}" else: for plugin in cfg.plugins: + if not plugin.can_handle_pre_command(): + continue command_name, arguments = plugin.pre_command( command_name, arguments ) @@ -192,6 +194,8 @@ class Agent: result = f"Command {command_name} returned: " f"{command_result}" for plugin in cfg.plugins: + if not plugin.can_handle_post_command(): + continue result = plugin.post_command(command_name, result) if self.next_action_count > 0: self.next_action_count -= 1 diff --git a/autogpt/agent/agent_manager.py b/autogpt/agent/agent_manager.py index e1353e03..d2648150 100644 --- a/autogpt/agent/agent_manager.py +++ b/autogpt/agent/agent_manager.py @@ -31,6 +31,8 @@ class AgentManager(metaclass=Singleton): {"role": "user", "content": prompt}, ] for plugin in self.cfg.plugins: + if not plugin.can_handle_pre_instruction(): + continue plugin_messages = plugin.pre_instruction(messages) if plugin_messages: for plugin_message in plugin_messages: @@ -46,6 +48,8 @@ class AgentManager(metaclass=Singleton): plugins_reply = "" for i, plugin in enumerate(self.cfg.plugins): + if not plugin.can_handle_on_instruction(): + continue plugin_result = plugin.on_instruction(messages) if plugin_result: sep = "" if not i else "\n" @@ -61,6 +65,8 @@ class AgentManager(metaclass=Singleton): self.agents[key] = (task, messages, model) for plugin in self.cfg.plugins: + if not plugin.can_handle_post_instruction(): + continue agent_reply = plugin.post_instruction(agent_reply) return key, agent_reply @@ -81,6 +87,8 @@ class AgentManager(metaclass=Singleton): messages.append({"role": "user", "content": message}) for plugin in self.cfg.plugins: + if not plugin.can_handle_pre_instruction(): + continue plugin_messages = plugin.pre_instruction(messages) if plugin_messages: for plugin_message in plugin_messages: @@ -96,6 +104,8 @@ class AgentManager(metaclass=Singleton): plugins_reply = agent_reply for i, plugin in enumerate(self.cfg.plugins): + if not plugin.can_handle_on_instruction(): + continue plugin_result = plugin.on_instruction(messages) if plugin_result: sep = "" if not i else "\n" @@ -105,6 +115,8 @@ class AgentManager(metaclass=Singleton): messages.append({"role": "assistant", "content": plugins_reply}) for plugin in self.cfg.plugins: + if not plugin.can_handle_post_instruction(): + continue agent_reply = plugin.post_instruction(agent_reply) return agent_reply diff --git a/autogpt/chat.py b/autogpt/chat.py index 16693040..22fe636c 100644 --- a/autogpt/chat.py +++ b/autogpt/chat.py @@ -137,6 +137,8 @@ def chat_with_ai( plugin_count = len(cfg.plugins) for i, plugin in enumerate(cfg.plugins): + if not plugin.can_handle_on_planning(): + continue plugin_response = plugin.on_planning( agent.prompt_generator, current_context ) diff --git a/autogpt/llm_utils.py b/autogpt/llm_utils.py index 7aac703c..4fb0e1f5 100644 --- a/autogpt/llm_utils.py +++ b/autogpt/llm_utils.py @@ -131,6 +131,8 @@ def create_chat_completion( raise RuntimeError(f"Failed to get response after {num_retries} retries") resp = response.choices[0].message["content"] for plugin in CFG.plugins: + if not plugin.can_handle_on_response(): + continue resp = plugin.on_response(resp) return resp