Use correct reference to prompt_generator in autogpt/llm/chat.py (#4011)

This commit is contained in:
Tomasz Kasperczyk
2023-05-08 14:59:12 +02:00
committed by GitHub
parent 33a3e6f998
commit 0166eacb2b

View File

@@ -200,7 +200,7 @@ def chat_with_ai(
if not plugin.can_handle_on_planning():
continue
plugin_response = plugin.on_planning(
agent.prompt_generator, current_context
agent.config.prompt_generator, current_context
)
if not plugin_response or plugin_response == "":
continue