mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2026-02-15 11:14:27 +01:00
Add API via agent-protocol SDK (#5044)
* Add API via agent-protocol * Fix linter formatting errors
This commit is contained in:
@@ -1,14 +1,8 @@
|
||||
import contextlib
|
||||
import pathlib
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
import click
|
||||
import requests
|
||||
import uvicorn
|
||||
import yaml
|
||||
from agent_protocol import Agent
|
||||
|
||||
from autogpt.core.runner.client_lib.shared_click_commands import (
|
||||
DEFAULT_SETTINGS_FILE,
|
||||
@@ -29,13 +23,6 @@ autogpt.add_command(status)
|
||||
|
||||
|
||||
@autogpt.command()
|
||||
@click.option(
|
||||
"host",
|
||||
"--host",
|
||||
default="localhost",
|
||||
help="The host for the webserver.",
|
||||
type=click.STRING,
|
||||
)
|
||||
@click.option(
|
||||
"port",
|
||||
"--port",
|
||||
@@ -43,16 +30,11 @@ autogpt.add_command(status)
|
||||
help="The port of the webserver.",
|
||||
type=click.INT,
|
||||
)
|
||||
def server(host: str, port: int) -> None:
|
||||
def server(port: int) -> None:
|
||||
"""Run the Auto-GPT runner httpserver."""
|
||||
click.echo("Running Auto-GPT runner httpserver...")
|
||||
uvicorn.run(
|
||||
"autogpt.core.runner.cli_web_app.server.api:app",
|
||||
workers=1,
|
||||
host=host,
|
||||
port=port,
|
||||
reload=True,
|
||||
)
|
||||
port = 8080
|
||||
Agent.start(port)
|
||||
|
||||
|
||||
@autogpt.command()
|
||||
@@ -69,32 +51,7 @@ async def client(settings_file) -> None:
|
||||
if settings_file.exists():
|
||||
settings = yaml.safe_load(settings_file.read_text())
|
||||
|
||||
from autogpt.core.runner.cli_web_app.client.client import run
|
||||
|
||||
with autogpt_server():
|
||||
run()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def autogpt_server():
|
||||
host = "localhost"
|
||||
port = 8080
|
||||
cmd = shlex.split(
|
||||
f"{sys.executable} autogpt/core/runner/cli_web_app/cli.py server --host {host} --port {port}"
|
||||
)
|
||||
server_process = subprocess.Popen(
|
||||
args=cmd,
|
||||
)
|
||||
started = False
|
||||
|
||||
while not started:
|
||||
try:
|
||||
requests.get(f"http://{host}:{port}")
|
||||
started = True
|
||||
except requests.exceptions.ConnectionError:
|
||||
time.sleep(0.2)
|
||||
yield server_process
|
||||
server_process.terminate()
|
||||
# TODO: Call the API server with the settings and task, using the Python API client for agent protocol.
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
def run():
|
||||
body = json.dumps(
|
||||
{"ai_name": "HelloBot", "ai_role": "test", "ai_goals": ["goal1", "goal2"]}
|
||||
)
|
||||
|
||||
header = {"Content-Type": "application/json", "openai_api_key": "asdf"}
|
||||
print("Sending: ", header, body)
|
||||
response = requests.post(
|
||||
"http://localhost:8080/api/v1/agents", data=body, headers=header
|
||||
)
|
||||
print(response.content.decode("utf-8"))
|
||||
@@ -1,48 +1,114 @@
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, FastAPI, Request
|
||||
from agent_protocol import Agent as AgentProtocol
|
||||
from agent_protocol import StepHandler, StepResult
|
||||
from colorama import Fore
|
||||
|
||||
from autogpt.core.runner.cli_web_app.server.schema import InteractRequestBody
|
||||
from autogpt.agents import Agent
|
||||
from autogpt.app.main import UserFeedback
|
||||
from autogpt.commands import COMMAND_CATEGORIES
|
||||
from autogpt.config import AIConfig, Config, ConfigBuilder
|
||||
from autogpt.logs import logger
|
||||
from autogpt.memory.vector import get_memory
|
||||
from autogpt.models.command_registry import CommandRegistry
|
||||
from autogpt.prompts.prompt import DEFAULT_TRIGGERING_PROMPT
|
||||
from autogpt.workspace import Workspace
|
||||
|
||||
router = APIRouter()
|
||||
PROJECT_DIR = Path().resolve()
|
||||
|
||||
|
||||
@router.post("/agents")
|
||||
async def create_agent(request: Request):
|
||||
"""Create a new agent."""
|
||||
agent_id = uuid.uuid4().hex
|
||||
return {"agent_id": agent_id}
|
||||
async def task_handler(task_input) -> StepHandler:
|
||||
agent = bootstrap_agent(task_input)
|
||||
|
||||
next_command_name: str | None
|
||||
next_command_args: dict[str, str] | None
|
||||
|
||||
async def step_handler(step_input) -> StepResult:
|
||||
result = await interaction_step(
|
||||
agent,
|
||||
step_input["user_input"],
|
||||
step_input["user_feedback"],
|
||||
next_command_name,
|
||||
next_command_args,
|
||||
)
|
||||
|
||||
nonlocal next_command_name, next_command_args
|
||||
next_command_name = result["next_step_command_name"] if result else None
|
||||
next_command_args = result["next_step_command_args"] if result else None
|
||||
|
||||
if not result:
|
||||
return StepResult(output=None, is_last=True)
|
||||
return StepResult(output=result)
|
||||
|
||||
return step_handler
|
||||
|
||||
|
||||
@router.post("/agents/{agent_id}")
|
||||
async def interact(request: Request, agent_id: str, body: InteractRequestBody):
|
||||
"""Interact with an agent."""
|
||||
async def interaction_step(
|
||||
agent: Agent,
|
||||
user_input,
|
||||
user_feedback: UserFeedback | None,
|
||||
command_name: str | None,
|
||||
command_args: dict[str, str] | None,
|
||||
):
|
||||
"""Run one step of the interaction loop."""
|
||||
if user_feedback == UserFeedback.EXIT:
|
||||
return
|
||||
if user_feedback == UserFeedback.TEXT:
|
||||
command_name = "human_feedback"
|
||||
|
||||
# check headers
|
||||
result: str | None = None
|
||||
|
||||
# check if agent_id exists
|
||||
if command_name is not None:
|
||||
result = agent.execute(command_name, command_args, user_input)
|
||||
if result is None:
|
||||
logger.typewriter_log("SYSTEM: ", Fore.YELLOW, "Unable to execute command")
|
||||
return
|
||||
|
||||
# get agent object from somewhere, e.g. a database/disk/global dict
|
||||
|
||||
# continue agent interaction with user input
|
||||
next_command_name, next_command_args, assistant_reply_dict = agent.think()
|
||||
|
||||
return {
|
||||
"thoughts": {
|
||||
"thoughts": {
|
||||
"text": "text",
|
||||
"reasoning": "reasoning",
|
||||
"plan": "plan",
|
||||
"criticism": "criticism",
|
||||
"speak": "speak",
|
||||
},
|
||||
"commands": {
|
||||
"name": "name",
|
||||
"args": {"arg_1": "value_1", "arg_2": "value_2"},
|
||||
},
|
||||
},
|
||||
"messages": ["message1", agent_id],
|
||||
"config": agent.config,
|
||||
"ai_config": agent.ai_config,
|
||||
"result": result,
|
||||
"assistant_reply_dict": assistant_reply_dict,
|
||||
"next_step_command_name": next_command_name,
|
||||
"next_step_command_args": next_command_args,
|
||||
}
|
||||
|
||||
|
||||
app = FastAPI()
|
||||
app.include_router(router, prefix="/api/v1")
|
||||
def bootstrap_agent(task):
|
||||
config = ConfigBuilder.build_config_from_env(workdir=PROJECT_DIR)
|
||||
config.continuous_mode = False
|
||||
config.temperature = 0
|
||||
config.plain_output = True
|
||||
command_registry = get_command_registry(config)
|
||||
config.memory_backend = "no_memory"
|
||||
Workspace.set_workspace_directory(config)
|
||||
Workspace.build_file_logger_path(config, config.workspace_path)
|
||||
ai_config = AIConfig(
|
||||
ai_name="Auto-GPT",
|
||||
ai_role="a multi-purpose AI assistant.",
|
||||
ai_goals=[task.user_input],
|
||||
)
|
||||
ai_config.command_registry = command_registry
|
||||
return Agent(
|
||||
memory=get_memory(config),
|
||||
command_registry=command_registry,
|
||||
ai_config=ai_config,
|
||||
config=config,
|
||||
triggering_prompt=DEFAULT_TRIGGERING_PROMPT,
|
||||
workspace_directory=str(config.workspace_path),
|
||||
)
|
||||
|
||||
|
||||
def get_command_registry(config: Config):
|
||||
command_registry = CommandRegistry()
|
||||
enabled_command_categories = [
|
||||
x for x in COMMAND_CATEGORIES if x not in config.disabled_command_categories
|
||||
]
|
||||
for command_category in enabled_command_categories:
|
||||
command_registry.import_commands(command_category)
|
||||
return command_registry
|
||||
|
||||
|
||||
AgentProtocol.handle_task(task_handler)
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
|
||||
class AgentInfo(BaseModel):
|
||||
id: UUID = None
|
||||
objective: str = ""
|
||||
name: str = ""
|
||||
role: str = ""
|
||||
goals: list[str] = []
|
||||
|
||||
|
||||
class AgentConfiguration(BaseModel):
|
||||
"""Configuration for creation of a new agent."""
|
||||
|
||||
# We'll want to get this schema from the configuration, so it needs to be dynamic.
|
||||
user_configuration: dict
|
||||
agent_goals: AgentInfo
|
||||
|
||||
@validator("agent_goals")
|
||||
def only_objective_or_name_role_goals(cls, agent_goals):
|
||||
goals_specification = [agent_goals.name, agent_goals.role, agent_goals.goals]
|
||||
if agent_goals.objective and any(goals_specification):
|
||||
raise ValueError("Cannot specify both objective and name, role, or goals")
|
||||
if not agent_goals.objective and not all(goals_specification):
|
||||
raise ValueError("Must specify either objective or name, role, and goals")
|
||||
|
||||
|
||||
class InteractRequestBody(BaseModel):
|
||||
user_input: str = ""
|
||||
|
||||
|
||||
class InteractResponseBody(BaseModel):
|
||||
thoughts: dict[str, str] # TBD
|
||||
messages: list[str] # for example
|
||||
@@ -1,20 +0,0 @@
|
||||
import uuid
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
|
||||
class UserService:
|
||||
def __init__(self):
|
||||
self.users = {}
|
||||
|
||||
def get_user_id(self, request: Request) -> uuid.UUID:
|
||||
# TODO: something real. I don't know how this works.
|
||||
hostname = request.client.host
|
||||
port = request.client.port
|
||||
user = f"{hostname}:{port}"
|
||||
if user not in self.users:
|
||||
self.users[user] = uuid.uuid4()
|
||||
return self.users[user]
|
||||
|
||||
|
||||
USER_SERVICE = UserService()
|
||||
@@ -32,6 +32,7 @@ prompt_toolkit>=3.0.38
|
||||
pydantic
|
||||
inflection
|
||||
agbenchmark
|
||||
agent-protocol>=0.1.1
|
||||
|
||||
# web server
|
||||
fastapi
|
||||
|
||||
Reference in New Issue
Block a user