mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-25 18:04:28 +01:00
Rough sketching out of a hello world using our refactored autogpt library. See the tracking issue here: #4770. # Run instructions There are two client applications for Auto-GPT included. ## CLI Application 🌟 **This is the reference application I'm working with for now** 🌟 The first app is a straight CLI application. I have not done anything yet to port all the friendly display stuff from the `logger.typewriter_log` logic. - [Entry Point](https://github.com/Significant-Gravitas/Auto-GPT/blob/re-arch/hello-world/autogpt/core/runner/cli_app/cli.py) - [Client Application](https://github.com/Significant-Gravitas/Auto-GPT/blob/re-arch/hello-world/autogpt/core/runner/cli_app/main.py) To run, you first need a settings file. Run ``` python REPOSITORY_ROOT/autogpt/core/runner/cli_app/cli.py make-settings ``` where `REPOSITORY_ROOT` is the root of the Auto-GPT repository on your machine. This will write a file called `default_agent_settings.yaml` with all the user-modifiable configuration keys to `~/auto-gpt/default_agent_settings.yml` and make the `auto-gpt` directory in your user directory if it doesn't exist). At a bare minimum, you'll need to set `openai.credentials.api_key` to your OpenAI API Key to run the model. You can then run Auto-GPT with ``` python REPOSITORY_ROOT/autogpt/core/runner/cli_app/cli.py make-settings ``` to launch the interaction loop. ## CLI Web App The second app is still a CLI, but it sets up a local webserver that the client application talks to rather than invoking calls to the Agent library code directly. This application is essentially a sketch at this point as the folks who were driving it have had less time (and likely not enough clarity) to proceed. - [Entry Point](https://github.com/Significant-Gravitas/Auto-GPT/blob/re-arch/hello-world/autogpt/core/runner/cli_web_app/cli.py) - [Client Application](https://github.com/Significant-Gravitas/Auto-GPT/blob/re-arch/hello-world/autogpt/core/runner/cli_web_app/client/client.py) - [Server API](https://github.com/Significant-Gravitas/Auto-GPT/blob/re-arch/hello-world/autogpt/core/runner/cli_web_app/server/api.py) To run, you still need to generate a default configuration. You can do ``` python REPOSITORY_ROOT/autogpt/core/runner/cli_web_app/cli.py make-settings ``` It invokes the same command as the bare CLI app, so follow the instructions above about setting your API key. To run, do ``` python REPOSITORY_ROOT/autogpt/core/runner/cli_web_app/cli.py client ``` This will launch a webserver and then start the client cli application to communicate with it. ⚠️ I am not actively developing this application. It is a very good place to get involved if you have web application design experience and are looking to get involved in the re-arch. --------- Co-authored-by: David Wurtz <davidjwurtz@gmail.com> Co-authored-by: Media <12145726+rihp@users.noreply.github.com> Co-authored-by: Richard Beales <rich@richbeales.net> Co-authored-by: Daryl Rodrigo <darylrodrigo@gmail.com> Co-authored-by: Daryl Rodrigo <daryl@orkestro.com> Co-authored-by: Swifty <craigswift13@gmail.com> Co-authored-by: Nicholas Tindle <nick@ntindle.com> Co-authored-by: Merwane Hamadi <merwanehamadi@gmail.com>
156 lines
4.4 KiB
Python
156 lines
4.4 KiB
Python
import abc
|
|
import enum
|
|
from typing import TYPE_CHECKING, Type
|
|
|
|
from pydantic import BaseModel
|
|
|
|
from autogpt.core.configuration import SystemConfiguration, UserConfigurable
|
|
|
|
if TYPE_CHECKING:
|
|
from autogpt.core.ability import Ability, AbilityRegistry
|
|
from autogpt.core.memory import Memory
|
|
from autogpt.core.resource.model_providers import (
|
|
EmbeddingModelProvider,
|
|
LanguageModelProvider,
|
|
)
|
|
|
|
# Expand to other types as needed
|
|
PluginType = (
|
|
Type[Ability] # Swappable now
|
|
| Type[AbilityRegistry] # Swappable maybe never
|
|
| Type[LanguageModelProvider] # Swappable soon
|
|
| Type[EmbeddingModelProvider] # Swappable soon
|
|
| Type[Memory] # Swappable now
|
|
# | Type[Planner] # Swappable soon
|
|
)
|
|
|
|
|
|
class PluginStorageFormat(str, enum.Enum):
|
|
"""Supported plugin storage formats.
|
|
|
|
Plugins can be stored at one of these supported locations.
|
|
|
|
"""
|
|
|
|
INSTALLED_PACKAGE = "installed_package" # Required now, loads system defaults
|
|
WORKSPACE = "workspace" # Required now
|
|
# OPENAPI_URL = "open_api_url" # Soon (requires some tooling we don't have yet).
|
|
# OTHER_FILE_PATH = "other_file_path" # Maybe later (maybe now)
|
|
# GIT = "git" # Maybe later (or soon)
|
|
# PYPI = "pypi" # Maybe later
|
|
# AUTOGPT_PLUGIN_SERVICE = "autogpt_plugin_service" # Long term solution, requires design
|
|
# AUTO = "auto" # Feature for later maybe, automatically find plugin.
|
|
|
|
|
|
# Installed package example
|
|
# PluginLocation(
|
|
# storage_format='installed_package',
|
|
# storage_route='autogpt_plugins.twitter.SendTwitterMessage'
|
|
# )
|
|
# Workspace example
|
|
# PluginLocation(
|
|
# storage_format='workspace',
|
|
# storage_route='relative/path/to/plugin.pkl'
|
|
# OR
|
|
# storage_route='relative/path/to/plugin.py'
|
|
# )
|
|
# Git
|
|
# PluginLocation(
|
|
# storage_format='git',
|
|
# Exact format TBD.
|
|
# storage_route='https://github.com/gravelBridge/AutoGPT-WolframAlpha/blob/main/autogpt-wolframalpha/wolfram_alpha.py'
|
|
# )
|
|
# PyPI
|
|
# PluginLocation(
|
|
# storage_format='pypi',
|
|
# storage_route='package_name'
|
|
# )
|
|
|
|
|
|
# PluginLocation(
|
|
# storage_format='installed_package',
|
|
# storage_route='autogpt_plugins.twitter.SendTwitterMessage'
|
|
# )
|
|
|
|
|
|
# A plugin storage route.
|
|
#
|
|
# This is a string that specifies where to load a plugin from
|
|
# (e.g. an import path or file path).
|
|
PluginStorageRoute = str
|
|
|
|
|
|
class PluginLocation(SystemConfiguration):
|
|
"""A plugin location.
|
|
|
|
This is a combination of a plugin storage format and a plugin storage route.
|
|
It is used by the PluginService to load plugins.
|
|
|
|
"""
|
|
|
|
storage_format: PluginStorageFormat = UserConfigurable()
|
|
storage_route: PluginStorageRoute = UserConfigurable()
|
|
|
|
|
|
class PluginMetadata(BaseModel):
|
|
"""Metadata about a plugin."""
|
|
|
|
name: str
|
|
description: str
|
|
location: PluginLocation
|
|
|
|
|
|
class PluginService(abc.ABC):
|
|
"""Base class for plugin service.
|
|
|
|
The plugin service should be stateless. This defines the interface for
|
|
loading plugins from various storage formats.
|
|
|
|
"""
|
|
|
|
@staticmethod
|
|
@abc.abstractmethod
|
|
def get_plugin(plugin_location: PluginLocation) -> "PluginType":
|
|
"""Get a plugin from a plugin location."""
|
|
...
|
|
|
|
####################################
|
|
# Low-level storage format loaders #
|
|
####################################
|
|
@staticmethod
|
|
@abc.abstractmethod
|
|
def load_from_file_path(plugin_route: PluginStorageRoute) -> "PluginType":
|
|
"""Load a plugin from a file path."""
|
|
|
|
...
|
|
|
|
@staticmethod
|
|
@abc.abstractmethod
|
|
def load_from_import_path(plugin_route: PluginStorageRoute) -> "PluginType":
|
|
"""Load a plugin from an import path."""
|
|
...
|
|
|
|
@staticmethod
|
|
@abc.abstractmethod
|
|
def resolve_name_to_path(
|
|
plugin_route: PluginStorageRoute, path_type: str
|
|
) -> PluginStorageRoute:
|
|
"""Resolve a plugin name to a plugin path."""
|
|
...
|
|
|
|
#####################################
|
|
# High-level storage format loaders #
|
|
#####################################
|
|
|
|
@staticmethod
|
|
@abc.abstractmethod
|
|
def load_from_workspace(plugin_route: PluginStorageRoute) -> "PluginType":
|
|
"""Load a plugin from the workspace."""
|
|
...
|
|
|
|
@staticmethod
|
|
@abc.abstractmethod
|
|
def load_from_installed_package(plugin_route: PluginStorageRoute) -> "PluginType":
|
|
"""Load a plugin from an installed package."""
|
|
...
|