mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-17 22:14:28 +01:00
Add extra documentation
This commit is contained in:
@@ -3,7 +3,9 @@ import data
|
||||
|
||||
|
||||
class AIConfig:
|
||||
"""Class to store the AI's name, role, and goals."""
|
||||
def __init__(self, ai_name="", ai_role="", ai_goals=[]):
|
||||
"""Initialize the AIConfig class"""
|
||||
self.ai_name = ai_name
|
||||
self.ai_role = ai_role
|
||||
self.ai_goals = ai_goals
|
||||
@@ -13,7 +15,7 @@ class AIConfig:
|
||||
|
||||
@classmethod
|
||||
def load(cls, config_file=SAVE_FILE):
|
||||
# Load variables from yaml file if it exists
|
||||
"""Load variables from yaml file if it exists, otherwise use defaults."""
|
||||
try:
|
||||
with open(config_file) as file:
|
||||
config_params = yaml.load(file, Loader=yaml.FullLoader)
|
||||
@@ -27,11 +29,13 @@ class AIConfig:
|
||||
return cls(ai_name, ai_role, ai_goals)
|
||||
|
||||
def save(self, config_file=SAVE_FILE):
|
||||
"""Save variables to yaml file."""
|
||||
config = {"ai_name": self.ai_name, "ai_role": self.ai_role, "ai_goals": self.ai_goals}
|
||||
with open(config_file, "w") as file:
|
||||
yaml.dump(config, file)
|
||||
|
||||
def construct_full_prompt(self):
|
||||
"""Construct the full prompt for the AI to use."""
|
||||
prompt_start = """Your decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications."""
|
||||
|
||||
# Construct full prompt
|
||||
|
||||
@@ -6,6 +6,7 @@ from llm_utils import create_chat_completion
|
||||
# This is a magic function that can do anything with no-code. See
|
||||
# https://github.com/Torantulino/AI-Functions for more info.
|
||||
def call_ai_function(function, args, description, model=cfg.smart_llm_model):
|
||||
"""Call an AI function with the given args and description."""
|
||||
# For each arg, if any are None, convert to "None":
|
||||
args = [str(arg) if arg is not None else "None" for arg in args]
|
||||
# parse args to comma seperated string
|
||||
|
||||
@@ -51,21 +51,27 @@ class Config(metaclass=Singleton):
|
||||
self.speak_mode = value
|
||||
|
||||
def set_fast_llm_model(self, value: str):
|
||||
"""Set the fast LLM model value."""
|
||||
self.fast_llm_model = value
|
||||
|
||||
def set_smart_llm_model(self, value: str):
|
||||
"""Set the smart LLM model value."""
|
||||
self.smart_llm_model = value
|
||||
|
||||
def set_fast_token_limit(self, value: int):
|
||||
"""Set the fast token limit value."""
|
||||
self.fast_token_limit = value
|
||||
|
||||
def set_smart_token_limit(self, value: int):
|
||||
"""Set the smart token limit value."""
|
||||
self.smart_token_limit = value
|
||||
|
||||
def set_openai_api_key(self, value: str):
|
||||
"""Set the OpenAI API key value."""
|
||||
self.apiopenai_api_key_key = value
|
||||
|
||||
def set_elevenlabs_api_key(self, value: str):
|
||||
"""Set the ElevenLabs API key value."""
|
||||
self.elevenlabs_api_key = value
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import os.path
|
||||
|
||||
# Set a dedicated folder for file I/O
|
||||
working_directory = "auto_gpt_workspace"
|
||||
|
||||
# Create the directory if it doesn't exist
|
||||
if not os.path.exists(working_directory):
|
||||
os.makedirs(working_directory)
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from config import Config
|
||||
cfg = Config()
|
||||
|
||||
def fix_and_parse_json(json_str: str, try_to_fix_with_gpt: bool = True):
|
||||
"""Fix and parse the given JSON string."""
|
||||
json_schema = """
|
||||
{
|
||||
"command": {
|
||||
@@ -50,6 +51,7 @@ def fix_and_parse_json(json_str: str, try_to_fix_with_gpt: bool = True):
|
||||
|
||||
# TODO: Make debug a global config var
|
||||
def fix_json(json_str: str, schema: str, debug=False) -> str:
|
||||
"""Fix the given JSON string to make it parseable and fully complient with the provided schema."""
|
||||
# Try to fix the JSON using gpt:
|
||||
function_string = "def fix_json(json_str: str, schema:str=None) -> str:"
|
||||
args = [json_str, schema]
|
||||
|
||||
@@ -6,6 +6,7 @@ openai.api_key = cfg.openai_api_key
|
||||
|
||||
# Overly simple abstraction until we create something better
|
||||
def create_chat_completion(messages, model=None, temperature=None, max_tokens=None)->str:
|
||||
"""Create a chat completion using the OpenAI API."""
|
||||
response = openai.ChatCompletion.create(
|
||||
model=model,
|
||||
messages=messages,
|
||||
|
||||
@@ -117,7 +117,7 @@ def print_assistant_thoughts(assistant_reply):
|
||||
print_to_console("Error: \n", Fore.RED, call_stack)
|
||||
|
||||
def load_variables(config_file="config.yaml"):
|
||||
# Load variables from yaml file if it exists
|
||||
"""Load variables from yaml file if it exists, otherwise prompt the user for input"""
|
||||
try:
|
||||
with open(config_file) as file:
|
||||
config = yaml.load(file, Loader=yaml.FullLoader)
|
||||
@@ -200,6 +200,7 @@ Continue (y/n): """)
|
||||
|
||||
|
||||
def prompt_user():
|
||||
"""Prompt the user for input"""
|
||||
ai_name = ""
|
||||
# Construct the prompt
|
||||
print_to_console(
|
||||
|
||||
Reference in New Issue
Block a user