diff --git a/scripts/ai_config.py b/scripts/ai_config.py index 945fcfb2..42d227d0 100644 --- a/scripts/ai_config.py +++ b/scripts/ai_config.py @@ -3,7 +3,9 @@ import data class AIConfig: + """Class to store the AI's name, role, and goals.""" def __init__(self, ai_name="", ai_role="", ai_goals=[]): + """Initialize the AIConfig class""" self.ai_name = ai_name self.ai_role = ai_role self.ai_goals = ai_goals @@ -13,7 +15,7 @@ class AIConfig: @classmethod def load(cls, config_file=SAVE_FILE): - # Load variables from yaml file if it exists + """Load variables from yaml file if it exists, otherwise use defaults.""" try: with open(config_file) as file: config_params = yaml.load(file, Loader=yaml.FullLoader) @@ -27,11 +29,13 @@ class AIConfig: return cls(ai_name, ai_role, ai_goals) def save(self, config_file=SAVE_FILE): + """Save variables to yaml file.""" config = {"ai_name": self.ai_name, "ai_role": self.ai_role, "ai_goals": self.ai_goals} with open(config_file, "w") as file: yaml.dump(config, file) def construct_full_prompt(self): + """Construct the full prompt for the AI to use.""" prompt_start = """Your decisions must always be made independently without seeking user assistance. Play to your strengths as an LLM and pursue simple strategies with no legal complications.""" # Construct full prompt diff --git a/scripts/call_ai_function.py b/scripts/call_ai_function.py index 0c864b49..83c87687 100644 --- a/scripts/call_ai_function.py +++ b/scripts/call_ai_function.py @@ -6,6 +6,7 @@ from llm_utils import create_chat_completion # This is a magic function that can do anything with no-code. See # https://github.com/Torantulino/AI-Functions for more info. def call_ai_function(function, args, description, model=cfg.smart_llm_model): + """Call an AI function with the given args and description.""" # For each arg, if any are None, convert to "None": args = [str(arg) if arg is not None else "None" for arg in args] # parse args to comma seperated string diff --git a/scripts/config.py b/scripts/config.py index fa3bf7cc..d98cb698 100644 --- a/scripts/config.py +++ b/scripts/config.py @@ -51,21 +51,27 @@ class Config(metaclass=Singleton): self.speak_mode = value def set_fast_llm_model(self, value: str): + """Set the fast LLM model value.""" self.fast_llm_model = value def set_smart_llm_model(self, value: str): + """Set the smart LLM model value.""" self.smart_llm_model = value def set_fast_token_limit(self, value: int): + """Set the fast token limit value.""" self.fast_token_limit = value def set_smart_token_limit(self, value: int): + """Set the smart token limit value.""" self.smart_token_limit = value def set_openai_api_key(self, value: str): + """Set the OpenAI API key value.""" self.apiopenai_api_key_key = value def set_elevenlabs_api_key(self, value: str): + """Set the ElevenLabs API key value.""" self.elevenlabs_api_key = value diff --git a/scripts/file_operations.py b/scripts/file_operations.py index d92709fb..f58d2cbc 100644 --- a/scripts/file_operations.py +++ b/scripts/file_operations.py @@ -3,7 +3,7 @@ import os.path # Set a dedicated folder for file I/O working_directory = "auto_gpt_workspace" - +# Create the directory if it doesn't exist if not os.path.exists(working_directory): os.makedirs(working_directory) diff --git a/scripts/json_parser.py b/scripts/json_parser.py index 8154b584..edd67305 100644 --- a/scripts/json_parser.py +++ b/scripts/json_parser.py @@ -4,6 +4,7 @@ from config import Config cfg = Config() def fix_and_parse_json(json_str: str, try_to_fix_with_gpt: bool = True): + """Fix and parse the given JSON string.""" json_schema = """ { "command": { @@ -50,6 +51,7 @@ def fix_and_parse_json(json_str: str, try_to_fix_with_gpt: bool = True): # TODO: Make debug a global config var def fix_json(json_str: str, schema: str, debug=False) -> str: + """Fix the given JSON string to make it parseable and fully complient with the provided schema.""" # Try to fix the JSON using gpt: function_string = "def fix_json(json_str: str, schema:str=None) -> str:" args = [json_str, schema] diff --git a/scripts/llm_utils.py b/scripts/llm_utils.py index 41f39625..c512e997 100644 --- a/scripts/llm_utils.py +++ b/scripts/llm_utils.py @@ -6,6 +6,7 @@ openai.api_key = cfg.openai_api_key # Overly simple abstraction until we create something better def create_chat_completion(messages, model=None, temperature=None, max_tokens=None)->str: + """Create a chat completion using the OpenAI API.""" response = openai.ChatCompletion.create( model=model, messages=messages, diff --git a/scripts/main.py b/scripts/main.py index b4236c52..b0aef178 100644 --- a/scripts/main.py +++ b/scripts/main.py @@ -117,7 +117,7 @@ def print_assistant_thoughts(assistant_reply): print_to_console("Error: \n", Fore.RED, call_stack) def load_variables(config_file="config.yaml"): - # Load variables from yaml file if it exists + """Load variables from yaml file if it exists, otherwise prompt the user for input""" try: with open(config_file) as file: config = yaml.load(file, Loader=yaml.FullLoader) @@ -200,6 +200,7 @@ Continue (y/n): """) def prompt_user(): + """Prompt the user for input""" ai_name = "" # Construct the prompt print_to_console(