mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-17 22:14:28 +01:00
unify annotations to future syntax
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
"""Agent manager for managing GPT agents"""
|
||||
from typing import List, Tuple, Union
|
||||
from __future__ import annotations
|
||||
|
||||
from autogpt.llm_utils import create_chat_completion
|
||||
from autogpt.config.config import Singleton
|
||||
|
||||
@@ -14,7 +15,7 @@ class AgentManager(metaclass=Singleton):
|
||||
# Create new GPT agent
|
||||
# TODO: Centralise use of create_chat_completion() to globally enforce token limit
|
||||
|
||||
def create_agent(self, task: str, prompt: str, model: str) -> Tuple[int, str]:
|
||||
def create_agent(self, task: str, prompt: str, model: str) -> tuple[int, str]:
|
||||
"""Create a new agent and return its key
|
||||
|
||||
Args:
|
||||
@@ -47,7 +48,7 @@ class AgentManager(metaclass=Singleton):
|
||||
|
||||
return key, agent_reply
|
||||
|
||||
def message_agent(self, key: Union[str, int], message: str) -> str:
|
||||
def message_agent(self, key: str | int, message: str) -> str:
|
||||
"""Send a message to an agent and return its response
|
||||
|
||||
Args:
|
||||
@@ -73,7 +74,7 @@ class AgentManager(metaclass=Singleton):
|
||||
|
||||
return agent_reply
|
||||
|
||||
def list_agents(self) -> List[Tuple[Union[str, int], str]]:
|
||||
def list_agents(self) -> list[tuple[str | int, str]]:
|
||||
"""Return a list of all agents
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
"""Code evaluation module."""
|
||||
from typing import List
|
||||
from __future__ import annotations
|
||||
|
||||
from autogpt.llm_utils import call_ai_function
|
||||
|
||||
|
||||
def evaluate_code(code: str) -> List[str]:
|
||||
def evaluate_code(code: str) -> list[str]:
|
||||
"""
|
||||
A function that takes in a string and returns a response from create chat
|
||||
completion api call.
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
"""File operations for AutoGPT"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import os.path
|
||||
from pathlib import Path
|
||||
from typing import Generator, List
|
||||
from typing import Generator
|
||||
|
||||
# Set a dedicated folder for file I/O
|
||||
WORKING_DIRECTORY = Path(os.getcwd()) / "auto_gpt_workspace"
|
||||
@@ -214,14 +216,14 @@ def delete_file(filename: str) -> str:
|
||||
return f"Error: {str(e)}"
|
||||
|
||||
|
||||
def search_files(directory: str) -> List[str]:
|
||||
def search_files(directory: str) -> list[str]:
|
||||
"""Search for files in a directory
|
||||
|
||||
Args:
|
||||
directory (str): The directory to search in
|
||||
|
||||
Returns:
|
||||
List[str]: A list of files found in the directory
|
||||
list[str]: A list of files found in the directory
|
||||
"""
|
||||
found_files = []
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Google search command for Autogpt."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import List, Union
|
||||
|
||||
from duckduckgo_search import ddg
|
||||
|
||||
@@ -33,7 +34,7 @@ def google_search(query: str, num_results: int = 8) -> str:
|
||||
return json.dumps(search_results, ensure_ascii=False, indent=4)
|
||||
|
||||
|
||||
def google_official_search(query: str, num_results: int = 8) -> Union[str, List[str]]:
|
||||
def google_official_search(query: str, num_results: int = 8) -> str | list[str]:
|
||||
"""Return the results of a google search using the official Google API
|
||||
|
||||
Args:
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from autogpt.llm_utils import call_ai_function
|
||||
|
||||
|
||||
def improve_code(suggestions: List[str], code: str) -> str:
|
||||
def improve_code(suggestions: list[str], code: str) -> str:
|
||||
"""
|
||||
A function that takes in code and suggestions and returns a response from create
|
||||
chat completion api call.
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
"""Web scraping commands using Playwright"""
|
||||
from __future__ import annotations
|
||||
|
||||
try:
|
||||
from playwright.sync_api import sync_playwright
|
||||
except ImportError:
|
||||
@@ -7,7 +9,6 @@ except ImportError:
|
||||
)
|
||||
from bs4 import BeautifulSoup
|
||||
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
|
||||
from typing import List, Union
|
||||
|
||||
|
||||
def scrape_text(url: str) -> str:
|
||||
@@ -45,7 +46,7 @@ def scrape_text(url: str) -> str:
|
||||
return text
|
||||
|
||||
|
||||
def scrape_links(url: str) -> Union[str, List[str]]:
|
||||
def scrape_links(url: str) -> str | list[str]:
|
||||
"""Scrape links from a webpage
|
||||
|
||||
Args:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Browse a webpage and summarize it using the LLM model"""
|
||||
from typing import List, Tuple, Union
|
||||
from __future__ import annotations
|
||||
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
import requests
|
||||
@@ -66,7 +67,7 @@ def check_local_file_access(url: str) -> bool:
|
||||
|
||||
def get_response(
|
||||
url: str, timeout: int = 10
|
||||
) -> Union[Tuple[None, str], Tuple[Response, None]]:
|
||||
) -> tuple[None, str] | tuple[Response, None]:
|
||||
"""Get the response from a URL
|
||||
|
||||
Args:
|
||||
@@ -74,7 +75,7 @@ def get_response(
|
||||
timeout (int): The timeout for the HTTP request
|
||||
|
||||
Returns:
|
||||
Tuple[None, str] | Tuple[Response, None]: The response and error message
|
||||
tuple[None, str] | tuple[Response, None]: The response and error message
|
||||
|
||||
Raises:
|
||||
ValueError: If the URL is invalid
|
||||
@@ -136,14 +137,14 @@ def scrape_text(url: str) -> str:
|
||||
return text
|
||||
|
||||
|
||||
def scrape_links(url: str) -> Union[str, List[str]]:
|
||||
def scrape_links(url: str) -> str | list[str]:
|
||||
"""Scrape links from a webpage
|
||||
|
||||
Args:
|
||||
url (str): The URL to scrape links from
|
||||
|
||||
Returns:
|
||||
Union[str, List[str]]: The scraped links
|
||||
str | list[str]: The scraped links
|
||||
"""
|
||||
response, error_message = get_response(url)
|
||||
if error_message:
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
"""Selenium web scraping module."""
|
||||
from __future__ import annotations
|
||||
|
||||
from selenium import webdriver
|
||||
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
|
||||
import autogpt.processing.text as summary
|
||||
@@ -15,13 +17,12 @@ from selenium.webdriver.safari.options import Options as SafariOptions
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from autogpt.config import Config
|
||||
from typing import List, Tuple, Union
|
||||
|
||||
FILE_DIR = Path(__file__).parent.parent
|
||||
CFG = Config()
|
||||
|
||||
|
||||
def browse_website(url: str, question: str) -> Tuple[str, WebDriver]:
|
||||
def browse_website(url: str, question: str) -> tuple[str, WebDriver]:
|
||||
"""Browse a website and return the answer and links to the user
|
||||
|
||||
Args:
|
||||
@@ -43,7 +44,7 @@ def browse_website(url: str, question: str) -> Tuple[str, WebDriver]:
|
||||
return f"Answer gathered from website: {summary_text} \n \n Links: {links}", driver
|
||||
|
||||
|
||||
def scrape_text_with_selenium(url: str) -> Tuple[WebDriver, str]:
|
||||
def scrape_text_with_selenium(url: str) -> tuple[WebDriver, str]:
|
||||
"""Scrape text from a website using selenium
|
||||
|
||||
Args:
|
||||
@@ -97,7 +98,7 @@ def scrape_text_with_selenium(url: str) -> Tuple[WebDriver, str]:
|
||||
return driver, text
|
||||
|
||||
|
||||
def scrape_links_with_selenium(driver: WebDriver, url: str) -> List[str]:
|
||||
def scrape_links_with_selenium(driver: WebDriver, url: str) -> list[str]:
|
||||
"""Scrape links from a website using selenium
|
||||
|
||||
Args:
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
"""A module that contains a function to generate test cases for the submitted code."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import List
|
||||
from autogpt.llm_utils import call_ai_function
|
||||
|
||||
|
||||
def write_tests(code: str, focus: List[str]) -> str:
|
||||
def write_tests(code: str, focus: list[str]) -> str:
|
||||
"""
|
||||
A function that takes in code and focus topics and returns a response from create
|
||||
chat completion api call.
|
||||
|
||||
Parameters:
|
||||
focus (List): A list of suggestions around what needs to be improved.
|
||||
focus (list): A list of suggestions around what needs to be improved.
|
||||
code (str): Code for test cases to be generated against.
|
||||
Returns:
|
||||
A result string from create chat completion. Test cases for the submitted code
|
||||
|
||||
@@ -2,8 +2,10 @@
|
||||
"""
|
||||
A module that contains the AIConfig class object that contains the configuration
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import List, Optional, Type
|
||||
from typing import Type
|
||||
import yaml
|
||||
|
||||
|
||||
@@ -18,7 +20,7 @@ class AIConfig:
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, ai_name: str = "", ai_role: str = "", ai_goals: Optional[List] = None
|
||||
self, ai_name: str = "", ai_role: str = "", ai_goals: list | None = None
|
||||
) -> None:
|
||||
"""
|
||||
Initialize a class instance
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
"""Fix JSON brackets."""
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import json
|
||||
from typing import Optional
|
||||
import regex
|
||||
from colorama import Fore
|
||||
|
||||
@@ -46,7 +47,7 @@ def attempt_to_fix_json_by_finding_outermost_brackets(json_string: str):
|
||||
return json_string
|
||||
|
||||
|
||||
def balance_braces(json_string: str) -> Optional[str]:
|
||||
def balance_braces(json_string: str) -> str | None:
|
||||
"""
|
||||
Balance the braces in a JSON string.
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
"""Fix and parse JSON strings."""
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import json
|
||||
from typing import Any, Dict, Union
|
||||
from typing import Any
|
||||
|
||||
from autogpt.config import Config
|
||||
from autogpt.json_fixes.auto_fix import fix_json
|
||||
@@ -71,7 +72,7 @@ def correct_json(json_to_load: str) -> str:
|
||||
|
||||
def fix_and_parse_json(
|
||||
json_to_load: str, try_to_fix_with_gpt: bool = True
|
||||
) -> Union[str, Dict[Any, Any]]:
|
||||
) -> str | dict[Any, Any]:
|
||||
"""Fix and parse JSON string
|
||||
|
||||
Args:
|
||||
@@ -80,7 +81,7 @@ def fix_and_parse_json(
|
||||
Defaults to True.
|
||||
|
||||
Returns:
|
||||
Union[str, Dict[Any, Any]]: The parsed JSON.
|
||||
str or dict[Any, Any]: The parsed JSON.
|
||||
"""
|
||||
|
||||
with contextlib.suppress(json.JSONDecodeError):
|
||||
@@ -109,7 +110,7 @@ def fix_and_parse_json(
|
||||
|
||||
def try_ai_fix(
|
||||
try_to_fix_with_gpt: bool, exception: Exception, json_to_load: str
|
||||
) -> Union[str, Dict[Any, Any]]:
|
||||
) -> str | dict[Any, Any]:
|
||||
"""Try to fix the JSON with the AI
|
||||
|
||||
Args:
|
||||
@@ -121,7 +122,7 @@ def try_ai_fix(
|
||||
exception: If try_to_fix_with_gpt is False.
|
||||
|
||||
Returns:
|
||||
Union[str, Dict[Any, Any]]: The JSON string or dictionary.
|
||||
str or dict[Any, Any]: The JSON string or dictionary.
|
||||
"""
|
||||
if not try_to_fix_with_gpt:
|
||||
raise exception
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ast import List
|
||||
import time
|
||||
from typing import Dict, Optional
|
||||
|
||||
import openai
|
||||
from openai.error import APIError, RateLimitError
|
||||
@@ -14,7 +15,7 @@ openai.api_key = CFG.openai_api_key
|
||||
|
||||
|
||||
def call_ai_function(
|
||||
function: str, args: List, description: str, model: Optional[str] = None
|
||||
function: str, args: list, description: str, model: str | None = None
|
||||
) -> str:
|
||||
"""Call an AI function
|
||||
|
||||
@@ -51,15 +52,15 @@ def call_ai_function(
|
||||
# Overly simple abstraction until we create something better
|
||||
# simple retry mechanism when getting a rate error or a bad gateway
|
||||
def create_chat_completion(
|
||||
messages: List, # type: ignore
|
||||
model: Optional[str] = None,
|
||||
messages: list, # type: ignore
|
||||
model: str | None = None,
|
||||
temperature: float = CFG.temperature,
|
||||
max_tokens: Optional[int] = None,
|
||||
max_tokens: int | None = None,
|
||||
) -> str:
|
||||
"""Create a chat completion using the OpenAI API
|
||||
|
||||
Args:
|
||||
messages (List[Dict[str, str]]): The messages to send to the chat completion
|
||||
messages (list[dict[str, str]]): The messages to send to the chat completion
|
||||
model (str, optional): The model to use. Defaults to None.
|
||||
temperature (float, optional): The temperature to use. Defaults to 0.9.
|
||||
max_tokens (int, optional): The max tokens to use. Defaults to None.
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
import os
|
||||
from typing import Any, List, Optional, Tuple
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import orjson
|
||||
@@ -97,7 +99,7 @@ class LocalCache(MemoryProviderSingleton):
|
||||
self.data = CacheContent()
|
||||
return "Obliviated"
|
||||
|
||||
def get(self, data: str) -> Optional[List[Any]]:
|
||||
def get(self, data: str) -> list[Any] | None:
|
||||
"""
|
||||
Gets the data from the memory that is most relevant to the given data.
|
||||
|
||||
@@ -108,7 +110,7 @@ class LocalCache(MemoryProviderSingleton):
|
||||
"""
|
||||
return self.get_relevant(data, 1)
|
||||
|
||||
def get_relevant(self, text: str, k: int) -> List[Any]:
|
||||
def get_relevant(self, text: str, k: int) -> list[Any]:
|
||||
""" "
|
||||
matrix-vector mult to find score-for-each-row-of-matrix
|
||||
get indices for top-k winning scores
|
||||
@@ -127,7 +129,7 @@ class LocalCache(MemoryProviderSingleton):
|
||||
|
||||
return [self.data.texts[i] for i in top_k_indices]
|
||||
|
||||
def get_stats(self) -> Tuple[int, Tuple[int, ...]]:
|
||||
def get_stats(self) -> tuple[int, tuple[int, ...]]:
|
||||
"""
|
||||
Returns: The stats of the local cache.
|
||||
"""
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""A class that does not store any data. This is the default memory provider."""
|
||||
from typing import Optional, List, Any
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from autogpt.memory.base import MemoryProviderSingleton
|
||||
|
||||
@@ -31,7 +33,7 @@ class NoMemory(MemoryProviderSingleton):
|
||||
"""
|
||||
return ""
|
||||
|
||||
def get(self, data: str) -> Optional[List[Any]]:
|
||||
def get(self, data: str) -> list[Any] | None:
|
||||
"""
|
||||
Gets the data from the memory that is most relevant to the given data.
|
||||
NoMemory always returns None.
|
||||
@@ -51,7 +53,7 @@ class NoMemory(MemoryProviderSingleton):
|
||||
"""
|
||||
return ""
|
||||
|
||||
def get_relevant(self, data: str, num_relevant: int = 5) -> Optional[List[Any]]:
|
||||
def get_relevant(self, data: str, num_relevant: int = 5) ->list[Any] | None:
|
||||
"""
|
||||
Returns all the data in the memory that is relevant to the given data.
|
||||
NoMemory always returns None.
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Redis memory provider."""
|
||||
from typing import Any, List, Optional
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import redis
|
||||
@@ -99,7 +101,7 @@ class RedisMemory(MemoryProviderSingleton):
|
||||
pipe.execute()
|
||||
return _text
|
||||
|
||||
def get(self, data: str) -> Optional[List[Any]]:
|
||||
def get(self, data: str) -> list[Any] | None:
|
||||
"""
|
||||
Gets the data from the memory that is most relevant to the given data.
|
||||
|
||||
@@ -119,7 +121,7 @@ class RedisMemory(MemoryProviderSingleton):
|
||||
self.redis.flushall()
|
||||
return "Obliviated"
|
||||
|
||||
def get_relevant(self, data: str, num_relevant: int = 5) -> Optional[List[Any]]:
|
||||
def get_relevant(self, data: str, num_relevant: int = 5) -> list[Any] | None:
|
||||
"""
|
||||
Returns all the data in the memory that is relevant to the given data.
|
||||
Args:
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
"""HTML processing functions"""
|
||||
from __future__ import annotations
|
||||
|
||||
from requests.compat import urljoin
|
||||
from typing import List, Tuple
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
def extract_hyperlinks(soup: BeautifulSoup, base_url: str) -> List[Tuple[str, str]]:
|
||||
def extract_hyperlinks(soup: BeautifulSoup, base_url: str) -> list[tuple[str, str]]:
|
||||
"""Extract hyperlinks from a BeautifulSoup object
|
||||
|
||||
Args:
|
||||
@@ -20,7 +21,7 @@ def extract_hyperlinks(soup: BeautifulSoup, base_url: str) -> List[Tuple[str, st
|
||||
]
|
||||
|
||||
|
||||
def format_hyperlinks(hyperlinks: List[Tuple[str, str]]) -> List[str]:
|
||||
def format_hyperlinks(hyperlinks: list[tuple[str, str]]) -> list[str]:
|
||||
"""Format hyperlinks to be displayed to the user
|
||||
|
||||
Args:
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
""" A module for generating custom prompt strings."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any
|
||||
|
||||
|
||||
class PromptGenerator:
|
||||
@@ -61,7 +63,7 @@ class PromptGenerator:
|
||||
|
||||
self.commands.append(command)
|
||||
|
||||
def _generate_command_string(self, command: Dict[str, Any]) -> str:
|
||||
def _generate_command_string(self, command: dict[str, Any]) -> str:
|
||||
"""
|
||||
Generate a formatted string representation of a command.
|
||||
|
||||
@@ -94,7 +96,7 @@ class PromptGenerator:
|
||||
"""
|
||||
self.performance_evaluation.append(evaluation)
|
||||
|
||||
def _generate_numbered_list(self, items: List[Any], item_type="list") -> str:
|
||||
def _generate_numbered_list(self, items: list[Any], item_type="list") -> str:
|
||||
"""
|
||||
Generate a numbered list from given items based on the item_type.
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Functions for counting the number of tokens in a message or string."""
|
||||
from typing import Dict, List
|
||||
from __future__ import annotations
|
||||
|
||||
import tiktoken
|
||||
|
||||
@@ -7,7 +7,7 @@ from autogpt.logs import logger
|
||||
|
||||
|
||||
def count_message_tokens(
|
||||
messages: List[Dict[str, str]], model: str = "gpt-3.5-turbo-0301"
|
||||
messages: list[dict[str, str]], model: str = "gpt-3.5-turbo-0301"
|
||||
) -> int:
|
||||
"""
|
||||
Returns the number of tokens used by a list of messages.
|
||||
|
||||
Reference in New Issue
Block a user