mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-22 08:24:26 +01:00
isort implemented
This commit is contained in:
@@ -1,12 +1,15 @@
|
|||||||
"""Main script for the autogpt package."""
|
"""Main script for the autogpt package."""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
|
|
||||||
from autogpt.agent.agent import Agent
|
from autogpt.agent.agent import Agent
|
||||||
from autogpt.args import parse_arguments
|
from autogpt.args import parse_arguments
|
||||||
from autogpt.config import Config, check_openai_api_key
|
from autogpt.config import Config, check_openai_api_key
|
||||||
from autogpt.logs import logger
|
from autogpt.logs import logger
|
||||||
from autogpt.memory import get_memory
|
from autogpt.memory import get_memory
|
||||||
from autogpt.prompt import construct_prompt
|
from autogpt.prompt import construct_prompt
|
||||||
|
|
||||||
# Load environment variables from .env file
|
# Load environment variables from .env file
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from colorama import Fore, Style
|
from colorama import Fore, Style
|
||||||
from autogpt.app import execute_command, get_command
|
|
||||||
|
|
||||||
|
from autogpt.app import execute_command, get_command
|
||||||
from autogpt.chat import chat_with_ai, create_chat_message
|
from autogpt.chat import chat_with_ai, create_chat_message
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.json_fixes.master_json_fix_method import fix_json_using_multiple_techniques
|
from autogpt.json_fixes.master_json_fix_method import fix_json_using_multiple_techniques
|
||||||
@@ -84,7 +84,7 @@ class Agent:
|
|||||||
|
|
||||||
# Print Assistant thoughts
|
# Print Assistant thoughts
|
||||||
if assistant_reply_json != {}:
|
if assistant_reply_json != {}:
|
||||||
validate_json(assistant_reply_json, 'llm_response_format_1')
|
validate_json(assistant_reply_json, "llm_response_format_1")
|
||||||
# Get command name and arguments
|
# Get command name and arguments
|
||||||
try:
|
try:
|
||||||
print_assistant_thoughts(self.ai_name, assistant_reply_json)
|
print_assistant_thoughts(self.ai_name, assistant_reply_json)
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
"""Agent manager for managing GPT agents"""
|
"""Agent manager for managing GPT agents"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Union
|
from typing import Union
|
||||||
from autogpt.llm_utils import create_chat_completion
|
|
||||||
from autogpt.config.config import Singleton
|
from autogpt.config.config import Singleton
|
||||||
|
from autogpt.llm_utils import create_chat_completion
|
||||||
|
|
||||||
|
|
||||||
class AgentManager(metaclass=Singleton):
|
class AgentManager(metaclass=Singleton):
|
||||||
|
|||||||
@@ -1,15 +1,10 @@
|
|||||||
""" Command and Control """
|
""" Command and Control """
|
||||||
import json
|
import json
|
||||||
from typing import List, NoReturn, Union, Dict
|
from typing import Dict, List, NoReturn, Union
|
||||||
|
|
||||||
from autogpt.agent.agent_manager import AgentManager
|
from autogpt.agent.agent_manager import AgentManager
|
||||||
from autogpt.commands.evaluate_code import evaluate_code
|
|
||||||
from autogpt.commands.google_search import google_official_search, google_search
|
|
||||||
from autogpt.commands.improve_code import improve_code
|
|
||||||
from autogpt.commands.write_tests import write_tests
|
|
||||||
from autogpt.config import Config
|
|
||||||
from autogpt.commands.image_gen import generate_image
|
|
||||||
from autogpt.commands.audio_text import read_audio_from_file
|
from autogpt.commands.audio_text import read_audio_from_file
|
||||||
from autogpt.commands.web_requests import scrape_links, scrape_text
|
from autogpt.commands.evaluate_code import evaluate_code
|
||||||
from autogpt.commands.execute_code import (
|
from autogpt.commands.execute_code import (
|
||||||
execute_python_file,
|
execute_python_file,
|
||||||
execute_shell,
|
execute_shell,
|
||||||
@@ -18,19 +13,24 @@ from autogpt.commands.execute_code import (
|
|||||||
from autogpt.commands.file_operations import (
|
from autogpt.commands.file_operations import (
|
||||||
append_to_file,
|
append_to_file,
|
||||||
delete_file,
|
delete_file,
|
||||||
|
download_file,
|
||||||
read_file,
|
read_file,
|
||||||
search_files,
|
search_files,
|
||||||
write_to_file,
|
write_to_file,
|
||||||
download_file
|
|
||||||
)
|
)
|
||||||
|
from autogpt.commands.git_operations import clone_repository
|
||||||
|
from autogpt.commands.google_search import google_official_search, google_search
|
||||||
|
from autogpt.commands.image_gen import generate_image
|
||||||
|
from autogpt.commands.improve_code import improve_code
|
||||||
|
from autogpt.commands.twitter import send_tweet
|
||||||
|
from autogpt.commands.web_requests import scrape_links, scrape_text
|
||||||
|
from autogpt.commands.web_selenium import browse_website
|
||||||
|
from autogpt.commands.write_tests import write_tests
|
||||||
|
from autogpt.config import Config
|
||||||
from autogpt.json_fixes.parsing import fix_and_parse_json
|
from autogpt.json_fixes.parsing import fix_and_parse_json
|
||||||
from autogpt.memory import get_memory
|
from autogpt.memory import get_memory
|
||||||
from autogpt.processing.text import summarize_text
|
from autogpt.processing.text import summarize_text
|
||||||
from autogpt.speech import say_text
|
from autogpt.speech import say_text
|
||||||
from autogpt.commands.web_selenium import browse_website
|
|
||||||
from autogpt.commands.git_operations import clone_repository
|
|
||||||
from autogpt.commands.twitter import send_tweet
|
|
||||||
|
|
||||||
|
|
||||||
CFG = Config()
|
CFG = Config()
|
||||||
AGENT_MANAGER = AgentManager()
|
AGENT_MANAGER = AgentManager()
|
||||||
@@ -133,11 +133,14 @@ def execute_command(command_name: str, arguments):
|
|||||||
|
|
||||||
# google_result can be a list or a string depending on the search results
|
# google_result can be a list or a string depending on the search results
|
||||||
if isinstance(google_result, list):
|
if isinstance(google_result, list):
|
||||||
safe_message = [google_result_single.encode('utf-8', 'ignore') for google_result_single in google_result]
|
safe_message = [
|
||||||
|
google_result_single.encode("utf-8", "ignore")
|
||||||
|
for google_result_single in google_result
|
||||||
|
]
|
||||||
else:
|
else:
|
||||||
safe_message = google_result.encode('utf-8', 'ignore')
|
safe_message = google_result.encode("utf-8", "ignore")
|
||||||
|
|
||||||
return safe_message.decode('utf-8')
|
return safe_message.decode("utf-8")
|
||||||
elif command_name == "memory_add":
|
elif command_name == "memory_add":
|
||||||
memory = get_memory(CFG)
|
memory = get_memory(CFG)
|
||||||
return memory.add(arguments["string"])
|
return memory.add(arguments["string"])
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
"""This module contains the argument parsing logic for the script."""
|
"""This module contains the argument parsing logic for the script."""
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
from colorama import Fore, Back, Style
|
from colorama import Back, Fore, Style
|
||||||
|
|
||||||
from autogpt import utils
|
from autogpt import utils
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.logs import logger
|
from autogpt.logs import logger
|
||||||
@@ -64,10 +65,10 @@ def parse_arguments() -> None:
|
|||||||
" skip the re-prompt.",
|
" skip the re-prompt.",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--allow-downloads',
|
"--allow-downloads",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
dest='allow_downloads',
|
dest="allow_downloads",
|
||||||
help='Dangerous: Allows Auto-GPT to download files natively.'
|
help="Dangerous: Allows Auto-GPT to download files natively.",
|
||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -141,10 +142,17 @@ def parse_arguments() -> None:
|
|||||||
|
|
||||||
if args.allow_downloads:
|
if args.allow_downloads:
|
||||||
logger.typewriter_log("Native Downloading:", Fore.GREEN, "ENABLED")
|
logger.typewriter_log("Native Downloading:", Fore.GREEN, "ENABLED")
|
||||||
logger.typewriter_log("WARNING: ", Fore.YELLOW,
|
logger.typewriter_log(
|
||||||
f"{Back.LIGHTYELLOW_EX}Auto-GPT will now be able to download and save files to your machine.{Back.RESET} " +
|
"WARNING: ",
|
||||||
"It is recommended that you monitor any files it downloads carefully.")
|
Fore.YELLOW,
|
||||||
logger.typewriter_log("WARNING: ", Fore.YELLOW, f"{Back.RED + Style.BRIGHT}ALWAYS REMEMBER TO NEVER OPEN FILES YOU AREN'T SURE OF!{Style.RESET_ALL}")
|
f"{Back.LIGHTYELLOW_EX}Auto-GPT will now be able to download and save files to your machine.{Back.RESET} "
|
||||||
|
+ "It is recommended that you monitor any files it downloads carefully.",
|
||||||
|
)
|
||||||
|
logger.typewriter_log(
|
||||||
|
"WARNING: ",
|
||||||
|
Fore.YELLOW,
|
||||||
|
f"{Back.RED + Style.BRIGHT}ALWAYS REMEMBER TO NEVER OPEN FILES YOU AREN'T SURE OF!{Style.RESET_ALL}",
|
||||||
|
)
|
||||||
CFG.allow_downloads = True
|
CFG.allow_downloads = True
|
||||||
|
|
||||||
if args.browser_name:
|
if args.browser_name:
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import requests
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.workspace import path_in_workspace
|
from autogpt.workspace import path_in_workspace
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import subprocess
|
|||||||
import docker
|
import docker
|
||||||
from docker.errors import ImageNotFound
|
from docker.errors import ImageNotFound
|
||||||
|
|
||||||
from autogpt.workspace import path_in_workspace, WORKSPACE_PATH
|
from autogpt.workspace import WORKSPACE_PATH, path_in_workspace
|
||||||
|
|
||||||
|
|
||||||
def execute_python_file(file: str) -> str:
|
def execute_python_file(file: str) -> str:
|
||||||
|
|||||||
@@ -5,14 +5,14 @@ import os
|
|||||||
import os.path
|
import os.path
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Generator, List
|
from typing import Generator, List
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from requests.adapters import HTTPAdapter
|
from colorama import Back, Fore
|
||||||
from requests.adapters import Retry
|
from requests.adapters import HTTPAdapter, Retry
|
||||||
from colorama import Fore, Back
|
|
||||||
from autogpt.spinner import Spinner
|
from autogpt.spinner import Spinner
|
||||||
from autogpt.utils import readable_file_size
|
from autogpt.utils import readable_file_size
|
||||||
from autogpt.workspace import path_in_workspace, WORKSPACE_PATH
|
from autogpt.workspace import WORKSPACE_PATH, path_in_workspace
|
||||||
|
|
||||||
|
|
||||||
LOG_FILE = "file_logger.txt"
|
LOG_FILE = "file_logger.txt"
|
||||||
LOG_FILE_PATH = WORKSPACE_PATH / LOG_FILE
|
LOG_FILE_PATH = WORKSPACE_PATH / LOG_FILE
|
||||||
@@ -241,18 +241,18 @@ def download_file(url, filename):
|
|||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
retry = Retry(total=3, backoff_factor=1, status_forcelist=[502, 503, 504])
|
retry = Retry(total=3, backoff_factor=1, status_forcelist=[502, 503, 504])
|
||||||
adapter = HTTPAdapter(max_retries=retry)
|
adapter = HTTPAdapter(max_retries=retry)
|
||||||
session.mount('http://', adapter)
|
session.mount("http://", adapter)
|
||||||
session.mount('https://', adapter)
|
session.mount("https://", adapter)
|
||||||
|
|
||||||
total_size = 0
|
total_size = 0
|
||||||
downloaded_size = 0
|
downloaded_size = 0
|
||||||
|
|
||||||
with session.get(url, allow_redirects=True, stream=True) as r:
|
with session.get(url, allow_redirects=True, stream=True) as r:
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
total_size = int(r.headers.get('Content-Length', 0))
|
total_size = int(r.headers.get("Content-Length", 0))
|
||||||
downloaded_size = 0
|
downloaded_size = 0
|
||||||
|
|
||||||
with open(safe_filename, 'wb') as f:
|
with open(safe_filename, "wb") as f:
|
||||||
for chunk in r.iter_content(chunk_size=8192):
|
for chunk in r.iter_content(chunk_size=8192):
|
||||||
f.write(chunk)
|
f.write(chunk)
|
||||||
downloaded_size += len(chunk)
|
downloaded_size += len(chunk)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"""Git operations for autogpt"""
|
"""Git operations for autogpt"""
|
||||||
import git
|
import git
|
||||||
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.workspace import path_in_workspace
|
from autogpt.workspace import path_in_workspace
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from base64 import b64decode
|
|||||||
import openai
|
import openai
|
||||||
import requests
|
import requests
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.workspace import path_in_workspace
|
from autogpt.workspace import path_in_workspace
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import tweepy
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import tweepy
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ except ImportError:
|
|||||||
"Playwright not installed. Please install it with 'pip install playwright' to use."
|
"Playwright not installed. Please install it with 'pip install playwright' to use."
|
||||||
)
|
)
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
|
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -4,9 +4,9 @@ from __future__ import annotations
|
|||||||
from urllib.parse import urljoin, urlparse
|
from urllib.parse import urljoin, urlparse
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from requests.compat import urljoin
|
|
||||||
from requests import Response
|
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
from requests import Response
|
||||||
|
from requests.compat import urljoin
|
||||||
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.memory import get_memory
|
from autogpt.memory import get_memory
|
||||||
@@ -79,7 +79,7 @@ def check_local_file_access(url: str) -> bool:
|
|||||||
"http://0000",
|
"http://0000",
|
||||||
"http://0000/",
|
"http://0000/",
|
||||||
"https://0000",
|
"https://0000",
|
||||||
"https://0000/"
|
"https://0000/",
|
||||||
]
|
]
|
||||||
return any(url.startswith(prefix) for prefix in local_prefixes)
|
return any(url.startswith(prefix) for prefix in local_prefixes)
|
||||||
|
|
||||||
|
|||||||
@@ -1,24 +1,26 @@
|
|||||||
"""Selenium web scraping module."""
|
"""Selenium web scraping module."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from selenium import webdriver
|
|
||||||
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
|
|
||||||
import autogpt.processing.text as summary
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
from selenium.webdriver.remote.webdriver import WebDriver
|
|
||||||
from selenium.webdriver.common.by import By
|
|
||||||
from selenium.webdriver.support.wait import WebDriverWait
|
|
||||||
from selenium.webdriver.support import expected_conditions as EC
|
|
||||||
from webdriver_manager.chrome import ChromeDriverManager
|
|
||||||
from webdriver_manager.firefox import GeckoDriverManager
|
|
||||||
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
|
||||||
from selenium.webdriver.firefox.options import Options as FirefoxOptions
|
|
||||||
from selenium.webdriver.safari.options import Options as SafariOptions
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from autogpt.config import Config
|
|
||||||
from sys import platform
|
from sys import platform
|
||||||
|
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from selenium import webdriver
|
||||||
|
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
||||||
|
from selenium.webdriver.common.by import By
|
||||||
|
from selenium.webdriver.firefox.options import Options as FirefoxOptions
|
||||||
|
from selenium.webdriver.remote.webdriver import WebDriver
|
||||||
|
from selenium.webdriver.safari.options import Options as SafariOptions
|
||||||
|
from selenium.webdriver.support import expected_conditions as EC
|
||||||
|
from selenium.webdriver.support.wait import WebDriverWait
|
||||||
|
from webdriver_manager.chrome import ChromeDriverManager
|
||||||
|
from webdriver_manager.firefox import GeckoDriverManager
|
||||||
|
|
||||||
|
import autogpt.processing.text as summary
|
||||||
|
from autogpt.config import Config
|
||||||
|
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
|
||||||
|
|
||||||
FILE_DIR = Path(__file__).parent.parent
|
FILE_DIR = Path(__file__).parent.parent
|
||||||
CFG = Config()
|
CFG = Config()
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from autogpt.llm_utils import call_ai_function
|
from autogpt.llm_utils import call_ai_function
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
This module contains the configuration classes for AutoGPT.
|
This module contains the configuration classes for AutoGPT.
|
||||||
"""
|
"""
|
||||||
from autogpt.config.ai_config import AIConfig
|
from autogpt.config.ai_config import AIConfig
|
||||||
from autogpt.config.config import check_openai_api_key, Config
|
from autogpt.config.config import Config, check_openai_api_key
|
||||||
from autogpt.config.singleton import AbstractSingleton, Singleton
|
from autogpt.config.singleton import AbstractSingleton, Singleton
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
from typing import Type
|
from typing import Type
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
"""Configuration class to store the state of bools for different scripts access."""
|
"""Configuration class to store the state of bools for different scripts access."""
|
||||||
import os
|
import os
|
||||||
from colorama import Fore
|
|
||||||
|
|
||||||
from autogpt.config.singleton import Singleton
|
|
||||||
|
|
||||||
import openai
|
import openai
|
||||||
import yaml
|
import yaml
|
||||||
|
from colorama import Fore
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
from autogpt.config.singleton import Singleton
|
||||||
|
|
||||||
load_dotenv(verbose=True)
|
load_dotenv(verbose=True)
|
||||||
|
|
||||||
|
|
||||||
@@ -74,7 +73,9 @@ class Config(metaclass=Singleton):
|
|||||||
self.weaviate_scopes = os.getenv("WEAVIATE_SCOPES", None)
|
self.weaviate_scopes = os.getenv("WEAVIATE_SCOPES", None)
|
||||||
self.weaviate_embedded_path = os.getenv("WEAVIATE_EMBEDDED_PATH")
|
self.weaviate_embedded_path = os.getenv("WEAVIATE_EMBEDDED_PATH")
|
||||||
self.weaviate_api_key = os.getenv("WEAVIATE_API_KEY", None)
|
self.weaviate_api_key = os.getenv("WEAVIATE_API_KEY", None)
|
||||||
self.use_weaviate_embedded = os.getenv("USE_WEAVIATE_EMBEDDED", "False") == "True"
|
self.use_weaviate_embedded = (
|
||||||
|
os.getenv("USE_WEAVIATE_EMBEDDED", "False") == "True"
|
||||||
|
)
|
||||||
|
|
||||||
# milvus configuration, e.g., localhost:19530.
|
# milvus configuration, e.g., localhost:19530.
|
||||||
self.milvus_addr = os.getenv("MILVUS_ADDR", "localhost:19530")
|
self.milvus_addr = os.getenv("MILVUS_ADDR", "localhost:19530")
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
"""This module contains the function to fix JSON strings using GPT-3."""
|
"""This module contains the function to fix JSON strings using GPT-3."""
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
from autogpt.config import Config
|
||||||
from autogpt.llm_utils import call_ai_function
|
from autogpt.llm_utils import call_ai_function
|
||||||
from autogpt.logs import logger
|
from autogpt.logs import logger
|
||||||
from autogpt.config import Config
|
|
||||||
|
|
||||||
CFG = Config()
|
CFG = Config()
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
|||||||
import contextlib
|
import contextlib
|
||||||
import json
|
import json
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
|
|
||||||
CFG = Config()
|
CFG = Config()
|
||||||
|
|||||||
@@ -3,13 +3,15 @@ from typing import Any, Dict
|
|||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.logs import logger
|
from autogpt.logs import logger
|
||||||
from autogpt.speech import say_text
|
from autogpt.speech import say_text
|
||||||
|
|
||||||
CFG = Config()
|
CFG = Config()
|
||||||
|
|
||||||
|
|
||||||
def fix_json_using_multiple_techniques(assistant_reply: str) -> Dict[Any, Any]:
|
def fix_json_using_multiple_techniques(assistant_reply: str) -> Dict[Any, Any]:
|
||||||
from autogpt.json_fixes.parsing import attempt_to_fix_json_by_finding_outermost_brackets
|
from autogpt.json_fixes.parsing import (
|
||||||
|
attempt_to_fix_json_by_finding_outermost_brackets,
|
||||||
from autogpt.json_fixes.parsing import fix_and_parse_json
|
fix_and_parse_json,
|
||||||
|
)
|
||||||
|
|
||||||
# Parse and print Assistant response
|
# Parse and print Assistant response
|
||||||
assistant_reply_json = fix_and_parse_json(assistant_reply)
|
assistant_reply_json = fix_and_parse_json(assistant_reply)
|
||||||
@@ -21,7 +23,10 @@ def fix_json_using_multiple_techniques(assistant_reply: str) -> Dict[Any, Any]:
|
|||||||
if assistant_reply_json != {}:
|
if assistant_reply_json != {}:
|
||||||
return assistant_reply_json
|
return assistant_reply_json
|
||||||
|
|
||||||
logger.error("Error: The following AI output couldn't be converted to a JSON:\n", assistant_reply)
|
logger.error(
|
||||||
|
"Error: The following AI output couldn't be converted to a JSON:\n",
|
||||||
|
assistant_reply,
|
||||||
|
)
|
||||||
if CFG.speak_mode:
|
if CFG.speak_mode:
|
||||||
say_text("I have received an invalid JSON response from the OpenAI API.")
|
say_text("I have received an invalid JSON response from the OpenAI API.")
|
||||||
|
|
||||||
|
|||||||
@@ -4,8 +4,10 @@ from __future__ import annotations
|
|||||||
import contextlib
|
import contextlib
|
||||||
import json
|
import json
|
||||||
from typing import Any, Dict, Union
|
from typing import Any, Dict, Union
|
||||||
|
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
from regex import regex
|
from regex import regex
|
||||||
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.json_fixes.auto_fix import fix_json
|
from autogpt.json_fixes.auto_fix import fix_json
|
||||||
from autogpt.json_fixes.bracket_termination import balance_braces
|
from autogpt.json_fixes.bracket_termination import balance_braces
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from jsonschema import Draft7Validator
|
from jsonschema import Draft7Validator
|
||||||
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.logs import logger
|
from autogpt.logs import logger
|
||||||
|
|
||||||
@@ -19,7 +21,9 @@ def validate_json(json_object: object, schema_name: object) -> object:
|
|||||||
if errors := sorted(validator.iter_errors(json_object), key=lambda e: e.path):
|
if errors := sorted(validator.iter_errors(json_object), key=lambda e: e.path):
|
||||||
logger.error("The JSON object is invalid.")
|
logger.error("The JSON object is invalid.")
|
||||||
if CFG.debug_mode:
|
if CFG.debug_mode:
|
||||||
logger.error(json.dumps(json_object, indent=4)) # Replace 'json_object' with the variable containing the JSON data
|
logger.error(
|
||||||
|
json.dumps(json_object, indent=4)
|
||||||
|
) # Replace 'json_object' with the variable containing the JSON data
|
||||||
logger.error("The following issues were found:")
|
logger.error("The following issues were found:")
|
||||||
|
|
||||||
for error in errors:
|
for error in errors:
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from ast import List
|
|
||||||
import time
|
import time
|
||||||
|
from ast import List
|
||||||
|
|
||||||
import openai
|
import openai
|
||||||
from openai.error import APIError, RateLimitError
|
|
||||||
from colorama import Fore, Style
|
from colorama import Fore, Style
|
||||||
|
from openai.error import APIError, RateLimitError
|
||||||
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.logs import logger
|
from autogpt.logs import logger
|
||||||
@@ -105,8 +105,9 @@ def create_chat_completion(
|
|||||||
)
|
)
|
||||||
if not warned_user:
|
if not warned_user:
|
||||||
logger.double_check(
|
logger.double_check(
|
||||||
f"Please double check that you have setup a {Fore.CYAN + Style.BRIGHT}PAID{Style.RESET_ALL} OpenAI API Account. " +
|
f"Please double check that you have setup a {Fore.CYAN + Style.BRIGHT}PAID{Style.RESET_ALL} OpenAI API Account. "
|
||||||
f"You can read more here: {Fore.CYAN}https://github.com/Significant-Gravitas/Auto-GPT#openai-api-keys-configuration{Fore.RESET}")
|
+ f"You can read more here: {Fore.CYAN}https://github.com/Significant-Gravitas/Auto-GPT#openai-api-keys-configuration{Fore.RESET}"
|
||||||
|
)
|
||||||
warned_user = True
|
warned_user = True
|
||||||
except APIError as e:
|
except APIError as e:
|
||||||
if e.http_status == 502:
|
if e.http_status == 502:
|
||||||
@@ -125,8 +126,8 @@ def create_chat_completion(
|
|||||||
logger.typewriter_log(
|
logger.typewriter_log(
|
||||||
"FAILED TO GET RESPONSE FROM OPENAI",
|
"FAILED TO GET RESPONSE FROM OPENAI",
|
||||||
Fore.RED,
|
Fore.RED,
|
||||||
"Auto-GPT has failed to get a response from OpenAI's services. " +
|
"Auto-GPT has failed to get a response from OpenAI's services. "
|
||||||
f"Try running Auto-GPT again, and if the problem the persists try running it with `{Fore.CYAN}--debug{Fore.RESET}`."
|
+ f"Try running Auto-GPT again, and if the problem the persists try running it with `{Fore.CYAN}--debug{Fore.RESET}`.",
|
||||||
)
|
)
|
||||||
logger.double_check()
|
logger.double_check()
|
||||||
if CFG.debug_mode:
|
if CFG.debug_mode:
|
||||||
|
|||||||
@@ -5,13 +5,13 @@ import os
|
|||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
from logging import LogRecord
|
|
||||||
import traceback
|
import traceback
|
||||||
|
from logging import LogRecord
|
||||||
|
|
||||||
from colorama import Fore, Style
|
from colorama import Fore, Style
|
||||||
|
|
||||||
from autogpt.speech import say_text
|
|
||||||
from autogpt.config import Config, Singleton
|
from autogpt.config import Config, Singleton
|
||||||
|
from autogpt.speech import say_text
|
||||||
|
|
||||||
CFG = Config()
|
CFG = Config()
|
||||||
|
|
||||||
@@ -47,7 +47,7 @@ class Logger(metaclass=Singleton):
|
|||||||
|
|
||||||
# Info handler in activity.log
|
# Info handler in activity.log
|
||||||
self.file_handler = logging.FileHandler(
|
self.file_handler = logging.FileHandler(
|
||||||
os.path.join(log_dir, log_file), 'a', 'utf-8'
|
os.path.join(log_dir, log_file), "a", "utf-8"
|
||||||
)
|
)
|
||||||
self.file_handler.setLevel(logging.DEBUG)
|
self.file_handler.setLevel(logging.DEBUG)
|
||||||
info_formatter = AutoGptFormatter(
|
info_formatter = AutoGptFormatter(
|
||||||
@@ -57,7 +57,7 @@ class Logger(metaclass=Singleton):
|
|||||||
|
|
||||||
# Error handler error.log
|
# Error handler error.log
|
||||||
error_handler = logging.FileHandler(
|
error_handler = logging.FileHandler(
|
||||||
os.path.join(log_dir, error_file), 'a', 'utf-8'
|
os.path.join(log_dir, error_file), "a", "utf-8"
|
||||||
)
|
)
|
||||||
error_handler.setLevel(logging.ERROR)
|
error_handler.setLevel(logging.ERROR)
|
||||||
error_formatter = AutoGptFormatter(
|
error_formatter = AutoGptFormatter(
|
||||||
@@ -294,7 +294,9 @@ def print_assistant_thoughts(ai_name, assistant_reply):
|
|||||||
logger.error("Error: \n", call_stack)
|
logger.error("Error: \n", call_stack)
|
||||||
|
|
||||||
|
|
||||||
def print_assistant_thoughts(ai_name: object, assistant_reply_json_valid: object) -> None:
|
def print_assistant_thoughts(
|
||||||
|
ai_name: object, assistant_reply_json_valid: object
|
||||||
|
) -> None:
|
||||||
assistant_thoughts_reasoning = None
|
assistant_thoughts_reasoning = None
|
||||||
assistant_thoughts_plan = None
|
assistant_thoughts_plan = None
|
||||||
assistant_thoughts_speak = None
|
assistant_thoughts_speak = None
|
||||||
@@ -310,9 +312,7 @@ def print_assistant_thoughts(ai_name: object, assistant_reply_json_valid: object
|
|||||||
logger.typewriter_log(
|
logger.typewriter_log(
|
||||||
f"{ai_name.upper()} THOUGHTS:", Fore.YELLOW, f"{assistant_thoughts_text}"
|
f"{ai_name.upper()} THOUGHTS:", Fore.YELLOW, f"{assistant_thoughts_text}"
|
||||||
)
|
)
|
||||||
logger.typewriter_log(
|
logger.typewriter_log("REASONING:", Fore.YELLOW, f"{assistant_thoughts_reasoning}")
|
||||||
"REASONING:", Fore.YELLOW, f"{assistant_thoughts_reasoning}"
|
|
||||||
)
|
|
||||||
if assistant_thoughts_plan:
|
if assistant_thoughts_plan:
|
||||||
logger.typewriter_log("PLAN:", Fore.YELLOW, "")
|
logger.typewriter_log("PLAN:", Fore.YELLOW, "")
|
||||||
# If it's a list, join it into a string
|
# If it's a list, join it into a string
|
||||||
@@ -326,9 +326,7 @@ def print_assistant_thoughts(ai_name: object, assistant_reply_json_valid: object
|
|||||||
for line in lines:
|
for line in lines:
|
||||||
line = line.lstrip("- ")
|
line = line.lstrip("- ")
|
||||||
logger.typewriter_log("- ", Fore.GREEN, line.strip())
|
logger.typewriter_log("- ", Fore.GREEN, line.strip())
|
||||||
logger.typewriter_log(
|
logger.typewriter_log("CRITICISM:", Fore.YELLOW, f"{assistant_thoughts_criticism}")
|
||||||
"CRITICISM:", Fore.YELLOW, f"{assistant_thoughts_criticism}"
|
|
||||||
)
|
|
||||||
# Speak the assistant's thoughts
|
# Speak the assistant's thoughts
|
||||||
if CFG.speak_mode and assistant_thoughts_speak:
|
if CFG.speak_mode and assistant_thoughts_speak:
|
||||||
say_text(assistant_thoughts_speak)
|
say_text(assistant_thoughts_speak)
|
||||||
|
|||||||
@@ -1,11 +1,5 @@
|
|||||||
""" Milvus memory storage provider."""
|
""" Milvus memory storage provider."""
|
||||||
from pymilvus import (
|
from pymilvus import Collection, CollectionSchema, DataType, FieldSchema, connections
|
||||||
connections,
|
|
||||||
FieldSchema,
|
|
||||||
CollectionSchema,
|
|
||||||
DataType,
|
|
||||||
Collection,
|
|
||||||
)
|
|
||||||
|
|
||||||
from autogpt.memory.base import MemoryProviderSingleton, get_ada_embedding
|
from autogpt.memory.base import MemoryProviderSingleton, get_ada_embedding
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
import pinecone
|
import pinecone
|
||||||
from colorama import Fore, Style
|
from colorama import Fore, Style
|
||||||
|
|
||||||
|
from autogpt.llm_utils import create_embedding_with_ada
|
||||||
from autogpt.logs import logger
|
from autogpt.logs import logger
|
||||||
from autogpt.memory.base import MemoryProviderSingleton
|
from autogpt.memory.base import MemoryProviderSingleton
|
||||||
from autogpt.llm_utils import create_embedding_with_ada
|
|
||||||
|
|
||||||
|
|
||||||
class PineconeMemory(MemoryProviderSingleton):
|
class PineconeMemory(MemoryProviderSingleton):
|
||||||
|
|||||||
@@ -10,9 +10,9 @@ from redis.commands.search.field import TextField, VectorField
|
|||||||
from redis.commands.search.indexDefinition import IndexDefinition, IndexType
|
from redis.commands.search.indexDefinition import IndexDefinition, IndexType
|
||||||
from redis.commands.search.query import Query
|
from redis.commands.search.query import Query
|
||||||
|
|
||||||
|
from autogpt.llm_utils import create_embedding_with_ada
|
||||||
from autogpt.logs import logger
|
from autogpt.logs import logger
|
||||||
from autogpt.memory.base import MemoryProviderSingleton
|
from autogpt.memory.base import MemoryProviderSingleton
|
||||||
from autogpt.llm_utils import create_embedding_with_ada
|
|
||||||
|
|
||||||
SCHEMA = [
|
SCHEMA = [
|
||||||
TextField("data"),
|
TextField("data"),
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
from autogpt.config import Config
|
|
||||||
from autogpt.memory.base import MemoryProviderSingleton, get_ada_embedding
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import weaviate
|
import weaviate
|
||||||
from weaviate import Client
|
from weaviate import Client
|
||||||
from weaviate.embedded import EmbeddedOptions
|
from weaviate.embedded import EmbeddedOptions
|
||||||
from weaviate.util import generate_uuid5
|
from weaviate.util import generate_uuid5
|
||||||
|
|
||||||
|
from autogpt.config import Config
|
||||||
|
from autogpt.memory.base import MemoryProviderSingleton, get_ada_embedding
|
||||||
|
|
||||||
|
|
||||||
def default_schema(weaviate_index):
|
def default_schema(weaviate_index):
|
||||||
return {
|
return {
|
||||||
@@ -14,7 +16,7 @@ def default_schema(weaviate_index):
|
|||||||
{
|
{
|
||||||
"name": "raw_text",
|
"name": "raw_text",
|
||||||
"dataType": ["text"],
|
"dataType": ["text"],
|
||||||
"description": "original text for the embedding"
|
"description": "original text for the embedding",
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
@@ -24,16 +26,20 @@ class WeaviateMemory(MemoryProviderSingleton):
|
|||||||
def __init__(self, cfg):
|
def __init__(self, cfg):
|
||||||
auth_credentials = self._build_auth_credentials(cfg)
|
auth_credentials = self._build_auth_credentials(cfg)
|
||||||
|
|
||||||
url = f'{cfg.weaviate_protocol}://{cfg.weaviate_host}:{cfg.weaviate_port}'
|
url = f"{cfg.weaviate_protocol}://{cfg.weaviate_host}:{cfg.weaviate_port}"
|
||||||
|
|
||||||
if cfg.use_weaviate_embedded:
|
if cfg.use_weaviate_embedded:
|
||||||
self.client = Client(embedded_options=EmbeddedOptions(
|
self.client = Client(
|
||||||
|
embedded_options=EmbeddedOptions(
|
||||||
hostname=cfg.weaviate_host,
|
hostname=cfg.weaviate_host,
|
||||||
port=int(cfg.weaviate_port),
|
port=int(cfg.weaviate_port),
|
||||||
persistence_data_path=cfg.weaviate_embedded_path
|
persistence_data_path=cfg.weaviate_embedded_path,
|
||||||
))
|
)
|
||||||
|
)
|
||||||
|
|
||||||
print(f"Weaviate Embedded running on: {url} with persistence path: {cfg.weaviate_embedded_path}")
|
print(
|
||||||
|
f"Weaviate Embedded running on: {url} with persistence path: {cfg.weaviate_embedded_path}"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.client = Client(url, auth_client_secret=auth_credentials)
|
self.client = Client(url, auth_client_secret=auth_credentials)
|
||||||
|
|
||||||
@@ -56,7 +62,9 @@ class WeaviateMemory(MemoryProviderSingleton):
|
|||||||
|
|
||||||
def _build_auth_credentials(self, cfg):
|
def _build_auth_credentials(self, cfg):
|
||||||
if cfg.weaviate_username and cfg.weaviate_password:
|
if cfg.weaviate_username and cfg.weaviate_password:
|
||||||
return weaviate.AuthClientPassword(cfg.weaviate_username, cfg.weaviate_password)
|
return weaviate.AuthClientPassword(
|
||||||
|
cfg.weaviate_username, cfg.weaviate_password
|
||||||
|
)
|
||||||
if cfg.weaviate_api_key:
|
if cfg.weaviate_api_key:
|
||||||
return weaviate.AuthApiKey(api_key=cfg.weaviate_api_key)
|
return weaviate.AuthApiKey(api_key=cfg.weaviate_api_key)
|
||||||
else:
|
else:
|
||||||
@@ -66,16 +74,14 @@ class WeaviateMemory(MemoryProviderSingleton):
|
|||||||
vector = get_ada_embedding(data)
|
vector = get_ada_embedding(data)
|
||||||
|
|
||||||
doc_uuid = generate_uuid5(data, self.index)
|
doc_uuid = generate_uuid5(data, self.index)
|
||||||
data_object = {
|
data_object = {"raw_text": data}
|
||||||
'raw_text': data
|
|
||||||
}
|
|
||||||
|
|
||||||
with self.client.batch as batch:
|
with self.client.batch as batch:
|
||||||
batch.add_data_object(
|
batch.add_data_object(
|
||||||
uuid=doc_uuid,
|
uuid=doc_uuid,
|
||||||
data_object=data_object,
|
data_object=data_object,
|
||||||
class_name=self.index,
|
class_name=self.index,
|
||||||
vector=vector
|
vector=vector,
|
||||||
)
|
)
|
||||||
|
|
||||||
return f"Inserting data into memory at uuid: {doc_uuid}:\n data: {data}"
|
return f"Inserting data into memory at uuid: {doc_uuid}:\n data: {data}"
|
||||||
@@ -91,29 +97,31 @@ class WeaviateMemory(MemoryProviderSingleton):
|
|||||||
# after a call to delete_all
|
# after a call to delete_all
|
||||||
self._create_schema()
|
self._create_schema()
|
||||||
|
|
||||||
return 'Obliterated'
|
return "Obliterated"
|
||||||
|
|
||||||
def get_relevant(self, data, num_relevant=5):
|
def get_relevant(self, data, num_relevant=5):
|
||||||
query_embedding = get_ada_embedding(data)
|
query_embedding = get_ada_embedding(data)
|
||||||
try:
|
try:
|
||||||
results = self.client.query.get(self.index, ['raw_text']) \
|
results = (
|
||||||
.with_near_vector({'vector': query_embedding, 'certainty': 0.7}) \
|
self.client.query.get(self.index, ["raw_text"])
|
||||||
.with_limit(num_relevant) \
|
.with_near_vector({"vector": query_embedding, "certainty": 0.7})
|
||||||
|
.with_limit(num_relevant)
|
||||||
.do()
|
.do()
|
||||||
|
)
|
||||||
|
|
||||||
if len(results['data']['Get'][self.index]) > 0:
|
if len(results["data"]["Get"][self.index]) > 0:
|
||||||
return [str(item['raw_text']) for item in results['data']['Get'][self.index]]
|
return [
|
||||||
|
str(item["raw_text"]) for item in results["data"]["Get"][self.index]
|
||||||
|
]
|
||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print(f'Unexpected error {err=}, {type(err)=}')
|
print(f"Unexpected error {err=}, {type(err)=}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def get_stats(self):
|
def get_stats(self):
|
||||||
result = self.client.query.aggregate(self.index) \
|
result = self.client.query.aggregate(self.index).with_meta_count().do()
|
||||||
.with_meta_count() \
|
class_data = result["data"]["Aggregate"][self.index]
|
||||||
.do()
|
|
||||||
class_data = result['data']['Aggregate'][self.index]
|
|
||||||
|
|
||||||
return class_data[0]['meta'] if class_data else {}
|
return class_data[0]["meta"] if class_data else {}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
"""HTML processing functions"""
|
"""HTML processing functions"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from requests.compat import urljoin
|
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
from requests.compat import urljoin
|
||||||
|
|
||||||
|
|
||||||
def extract_hyperlinks(soup: BeautifulSoup, base_url: str) -> list[tuple[str, str]]:
|
def extract_hyperlinks(soup: BeautifulSoup, base_url: str) -> list[tuple[str, str]]:
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
"""Text processing functions"""
|
"""Text processing functions"""
|
||||||
from typing import Generator, Optional, Dict
|
from typing import Dict, Generator, Optional
|
||||||
|
|
||||||
from selenium.webdriver.remote.webdriver import WebDriver
|
from selenium.webdriver.remote.webdriver import WebDriver
|
||||||
from autogpt.memory import get_memory
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.llm_utils import create_chat_completion
|
from autogpt.llm_utils import create_chat_completion
|
||||||
|
from autogpt.memory import get_memory
|
||||||
|
|
||||||
CFG = Config()
|
CFG = Config()
|
||||||
MEMORY = get_memory(CFG)
|
MEMORY = get_memory(CFG)
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
|
|
||||||
|
from autogpt.config import Config
|
||||||
from autogpt.config.ai_config import AIConfig
|
from autogpt.config.ai_config import AIConfig
|
||||||
from autogpt.config.config import Config
|
from autogpt.config.config import Config
|
||||||
from autogpt.logs import logger
|
from autogpt.logs import logger
|
||||||
from autogpt.promptgenerator import PromptGenerator
|
from autogpt.promptgenerator import PromptGenerator
|
||||||
from autogpt.config import Config
|
|
||||||
from autogpt.setup import prompt_user
|
from autogpt.setup import prompt_user
|
||||||
from autogpt.utils import clean_input
|
from autogpt.utils import clean_input
|
||||||
|
|
||||||
@@ -92,11 +93,7 @@ def get_prompt() -> str:
|
|||||||
# Only add the audio to text command if the model is specified
|
# Only add the audio to text command if the model is specified
|
||||||
if cfg.huggingface_audio_to_text_model:
|
if cfg.huggingface_audio_to_text_model:
|
||||||
commands.append(
|
commands.append(
|
||||||
(
|
("Convert Audio to text", "read_audio_from_file", {"file": "<file>"}),
|
||||||
"Convert Audio to text",
|
|
||||||
"read_audio_from_file",
|
|
||||||
{"file": "<file>"}
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Only add shell command to the prompt if the AI is allowed to execute it
|
# Only add shell command to the prompt if the AI is allowed to execute it
|
||||||
@@ -112,7 +109,7 @@ def get_prompt() -> str:
|
|||||||
(
|
(
|
||||||
"Execute Shell Command Popen, non-interactive commands only",
|
"Execute Shell Command Popen, non-interactive commands only",
|
||||||
"execute_shell_popen",
|
"execute_shell_popen",
|
||||||
{"command_line": "<command_line>"}
|
{"command_line": "<command_line>"},
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -122,7 +119,7 @@ def get_prompt() -> str:
|
|||||||
(
|
(
|
||||||
"Downloads a file from the internet, and stores it locally",
|
"Downloads a file from the internet, and stores it locally",
|
||||||
"download_file",
|
"download_file",
|
||||||
{"url": "<file_url>", "file": "<saved_filename>"}
|
{"url": "<file_url>", "file": "<saved_filename>"},
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"""Set up the AI and its goals"""
|
"""Set up the AI and its goals"""
|
||||||
from colorama import Fore, Style
|
from colorama import Fore, Style
|
||||||
|
|
||||||
from autogpt import utils
|
from autogpt import utils
|
||||||
from autogpt.config.ai_config import AIConfig
|
from autogpt.config.ai_config import AIConfig
|
||||||
from autogpt.logs import logger
|
from autogpt.logs import logger
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
""" Brian speech module for autogpt """
|
""" Brian speech module for autogpt """
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from playsound import playsound
|
from playsound import playsound
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
"""ElevenLabs speech module"""
|
"""ElevenLabs speech module"""
|
||||||
import os
|
import os
|
||||||
from playsound import playsound
|
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
from playsound import playsound
|
||||||
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.speech.base import VoiceBase
|
from autogpt.speech.base import VoiceBase
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
""" GTTS Voice. """
|
""" GTTS Voice. """
|
||||||
import os
|
import os
|
||||||
from playsound import playsound
|
|
||||||
import gtts
|
import gtts
|
||||||
|
from playsound import playsound
|
||||||
|
|
||||||
from autogpt.speech.base import VoiceBase
|
from autogpt.speech.base import VoiceBase
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
""" Text to speech module """
|
""" Text to speech module """
|
||||||
from autogpt.config import Config
|
|
||||||
|
|
||||||
import threading
|
import threading
|
||||||
from threading import Semaphore
|
from threading import Semaphore
|
||||||
from autogpt.speech.brian import BrianSpeech
|
|
||||||
from autogpt.speech.macos_tts import MacOSTTS
|
|
||||||
from autogpt.speech.gtts import GTTSVoice
|
|
||||||
from autogpt.speech.eleven_labs import ElevenLabsSpeech
|
|
||||||
|
|
||||||
|
from autogpt.config import Config
|
||||||
|
from autogpt.speech.brian import BrianSpeech
|
||||||
|
from autogpt.speech.eleven_labs import ElevenLabsSpeech
|
||||||
|
from autogpt.speech.gtts import GTTSVoice
|
||||||
|
from autogpt.speech.macos_tts import MacOSTTS
|
||||||
|
|
||||||
CFG = Config()
|
CFG = Config()
|
||||||
DEFAULT_VOICE_ENGINE = GTTSVoice()
|
DEFAULT_VOICE_ENGINE = GTTSVoice()
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from autogpt.config import Config
|
|
||||||
from autogpt.commands.file_operations import ingest_file, search_files
|
from autogpt.commands.file_operations import ingest_file, search_files
|
||||||
|
from autogpt.config import Config
|
||||||
from autogpt.memory import get_memory
|
from autogpt.memory import get_memory
|
||||||
|
|
||||||
cfg = Config()
|
cfg = Config()
|
||||||
|
|||||||
@@ -36,4 +36,5 @@ skip = '''
|
|||||||
.venv/*
|
.venv/*
|
||||||
reports/*
|
reports/*
|
||||||
dist/*
|
dist/*
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import pkg_resources
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
requirements_file = sys.argv[1]
|
requirements_file = sys.argv[1]
|
||||||
|
|||||||
1
tests.py
1
tests.py
@@ -1,4 +1,5 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import coverage
|
import coverage
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import unittest
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,15 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
import sys
|
from uuid import uuid4
|
||||||
import os
|
|
||||||
|
|
||||||
from weaviate import Client
|
from weaviate import Client
|
||||||
from weaviate.util import get_valid_uuid
|
from weaviate.util import get_valid_uuid
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from autogpt.config import Config
|
from autogpt.config import Config
|
||||||
from autogpt.memory.weaviate import WeaviateMemory
|
|
||||||
from autogpt.memory.base import get_ada_embedding
|
from autogpt.memory.base import get_ada_embedding
|
||||||
|
from autogpt.memory.weaviate import WeaviateMemory
|
||||||
|
|
||||||
|
|
||||||
class TestWeaviateMemory(unittest.TestCase):
|
class TestWeaviateMemory(unittest.TestCase):
|
||||||
@@ -25,13 +25,17 @@ class TestWeaviateMemory(unittest.TestCase):
|
|||||||
if cls.cfg.use_weaviate_embedded:
|
if cls.cfg.use_weaviate_embedded:
|
||||||
from weaviate.embedded import EmbeddedOptions
|
from weaviate.embedded import EmbeddedOptions
|
||||||
|
|
||||||
cls.client = Client(embedded_options=EmbeddedOptions(
|
cls.client = Client(
|
||||||
|
embedded_options=EmbeddedOptions(
|
||||||
hostname=cls.cfg.weaviate_host,
|
hostname=cls.cfg.weaviate_host,
|
||||||
port=int(cls.cfg.weaviate_port),
|
port=int(cls.cfg.weaviate_port),
|
||||||
persistence_data_path=cls.cfg.weaviate_embedded_path
|
persistence_data_path=cls.cfg.weaviate_embedded_path,
|
||||||
))
|
)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
cls.client = Client(f"{cls.cfg.weaviate_protocol}://{cls.cfg.weaviate_host}:{self.cfg.weaviate_port}")
|
cls.client = Client(
|
||||||
|
f"{cls.cfg.weaviate_protocol}://{cls.cfg.weaviate_host}:{self.cfg.weaviate_port}"
|
||||||
|
)
|
||||||
|
|
||||||
cls.index = WeaviateMemory.format_classname(cls.cfg.memory_index)
|
cls.index = WeaviateMemory.format_classname(cls.cfg.memory_index)
|
||||||
|
|
||||||
@@ -44,6 +48,7 @@ class TestWeaviateMemory(unittest.TestCase):
|
|||||||
USE_WEAVIATE_EMBEDDED=True
|
USE_WEAVIATE_EMBEDDED=True
|
||||||
WEAVIATE_EMBEDDED_PATH="/home/me/.local/share/weaviate"
|
WEAVIATE_EMBEDDED_PATH="/home/me/.local/share/weaviate"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
try:
|
try:
|
||||||
self.client.schema.delete_class(self.index)
|
self.client.schema.delete_class(self.index)
|
||||||
@@ -53,23 +58,23 @@ class TestWeaviateMemory(unittest.TestCase):
|
|||||||
self.memory = WeaviateMemory(self.cfg)
|
self.memory = WeaviateMemory(self.cfg)
|
||||||
|
|
||||||
def test_add(self):
|
def test_add(self):
|
||||||
doc = 'You are a Titan name Thanos and you are looking for the Infinity Stones'
|
doc = "You are a Titan name Thanos and you are looking for the Infinity Stones"
|
||||||
self.memory.add(doc)
|
self.memory.add(doc)
|
||||||
result = self.client.query.get(self.index, ['raw_text']).do()
|
result = self.client.query.get(self.index, ["raw_text"]).do()
|
||||||
actual = result['data']['Get'][self.index]
|
actual = result["data"]["Get"][self.index]
|
||||||
|
|
||||||
self.assertEqual(len(actual), 1)
|
self.assertEqual(len(actual), 1)
|
||||||
self.assertEqual(actual[0]['raw_text'], doc)
|
self.assertEqual(actual[0]["raw_text"], doc)
|
||||||
|
|
||||||
def test_get(self):
|
def test_get(self):
|
||||||
doc = 'You are an Avenger and swore to defend the Galaxy from a menace called Thanos'
|
doc = "You are an Avenger and swore to defend the Galaxy from a menace called Thanos"
|
||||||
|
|
||||||
with self.client.batch as batch:
|
with self.client.batch as batch:
|
||||||
batch.add_data_object(
|
batch.add_data_object(
|
||||||
uuid=get_valid_uuid(uuid4()),
|
uuid=get_valid_uuid(uuid4()),
|
||||||
data_object={'raw_text': doc},
|
data_object={"raw_text": doc},
|
||||||
class_name=self.index,
|
class_name=self.index,
|
||||||
vector=get_ada_embedding(doc)
|
vector=get_ada_embedding(doc),
|
||||||
)
|
)
|
||||||
|
|
||||||
batch.flush()
|
batch.flush()
|
||||||
@@ -81,8 +86,8 @@ class TestWeaviateMemory(unittest.TestCase):
|
|||||||
|
|
||||||
def test_get_stats(self):
|
def test_get_stats(self):
|
||||||
docs = [
|
docs = [
|
||||||
'You are now about to count the number of docs in this index',
|
"You are now about to count the number of docs in this index",
|
||||||
'And then you about to find out if you can count correctly'
|
"And then you about to find out if you can count correctly",
|
||||||
]
|
]
|
||||||
|
|
||||||
[self.memory.add(doc) for doc in docs]
|
[self.memory.add(doc) for doc in docs]
|
||||||
@@ -90,23 +95,23 @@ class TestWeaviateMemory(unittest.TestCase):
|
|||||||
stats = self.memory.get_stats()
|
stats = self.memory.get_stats()
|
||||||
|
|
||||||
self.assertTrue(stats)
|
self.assertTrue(stats)
|
||||||
self.assertTrue('count' in stats)
|
self.assertTrue("count" in stats)
|
||||||
self.assertEqual(stats['count'], 2)
|
self.assertEqual(stats["count"], 2)
|
||||||
|
|
||||||
def test_clear(self):
|
def test_clear(self):
|
||||||
docs = [
|
docs = [
|
||||||
'Shame this is the last test for this class',
|
"Shame this is the last test for this class",
|
||||||
'Testing is fun when someone else is doing it'
|
"Testing is fun when someone else is doing it",
|
||||||
]
|
]
|
||||||
|
|
||||||
[self.memory.add(doc) for doc in docs]
|
[self.memory.add(doc) for doc in docs]
|
||||||
|
|
||||||
self.assertEqual(self.memory.get_stats()['count'], 2)
|
self.assertEqual(self.memory.get_stats()["count"], 2)
|
||||||
|
|
||||||
self.memory.clear()
|
self.memory.clear()
|
||||||
|
|
||||||
self.assertEqual(self.memory.get_stats()['count'], 0)
|
self.assertEqual(self.memory.get_stats()["count"], 0)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import tests.context
|
import tests.context
|
||||||
from autogpt.token_counter import count_message_tokens, count_string_tokens
|
from autogpt.token_counter import count_message_tokens, count_string_tokens
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Generated by CodiumAI
|
# Generated by CodiumAI
|
||||||
import unittest
|
|
||||||
import time
|
import time
|
||||||
|
import unittest
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from autogpt.chat import create_chat_message, generate_context
|
from autogpt.chat import create_chat_message, generate_context
|
||||||
|
|||||||
Reference in New Issue
Block a user