mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2026-02-09 08:14:27 +01:00
Merge branch 'master' into fix/default-config
This commit is contained in:
@@ -1,10 +1,10 @@
|
||||
# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster
|
||||
# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3-bullseye, 3.10-bullseye, 3-buster, 3.10-buster
|
||||
ARG VARIANT=3-bullseye
|
||||
FROM --platform=linux/amd64 python:3.8
|
||||
FROM --platform=linux/amd64 python:3.10
|
||||
|
||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# Remove imagemagick due to https://security-tracker.debian.org/tracker/CVE-2019-10131
|
||||
&& apt-get purge -y imagemagick imagemagick-6-common
|
||||
&& apt-get purge -y imagemagick imagemagick-6-common
|
||||
|
||||
# Temporary: Upgrade python packages due to https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-40897
|
||||
# They are installed by the base image (python) which does not have the patch.
|
||||
@@ -25,4 +25,4 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||
|
||||
# [Optional] Uncomment this line to install global node packages.
|
||||
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
################################################################################
|
||||
### AUTO-GPT - GENERAL SETTINGS
|
||||
################################################################################
|
||||
## EXECUTE_LOCAL_COMMANDS - Allow local command execution (Example: False)
|
||||
|
||||
## EXECUTE_LOCAL_COMMANDS - Allow local command execution (Default: False)
|
||||
## RESTRICT_TO_WORKSPACE - Restrict file operations to workspace ./auto_gpt_workspace (Default: True)
|
||||
# EXECUTE_LOCAL_COMMANDS=False
|
||||
# RESTRICT_TO_WORKSPACE=True
|
||||
|
||||
## BROWSE_CHUNK_MAX_LENGTH - When browsing website, define the length of chunk stored in memory
|
||||
# BROWSE_CHUNK_MAX_LENGTH=8192
|
||||
@@ -13,10 +16,6 @@
|
||||
## AI_SETTINGS_FILE - Specifies which AI Settings file to use (defaults to ai_settings.yaml)
|
||||
# AI_SETTINGS_FILE=ai_settings.yaml
|
||||
|
||||
## USE_WEB_BROWSER - Sets the web-browser drivers to use with selenium (defaults to chrome).
|
||||
## Note: set this to either 'chrome', 'firefox', or 'safari' depending on your current browser
|
||||
# USE_WEB_BROWSER=chrome
|
||||
|
||||
################################################################################
|
||||
### LLM PROVIDER
|
||||
################################################################################
|
||||
@@ -98,7 +97,7 @@ OPENAI_API_KEY=your-openai-api-key
|
||||
|
||||
### MILVUS
|
||||
## MILVUS_ADDR - Milvus remote address (e.g. localhost:19530)
|
||||
## MILVUS_COLLECTION - Milvus collection,
|
||||
## MILVUS_COLLECTION - Milvus collection,
|
||||
## change it if you want to start a new memory and retain the old memory.
|
||||
# MILVUS_ADDR=your-milvus-cluster-host-port
|
||||
# MILVUS_COLLECTION=autogpt
|
||||
@@ -136,9 +135,14 @@ OPENAI_API_KEY=your-openai-api-key
|
||||
# GITHUB_USERNAME=your-github-username
|
||||
|
||||
################################################################################
|
||||
### SEARCH PROVIDER
|
||||
### WEB BROWSING
|
||||
################################################################################
|
||||
|
||||
### BROWSER
|
||||
## USE_WEB_BROWSER - Sets the web-browser driver to use with selenium (default: chrome).
|
||||
## Note: set this to either 'chrome', 'firefox', or 'safari' depending on your current browser
|
||||
# USE_WEB_BROWSER=chrome
|
||||
|
||||
### GOOGLE
|
||||
## GOOGLE_API_KEY - Google API key (Example: my-google-api-key)
|
||||
## CUSTOM_SEARCH_ENGINE_ID - Custom search engine ID (Example: my-custom-search-engine-id)
|
||||
|
||||
4
.github/workflows/benchmark.yml
vendored
4
.github/workflows/benchmark.yml
vendored
@@ -9,11 +9,11 @@ jobs:
|
||||
environment: benchmark
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
python-version: ['3.10', '3.11']
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
|
||||
67
.github/workflows/ci.yml
vendored
67
.github/workflows/ci.yml
vendored
@@ -2,26 +2,54 @@ name: Python CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
branches: [master]
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
pull_request_target:
|
||||
branches:
|
||||
- '**'
|
||||
branches: [master]
|
||||
|
||||
concurrency:
|
||||
group: ${{ format('ci-{0}', format('pr-{0}', github.event.pull_request.number) || github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
env:
|
||||
min-python-version: '3.10'
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python ${{ env.min-python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ env.min-python-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: Lint with flake8
|
||||
run: flake8
|
||||
|
||||
- name: Check black formatting
|
||||
run: black . --check
|
||||
if: success() || failure()
|
||||
|
||||
- name: Check isort formatting
|
||||
run: isort . --check
|
||||
if: success() || failure()
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.10', '3.11']
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
@@ -33,18 +61,6 @@ jobs:
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
- name: Lint with flake8
|
||||
continue-on-error: false
|
||||
run: flake8
|
||||
|
||||
- name: Check black formatting
|
||||
continue-on-error: false
|
||||
run: black . --check
|
||||
|
||||
- name: Check isort formatting
|
||||
continue-on-error: false
|
||||
run: isort . --check
|
||||
|
||||
- name: Run unittest tests with coverage
|
||||
run: |
|
||||
pytest --cov=autogpt --without-integration --without-slow-integration
|
||||
@@ -53,3 +69,4 @@ jobs:
|
||||
run: |
|
||||
coverage report
|
||||
coverage xml
|
||||
if: success() || failure()
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -157,3 +157,6 @@ vicuna-*
|
||||
|
||||
# mac
|
||||
.DS_Store
|
||||
|
||||
# news
|
||||
CURRENT_BULLETIN.md
|
||||
@@ -30,7 +30,7 @@ rule_settings:
|
||||
- refactoring
|
||||
- suggestion
|
||||
- comment
|
||||
python_version: '3.9' # A string specifying the lowest Python version your project supports. Sourcery will not suggest refactorings requiring a higher Python version.
|
||||
python_version: '3.10' # A string specifying the lowest Python version your project supports. Sourcery will not suggest refactorings requiring a higher Python version.
|
||||
|
||||
# rules: # A list of custom rules Sourcery will include in its analysis.
|
||||
# - id: no-print-statements
|
||||
@@ -68,4 +68,4 @@ rule_settings:
|
||||
# proxy:
|
||||
# url:
|
||||
# ssl_certs_file:
|
||||
# no_ssl_verify: false
|
||||
# no_ssl_verify: false
|
||||
|
||||
2
BULLETIN.md
Normal file
2
BULLETIN.md
Normal file
@@ -0,0 +1,2 @@
|
||||
Welcome to Auto-GPT! We'll keep you informed of the latest news and features by printing messages here.
|
||||
If you don't wish to see this message, you can run Auto-GPT with the --skip-news flag
|
||||
@@ -1,5 +1,5 @@
|
||||
# Use an official Python base image from the Docker Hub
|
||||
FROM python:3.11-slim
|
||||
FROM python:3.10-slim
|
||||
|
||||
# Install git
|
||||
RUN apt-get -y update
|
||||
|
||||
@@ -42,6 +42,11 @@ import click
|
||||
is_flag=True,
|
||||
help="Dangerous: Allows Auto-GPT to download files natively.",
|
||||
)
|
||||
@click.option(
|
||||
"--skip-news",
|
||||
is_flag=True,
|
||||
help="Specifies whether to suppress the output of latest news on startup.",
|
||||
)
|
||||
@click.pass_context
|
||||
def main(
|
||||
ctx: click.Context,
|
||||
@@ -56,6 +61,7 @@ def main(
|
||||
memory_type: str,
|
||||
browser_name: str,
|
||||
allow_downloads: bool,
|
||||
skip_news: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Welcome to AutoGPT an experimental open-source application showcasing the capabilities of the GPT-4 pushing the boundaries of AI.
|
||||
@@ -73,6 +79,7 @@ def main(
|
||||
from autogpt.logs import logger
|
||||
from autogpt.memory import get_memory
|
||||
from autogpt.prompt import construct_prompt
|
||||
from autogpt.utils import get_latest_bulletin
|
||||
|
||||
if ctx.invoked_subcommand is None:
|
||||
cfg = Config()
|
||||
@@ -90,9 +97,14 @@ def main(
|
||||
memory_type,
|
||||
browser_name,
|
||||
allow_downloads,
|
||||
skip_news,
|
||||
)
|
||||
logger.set_level(logging.DEBUG if cfg.debug_mode else logging.INFO)
|
||||
ai_name = ""
|
||||
if not cfg.skip_news:
|
||||
motd = get_latest_bulletin()
|
||||
if motd:
|
||||
logger.typewriter_log("NEWS: ", Fore.GREEN, motd)
|
||||
system_prompt = construct_prompt()
|
||||
# print(prompt)
|
||||
# Initialize variables
|
||||
|
||||
@@ -84,6 +84,12 @@ def execute_python_file(file: str) -> str:
|
||||
|
||||
return logs
|
||||
|
||||
except docker.errors.DockerException as e:
|
||||
print(
|
||||
"Could not run the script in a container. If you haven't already, please install Docker https://docs.docker.com/get-docker/"
|
||||
)
|
||||
return f"Error: {str(e)}"
|
||||
|
||||
except Exception as e:
|
||||
return f"Error: {str(e)}"
|
||||
|
||||
|
||||
@@ -3,8 +3,7 @@ from __future__ import annotations
|
||||
|
||||
import os
|
||||
import os.path
|
||||
from pathlib import Path
|
||||
from typing import Generator, List
|
||||
from typing import Generator
|
||||
|
||||
import requests
|
||||
from colorama import Back, Fore
|
||||
|
||||
@@ -81,7 +81,12 @@ def scrape_text_with_selenium(url: str) -> tuple[WebDriver, str]:
|
||||
if platform == "linux" or platform == "linux2":
|
||||
options.add_argument("--disable-dev-shm-usage")
|
||||
options.add_argument("--remote-debugging-port=9222")
|
||||
|
||||
options.add_argument("--no-sandbox")
|
||||
if CFG.selenium_headless:
|
||||
options.add_argument("--headless")
|
||||
options.add_argument("--disable-gpu")
|
||||
|
||||
driver = webdriver.Chrome(
|
||||
executable_path=ChromeDriverManager().install(), options=options
|
||||
)
|
||||
|
||||
@@ -24,8 +24,8 @@ class Config(metaclass=Singleton):
|
||||
self.speak_mode = False
|
||||
self.skip_reprompt = False
|
||||
self.allow_downloads = False
|
||||
self.skip_news = False
|
||||
|
||||
self.selenium_web_browser = os.getenv("USE_WEB_BROWSER", "chrome")
|
||||
self.ai_settings_file = os.getenv("AI_SETTINGS_FILE", "ai_settings.yaml")
|
||||
self.fast_llm_model = os.getenv("FAST_LLM_MODEL", "gpt-3.5-turbo")
|
||||
self.smart_llm_model = os.getenv("SMART_LLM_MODEL", "gpt-4")
|
||||
@@ -39,6 +39,9 @@ class Config(metaclass=Singleton):
|
||||
self.execute_local_commands = (
|
||||
os.getenv("EXECUTE_LOCAL_COMMANDS", "False") == "True"
|
||||
)
|
||||
self.restrict_to_workspace = (
|
||||
os.getenv("RESTRICT_TO_WORKSPACE", "True") == "True"
|
||||
)
|
||||
|
||||
if self.use_azure:
|
||||
self.load_azure_config()
|
||||
@@ -87,7 +90,11 @@ class Config(metaclass=Singleton):
|
||||
"HUGGINGFACE_AUDIO_TO_TEXT_MODEL"
|
||||
)
|
||||
|
||||
# User agent headers to use when browsing web
|
||||
# Selenium browser settings
|
||||
self.selenium_web_browser = os.getenv("USE_WEB_BROWSER", "chrome")
|
||||
self.selenium_headless = os.getenv("HEADLESS_BROWSER", "True") == "True"
|
||||
|
||||
# User agent header to use when making HTTP requests
|
||||
# Some websites might just completely deny request with an error code if
|
||||
# no user agent was found.
|
||||
self.user_agent = os.getenv(
|
||||
@@ -95,6 +102,7 @@ class Config(metaclass=Singleton):
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36"
|
||||
" (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36",
|
||||
)
|
||||
|
||||
self.redis_host = os.getenv("REDIS_HOST", "localhost")
|
||||
self.redis_port = os.getenv("REDIS_PORT", "6379")
|
||||
self.redis_password = os.getenv("REDIS_PASSWORD", "")
|
||||
|
||||
@@ -22,6 +22,7 @@ def create_config(
|
||||
memory_type: str,
|
||||
browser_name: str,
|
||||
allow_downloads: bool,
|
||||
skip_news: bool,
|
||||
) -> None:
|
||||
"""Updates the config object with the given arguments.
|
||||
|
||||
@@ -37,7 +38,7 @@ def create_config(
|
||||
memory_type (str): The type of memory backend to use
|
||||
browser_name (str): The name of the browser to use when using selenium to scrape the web
|
||||
allow_downloads (bool): Whether to allow Auto-GPT to download files natively
|
||||
|
||||
skips_news (bool): Whether to suppress the output of latest news on startup
|
||||
"""
|
||||
CFG.set_debug_mode(False)
|
||||
CFG.set_continuous_mode(False)
|
||||
@@ -126,5 +127,8 @@ def create_config(
|
||||
)
|
||||
CFG.allow_downloads = True
|
||||
|
||||
if skip_news:
|
||||
CFG.skip_news = True
|
||||
|
||||
if browser_name:
|
||||
CFG.selenium_web_browser = browser_name
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import os
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
from colorama import Fore
|
||||
|
||||
@@ -37,3 +40,28 @@ def readable_file_size(size, decimal_places=2):
|
||||
break
|
||||
size /= 1024.0
|
||||
return f"{size:.{decimal_places}f} {unit}"
|
||||
|
||||
|
||||
def get_bulletin_from_web() -> str:
|
||||
try:
|
||||
response = requests.get(
|
||||
"https://raw.githubusercontent.com/Significant-Gravitas/Auto-GPT/master/BULLETIN.md"
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.text
|
||||
except:
|
||||
return ""
|
||||
|
||||
|
||||
def get_latest_bulletin() -> str:
|
||||
exists = os.path.exists("CURRENT_BULLETIN.md")
|
||||
current_bulletin = ""
|
||||
if exists:
|
||||
current_bulletin = open("CURRENT_BULLETIN.md", "r", encoding="utf-8").read()
|
||||
new_bulletin = get_bulletin_from_web()
|
||||
is_new_news = new_bulletin != current_bulletin
|
||||
|
||||
if new_bulletin and is_new_news:
|
||||
open("CURRENT_BULLETIN.md", "w", encoding="utf-8").write(new_bulletin)
|
||||
return f" {Fore.RED}::UPDATED:: {Fore.CYAN}{new_bulletin}{Fore.RESET}"
|
||||
return current_bulletin
|
||||
|
||||
@@ -3,6 +3,10 @@ from __future__ import annotations
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from autogpt.config import Config
|
||||
|
||||
CFG = Config()
|
||||
|
||||
# Set a dedicated folder for file I/O
|
||||
WORKSPACE_PATH = Path(os.getcwd()) / "auto_gpt_workspace"
|
||||
|
||||
@@ -35,9 +39,9 @@ def safe_path_join(base: Path, *paths: str | Path) -> Path:
|
||||
"""
|
||||
joined_path = base.joinpath(*paths).resolve()
|
||||
|
||||
if not joined_path.is_relative_to(base):
|
||||
if CFG.restrict_to_workspace and not joined_path.is_relative_to(base):
|
||||
raise ValueError(
|
||||
f"Attempted to access path '{joined_path}' outside of working directory '{base}'."
|
||||
f"Attempted to access path '{joined_path}' outside of workspace '{base}'."
|
||||
)
|
||||
|
||||
return joined_path
|
||||
|
||||
Reference in New Issue
Block a user