From 65fdbaf27f5b4b2d1c4aa7e11fd7f6cb0ac9f83b Mon Sep 17 00:00:00 2001 From: SwiftyOS Date: Tue, 15 Aug 2023 13:20:47 +0200 Subject: [PATCH] Switched to sqlalchemy --- .env.example | 2 +- autogpt/__main__.py | 3 +- autogpt/agent.py | 5 +- autogpt/db.py | 320 ++++++++++++++++++-------------------------- autogpt/db_test.py | 55 +++----- poetry.lock | 173 +++++++++++++++++++++++- pyproject.toml | 2 + pytest.ini | 2 - 8 files changed, 336 insertions(+), 226 deletions(-) delete mode 100644 pytest.ini diff --git a/.env.example b/.env.example index 088ff34f..4ce8b983 100644 --- a/.env.example +++ b/.env.example @@ -6,6 +6,6 @@ OPENAI_API_BASE=https://oai.hconeai.com/v1 # Control log level LOG_LEVEL=INFO -DATABASE_NAME="./agent.db" +DATABASE_STRING="sqlite://agent.db" PORT=8000 AGENT_WORKSPACE="agbenchmark/workspace" \ No newline at end of file diff --git a/autogpt/__main__.py b/autogpt/__main__.py index 8c27bcac..3c0f857a 100644 --- a/autogpt/__main__.py +++ b/autogpt/__main__.py @@ -9,7 +9,8 @@ import autogpt.db if __name__ == "__main__": """Runs the agent server""" load_dotenv() - database_name = os.getenv("DATABASE_NAME") + database_name = os.getenv("DATABASE_STRING") + print(database_name) port = os.getenv("PORT") workspace = os.getenv("AGENT_WORKSPACE") auto_gpt = autogpt.agent.AutoGPT() diff --git a/autogpt/agent.py b/autogpt/agent.py index 4f951c2f..db79e5b5 100644 --- a/autogpt/agent.py +++ b/autogpt/agent.py @@ -24,5 +24,8 @@ class AutoGPT: agent_step.task_id, agent_step.step_id, status="completed" ) updated_step.output = agent_step.input - print(f"Step completed: {updated_step}") + if step.is_last: + print(f"Task completed: {updated_step.task_id}") + else: + print(f"Step completed: {updated_step}") return updated_step diff --git a/autogpt/db.py b/autogpt/db.py index d699a612..aa46d58b 100644 --- a/autogpt/db.py +++ b/autogpt/db.py @@ -1,234 +1,180 @@ """ This is an example implementation of the Agent Protocol DB for development Purposes -It uses SQlite as the database and file store backend. +It uses SQLite as the database and file store backend. IT IS NOT ADVISED TO USE THIS IN PRODUCTION! """ - -import sqlite3 from typing import Dict, List, Optional +from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Boolean, LargeBinary +from sqlalchemy.orm import relationship, sessionmaker, DeclarativeBase from agent_protocol import Artifact, Step, Task, TaskDB from agent_protocol.models import Status, TaskInput +class Base(DeclarativeBase): + pass + class DataNotFoundError(Exception): pass +class TaskModel(Base): + __tablename__ = 'tasks' + + task_id = Column(Integer, primary_key=True, autoincrement=True) + input = Column(String) + additional_input = Column(String) + + steps = relationship("StepModel", back_populates="task") + artifacts = relationship("ArtifactModel", back_populates="task") + + +class StepModel(Base): + __tablename__ = 'steps' + + step_id = Column(Integer, primary_key=True, autoincrement=True) + task_id = Column(Integer, ForeignKey('tasks.task_id')) + name = Column(String) + status = Column(String) + is_last = Column(Boolean, default=False) + additional_properties = Column(String) + + task = relationship("TaskModel", back_populates="steps") + + +class ArtifactModel(Base): + __tablename__ = 'artifacts' + + artifact_id = Column(Integer, primary_key=True, autoincrement=True) + task_id = Column(Integer, ForeignKey('tasks.task_id')) + step_id = Column(Integer, ForeignKey('steps.step_id')) + file_name = Column(String) + relative_path = Column(String) + file_data = Column(LargeBinary) + + task = relationship("TaskModel", back_populates="artifacts") + +#sqlite:///{database_name} class AgentDB(TaskDB): - def __init__(self, database_name) -> None: + def __init__(self, database_string) -> None: super().__init__() - self.conn = sqlite3.connect(database_name) - cursor = self.conn.cursor() - - # Create tasks table - cursor.execute( - """ - CREATE TABLE IF NOT EXISTS tasks ( - task_id INTEGER PRIMARY KEY AUTOINCREMENT, - input TEXT, - additional_input TEXT - ) - """ - ) - - # Create steps table - cursor.execute( - """ - CREATE TABLE IF NOT EXISTS steps ( - step_id INTEGER PRIMARY KEY AUTOINCREMENT, - task_id INTEGER, - name TEXT, - status TEXT, - is_last INTEGER DEFAULT 0, - additional_properties TEXT, - FOREIGN KEY (task_id) REFERENCES tasks(task_id) - ) - """ - ) - - # Create artifacts table - cursor.execute( - """ - CREATE TABLE IF NOT EXISTS artifacts ( - artifact_id INTEGER PRIMARY KEY AUTOINCREMENT, - task_id INTEGER, - step_id INTEGER, - file_name TEXT, - relative_path TEXT, - file_data BLOB, - FOREIGN KEY (task_id) REFERENCES tasks(task_id) - ) - """ - ) - - # Commit the changes - self.conn.commit() + self.engine = create_engine(database_string) + Base.metadata.create_all(self.engine) + self.Session = sessionmaker(bind=self.engine) print("Databases Created") - async def create_task( - self, - input: Optional[str], - additional_input: Optional[TaskInput] = None, - artifacts: List[Artifact] = None, - steps: List[Step] = None, - ) -> Task: - """Create a task""" - cursor = self.conn.cursor() - cursor.execute( - "INSERT INTO tasks (input, additional_input) VALUES (?, ?)", - (input, additional_input.json() if additional_input else None), - ) - task_id = cursor.lastrowid - self.conn.commit() - if task_id: - return await self.get_task(task_id) - else: - raise DataNotFoundError("Task not found") + async def create_task(self, input: Optional[str], additional_input: Optional[TaskInput] = None, + artifacts: List[Artifact] = None, steps: List[Step] = None) -> Task: + session = self.Session() + new_task = TaskModel(input=input, additional_input=additional_input.json() if additional_input else None) + session.add(new_task) + session.commit() + session.refresh(new_task) + return await self.get_task(new_task.task_id) - async def create_step( - self, - task_id: str, - name: Optional[str] = None, - is_last: bool = False, - additional_properties: Optional[Dict[str, str]] = None, - ) -> Step: - """Create a step for a given task""" - cursor = self.conn.cursor() - cursor.execute( - "INSERT INTO steps (task_id, name, status, is_last, additional_properties) VALUES (?, ?, ?, ?, ?)", - (task_id, name, "created", is_last, additional_properties), - ) - step_id = cursor.lastrowid - self.conn.commit() - if step_id and task_id: - return await self.get_step(task_id, step_id) - else: - raise DataNotFoundError("Step not found") + async def create_step(self, task_id: str, name: Optional[str] = None, is_last: bool = False, + additional_properties: Optional[Dict[str, str]] = None) -> Step: + session = self.Session() + new_step = StepModel(task_id=task_id, name=name, status="created", is_last=is_last, + additional_properties=additional_properties) + session.add(new_step) + session.commit() + session.refresh(new_step) + return await self.get_step(task_id, new_step.step_id) - async def create_artifact( - self, - task_id: str, - file_name: str, - relative_path: Optional[str] = None, - step_id: Optional[str] = None, - file_data: bytes | None = None, - ) -> Artifact: - """Create an artifact for a given task""" - cursor = self.conn.cursor() - cursor.execute( - "INSERT INTO artifacts (task_id, step_id, file_name, relative_path, file_data) VALUES (?, ?, ?, ?, ?)", - (task_id, step_id, file_name, relative_path, file_data), - ) - artifact_id = cursor.lastrowid - self.conn.commit() - return await self.get_artifact(task_id, artifact_id) + async def create_artifact(self, task_id: str, file_name: str, relative_path: Optional[str] = None, + step_id: Optional[str] = None, file_data: bytes | None = None) -> Artifact: + session = self.Session() + new_artifact = ArtifactModel(task_id=task_id, step_id=step_id, file_name=file_name, + relative_path=relative_path, file_data=file_data) + session.add(new_artifact) + session.commit() + session.refresh(new_artifact) + return await self.get_artifact(task_id, new_artifact.artifact_id) async def get_task(self, task_id: int) -> Task: """Get a task by its id""" - cursor = self.conn.cursor() - cursor.execute("SELECT * FROM tasks WHERE task_id=?", (task_id,)) - if task := cursor.fetchone(): - task = Task(task_id=task[0], input=task[1], additional_input=task[2]) - cursor.execute("SELECT * FROM steps WHERE task_id=?", (task_id,)) - steps = cursor.fetchall() - if steps: - for step in steps: - status = ( - Status.created if step[3] == "created" else Status.completed - ) + session = self.Session() + task_obj = session.query(TaskModel).filter_by(task_id=task_id).first() + if task_obj: + task = Task(task_id=task_obj.task_id, input=task_obj.input, additional_input=task_obj.additional_input, steps=[]) + steps_obj = session.query(StepModel).filter_by(task_id=task_id).all() + if steps_obj: + for step in steps_obj: + status = Status.created if step.status == "created" else Status.completed task.steps.append( Step( - task_id=step[1], - step_id=step[0], - name=step[2], + task_id=step.task_id, + step_id=step.step_id, + name=step.name, status=status, - is_last=True if step[4] == 1 else False, - additional_properties=step[5], + is_last=step.is_last == 1, + additional_properties=step.additional_properties, ) ) - # print(f"Getting task {task_id}.... Task details: {task}") return task else: raise DataNotFoundError("Task not found") + async def get_step(self, task_id: int, step_id: int) -> Step: - """Get a step by its id""" - cursor = self.conn.cursor() - cursor.execute( - "SELECT * FROM steps WHERE task_id=? AND step_id=?", (task_id, step_id) - ) - if step := cursor.fetchone(): - return Step( - task_id=task_id, - step_id=step_id, - name=step[2], - status=step[3], - is_last=step[4] == 1, - additional_properties=step[5], - ) + session = self.Session() + if ( + step := session.query(StepModel) + .filter_by(task_id=task_id, step_id=step_id) + .first() + ): + status = Status.completed if step.status == "completed" else Status.created + return Step(task_id=task_id, step_id=step_id, name=step.name, status=status, is_last=step.is_last == 1, + additional_properties=step.additional_properties) else: raise DataNotFoundError("Step not found") - async def update_step( - self, - task_id: str, - step_id: str, - status: str, - additional_properties: Optional[Dict[str, str]] = None, - ) -> Step: - """Update a step by its id""" - cursor = self.conn.cursor() - cursor.execute( - "UPDATE steps SET status=?, additional_properties=? WHERE task_id=? AND step_id=?", - (status, additional_properties, task_id, step_id), - ) - self.conn.commit() - return await self.get_step(task_id, step_id) + async def update_step(self, task_id: str, step_id: str, status: str, + additional_properties: Optional[Dict[str, str]] = None) -> Step: + session = self.Session() + if ( + step := session.query(StepModel) + .filter_by(task_id=task_id, step_id=step_id) + .first() + ): + step.status = status + step.additional_properties = additional_properties + session.commit() + return await self.get_step(task_id, step_id) + else: + raise DataNotFoundError("Step not found") async def get_artifact(self, task_id: str, artifact_id: str) -> Artifact: - """Get an artifact by its id""" - cursor = self.conn.cursor() - cursor.execute( - "SELECT artifact_id, file_name, relative_path FROM artifacts WHERE task_id=? AND artifact_id=?", - (task_id, artifact_id), - ) - if artifact := cursor.fetchone(): - return Artifact( - artifact_id=artifact[0], - file_name=artifact[1], - relative_path=artifact[2], - ) + session = self.Session() + if ( + artifact := session.query(ArtifactModel) + .filter_by(task_id=task_id, artifact_id=artifact_id) + .first() + ): + return Artifact(artifact_id=artifact.artifact_id, file_name=artifact.file_name, + relative_path=artifact.relative_path) else: raise DataNotFoundError("Artifact not found") async def get_artifact_file(self, task_id: str, artifact_id: str) -> bytes: - """Get an artifact file by its id""" - cursor = self.conn.cursor() - cursor.execute( - "SELECT file_data, file_name FROM artifacts WHERE task_id=? AND artifact_id=?", - (task_id, artifact_id), - ) - if artifact := cursor.fetchone(): - return artifact[0] + session = self.Session() + if ( + artifact := session.query(ArtifactModel.file_data) + .filter_by(task_id=task_id, artifact_id=artifact_id) + .first() + ): + return artifact.file_data + else: + raise DataNotFoundError("Artifact not found") async def list_tasks(self) -> List[Task]: - """List all tasks""" - cursor = self.conn.cursor() - cursor.execute("SELECT * FROM tasks") - tasks = cursor.fetchall() - return [ - Task(task_id=task[0], input=task[1], additional_input=task[2]) - for task in tasks - ] + session = self.Session() + tasks = session.query(TaskModel).all() + return [Task(task_id=task.task_id, input=task.input, additional_input=task.additional_input) for task in tasks] async def list_steps(self, task_id: str) -> List[Step]: - """List all steps for a given task""" - cursor = self.conn.cursor() - cursor.execute("SELECT * FROM steps WHERE task_id=?", (task_id,)) - steps = cursor.fetchall() - return [ - Step(task_id=task_id, step_id=step[0], name=step[2], status=step[3]) - for step in steps - ] + session = self.Session() + steps = session.query(StepModel).filter_by(task_id=task_id).all() + return [Step(task_id=task_id, step_id=step.step_id, name=step.name, status=step.status) for step in steps] diff --git a/autogpt/db_test.py b/autogpt/db_test.py index 3ed988ce..b965cee9 100644 --- a/autogpt/db_test.py +++ b/autogpt/db_test.py @@ -7,7 +7,7 @@ from autogpt.db import AgentDB, DataNotFoundError def test_table_creation(): - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" agent_db = AgentDB(db_name) conn = sqlite3.connect("test_db.sqlite3") @@ -27,59 +27,54 @@ def test_table_creation(): ) assert cursor.fetchone() is not None - agent_db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) @pytest.mark.asyncio async def test_create_task(): # Having issues with pytest fixture so added setup and teardown in each test as a rapid workaround # TODO: Fix this! - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" agent_db = AgentDB(db_name) task = await agent_db.create_task("task_input") assert task.input == "task_input" - agent_db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) @pytest.mark.asyncio async def test_create_and_get_task(): - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" agent_db = AgentDB(db_name) await agent_db.create_task("task_input") task = await agent_db.get_task(1) assert task.input == "task_input" - agent_db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) @pytest.mark.asyncio async def test_get_task_not_found(): - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" agent_db = AgentDB(db_name) with pytest.raises(DataNotFoundError): await agent_db.get_task(9999) - agent_db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) @pytest.mark.asyncio async def test_create_and_get_step(): - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" agent_db = AgentDB(db_name) await agent_db.create_task("task_input") await agent_db.create_step(1, "step_name") step = await agent_db.get_step(1, 1) assert step.name == "step_name" - agent_db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) @pytest.mark.asyncio async def test_updating_step(): - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" agent_db = AgentDB(db_name) await agent_db.create_task("task_input") await agent_db.create_step(1, "step_name") @@ -87,23 +82,21 @@ async def test_updating_step(): step = await agent_db.get_step(1, 1) assert step.status.value == "completed" - agent_db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) @pytest.mark.asyncio async def test_get_step_not_found(): - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" agent_db = AgentDB(db_name) with pytest.raises(DataNotFoundError): await agent_db.get_step(9999, 9999) - agent_db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) @pytest.mark.asyncio async def test_get_artifact(): - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" db = AgentDB(db_name) # Given: A task and its corresponding artifact @@ -120,13 +113,12 @@ async def test_get_artifact(): assert fetched_artifact.artifact_id == artifact.artifact_id assert fetched_artifact.file_name == "sample_file.txt" assert fetched_artifact.relative_path == "/path/to/sample_file.txt" - db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) @pytest.mark.asyncio async def test_get_artifact_file(): - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" db = AgentDB(db_name) sample_data = b"sample data" # Given: A task and its corresponding artifact @@ -145,13 +137,12 @@ async def test_get_artifact_file(): # Then: The fetched artifact matches the original assert fetched_artifact == sample_data - db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) @pytest.mark.asyncio async def test_list_tasks(): - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" db = AgentDB(db_name) # Given: Multiple tasks in the database @@ -165,13 +156,12 @@ async def test_list_tasks(): task_ids = [task.task_id for task in fetched_tasks] assert task1.task_id in task_ids assert task2.task_id in task_ids - db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) @pytest.mark.asyncio async def test_list_steps(): - db_name = "test_db.sqlite3" + db_name = "sqlite:///test_db.sqlite3" db = AgentDB(db_name) # Given: A task and multiple steps for that task @@ -186,5 +176,4 @@ async def test_list_steps(): step_ids = [step.step_id for step in fetched_steps] assert step1.step_id in step_ids assert step2.step_id in step_ids - db.conn.close() - os.remove(db_name) + os.remove(db_name.split('///')[1]) diff --git a/poetry.lock b/poetry.lock index fdffcf0f..230a25c3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -722,6 +722,24 @@ dev = ["PyYAML (>=5.4.1)", "coloredlogs (>=15.0.1)", "fire (>=0.4.0)"] diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"] testing = ["beautifulsoup4 (>=4.8.2)", "cryptography (<3.4)", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"] +[[package]] +name = "dash-bootstrap-components" +version = "1.4.2" +description = "Bootstrap themed components for use in Plotly Dash" +category = "dev" +optional = false +python-versions = ">=3.7, <4" +files = [ + {file = "dash-bootstrap-components-1.4.2.tar.gz", hash = "sha256:b7514be30e229a1701db5010a47d275882a94d1efff4c803ac42a9d222ed86e0"}, + {file = "dash_bootstrap_components-1.4.2-py3-none-any.whl", hash = "sha256:4f59352a2f81cb0c41ae75dd3e0814f64049a4520f935397298e9a093ace727c"}, +] + +[package.dependencies] +dash = ">=2.0.0" + +[package.extras] +pandas = ["numpy", "pandas"] + [[package]] name = "dash-core-components" version = "2.0.0" @@ -1159,6 +1177,80 @@ requests = ">=2.0,<3.0" typing-extensions = ">=4.0,<5.0" websockets = ">=10.0,<12.0" +[[package]] +name = "greenlet" +version = "2.0.2" +description = "Lightweight in-process concurrent programming" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +] + +[package.extras] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["objgraph", "psutil"] + [[package]] name = "h11" version = "0.14.0" @@ -3107,6 +3199,85 @@ files = [ {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] +[[package]] +name = "sqlalchemy" +version = "2.0.19" +description = "Database Abstraction Library" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9deaae357edc2091a9ed5d25e9ee8bba98bcfae454b3911adeaf159c2e9ca9e3"}, + {file = "SQLAlchemy-2.0.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0bf0fd65b50a330261ec7fe3d091dfc1c577483c96a9fa1e4323e932961aa1b5"}, + {file = "SQLAlchemy-2.0.19-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d90ccc15ba1baa345796a8fb1965223ca7ded2d235ccbef80a47b85cea2d71a"}, + {file = "SQLAlchemy-2.0.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4e688f6784427e5f9479d1a13617f573de8f7d4aa713ba82813bcd16e259d1"}, + {file = "SQLAlchemy-2.0.19-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:584f66e5e1979a7a00f4935015840be627e31ca29ad13f49a6e51e97a3fb8cae"}, + {file = "SQLAlchemy-2.0.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c69ce70047b801d2aba3e5ff3cba32014558966109fecab0c39d16c18510f15"}, + {file = "SQLAlchemy-2.0.19-cp310-cp310-win32.whl", hash = "sha256:96f0463573469579d32ad0c91929548d78314ef95c210a8115346271beeeaaa2"}, + {file = "SQLAlchemy-2.0.19-cp310-cp310-win_amd64.whl", hash = "sha256:22bafb1da60c24514c141a7ff852b52f9f573fb933b1e6b5263f0daa28ce6db9"}, + {file = "SQLAlchemy-2.0.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d6894708eeb81f6d8193e996257223b6bb4041cb05a17cd5cf373ed836ef87a2"}, + {file = "SQLAlchemy-2.0.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8f2afd1aafded7362b397581772c670f20ea84d0a780b93a1a1529da7c3d369"}, + {file = "SQLAlchemy-2.0.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15afbf5aa76f2241184c1d3b61af1a72ba31ce4161013d7cb5c4c2fca04fd6e"}, + {file = "SQLAlchemy-2.0.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc05b59142445a4efb9c1fd75c334b431d35c304b0e33f4fa0ff1ea4890f92e"}, + {file = "SQLAlchemy-2.0.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5831138f0cc06b43edf5f99541c64adf0ab0d41f9a4471fd63b54ae18399e4de"}, + {file = "SQLAlchemy-2.0.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3afa8a21a9046917b3a12ffe016ba7ebe7a55a6fc0c7d950beb303c735c3c3ad"}, + {file = "SQLAlchemy-2.0.19-cp311-cp311-win32.whl", hash = "sha256:c896d4e6ab2eba2afa1d56be3d0b936c56d4666e789bfc59d6ae76e9fcf46145"}, + {file = "SQLAlchemy-2.0.19-cp311-cp311-win_amd64.whl", hash = "sha256:024d2f67fb3ec697555e48caeb7147cfe2c08065a4f1a52d93c3d44fc8e6ad1c"}, + {file = "SQLAlchemy-2.0.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:89bc2b374ebee1a02fd2eae6fd0570b5ad897ee514e0f84c5c137c942772aa0c"}, + {file = "SQLAlchemy-2.0.19-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd4d410a76c3762511ae075d50f379ae09551d92525aa5bb307f8343bf7c2c12"}, + {file = "SQLAlchemy-2.0.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f469f15068cd8351826df4080ffe4cc6377c5bf7d29b5a07b0e717dddb4c7ea2"}, + {file = "SQLAlchemy-2.0.19-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cda283700c984e699e8ef0fcc5c61f00c9d14b6f65a4f2767c97242513fcdd84"}, + {file = "SQLAlchemy-2.0.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:43699eb3f80920cc39a380c159ae21c8a8924fe071bccb68fc509e099420b148"}, + {file = "SQLAlchemy-2.0.19-cp37-cp37m-win32.whl", hash = "sha256:61ada5831db36d897e28eb95f0f81814525e0d7927fb51145526c4e63174920b"}, + {file = "SQLAlchemy-2.0.19-cp37-cp37m-win_amd64.whl", hash = "sha256:57d100a421d9ab4874f51285c059003292433c648df6abe6c9c904e5bd5b0828"}, + {file = "SQLAlchemy-2.0.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:16a310f5bc75a5b2ce7cb656d0e76eb13440b8354f927ff15cbaddd2523ee2d1"}, + {file = "SQLAlchemy-2.0.19-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf7b5e3856cbf1876da4e9d9715546fa26b6e0ba1a682d5ed2fc3ca4c7c3ec5b"}, + {file = "SQLAlchemy-2.0.19-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e7b69d9ced4b53310a87117824b23c509c6fc1f692aa7272d47561347e133b6"}, + {file = "SQLAlchemy-2.0.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9eb4575bfa5afc4b066528302bf12083da3175f71b64a43a7c0badda2be365"}, + {file = "SQLAlchemy-2.0.19-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6b54d1ad7a162857bb7c8ef689049c7cd9eae2f38864fc096d62ae10bc100c7d"}, + {file = "SQLAlchemy-2.0.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5d6afc41ca0ecf373366fd8e10aee2797128d3ae45eb8467b19da4899bcd1ee0"}, + {file = "SQLAlchemy-2.0.19-cp38-cp38-win32.whl", hash = "sha256:430614f18443b58ceb9dedec323ecddc0abb2b34e79d03503b5a7579cd73a531"}, + {file = "SQLAlchemy-2.0.19-cp38-cp38-win_amd64.whl", hash = "sha256:eb60699de43ba1a1f77363f563bb2c652f7748127ba3a774f7cf2c7804aa0d3d"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a752b7a9aceb0ba173955d4f780c64ee15a1a991f1c52d307d6215c6c73b3a4c"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7351c05db355da112e056a7b731253cbeffab9dfdb3be1e895368513c7d70106"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa51ce4aea583b0c6b426f4b0563d3535c1c75986c4373a0987d84d22376585b"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae7473a67cd82a41decfea58c0eac581209a0aa30f8bc9190926fbf628bb17f7"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851a37898a8a39783aab603c7348eb5b20d83c76a14766a43f56e6ad422d1ec8"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539010665c90e60c4a1650afe4ab49ca100c74e6aef882466f1de6471d414be7"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-win32.whl", hash = "sha256:f82c310ddf97b04e1392c33cf9a70909e0ae10a7e2ddc1d64495e3abdc5d19fb"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-win_amd64.whl", hash = "sha256:8e712cfd2e07b801bc6b60fdf64853bc2bd0af33ca8fa46166a23fe11ce0dbb0"}, + {file = "SQLAlchemy-2.0.19-py3-none-any.whl", hash = "sha256:314145c1389b021a9ad5aa3a18bac6f5d939f9087d7fc5443be28cba19d2c972"}, + {file = "SQLAlchemy-2.0.19.tar.gz", hash = "sha256:77a14fa20264af73ddcdb1e2b9c5a829b8cc6b8304d0f093271980e36c200a3f"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.2.0" + +[package.extras] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3-binary"] + [[package]] name = "stack-data" version = "0.6.2" @@ -3603,4 +3774,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "85135cf2401b54937e36cedc2ed9fb17305b146dfc649f80c9c8fd02e1fd6569" +content-hash = "45bffade3b12c3921fe2909254cc5afded25eab86a273943bfb672ba7c6f26d2" diff --git a/pyproject.toml b/pyproject.toml index 3f75314f..63f038a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,7 @@ openai = "^0.27.8" agent-protocol = "^0.2.2" helicone = "^1.0.6" tenacity = "^8.2.2" +sqlalchemy = "^2.0.19" [tool.poetry.group.dev.dependencies] @@ -32,6 +33,7 @@ gradio = "^3.40.1" plotly = "^5.16.0" dash = "^2.11.1" pandas = "^2.0.3" +dash-bootstrap-components = "^1.4.2" [build-system] requires = ["poetry-core"] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index a447748f..00000000 --- a/pytest.ini +++ /dev/null @@ -1,2 +0,0 @@ -[pytest] -python_paths = ./autogpt \ No newline at end of file