mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-25 18:04:28 +01:00
fix: more modular approach for embedding dimension
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
|
||||
import pinecone
|
||||
|
||||
from memory.base import MemoryProviderSingleton, get_embedding
|
||||
from memory.base import MemoryProviderSingleton, get_embedding, EMBED_DIM
|
||||
from logger import logger
|
||||
from colorama import Fore, Style
|
||||
|
||||
@@ -10,8 +10,6 @@ class PineconeMemory(MemoryProviderSingleton):
|
||||
pinecone_api_key = cfg.pinecone_api_key
|
||||
pinecone_region = cfg.pinecone_region
|
||||
pinecone.init(api_key=pinecone_api_key, environment=pinecone_region)
|
||||
# set the embedding dimension based on the embeder
|
||||
dimension = 1536 if cfg.memory_embeder == "ada" else 768
|
||||
metric = "cosine"
|
||||
pod_type = "p1"
|
||||
table_name = "auto-gpt"
|
||||
@@ -29,7 +27,7 @@ class PineconeMemory(MemoryProviderSingleton):
|
||||
exit(1)
|
||||
|
||||
if table_name not in pinecone.list_indexes():
|
||||
pinecone.create_index(table_name, dimension=dimension, metric=metric, pod_type=pod_type)
|
||||
pinecone.create_index(table_name, dimension=EMBED_DIM, metric=metric, pod_type=pod_type)
|
||||
self.index = pinecone.Index(table_name)
|
||||
|
||||
def add(self, data):
|
||||
|
||||
Reference in New Issue
Block a user