mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-25 09:54:23 +01:00
fix: more modular approach for embedding dimension
This commit is contained in:
@@ -15,6 +15,12 @@ except ImportError:
|
||||
|
||||
|
||||
cfg = Config()
|
||||
# Dimension of embeddings encoded by models
|
||||
EMBED_DIM = {
|
||||
"ada": 1536,
|
||||
"sbert": 768
|
||||
}.get(cfg.memory_embeder, default=1536)
|
||||
|
||||
|
||||
def get_embedding(text):
|
||||
text = text.replace("\n", " ")
|
||||
|
||||
Reference in New Issue
Block a user