fix: more modular approach for embedding dimension

This commit is contained in:
Tymec
2023-04-14 17:17:10 +02:00
parent 653904a359
commit 121f4e606c
4 changed files with 10 additions and 18 deletions

View File

@@ -15,6 +15,12 @@ except ImportError:
cfg = Config()
# Dimension of embeddings encoded by models
EMBED_DIM = {
"ada": 1536,
"sbert": 768
}.get(cfg.memory_embeder, default=1536)
def get_embedding(text):
text = text.replace("\n", " ")