Files
Auto-GPT/autogpt/llm/modelsinfo.py
kinance 4767fe63d3 Fix the maximum context length issue by chunking (#3222)
Co-authored-by: Reinier van der Leer <github@pwuts.nl>
2023-05-01 20:13:24 +02:00

11 lines
486 B
Python

COSTS = {
"gpt-3.5-turbo": {"prompt": 0.002, "completion": 0.002},
"gpt-3.5-turbo-0301": {"prompt": 0.002, "completion": 0.002},
"gpt-4-0314": {"prompt": 0.03, "completion": 0.06},
"gpt-4": {"prompt": 0.03, "completion": 0.06},
"gpt-4-0314": {"prompt": 0.03, "completion": 0.06},
"gpt-4-32k": {"prompt": 0.06, "completion": 0.12},
"gpt-4-32k-0314": {"prompt": 0.06, "completion": 0.12},
"text-embedding-ada-002": {"prompt": 0.0004, "completion": 0.0},
}