mirror of
https://github.com/aljazceru/Auto-GPT.git
synced 2025-12-26 10:24:30 +01:00
Merge pull request #2324 from itaihochman/iss1211
* Use BROWSE_MAX_CHUNK_LENGTH for chunking text * Fix Issue #1211: GPT-3.5 token limit is lower than the default
This commit is contained in:
@@ -62,7 +62,7 @@ def summarize_text(
|
||||
print(f"Text length: {text_length} characters")
|
||||
|
||||
summaries = []
|
||||
chunks = list(split_text(text))
|
||||
chunks = list(split_text(text, CFG.browse_chunk_max_length))
|
||||
scroll_ratio = 1 / len(chunks)
|
||||
|
||||
for i, chunk in enumerate(chunks):
|
||||
|
||||
Reference in New Issue
Block a user