improvements and added initial spotify plugin

This commit is contained in:
ned
2023-06-19 19:53:34 +02:00
parent d86cd38527
commit c1bfda47b5
13 changed files with 473 additions and 127 deletions

View File

@@ -388,85 +388,85 @@ class ChatGPTTelegramBot:
total_tokens = 0
if self.config['stream']:
async def _reply():
nonlocal total_tokens
await update.effective_message.reply_chat_action(
action=constants.ChatAction.TYPING,
message_thread_id=get_thread_id(update)
)
stream_response = self.openai.get_chat_response_stream(chat_id=chat_id, query=prompt)
i = 0
prev = ''
sent_message = None
backoff = 0
stream_chunk = 0
stream_response = self.openai.get_chat_response_stream(chat_id=chat_id, query=prompt)
i = 0
prev = ''
sent_message = None
backoff = 0
stream_chunk = 0
async for content, tokens in stream_response:
if len(content.strip()) == 0:
continue
async for content, tokens in stream_response:
if len(content.strip()) == 0:
continue
stream_chunks = split_into_chunks(content)
if len(stream_chunks) > 1:
content = stream_chunks[-1]
if stream_chunk != len(stream_chunks) - 1:
stream_chunk += 1
try:
await edit_message_with_retry(context, chat_id, str(sent_message.message_id),
stream_chunks[-2])
except:
pass
try:
sent_message = await update.effective_message.reply_text(
message_thread_id=get_thread_id(update),
text=content if len(content) > 0 else "..."
)
except:
pass
continue
cutoff = get_stream_cutoff_values(update, content)
cutoff += backoff
if i == 0:
stream_chunks = split_into_chunks(content)
if len(stream_chunks) > 1:
content = stream_chunks[-1]
if stream_chunk != len(stream_chunks) - 1:
stream_chunk += 1
try:
await edit_message_with_retry(context, chat_id, str(sent_message.message_id),
stream_chunks[-2])
except:
pass
try:
if sent_message is not None:
await context.bot.delete_message(chat_id=sent_message.chat_id,
message_id=sent_message.message_id)
sent_message = await update.effective_message.reply_text(
message_thread_id=get_thread_id(update),
reply_to_message_id=get_reply_to_message_id(self.config, update),
text=content,
disable_web_page_preview=True
text=content if len(content) > 0 else "..."
)
except:
continue
pass
continue
elif abs(len(content) - len(prev)) > cutoff or tokens != 'not_finished':
prev = content
cutoff = get_stream_cutoff_values(update, content)
cutoff += backoff
try:
use_markdown = tokens != 'not_finished'
await edit_message_with_retry(context, chat_id, str(sent_message.message_id),
text=content, markdown=use_markdown)
if i == 0:
try:
if sent_message is not None:
await context.bot.delete_message(chat_id=sent_message.chat_id,
message_id=sent_message.message_id)
sent_message = await update.effective_message.reply_text(
message_thread_id=get_thread_id(update),
reply_to_message_id=get_reply_to_message_id(self.config, update),
text=content,
disable_web_page_preview=True
)
except:
continue
except RetryAfter as e:
backoff += 5
await asyncio.sleep(e.retry_after)
continue
elif abs(len(content) - len(prev)) > cutoff or tokens != 'not_finished':
prev = content
except TimedOut:
backoff += 5
await asyncio.sleep(0.5)
continue
try:
use_markdown = tokens != 'not_finished'
await edit_message_with_retry(context, chat_id, str(sent_message.message_id),
text=content, markdown=use_markdown)
except Exception:
backoff += 5
continue
except RetryAfter as e:
backoff += 5
await asyncio.sleep(e.retry_after)
continue
await asyncio.sleep(0.01)
except TimedOut:
backoff += 5
await asyncio.sleep(0.5)
continue
i += 1
if tokens != 'not_finished':
total_tokens = int(tokens)
except Exception:
backoff += 5
continue
await wrap_with_indicator(update, context, _reply, constants.ChatAction.TYPING)
await asyncio.sleep(0.01)
i += 1
if tokens != 'not_finished':
total_tokens = int(tokens)
else:
async def _reply():