mirror of
https://github.com/aljazceru/chatgpt-telegram-bot.git
synced 2025-12-19 21:55:06 +01:00
added translations for messages from openai_helper
This commit is contained in:
@@ -42,6 +42,7 @@ def main():
|
||||
'model': model,
|
||||
'presence_penalty': float(os.environ.get('PRESENCE_PENALTY', 0.0)),
|
||||
'frequency_penalty': float(os.environ.get('FREQUENCY_PENALTY', 0.0)),
|
||||
'bot_language': os.environ.get('BOT_LANGUAGE', 'en'),
|
||||
}
|
||||
|
||||
# log deprecation warning for old budget variable names
|
||||
@@ -72,7 +73,7 @@ def main():
|
||||
'token_price': float(os.environ.get('TOKEN_PRICE', 0.002)),
|
||||
'image_prices': [float(i) for i in os.environ.get('IMAGE_PRICES',"0.016,0.018,0.02").split(",")],
|
||||
'transcription_price': float(os.environ.get('TOKEN_PRICE', 0.006)),
|
||||
'bot_language': os.environ.get('BOT_LANGUAGE', 'en')
|
||||
'bot_language': os.environ.get('BOT_LANGUAGE', 'en'),
|
||||
}
|
||||
|
||||
# Setup and run ChatGPT and Telegram bot
|
||||
|
||||
@@ -26,6 +26,25 @@ def default_max_tokens(model: str) -> int:
|
||||
"""
|
||||
return 1200 if model in GPT_3_MODELS else 2400
|
||||
|
||||
with open('translations.json', 'r', encoding='utf-8') as f:
|
||||
translations = json.load(f)
|
||||
|
||||
def localized_text(key, bot_language):
|
||||
"""
|
||||
Return translated text for a key in specified bot_language.
|
||||
Keys and translations can be found in the translations.json.
|
||||
"""
|
||||
try:
|
||||
return translations[bot_language][key]
|
||||
except KeyError:
|
||||
logging.warning(f"No translation available for bot_language code '{bot_language}' and key '{key}'")
|
||||
# Fallback to English if the translation is not available
|
||||
if key in translations['en']:
|
||||
return translations['en'][key]
|
||||
else:
|
||||
logging.warning(f"No english definition found for key '{key}' in translations.json")
|
||||
# return key as text
|
||||
return key
|
||||
|
||||
class OpenAIHelper:
|
||||
"""
|
||||
@@ -75,11 +94,12 @@ class OpenAIHelper:
|
||||
answer = response.choices[0]['message']['content'].strip()
|
||||
self.__add_to_history(chat_id, role="assistant", content=answer)
|
||||
|
||||
bot_language = self.config['bot_language']
|
||||
if self.config['show_usage']:
|
||||
answer += "\n\n---\n" \
|
||||
f"💰 Tokens used: {str(response.usage['total_tokens'])}" \
|
||||
f" ({str(response.usage['prompt_tokens'])} prompt," \
|
||||
f" {str(response.usage['completion_tokens'])} completion)"
|
||||
f"💰 {str(response.usage['total_tokens'])} {localized_text('stats_tokens', bot_language)}" \
|
||||
f" ({str(response.usage['prompt_tokens'])} {localized_text('prompt', bot_language)}," \
|
||||
f" {str(response.usage['completion_tokens'])} {localized_text('completion', bot_language)})"
|
||||
|
||||
return answer, response.usage['total_tokens']
|
||||
|
||||
@@ -105,7 +125,7 @@ class OpenAIHelper:
|
||||
tokens_used = str(self.__count_tokens(self.conversations[chat_id]))
|
||||
|
||||
if self.config['show_usage']:
|
||||
answer += f"\n\n---\n💰 Tokens used: {tokens_used}"
|
||||
answer += f"\n\n---\n💰 {tokens_used} {localized_text('stats_tokens', self.config['bot_language'])}"
|
||||
|
||||
yield answer, tokens_used
|
||||
|
||||
@@ -116,6 +136,7 @@ class OpenAIHelper:
|
||||
:param query: The query to send to the model
|
||||
:return: The answer from the model and the number of tokens used
|
||||
"""
|
||||
bot_language = self.config['bot_language']
|
||||
try:
|
||||
if chat_id not in self.conversations or self.__max_age_reached(chat_id):
|
||||
self.reset_chat_history(chat_id)
|
||||
@@ -153,13 +174,13 @@ class OpenAIHelper:
|
||||
)
|
||||
|
||||
except openai.error.RateLimitError as e:
|
||||
raise Exception(f'⚠️ _OpenAI Rate Limit exceeded_ ⚠️\n{str(e)}') from e
|
||||
raise Exception(f"⚠️ _{localized_text('openai_rate_limit', bot_language)}._ ⚠️\n{str(e)}") from e
|
||||
|
||||
except openai.error.InvalidRequestError as e:
|
||||
raise Exception(f'⚠️ _OpenAI Invalid request_ ⚠️\n{str(e)}') from e
|
||||
raise Exception(f"⚠️ _{localized_text('openai_invalid', bot_language)}._ ⚠️\n{str(e)}") from e
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(f'⚠️ _An error has occurred_ ⚠️\n{str(e)}') from e
|
||||
raise Exception(f"⚠️ _{localized_text('error', bot_language)}._ ⚠️\n{str(e)}") from e
|
||||
|
||||
async def generate_image(self, prompt: str) -> tuple[str, str]:
|
||||
"""
|
||||
@@ -167,6 +188,7 @@ class OpenAIHelper:
|
||||
:param prompt: The prompt to send to the model
|
||||
:return: The image URL and the image size
|
||||
"""
|
||||
bot_language = self.config['bot_language']
|
||||
try:
|
||||
response = await openai.Image.acreate(
|
||||
prompt=prompt,
|
||||
@@ -176,11 +198,11 @@ class OpenAIHelper:
|
||||
|
||||
if 'data' not in response or len(response['data']) == 0:
|
||||
logging.error(f'No response from GPT: {str(response)}')
|
||||
raise Exception('⚠️ _An error has occurred_ ⚠️\nPlease try again in a while.')
|
||||
raise Exception(f"⚠️ _{localized_text('error', bot_language)}._ ⚠️\n{localized_text('try_again', bot_language)}.")
|
||||
|
||||
return response['data'][0]['url'], self.config['image_size']
|
||||
except Exception as e:
|
||||
raise Exception(f'⚠️ _An error has occurred_ ⚠️\n{str(e)}') from e
|
||||
raise Exception(f"⚠️ _{localized_text('error', bot_language)}._ ⚠️\n{str(e)}") from e
|
||||
|
||||
async def transcribe(self, filename):
|
||||
"""
|
||||
@@ -192,7 +214,7 @@ class OpenAIHelper:
|
||||
return result.text
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
raise Exception(f'⚠️ _An error has occurred_ ⚠️\n{str(e)}') from e
|
||||
raise Exception(f"⚠️ _{localized_text('error', self.config['bot_language'])}._ ⚠️\n{str(e)}") from e
|
||||
|
||||
def reset_chat_history(self, chat_id, content=''):
|
||||
"""
|
||||
|
||||
@@ -14,29 +14,9 @@ from telegram.ext import ApplicationBuilder, ContextTypes, CommandHandler, Messa
|
||||
filters, InlineQueryHandler, Application, CallbackContext
|
||||
|
||||
from pydub import AudioSegment
|
||||
from openai_helper import OpenAIHelper
|
||||
from openai_helper import OpenAIHelper, localized_text
|
||||
from usage_tracker import UsageTracker
|
||||
|
||||
with open('translations.json', 'r', encoding='utf-8') as f:
|
||||
translations = json.load(f)
|
||||
|
||||
def localized_text(key, bot_language):
|
||||
"""
|
||||
Return translated text for a key in specified bot_language.
|
||||
Keys and translations can be found in the translations.json.
|
||||
"""
|
||||
try:
|
||||
return translations[bot_language][key]
|
||||
except KeyError:
|
||||
logging.warning(f"No translation available for bot_language code '{bot_language}' and key '{key}'")
|
||||
# Fallback to English if the translation is not available
|
||||
if key in translations['en']:
|
||||
return translations['en'][key]
|
||||
else:
|
||||
logging.warning(f"No english definition found for key '{key}' in translations.json")
|
||||
# return key as text
|
||||
return key
|
||||
|
||||
def message_text(message: Message) -> str:
|
||||
"""
|
||||
Returns the text of a message, excluding any bot commands.
|
||||
|
||||
Reference in New Issue
Block a user