From 93e39bdb5345c4c10308a3637b6e956acfa0549d Mon Sep 17 00:00:00 2001 From: none Date: Thu, 7 Mar 2024 21:06:16 +0300 Subject: [PATCH] =?UTF-8?q?=D0=A3=D0=B1=D1=80=D0=B0=D0=BD=20ChatGPT,=20?= =?UTF-8?q?=D0=B4=D0=BE=D0=B1=D0=B0=D0=B2=D0=BB=D0=B5=D0=BD=20RWKV,=20?= =?UTF-8?q?=D0=B8=D0=B7=D0=BC=D0=B5=D0=BD=D0=B5=D0=BD=D0=B8=D0=B5=20=D1=81?= =?UTF-8?q?=D0=BE=D0=BE=D0=B1=D1=89=D0=B5=D0=BD=D0=B8=D0=B9=20=D0=B2=D0=BC?= =?UTF-8?q?=D0=B5=D1=81=D1=82=D0=BE=20=D0=B8=D1=85=20=D1=83=D0=B4=D0=B0?= =?UTF-8?q?=D0=BB=D0=B5=D0=BD=D0=B8=D1=8F.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api.py | 58 +++++++++++++++++++++++++++++--------------------- minigpt.py | 62 +++++++++++++++++++++++++++++++++++------------------- 2 files changed, 74 insertions(+), 46 deletions(-) diff --git a/api.py b/api.py index f9fbc41..a05dde7 100644 --- a/api.py +++ b/api.py @@ -1,8 +1,7 @@ from gradio_client import Client -from chat_api import * - from deep_translator import GoogleTranslator +# Обработка запроса генерации def predict(prompt, client, model = "0.1"): global iddb if model == "0.1": @@ -14,23 +13,38 @@ def predict(prompt, client, model = "0.1"): elif model == "0.2": result = client.predict( prompt, - 0.5, # 'Temperature' - 128, # 'Max new tokens' + 0.5, # 'Температура' + 128, # 'Длина' 0.8, # 'Top-p (nucleus sampling)' 1.8, # 'Repetition penalty' api_name="/chat" ) + elif model == "RWKV": + result = client.predict( + prompt, + 333, + 0.6, + 0, # int | float representing numeric value between 0.0 and 1.0 in 'Top P' Slider component + 0, # int | float representing numeric value between 0.0 and 1.0 in 'Presence Penalty' Slider component + 0, # int | float representing numeric value between 0.0 and 1.0 in 'Count Penalty' Slider component + fn_index=0 + ) + else: + print("INCORRECT MODEL: ", model) + print(type(model)) return result -# Detect code +# Определяем что это код def iscode(text): + # Теги langs = ['sql','php','js','java','c','cpp','python','go'] is_code = False for i in langs: if i + r'\n' in text: is_code = True break + # Тег ассемблера spec = ['section .'] if not is_code: for i in spec: @@ -46,7 +60,7 @@ def translate(text, source): elif source == "en": target = "ru" - # Fix code translate + # Исправление перевода кода if '```' in text: out = '' for i in text.split('```'): @@ -59,39 +73,35 @@ def translate(text, source): return out +# Словарь пользователь - сессия iddb = {} -def gen(text, id, model): +def gen(prompt, id, model): global iddb + # Если нету сессии if str(id) not in iddb: if model == "0.1": client = Client("https://afischer1985-ai-interface.hf.space/") elif model == "0.2": -# client = Client("https://skier8402-mistral-super-fast.hf.space/") client = Client("https://nonamed33-minigpt-api.hf.space/") - # Changed to private space - if not model == "3.5": - iddb[str(id)] = client + elif model == "RWKV": + client = Client("https://blinkdl-rwkv-gradio-1.hf.space/") + iddb[str(id)] = client else: - if not model == "3.5": - client = iddb[str(id)] + client = iddb[str(id)] + try: -# if True: if model == "0.1" or model == "0.2": - prompt = translate(text, "ru") + prompt = translate(prompt, "ru") predicted = predict(prompt, client, model).replace("", "") predicted = translate(predicted, "en") - elif model == "3.5": - # ChatGPT - global setted_models - prompt = text - try: - inst = setted_models[id] - except: - inst = None - predicted = chat_predict(prompt, id, inst) + elif model == "RWKV": + predicted = predict(prompt, client, model) + else: + print("INCORRECT MODEL: ", model) + print(type(model)) except: pass diff --git a/minigpt.py b/minigpt.py index ebcb02f..b677fc5 100755 --- a/minigpt.py +++ b/minigpt.py @@ -27,7 +27,6 @@ def send_welcome(message): __ Есть 3 версии: 0.1 - Простейшая, быстрейшая, краткая, не помнит что вы говорили. 0.2 - Умнее, относительно быстрая, помнит что вы говорили. -3.5 - Самая умная, есть цензура, помнит что вы говорили. /m - Выбор модели __ Список команд: @@ -77,33 +76,35 @@ System-prompt: {telebot.formatting.hcode(prompt)} @bot.message_handler(commands=['m']) def set_model(message): - bot.send_message(message.chat.id, "Выберите новую модель:", reply_markup=gen_markup(str(message.chat.id))) + mm = bot.send_message(message.chat.id, "Выберите новую модель:") + m_id = mm.id + bot.edit_message_text("Выберите новую модель:", chat_id = message.chat.id, message_id = m_id + , reply_markup=gen_markup(str(message.chat.id), m_id, message.chat.id)) -def gen_markup(id): +def gen_markup(id, m_id, c_id): markup = InlineKeyboardMarkup() markup.row_width = 3 - markup.add(InlineKeyboardButton("0.1", callback_data=id+"_0.1"), - InlineKeyboardButton("0.2", callback_data=id+"_0.2"), - InlineKeyboardButton("3.5", callback_data=id+"_3.5") ) + markup.add(InlineKeyboardButton("0.1", callback_data=id+"_0.1_"+str(m_id)+"_"+str(c_id)), + InlineKeyboardButton("0.2", callback_data=id+"_0.2_"+str(m_id)+"_"+str(c_id)), + InlineKeyboardButton("RWKV", callback_data=id+"_RWKV_"+str(m_id)+"_"+str(c_id)), + ) + # InlineKeyboardButton("VER", callback_data=id+"_VER_"+str(m_id)+"_"+str(c_id)), return markup @bot.callback_query_handler(func=lambda call: True) def callback_query(call): global setted_models, iddb - id = call.data.split("_")[0] ; m = call.data.split("_")[1] + id, m, m_id, c_id = call.data.split("_") + m_id = int(m_id) + c_id = int(c_id) try: iddb.pop(id) except: pass - if m == "0.1": - setted_models[id] = "0.1" - elif m == "0.2": - setted_models[id] = "0.2" - elif m == "3.5": - setted_models[id] = "3.5" - bot.send_message(int(id), "Успешно установлена новая модель 🤖") + setted_models[id] = m + bot.edit_message_text(f"Успешно установлена модель {m} 🤖", chat_id = c_id, message_id = m_id) ########################################## @@ -138,9 +139,11 @@ def clear_context(message): @bot.message_handler(func=lambda message: True) def echo_message(message): + # Отвечаем в ЛС, либо по команде if bot.get_chat(message.chat.id).type == "private" or message.text[:2] == "/a": global setted_models, system_prompts + # Текст генерации при команде if message.text[:2] == "/a": text = message.text[3:] else: @@ -150,27 +153,42 @@ def echo_message(message): if id not in setted_models: setted_models[id] = "0.1" + + mm = bot.send_message(message.chat.id, "Печатает...") + m_id = mm.id + + + # Если задана инструкция if id in system_prompts: - if setted_models[id] != 3.5: + if setted_models[id] == "0.1" or setted_models[id] == "0.2": prompt = '[INST]' + system_prompts[id] + '[/INST]\n\n' + text + elif setted_models[id] == "RWKV": + prompt = f''' + Instruction: {system_prompts[id]} + \nInput:{text} + \nResponse:\n''' + # Если инструкция не задана else: - prompt = text + if setted_models[id] == "0.1" or setted_models[id] == "0.2": + prompt = text + elif setted_models[id] == "RWKV": + prompt = f''' + Input: {text} + \nResponse:\n''' - st = bot.send_message(message.chat.id, "Печатает...") try: + #if 1: predicted = gen(prompt, message.chat.id, setted_models[id]) except: bot.send_message(message.chat.id, "Извините, возникла непредвиденная ошибка") try: - bot.reply_to(message, predicted, parse_mode="Markdown") + bot.edit_message_text(predicted, chat_id=message.chat.id, message_id=m_id, parse_mode="Markdown") except Exception as e: - bot.reply_to(message, predicted, parse_mode="HTML") + bot.edit_message_text(predicted, chat_id=message.chat.id, message_id=m_id, parse_mode="HTML") logging.error(traceback.format_exc()) - print(predicted) - - bot.delete_message(message.chat.id, st.id) + #bot.delete_message(message.chat.id, st.id) ############