import g4f global history history = {} def chat_predict(prompt, id, sys_inst = None): global history if id not in history: history[id] = [{"role": "user", "content": prompt}] else: history[id].append({"role": "user", "content": prompt}) if sys_inst: history[id] = [{"role": "system", "content": sys_inst}] predicted = g4f.ChatCompletion.create(model="gpt-3.5-turbo", messages=history[id]) history[id].append({"role": "assistant", "content": predicted}) return predicted