diff --git a/vibe_bot/main.py b/vibe_bot/main.py index 25be8e0..296235b 100644 --- a/vibe_bot/main.py +++ b/vibe_bot/main.py @@ -354,47 +354,52 @@ async def talkforme(ctx, *, message: str): message_counter = 0 bot_counter = 0 - current_user = "user" current_bot = bot_list[bot_counter] - prompt_history = [{"role": current_user, "content": topic}] + prompt_histories = [ + [{"role": "user", "content": topic}], + [{"role": "assistant", "content": topic}], + ] first_bot_response = llama_wrapper.chat_completion_with_history( system_prompt=current_bot[1] - + f"\nKeep your responses under 2-3 sentences. You are talking to {current_bot[flip_counter(bot_counter)]}", - prompts=prompt_history, # type: ignore + + f"\nKeep your responses under 2-3 sentences. You are talking to {current_bot[flip_counter(bot_counter)][0]}", + prompts=prompt_histories[bot_counter], # type: ignore openai_url=CHAT_ENDPOINT, openai_api_key=CHAT_ENDPOINT_KEY, model=CHAT_MODEL, max_tokens=MAX_COMPLETION_TOKENS, ) await ctx.send(f"## {current_bot[0]}\n{first_bot_response}") - prompt_history.append({"role": current_user, "content": first_bot_response}) + prompt_histories[0].append({"role": "assistant", "content": first_bot_response}) + prompt_histories[1].append({"role": "user", "content": first_bot_response}) - current_user = flip_user(current_user) bot_counter = flip_counter(counter=bot_counter) while message_counter < min(message_limit, TALK_LIMIT): current_bot = bot_list[bot_counter] - logger.info(f"Current user is {current_user}") logger.info(f"Current bot is {current_bot}") bot_response = llama_wrapper.chat_completion_with_history( system_prompt=current_bot[1] + f"\nKeep your responses under 2-3 sentences. {current_bot[flip_counter(bot_counter)]}", - prompts=prompt_history, # type: ignore + prompts=prompt_histories[bot_counter], # type: ignore openai_url=CHAT_ENDPOINT, openai_api_key=CHAT_ENDPOINT_KEY, model=CHAT_MODEL, max_tokens=MAX_COMPLETION_TOKENS, ) message_counter += 1 + prompt_histories[bot_counter].append( + {"role": "assistant", "content": bot_response} + ) + prompt_histories[flip_counter(bot_counter)].append( + {"role": "user", "content": bot_response} + ) await ctx.send(f"## {current_bot[0]}") while bot_response: send_chunk = bot_response[:1000] bot_response = bot_response[1000:] await ctx.send(send_chunk) - prompt_history.append({"role": current_user, "content": bot_response}) bot_counter = flip_counter(counter=bot_counter) - current_user = flip_user(current_user) logger.info(f"Message counter is {message_counter}/{limit}")