bots can talk to each other

This commit is contained in:
2026-03-10 12:48:33 -04:00
parent 2a55c412d2
commit d063519c04
2 changed files with 132 additions and 0 deletions

View File

@@ -44,6 +44,41 @@ def chat_completion(
return ""
def chat_completion_with_history(
system_prompt: str,
prompts: Iterable[ChatCompletionMessageParam],
openai_url: str,
openai_api_key: str,
model: str,
max_tokens: int = 1000,
) -> str:
client = openai.OpenAI(base_url=openai_url, api_key=openai_api_key)
messages: Iterable[ChatCompletionMessageParam] = [
{
"role": "system",
"content": system_prompt,
}
] + prompts # type: ignore
response = client.chat.completions.create(
model=model,
messages=messages,
max_tokens=max_tokens,
extra_body={
"chat_template_kwargs": {"enable_thinking": False},
},
)
# Assert that thinking was used
if response.choices[0].message.model_extra:
assert response.choices[0].message.model_extra.get("reasoning_content")
content = response.choices[0].message.content
if content:
return content.strip()
else:
return ""
def chat_completion_instruct(
system_prompt: str,
user_prompt: str,

View File

@@ -297,6 +297,103 @@ async def retcon(ctx, *, message: str):
await ctx.send(file=send_img)
@bot.command(name="talkforme")
async def talkforme(ctx, *, message: str):
"""Have two bots talk to each other about a topic
Usage: !talkforme bot1 bot2 4 some conversation topic
"""
bot1_name, bot2_name, limit, topic_list = (
message.split(" ")[0],
message.split(" ")[1],
message.split(" ")[2],
message.split(" ")[3:],
)
topic = " ".join(topic_list)
custom_bot_manager = CustomBotManager()
bot1 = custom_bot_manager.get_custom_bot(bot1_name)
if not bot1:
await ctx.send(f"{bot1_name} is not a real bot...")
return
else:
_, bot1_prompt, _, _ = bot1
bot2 = custom_bot_manager.get_custom_bot(bot2_name)
if not bot2:
await ctx.send(f"{bot2_name} is not a real bot...")
return
else:
_, bot2_prompt, _, _ = bot2
await ctx.send(
f'{bot1_name} is going to talk to {bot2_name} about "{topic[:50]}" for {limit} replies.'
)
bot_list = [(bot1_name, bot1_prompt), (bot2_name, bot2_prompt)]
message_limit = int(limit)
def flip_counter(counter: int):
if counter == 0:
return 1
else:
return 0
def flip_user(user: str):
if user == "user":
return "assistant"
else:
return "user"
message_counter = 0
bot_counter = 0
current_user = "user"
current_bot = bot_list[bot_counter]
prompt_history = [{"role": current_user, "content": topic}]
first_bot_response = llama_wrapper.chat_completion_with_history(
system_prompt=current_bot[1] + "\nKeep your responses under 2-3 sentences.",
prompts=prompt_history, # type: ignore
openai_url=CHAT_ENDPOINT,
openai_api_key=CHAT_ENDPOINT_KEY,
model=CHAT_MODEL,
max_tokens=MAX_COMPLETION_TOKENS,
)
await ctx.send(f"## {current_bot[0]}\n{first_bot_response}")
prompt_history.append({"role": current_user, "content": first_bot_response})
current_user = flip_user(current_user)
bot_counter = flip_counter(counter=bot_counter)
while message_counter < message_limit:
current_bot = bot_list[bot_counter]
logger.info(f"Current user is {current_user}")
logger.info(f"Current bot is {current_bot}")
bot_response = llama_wrapper.chat_completion_with_history(
system_prompt=current_bot[1] + "\nKeep your responses under 2-3 sentences.",
prompts=prompt_history, # type: ignore
openai_url=CHAT_ENDPOINT,
openai_api_key=CHAT_ENDPOINT_KEY,
model=CHAT_MODEL,
max_tokens=MAX_COMPLETION_TOKENS,
)
message_counter += 1
await ctx.send(f"## {current_bot[0]}")
while bot_response:
send_chunk = bot_response[:1000]
bot_response = bot_response[1000:]
await ctx.send(send_chunk)
prompt_history.append({"role": current_user, "content": bot_response})
bot_counter = flip_counter(counter=bot_counter)
current_user = flip_user(current_user)
logger.info(f"Message counter is {message_counter}/{limit}")
async def handle_chat(
ctx, *, bot_name: str, message: str, system_prompt: str, response_prefix: str
):