WIP: code cleanup
This commit is contained in:
0
vibe_bot/__init__.py
Normal file
0
vibe_bot/__init__.py
Normal file
532
vibe_bot/database.py
Normal file
532
vibe_bot/database.py
Normal file
@@ -0,0 +1,532 @@
|
||||
import sqlite3
|
||||
import os
|
||||
from typing import Optional, List, Tuple
|
||||
from datetime import datetime
|
||||
import numpy as np
|
||||
from openai import OpenAI
|
||||
import logging
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Database configuration
|
||||
DB_PATH = os.getenv("DB_PATH", "chat_history.db")
|
||||
EMBEDDING_MODEL = os.getenv("EMBEDDING_MODEL", "qwen3-embed-4b")
|
||||
EMBEDDING_DIMENSION = 2048 # Default for qwen3-embed-4b
|
||||
MAX_HISTORY_MESSAGES = int(os.getenv("MAX_HISTORY_MESSAGES", "1000"))
|
||||
SIMILARITY_THRESHOLD = float(os.getenv("SIMILARITY_THRESHOLD", "0.7"))
|
||||
TOP_K_RESULTS = int(os.getenv("TOP_K_RESULTS", "5"))
|
||||
|
||||
# OpenAI configuration
|
||||
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "placeholder")
|
||||
OPENAI_API_EMBED_ENDPOINT = os.getenv(
|
||||
"OPENAI_API_EMBED_ENDPOINT", "https://llama-embed.reeselink.com"
|
||||
)
|
||||
|
||||
|
||||
class ChatDatabase:
|
||||
"""SQLite database with RAG support for storing chat history using OpenAI embeddings."""
|
||||
|
||||
def __init__(self, db_path: str = DB_PATH):
|
||||
logger.info(f"Initializing ChatDatabase with path: {db_path}")
|
||||
self.db_path = db_path
|
||||
self.client = OpenAI(base_url=OPENAI_API_EMBED_ENDPOINT, api_key=OPENAI_API_KEY)
|
||||
logger.info("Connecting to OpenAI API for embeddings")
|
||||
self._initialize_database()
|
||||
|
||||
def _initialize_database(self):
|
||||
"""Initialize the SQLite database with required tables."""
|
||||
logger.info(f"Initializing SQLite database at {self.db_path}")
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create messages table
|
||||
logger.info("Creating chat_messages table if not exists")
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS chat_messages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
message_id TEXT UNIQUE,
|
||||
user_id TEXT,
|
||||
username TEXT,
|
||||
content TEXT,
|
||||
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
channel_id TEXT,
|
||||
guild_id TEXT
|
||||
)
|
||||
"""
|
||||
)
|
||||
logger.info("chat_messages table initialized successfully")
|
||||
|
||||
# Create embeddings table for RAG
|
||||
logger.info("Creating message_embeddings table if not exists")
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS message_embeddings (
|
||||
message_id TEXT PRIMARY KEY,
|
||||
embedding BLOB,
|
||||
FOREIGN KEY (message_id) REFERENCES chat_messages(message_id)
|
||||
)
|
||||
"""
|
||||
)
|
||||
logger.info("message_embeddings table initialized successfully")
|
||||
|
||||
# Create index for faster lookups
|
||||
logger.info("Creating idx_timestamp index if not exists")
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_timestamp ON chat_messages(timestamp)
|
||||
"""
|
||||
)
|
||||
logger.info("idx_timestamp index created successfully")
|
||||
|
||||
logger.info("Creating idx_user_id index if not exists")
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS idx_user_id ON chat_messages(user_id)
|
||||
"""
|
||||
)
|
||||
logger.info("idx_user_id index created successfully")
|
||||
|
||||
conn.commit()
|
||||
logger.info("Database initialization completed successfully")
|
||||
conn.close()
|
||||
|
||||
def _generate_embedding(self, text: str) -> List[float]:
|
||||
"""Generate embedding for text using OpenAI API."""
|
||||
logger.debug(f"Generating embedding for text (length: {len(text)})")
|
||||
try:
|
||||
logger.info(f"Calling OpenAI API to generate embedding with model: {EMBEDDING_MODEL}")
|
||||
response = self.client.embeddings.create(
|
||||
model=EMBEDDING_MODEL, input=text, encoding_format="float"
|
||||
)
|
||||
logger.debug("OpenAI API response received successfully")
|
||||
|
||||
# The embedding is returned as a nested list: [[embedding_values]]
|
||||
# We need to extract the inner list
|
||||
embedding_data = response[0].embedding
|
||||
if isinstance(embedding_data, list) and len(embedding_data) > 0:
|
||||
# The first element might be the embedding array itself or a nested list
|
||||
first_item = embedding_data[0]
|
||||
if isinstance(first_item, list):
|
||||
# Handle nested structure: [[values]] -> [values]
|
||||
logger.debug("Extracted embedding from nested structure [[values]]")
|
||||
return first_item
|
||||
else:
|
||||
# Handle direct structure: [values]
|
||||
logger.debug("Extracted embedding from direct structure [values]")
|
||||
return embedding_data
|
||||
logger.warning("Embedding data is empty or invalid")
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating embedding: {e}")
|
||||
return None
|
||||
|
||||
def _vector_to_bytes(self, vector: List[float]) -> bytes:
|
||||
"""Convert vector to bytes for SQLite storage."""
|
||||
logger.debug(f"Converting vector (length: {len(vector)}) to bytes")
|
||||
result = np.array(vector, dtype=np.float32).tobytes()
|
||||
logger.debug(f"Vector converted to {len(result)} bytes")
|
||||
return result
|
||||
|
||||
def _bytes_to_vector(self, blob: bytes) -> np.ndarray:
|
||||
"""Convert bytes back to vector."""
|
||||
logger.debug(f"Converting {len(blob)} bytes back to vector")
|
||||
result = np.frombuffer(blob, dtype=np.float32)
|
||||
logger.debug(f"Vector reconstructed with {len(result)} dimensions")
|
||||
return result
|
||||
|
||||
def _calculate_similarity(self, vec1: np.ndarray, vec2: np.ndarray) -> float:
|
||||
"""Calculate cosine similarity between two vectors."""
|
||||
logger.debug(f"Calculating cosine similarity between vectors of dimension {len(vec1)}")
|
||||
result = np.dot(vec1, vec2) / (np.linalg.norm(vec1) * np.linalg.norm(vec2))
|
||||
logger.debug(f"Similarity calculated: {result:.4f}")
|
||||
return result
|
||||
|
||||
def add_message(
|
||||
self,
|
||||
message_id: str,
|
||||
user_id: str,
|
||||
username: str,
|
||||
content: str,
|
||||
channel_id: Optional[str] = None,
|
||||
guild_id: Optional[str] = None,
|
||||
) -> bool:
|
||||
"""Add a message to the database and generate its embedding."""
|
||||
logger.info(f"Adding message {message_id} from user {username}")
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
# Insert message
|
||||
logger.debug(f"Inserting message into chat_messages table: message_id={message_id}")
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT OR REPLACE INTO chat_messages
|
||||
(message_id, user_id, username, content, channel_id, guild_id)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(message_id, user_id, username, content, channel_id, guild_id),
|
||||
)
|
||||
logger.debug(f"Message {message_id} inserted into chat_messages table")
|
||||
|
||||
# Generate and store embedding
|
||||
logger.info(f"Generating embedding for message {message_id}")
|
||||
embedding = self._generate_embedding(content)
|
||||
if embedding:
|
||||
logger.debug(f"Embedding generated successfully for message {message_id}, storing in database")
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT OR REPLACE INTO message_embeddings
|
||||
(message_id, embedding)
|
||||
VALUES (?, ?)
|
||||
""",
|
||||
(message_id, self._vector_to_bytes(embedding)),
|
||||
)
|
||||
logger.debug(f"Embedding stored in message_embeddings table for message {message_id}")
|
||||
else:
|
||||
logger.warning(f"Failed to generate embedding for message {message_id}, skipping embedding storage")
|
||||
|
||||
# Clean up old messages if exceeding limit
|
||||
logger.info("Checking if cleanup of old messages is needed")
|
||||
self._cleanup_old_messages(cursor)
|
||||
|
||||
conn.commit()
|
||||
logger.info(f"Successfully added message {message_id} to database")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding message {message_id}: {e}")
|
||||
conn.rollback()
|
||||
return False
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _cleanup_old_messages(self, cursor):
|
||||
"""Remove old messages to stay within the limit."""
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT COUNT(*) FROM chat_messages
|
||||
"""
|
||||
)
|
||||
count = cursor.fetchone()[0]
|
||||
|
||||
if count > MAX_HISTORY_MESSAGES:
|
||||
cursor.execute(
|
||||
"""
|
||||
DELETE FROM chat_messages
|
||||
WHERE id IN (
|
||||
SELECT id FROM chat_messages
|
||||
ORDER BY timestamp ASC
|
||||
LIMIT ?
|
||||
)
|
||||
""",
|
||||
(count - MAX_HISTORY_MESSAGES,),
|
||||
)
|
||||
|
||||
# Also remove corresponding embeddings
|
||||
cursor.execute(
|
||||
"""
|
||||
DELETE FROM message_embeddings
|
||||
WHERE message_id IN (
|
||||
SELECT message_id FROM chat_messages
|
||||
ORDER BY timestamp ASC
|
||||
LIMIT ?
|
||||
)
|
||||
""",
|
||||
(count - MAX_HISTORY_MESSAGES,),
|
||||
)
|
||||
|
||||
def get_recent_messages(
|
||||
self, limit: int = 10
|
||||
) -> List[Tuple[str, str, str, datetime]]:
|
||||
"""Get recent messages from the database."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT message_id, username, content, timestamp
|
||||
FROM chat_messages
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(limit,),
|
||||
)
|
||||
|
||||
messages = cursor.fetchall()
|
||||
conn.close()
|
||||
|
||||
return messages
|
||||
|
||||
def search_similar_messages(
|
||||
self,
|
||||
query: str,
|
||||
top_k: int = TOP_K_RESULTS,
|
||||
min_similarity: float = SIMILARITY_THRESHOLD,
|
||||
) -> List[Tuple[str, str, str, float]]:
|
||||
"""Search for messages similar to the query using embeddings."""
|
||||
query_embedding = self._generate_embedding(query)
|
||||
if not query_embedding:
|
||||
return []
|
||||
|
||||
query_vector = np.array(query_embedding, dtype=np.float32)
|
||||
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Join chat_messages and message_embeddings to get content and embeddings
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT cm.message_id, cm.content, me.embedding
|
||||
FROM chat_messages cm
|
||||
JOIN message_embeddings me ON cm.message_id = me.message_id
|
||||
"""
|
||||
)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
results = []
|
||||
for message_id, content, embedding_blob in rows:
|
||||
embedding_vector = self._bytes_to_vector(embedding_blob)
|
||||
similarity = self._calculate_similarity(query_vector, embedding_vector)
|
||||
|
||||
if similarity >= min_similarity:
|
||||
results.append(
|
||||
(message_id, content[:500], similarity)
|
||||
) # Limit content length
|
||||
|
||||
conn.close()
|
||||
|
||||
# Sort by similarity and return top results
|
||||
results.sort(key=lambda x: x[2], reverse=True)
|
||||
return results[:top_k]
|
||||
|
||||
def get_user_history(
|
||||
self, user_id: str, limit: int = 20
|
||||
) -> List[Tuple[str, str, datetime]]:
|
||||
"""Get message history for a specific user."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT message_id, content, timestamp
|
||||
FROM chat_messages
|
||||
WHERE user_id = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?
|
||||
""",
|
||||
(user_id, limit),
|
||||
)
|
||||
|
||||
messages = cursor.fetchall()
|
||||
conn.close()
|
||||
|
||||
return messages
|
||||
|
||||
def get_conversation_context(
|
||||
self, user_id: str, current_message: str, max_context: int = 5
|
||||
) -> str:
|
||||
"""Get relevant conversation context for RAG."""
|
||||
# Get recent messages from the user
|
||||
recent_messages = self.get_user_history(user_id, limit=max_context * 2)
|
||||
|
||||
# Search for similar messages
|
||||
similar_messages = self.search_similar_messages(
|
||||
current_message, top_k=max_context
|
||||
)
|
||||
|
||||
# Combine contexts
|
||||
context_parts = []
|
||||
|
||||
# Add recent messages
|
||||
for message_id, content, timestamp in recent_messages:
|
||||
context_parts.append(f"[{timestamp}] User: {content}")
|
||||
|
||||
# Add similar messages
|
||||
for message_id, content, similarity in similar_messages:
|
||||
if f"[{content}" not in "\n".join(context_parts): # Avoid duplicates
|
||||
context_parts.append(f"[Similar] {content}")
|
||||
|
||||
return "\n".join(context_parts[-max_context * 2 :]) # Limit total context
|
||||
|
||||
def clear_all_messages(self):
|
||||
"""Clear all messages and embeddings from the database."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("DELETE FROM message_embeddings")
|
||||
cursor.execute("DELETE FROM chat_messages")
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
# Global database instance
|
||||
_chat_db: Optional[ChatDatabase] = None
|
||||
|
||||
|
||||
def get_database() -> ChatDatabase:
|
||||
"""Get or create the global database instance."""
|
||||
global _chat_db
|
||||
if _chat_db is None:
|
||||
_chat_db = ChatDatabase()
|
||||
return _chat_db
|
||||
|
||||
|
||||
class CustomBotManager:
|
||||
"""Manages custom bot configurations stored in SQLite database."""
|
||||
|
||||
def __init__(self, db_path: str = DB_PATH):
|
||||
self.db_path = db_path
|
||||
self._initialize_custom_bots_table()
|
||||
|
||||
def _initialize_custom_bots_table(self):
|
||||
"""Initialize the custom bots table in SQLite."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS custom_bots (
|
||||
bot_name TEXT PRIMARY KEY,
|
||||
system_prompt TEXT NOT NULL,
|
||||
created_by TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
is_active INTEGER DEFAULT 1
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
def create_custom_bot(
|
||||
self, bot_name: str, system_prompt: str, created_by: str
|
||||
) -> bool:
|
||||
"""Create a new custom bot configuration."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT OR REPLACE INTO custom_bots
|
||||
(bot_name, system_prompt, created_by, is_active)
|
||||
VALUES (?, ?, ?, 1)
|
||||
""",
|
||||
(bot_name.lower(), system_prompt, created_by),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error creating custom bot: {e}")
|
||||
conn.rollback()
|
||||
return False
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def get_custom_bot(self, bot_name: str) -> Optional[Tuple[str, str, str, datetime]]:
|
||||
"""Get a custom bot configuration by name."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT bot_name, system_prompt, created_by, created_at
|
||||
FROM custom_bots
|
||||
WHERE bot_name = ? AND is_active = 1
|
||||
""",
|
||||
(bot_name.lower(),),
|
||||
)
|
||||
|
||||
result = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
return result
|
||||
|
||||
def list_custom_bots(
|
||||
self, user_id: Optional[str] = None
|
||||
) -> List[Tuple[str, str, str]]:
|
||||
"""List all custom bots, optionally filtered by creator."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
if user_id:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT bot_name, system_prompt, created_by
|
||||
FROM custom_bots
|
||||
WHERE is_active = 1
|
||||
ORDER BY created_at DESC
|
||||
"""
|
||||
)
|
||||
else:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT bot_name, system_prompt, created_by
|
||||
FROM custom_bots
|
||||
WHERE is_active = 1
|
||||
ORDER BY created_at DESC
|
||||
"""
|
||||
)
|
||||
|
||||
bots = cursor.fetchall()
|
||||
conn.close()
|
||||
|
||||
return bots
|
||||
|
||||
def delete_custom_bot(self, bot_name: str) -> bool:
|
||||
"""Delete a custom bot configuration."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
"""
|
||||
DELETE FROM custom_bots
|
||||
WHERE bot_name = ?
|
||||
""",
|
||||
(bot_name.lower(),),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
return cursor.rowcount > 0
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error deleting custom bot: {e}")
|
||||
conn.rollback()
|
||||
return False
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def deactivate_custom_bot(self, bot_name: str) -> bool:
|
||||
"""Deactivate a custom bot (soft delete)."""
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
"""
|
||||
UPDATE custom_bots
|
||||
SET is_active = 0
|
||||
WHERE bot_name = ?
|
||||
""",
|
||||
(bot_name.lower(),),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
return cursor.rowcount > 0
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error deactivating custom bot: {e}")
|
||||
conn.rollback()
|
||||
return False
|
||||
finally:
|
||||
conn.close()
|
||||
114
vibe_bot/llama_wrapper.py
Normal file
114
vibe_bot/llama_wrapper.py
Normal file
@@ -0,0 +1,114 @@
|
||||
# Wraps the openai calls in generic functions
|
||||
# Supports chat, image, edit, and embeddings
|
||||
# Allows custom endpoints for each of the above supported functions
|
||||
|
||||
import openai
|
||||
from typing import Iterable
|
||||
from openai.types.chat import ChatCompletionMessageParam
|
||||
|
||||
|
||||
def chat_completion_think(
|
||||
system_prompt: str,
|
||||
user_prompt: str,
|
||||
openai_url: str,
|
||||
openai_api_key: str,
|
||||
model: str,
|
||||
max_tokens: int = 1000,
|
||||
) -> str:
|
||||
client = openai.OpenAI(base_url=openai_url, api_key=openai_api_key)
|
||||
messages: Iterable[ChatCompletionMessageParam] = [
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": user_prompt,
|
||||
},
|
||||
]
|
||||
response = client.chat.completions.create(
|
||||
model=model, messages=messages, max_tokens=max_tokens
|
||||
)
|
||||
|
||||
# Assert that thinking was used
|
||||
if response.choices[0].message.model_extra:
|
||||
assert response.choices[0].message.model_extra.get("reasoning_content")
|
||||
|
||||
content = response.choices[0].message.content
|
||||
if content:
|
||||
return content.strip()
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def chat_completion_instruct(
|
||||
system_prompt: str,
|
||||
user_prompt: str,
|
||||
openai_url: str,
|
||||
openai_api_key: str,
|
||||
model: str,
|
||||
max_tokens: int = 1000,
|
||||
) -> str:
|
||||
client = openai.OpenAI(base_url=openai_url, api_key=openai_api_key)
|
||||
messages: Iterable[ChatCompletionMessageParam] = [
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": user_prompt,
|
||||
},
|
||||
]
|
||||
response = client.chat.completions.create(
|
||||
model=model,
|
||||
messages=messages,
|
||||
max_tokens=max_tokens,
|
||||
extra_body={
|
||||
"chat_template_kwargs": {"enable_thinking": False},
|
||||
},
|
||||
)
|
||||
|
||||
# Assert that thinking wasn't used
|
||||
if response.choices[0].message.model_extra:
|
||||
assert response.choices[0].message.model_extra.get("reasoning_content")
|
||||
|
||||
content = response.choices[0].message.content
|
||||
if content:
|
||||
return content.strip()
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def image_generation(prompt: str, n=1) -> str:
|
||||
client = openai.OpenAI(base_url=OPENAI_API_IMAGE_ENDPOINT, api_key="placeholder")
|
||||
response = client.images.generate(
|
||||
prompt=prompt,
|
||||
n=n,
|
||||
size="1024x1024",
|
||||
)
|
||||
if response.data:
|
||||
return response.data[0].url
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def image_edit(image, mask, prompt, n=1, size="1024x1024"):
|
||||
client = openai.OpenAI(base_url=OPENAI_API_EDIT_ENDPOINT, api_key="placeholder")
|
||||
response = client.images.edit(
|
||||
image=image,
|
||||
mask=mask,
|
||||
prompt=prompt,
|
||||
n=n,
|
||||
size=size,
|
||||
)
|
||||
return response.data[0].url
|
||||
|
||||
|
||||
def embeddings(text, model="text-embedding-3-small"):
|
||||
client = openai.OpenAI(base_url=OPENAI_API_EMBED_ENDPOINT, api_key="placeholder")
|
||||
response = client.embeddings.create(
|
||||
input=text,
|
||||
model=model,
|
||||
)
|
||||
return response.data[0].embedding
|
||||
461
vibe_bot/main.py
Normal file
461
vibe_bot/main.py
Normal file
@@ -0,0 +1,461 @@
|
||||
import discord
|
||||
from discord.ext import commands
|
||||
import os
|
||||
import base64
|
||||
from io import BytesIO
|
||||
from openai import OpenAI
|
||||
import logging
|
||||
from database import get_database, CustomBotManager
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DISCORD_TOKEN = os.getenv("DISCORD_TOKEN", "placeholder")
|
||||
|
||||
OPENAI_API_ENDPOINT = os.getenv("OPENAI_API_ENDPOINT")
|
||||
IMAGE_GEN_ENDPOINT = os.getenv("IMAGE_GEN_ENDPOINT")
|
||||
IMAGE_EDIT_ENDPOINT = os.getenv("IMAGE_EDIT_ENDPOINT")
|
||||
MAX_COMPLETION_TOKENS = int(os.getenv("MAX_COMPLETION_TOKENS", "1000"))
|
||||
|
||||
if not OPENAI_API_ENDPOINT:
|
||||
raise Exception("OPENAI_API_ENDPOINT required.")
|
||||
|
||||
if not IMAGE_GEN_ENDPOINT:
|
||||
raise Exception("IMAGE_GEN_ENDPOINT required.")
|
||||
|
||||
# Set your OpenAI API key as an environment variable
|
||||
# You can also pass it directly but environment variables are safer
|
||||
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "placeholder")
|
||||
|
||||
# Initialize the bot
|
||||
intents = discord.Intents.default()
|
||||
intents.message_content = True
|
||||
bot = commands.Bot(command_prefix="!", intents=intents)
|
||||
|
||||
# OpenAI Completions API endpoint
|
||||
OPENAI_COMPLETIONS_URL = f"{OPENAI_API_ENDPOINT}/chat/completions"
|
||||
|
||||
|
||||
@bot.event
|
||||
async def on_ready():
|
||||
logger.info("Bot is starting up...")
|
||||
print(f"Bot logged in as {bot.user}")
|
||||
logger.info(f"Bot logged in as {bot.user}")
|
||||
|
||||
|
||||
@bot.command(name="custom-bot")
|
||||
async def custom_bot(ctx, bot_name: str, *, personality: str):
|
||||
"""Create a custom bot with a name and personality
|
||||
|
||||
Usage: !custom-bot <bot_name> <personality_description>
|
||||
Example: !custom-bot alfred you are a proper british butler
|
||||
"""
|
||||
logger.info(
|
||||
f"Custom bot command initiated by {ctx.author.name}: name='{bot_name}', personality length={len(personality)}"
|
||||
)
|
||||
|
||||
# Validate bot name
|
||||
if not bot_name or len(bot_name) < 2 or len(bot_name) > 50:
|
||||
logger.warning(
|
||||
f"Invalid bot name from {ctx.author.name}: '{bot_name}' (length: {len(bot_name) if bot_name else 0})"
|
||||
)
|
||||
await ctx.send("❌ Invalid bot name. Name must be between 2 and 50 characters.")
|
||||
return
|
||||
|
||||
logger.info(f"Bot name validation passed for '{bot_name}'")
|
||||
|
||||
# Validate personality
|
||||
if not personality or len(personality) < 10:
|
||||
logger.warning(
|
||||
f"Invalid personality from {ctx.author.name}: length={len(personality) if personality else 0}"
|
||||
)
|
||||
await ctx.send(
|
||||
"❌ Invalid personality. Description must be at least 10 characters."
|
||||
)
|
||||
return
|
||||
|
||||
logger.info(f"Personality validation passed for bot '{bot_name}'")
|
||||
|
||||
# Create custom bot manager
|
||||
logger.info(f"Initializing CustomBotManager for user {ctx.author.name}")
|
||||
custom_bot_manager = CustomBotManager()
|
||||
|
||||
# Create the custom bot
|
||||
logger.info(
|
||||
f"Attempting to create custom bot '{bot_name}' for user {ctx.author.name}"
|
||||
)
|
||||
success = custom_bot_manager.create_custom_bot(
|
||||
bot_name=bot_name, system_prompt=personality, created_by=str(ctx.author.id)
|
||||
)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
f"Successfully created custom bot '{bot_name}' for user {ctx.author.name}"
|
||||
)
|
||||
await ctx.send(
|
||||
f"✅ Custom bot **'{bot_name}'** has been created with personality: *{personality}*"
|
||||
)
|
||||
await ctx.send(f"\nYou can now use this bot with: `!{bot_name} <your message>`")
|
||||
else:
|
||||
logger.warning(
|
||||
f"Failed to create custom bot '{bot_name}' for user {ctx.author.name}"
|
||||
)
|
||||
await ctx.send("❌ Failed to create custom bot. It may already exist.")
|
||||
|
||||
|
||||
@bot.command(name="list-custom-bots")
|
||||
async def list_custom_bots(ctx):
|
||||
"""List all custom bots available in the server"""
|
||||
logger.info(f"Listing custom bots requested by {ctx.author.name}")
|
||||
|
||||
# Create custom bot manager
|
||||
logger.info("Initializing CustomBotManager to list custom bots")
|
||||
custom_bot_manager = CustomBotManager()
|
||||
|
||||
logger.info("Fetching list of custom bots from database")
|
||||
bots = custom_bot_manager.list_custom_bots()
|
||||
|
||||
if not bots:
|
||||
logger.info(f"No custom bots found for user {ctx.author.name}")
|
||||
await ctx.send(
|
||||
"No custom bots have been created yet. Use `!custom-bot <name> <personality>` to create one."
|
||||
)
|
||||
return
|
||||
|
||||
logger.info(
|
||||
f"Found {len(bots)} custom bots, displaying top 10 for {ctx.author.name}"
|
||||
)
|
||||
bot_list = "🤖 **Available Custom Bots**:\n\n"
|
||||
for name, prompt, creator in bots[:10]: # Limit to 10 bots
|
||||
bot_list += f"• **{name}** (created by {creator})\n"
|
||||
|
||||
logger.info(f"Sending bot list response to {ctx.author.name}")
|
||||
await ctx.send(bot_list)
|
||||
|
||||
|
||||
@bot.command(name="delete-custom-bot")
|
||||
async def delete_custom_bot(ctx, bot_name: str):
|
||||
"""Delete a custom bot (only the creator can delete)
|
||||
|
||||
Usage: !delete-custom-bot <bot_name>
|
||||
"""
|
||||
logger.info(
|
||||
f"Delete custom bot command initiated by {ctx.author.name}: bot_name='{bot_name}'"
|
||||
)
|
||||
|
||||
# Create custom bot manager
|
||||
logger.info("Initializing CustomBotManager for delete operation")
|
||||
custom_bot_manager = CustomBotManager()
|
||||
|
||||
# Get bot info
|
||||
logger.info(f"Looking up custom bot '{bot_name}' in database")
|
||||
bot_info = custom_bot_manager.get_custom_bot(bot_name)
|
||||
|
||||
if not bot_info:
|
||||
logger.warning(f"Custom bot '{bot_name}' not found by user {ctx.author.name}")
|
||||
await ctx.send(f"❌ Custom bot '{bot_name}' not found.")
|
||||
return
|
||||
|
||||
logger.info(f"Custom bot '{bot_name}' found, owned by user {bot_info[2]}")
|
||||
|
||||
# Check ownership
|
||||
if bot_info[2] != str(ctx.author.id):
|
||||
logger.warning(
|
||||
f"User {ctx.author.name} attempted to delete bot '{bot_name}' they don't own"
|
||||
)
|
||||
await ctx.send("❌ You can only delete your own custom bots.")
|
||||
return
|
||||
|
||||
logger.info(f"User {ctx.author.name} is authorized to delete bot '{bot_name}'")
|
||||
|
||||
# Delete the bot
|
||||
logger.info(f"Deleting custom bot '{bot_name}' from database")
|
||||
success = custom_bot_manager.delete_custom_bot(bot_name)
|
||||
|
||||
if success:
|
||||
logger.info(
|
||||
f"Successfully deleted custom bot '{bot_name}' by user {ctx.author.name}"
|
||||
)
|
||||
await ctx.send(f"✅ Custom bot '{bot_name}' has been deleted.")
|
||||
else:
|
||||
logger.warning(
|
||||
f"Failed to delete custom bot '{bot_name}' by user {ctx.author.name}"
|
||||
)
|
||||
await ctx.send("❌ Failed to delete custom bot.")
|
||||
|
||||
|
||||
# Handle custom bot commands
|
||||
@bot.event
|
||||
async def on_message(message):
|
||||
# Skip bot messages
|
||||
if message.author == bot.user:
|
||||
return
|
||||
|
||||
logger.debug(
|
||||
f"Processing message from {message.author.name}: '{message.content[:50]}...'"
|
||||
)
|
||||
|
||||
ctx = await bot.get_context(message)
|
||||
|
||||
# Check if the message starts with a custom bot command
|
||||
content = message.content.lower()
|
||||
|
||||
logger.info(f"Initializing CustomBotManager to check for custom bot commands")
|
||||
custom_bot_manager = CustomBotManager()
|
||||
|
||||
logger.info("Fetching list of custom bots to check for matching commands")
|
||||
custom_bots = custom_bot_manager.list_custom_bots()
|
||||
|
||||
logger.info(f"Checking {len(custom_bots)} custom bots for command match")
|
||||
for bot_name, system_prompt, _ in custom_bots:
|
||||
# Check if message starts with the custom bot name followed by a space
|
||||
if content.startswith(f"!{bot_name} "):
|
||||
logger.info(
|
||||
f"Custom bot command detected: '{bot_name}' triggered by {message.author.name}"
|
||||
)
|
||||
|
||||
# Extract the actual message (remove the bot name prefix)
|
||||
user_message = message.content[len(f"!{bot_name} ") :]
|
||||
logger.debug(
|
||||
f"Extracted user message for bot '{bot_name}': '{user_message[:50]}...'"
|
||||
)
|
||||
|
||||
# Prepare the payload with custom personality
|
||||
payload = {
|
||||
"model": "qwen3-vl-30b-a3b-instruct",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{"role": "user", "content": user_message},
|
||||
],
|
||||
"max_completion_tokens": MAX_COMPLETION_TOKENS,
|
||||
}
|
||||
|
||||
response_prefix = f"**{bot_name} response**"
|
||||
|
||||
logger.info(f"Sending request to OpenAI API for bot '{bot_name}'")
|
||||
await handle_chat(
|
||||
ctx=ctx,
|
||||
message=user_message,
|
||||
payload=payload,
|
||||
response_prefix=response_prefix,
|
||||
)
|
||||
return
|
||||
|
||||
# If no custom bot matched, call the default event handler
|
||||
await bot.process_commands(message)
|
||||
|
||||
|
||||
@bot.command(name="doodlebob")
|
||||
async def doodlebob(ctx, *, message: str):
|
||||
# add some logging
|
||||
|
||||
logger.info(f"Doodlebob command triggered by {ctx.author.name}: {message[:100]}")
|
||||
await ctx.send(f"**Doodlebob erasing {message[:100]}...**")
|
||||
|
||||
image_prompt_payload = {
|
||||
"model": "qwen3-vl-30b-a3b-instruct",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
"content": (
|
||||
"Given the following message, convert it to a detailed image generation prompt that will be passed directly into an image generation model."
|
||||
"If told to generate an image of yourself, generate a picture of a rat. If told to generate a picture of 'me', 'myself', or some other self"
|
||||
" reference, generate a picture of a rat. Only respond with a valid image generation prompt, do not affirm the user or respond to the user's"
|
||||
" questions."
|
||||
),
|
||||
},
|
||||
{"role": "user", "content": message},
|
||||
],
|
||||
}
|
||||
|
||||
# Wait for the generated image prompt
|
||||
image_prompt = await call_llm(ctx, image_prompt_payload)
|
||||
|
||||
# If the string is empty we had an error
|
||||
if image_prompt == "":
|
||||
print("No image prompt supplied. Check for errors.")
|
||||
return
|
||||
|
||||
# Alert the user we're generating the image
|
||||
await ctx.send(f"**Doodlebob calling drone strike on {image_prompt[:100]}...**")
|
||||
|
||||
# Create the image prompt payload
|
||||
image_payload = {
|
||||
"model": "default",
|
||||
"prompt": image_prompt,
|
||||
"n": 1,
|
||||
"size": "1024x1024",
|
||||
}
|
||||
|
||||
# Call the image generation endpoint
|
||||
response = requests.post(
|
||||
f"{IMAGE_GEN_ENDPOINT}/images/generations",
|
||||
json=image_payload,
|
||||
timeout=120,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
# Send image
|
||||
image_data = BytesIO(base64.b64decode(result["data"][0]["b64_json"]))
|
||||
send_img = discord.File(image_data, filename="image.png")
|
||||
await ctx.send(file=send_img)
|
||||
|
||||
else:
|
||||
print(f"❌ Error: {response.status_code}")
|
||||
print(response.text)
|
||||
return None
|
||||
|
||||
|
||||
@bot.command(name="retcon")
|
||||
async def retcon(ctx, *, message: str):
|
||||
image_url = ctx.message.attachments[0].url
|
||||
image_data = requests.get(image_url).content
|
||||
image_bytestream = BytesIO(image_data)
|
||||
|
||||
await ctx.send(f"**Rewriting history to match {message[:100]}...**")
|
||||
|
||||
client = OpenAI(base_url=IMAGE_EDIT_ENDPOINT, api_key=OPENAI_API_KEY)
|
||||
|
||||
result = client.images.edit(
|
||||
model="placeholder",
|
||||
image=[image_bytestream],
|
||||
prompt=message,
|
||||
size="1024x1024",
|
||||
)
|
||||
|
||||
image_base64 = result.data[0].b64_json
|
||||
image_bytes = base64.b64decode(image_base64)
|
||||
|
||||
# Save the image to a file
|
||||
edited_image_data = BytesIO(image_bytes)
|
||||
send_img = discord.File(edited_image_data, filename="image.png")
|
||||
await ctx.send(file=send_img)
|
||||
|
||||
|
||||
async def handle_chat(ctx, *, message: str, payload: dict, response_prefix: str):
|
||||
# Check if API key is set
|
||||
if not OPENAI_API_KEY:
|
||||
await ctx.send(
|
||||
"Error: OpenAI API key is not configured. Please set the OPENAI_API_KEY environment variable."
|
||||
)
|
||||
return
|
||||
|
||||
# Get database instance
|
||||
db = get_database()
|
||||
|
||||
# Get conversation context using RAG
|
||||
context = db.get_conversation_context(
|
||||
user_id=str(ctx.author.id), current_message=message, max_context=5
|
||||
)
|
||||
|
||||
if context:
|
||||
payload["messages"][0][
|
||||
"content"
|
||||
] += f"\n\nRelevant conversation history:\n{context}"
|
||||
|
||||
payload["messages"][1]["content"] = message
|
||||
|
||||
print(payload)
|
||||
|
||||
try:
|
||||
# Initialize OpenAI client
|
||||
client = OpenAI(api_key=OPENAI_API_KEY, base_url=OPENAI_API_ENDPOINT)
|
||||
|
||||
# Call OpenAI API
|
||||
response = client.chat.completions.create(
|
||||
model=payload["model"],
|
||||
messages=payload["messages"],
|
||||
max_completion_tokens=MAX_COMPLETION_TOKENS,
|
||||
frequency_penalty=1.5,
|
||||
presence_penalty=1.5,
|
||||
temperature=1,
|
||||
seed=-1,
|
||||
)
|
||||
|
||||
# Extract the generated text
|
||||
generated_text = response.choices[0].message.content.strip()
|
||||
|
||||
# Store both user message and bot response in the database
|
||||
db.add_message(
|
||||
message_id=f"{ctx.message.id}",
|
||||
user_id=str(ctx.author.id),
|
||||
username=ctx.author.name,
|
||||
content=f"User: {message}",
|
||||
channel_id=str(ctx.channel.id),
|
||||
guild_id=str(ctx.guild.id) if ctx.guild else None,
|
||||
)
|
||||
|
||||
db.add_message(
|
||||
message_id=f"{ctx.message.id}_response",
|
||||
user_id=str(bot.user.id),
|
||||
username=bot.user.name,
|
||||
content=f"Bot: {generated_text}",
|
||||
channel_id=str(ctx.channel.id),
|
||||
guild_id=str(ctx.guild.id) if ctx.guild else None,
|
||||
)
|
||||
|
||||
# Send the response back to the chat
|
||||
await ctx.send(response_prefix)
|
||||
while generated_text:
|
||||
send_chunk = generated_text[:1000]
|
||||
generated_text = generated_text[1000:]
|
||||
await ctx.send(send_chunk)
|
||||
|
||||
except requests.exceptions.HTTPError as e:
|
||||
await ctx.send(f"Error: OpenAI API error - {e}")
|
||||
except requests.exceptions.Timeout:
|
||||
await ctx.send("Error: Request timed out. Please try again.")
|
||||
except Exception as e:
|
||||
await ctx.send(f"Error: {str(e)}")
|
||||
|
||||
|
||||
async def call_llm(ctx, payload: dict) -> str:
|
||||
# Check if API key is set
|
||||
if not OPENAI_API_KEY:
|
||||
await ctx.send(
|
||||
"Error: OpenAI API key is not configured. Please set the OPENAI_API_KEY environment variable."
|
||||
)
|
||||
return ""
|
||||
|
||||
# Set headers
|
||||
headers = {
|
||||
"Authorization": f"Bearer {OPENAI_API_KEY}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
try:
|
||||
# Initialize OpenAI client
|
||||
client = OpenAI(api_key=OPENAI_API_KEY, base_url=OPENAI_API_ENDPOINT)
|
||||
|
||||
# Call OpenAI API
|
||||
response = client.chat.completions.create(
|
||||
model=payload["model"],
|
||||
messages=payload["messages"],
|
||||
max_tokens=MAX_COMPLETION_TOKENS,
|
||||
)
|
||||
|
||||
# Extract the generated text
|
||||
generated_text = response.choices[0].message.content.strip()
|
||||
print(generated_text)
|
||||
|
||||
return generated_text
|
||||
|
||||
except requests.exceptions.HTTPError as e:
|
||||
await ctx.send(f"Error: OpenAI API error - {e}")
|
||||
except requests.exceptions.Timeout:
|
||||
await ctx.send("Error: Request timed out. Please try again.")
|
||||
except Exception as e:
|
||||
await ctx.send(f"Error: {str(e)}")
|
||||
return ""
|
||||
|
||||
|
||||
# Run the bot
|
||||
if __name__ == "__main__":
|
||||
bot.run(DISCORD_TOKEN)
|
||||
0
vibe_bot/tests/__init__.py
Normal file
0
vibe_bot/tests/__init__.py
Normal file
30
vibe_bot/tests/conftest.py
Normal file
30
vibe_bot/tests/conftest.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import os
|
||||
import pytest
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Try to load .env.test first, fallback to .env
|
||||
env_test_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env.test')
|
||||
env_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env')
|
||||
|
||||
if os.path.exists(env_test_path):
|
||||
load_dotenv(env_test_path)
|
||||
print("✓ Loaded environment variables from .env.test")
|
||||
elif os.path.exists(env_path):
|
||||
load_dotenv(env_path)
|
||||
print("✓ Loaded environment variables from .env")
|
||||
|
||||
@pytest.fixture(autouse=True, scope="session")
|
||||
def verify_env_loaded():
|
||||
"""Verify critical environment variables are loaded before tests run"""
|
||||
required_vars = [
|
||||
"DISCORD_TOKEN",
|
||||
"OPENAI_API_ENDPOINT",
|
||||
"IMAGE_GEN_ENDPOINT",
|
||||
"IMAGE_EDIT_ENDPOINT"
|
||||
]
|
||||
|
||||
missing_vars = [var for var in required_vars if var not in os.environ]
|
||||
if missing_vars:
|
||||
pytest.fail(f"Missing required environment variables: {', '.join(missing_vars)}")
|
||||
|
||||
yield
|
||||
71
vibe_bot/tests/test_llama_wrapper.py
Normal file
71
vibe_bot/tests/test_llama_wrapper.py
Normal file
@@ -0,0 +1,71 @@
|
||||
# Tests all functions in the llama-wrapper.py file
|
||||
# Run with: python -m pytest test_llama_wrapper.py -v
|
||||
|
||||
from discord import message
|
||||
import pytest
|
||||
from ..llama_wrapper import (
|
||||
chat_completion_think,
|
||||
chat_completion_instruct,
|
||||
image_generation,
|
||||
image_edit,
|
||||
embeddings,
|
||||
)
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
|
||||
OPENAI_API_CHAT_ENDPOINT = os.getenv(
|
||||
"OPENAI_API_CHAT_ENDPOINT", "https://llama-cpp.reeselink.com"
|
||||
)
|
||||
OPENAI_API_IMAGE_ENDPOINT = os.getenv("OPENAI_API_IMAGE_ENDPOINT")
|
||||
OPENAI_API_EDIT_ENDPOINT = os.getenv("OPENAI_API_EDIT_ENDPOINT")
|
||||
OPENAI_API_EMBED_ENDPOINT = os.getenv("OPENAI_API_EMBED_ENDPOINT")
|
||||
|
||||
# Default models
|
||||
DEFAULT_CHAT_MODEL = os.getenv("DEFAULT_CHAT_MODEL", "qwen3.5-35b-a3b")
|
||||
DEFAULT_EMBED_MODEL = os.getenv("DEFAULT_EMBED_MODEL", "text-embedding-3-small")
|
||||
DEFAULT_IMAGE_MODEL = os.getenv("DEFAULT_IMAGE_MODEL", "dall-e-3")
|
||||
DEFAULT_EDIT_MODEL = os.getenv("DEFAULT_EDIT_MODEL", "dall-e-2")
|
||||
|
||||
|
||||
def test_chat_completion_think():
|
||||
# This test will fail without an actual API endpoint
|
||||
# But it's here to show the structure
|
||||
chat_completion_think(
|
||||
system_prompt="You are a helpful assistant.",
|
||||
user_prompt="Tell me about Everquest",
|
||||
openai_url=OPENAI_API_CHAT_ENDPOINT,
|
||||
openai_api_key="placeholder",
|
||||
model=DEFAULT_CHAT_MODEL,
|
||||
max_tokens=100,
|
||||
)
|
||||
|
||||
|
||||
def test_chat_completion_instruct():
|
||||
# This test will fail without an actual API endpoint
|
||||
# But it's here to show the structure
|
||||
chat_completion_instruct(
|
||||
system_prompt="You are a helpful assistant.",
|
||||
user_prompt="Tell me about Everquest",
|
||||
openai_url=OPENAI_API_CHAT_ENDPOINT,
|
||||
openai_api_key="placeholder",
|
||||
model=DEFAULT_CHAT_MODEL,
|
||||
max_tokens=100,
|
||||
)
|
||||
|
||||
|
||||
def test_image_generation():
|
||||
# This test will fail without an actual API endpoint
|
||||
# But it's here to show the structure
|
||||
pass
|
||||
|
||||
|
||||
def test_image_edit():
|
||||
# This test will fail without an actual API endpoint
|
||||
# But it's here to show the structure
|
||||
pass
|
||||
|
||||
|
||||
def test_embeddings():
|
||||
# This test will fail without an actual API endpoint
|
||||
# But it's here to show the structure
|
||||
pass
|
||||
Reference in New Issue
Block a user