feat(*): add RAG support
This commit is contained in:
@@ -1,9 +1,14 @@
|
||||
from aiogram import Router
|
||||
|
||||
from . import apikey, chat, initialize, message, start
|
||||
from . import apikey, chat, initialize, message, rag, start
|
||||
|
||||
router = Router()
|
||||
|
||||
router.include_routers(
|
||||
start.router, initialize.router, apikey.router, chat.router, message.router
|
||||
start.router,
|
||||
initialize.router,
|
||||
apikey.router,
|
||||
chat.router,
|
||||
rag.router,
|
||||
message.router,
|
||||
)
|
||||
|
||||
@@ -20,6 +20,7 @@ from convex import ConvexInt64
|
||||
|
||||
from bot.modules.ai import (
|
||||
SUMMARIZE_PROMPT,
|
||||
AgentDeps,
|
||||
ImageData,
|
||||
create_follow_up_agent,
|
||||
create_text_agent,
|
||||
@@ -218,6 +219,18 @@ async def process_message_from_web( # noqa: C901, PLR0912, PLR0913, PLR0915
|
||||
api_key = user["geminiApiKey"]
|
||||
model_name = user.get("model", "gemini-3-pro-preview")
|
||||
|
||||
rag_connections = await convex.query(
|
||||
"ragConnections:getActiveForUser", {"userId": convex_user_id}
|
||||
)
|
||||
rag_db_names: list[str] = []
|
||||
if rag_connections:
|
||||
for conn in rag_connections:
|
||||
db = await convex.query(
|
||||
"rag:getDatabaseById", {"ragDatabaseId": conn["ragDatabaseId"]}
|
||||
)
|
||||
if db:
|
||||
rag_db_names.append(db["name"])
|
||||
|
||||
assistant_message_id = await convex.mutation(
|
||||
"messages:create",
|
||||
{
|
||||
@@ -235,7 +248,16 @@ async def process_message_from_web( # noqa: C901, PLR0912, PLR0913, PLR0915
|
||||
|
||||
system_prompt = SUMMARIZE_PROMPT if is_summarize else user.get("systemPrompt")
|
||||
text_agent = create_text_agent(
|
||||
api_key=api_key, model_name=model_name, system_prompt=system_prompt
|
||||
api_key=api_key,
|
||||
model_name=model_name,
|
||||
system_prompt=system_prompt,
|
||||
rag_db_names=rag_db_names if rag_db_names else None,
|
||||
)
|
||||
|
||||
agent_deps = (
|
||||
AgentDeps(user_id=convex_user_id, api_key=api_key, rag_db_names=rag_db_names)
|
||||
if rag_db_names
|
||||
else None
|
||||
)
|
||||
|
||||
processing_msg = None
|
||||
@@ -266,7 +288,7 @@ async def process_message_from_web( # noqa: C901, PLR0912, PLR0913, PLR0915
|
||||
chat_images = await fetch_chat_images(convex_chat_id)
|
||||
|
||||
final_answer = await stream_response(
|
||||
text_agent, prompt_text, hist, on_chunk, images=chat_images
|
||||
text_agent, prompt_text, hist, on_chunk, images=chat_images, deps=agent_deps
|
||||
)
|
||||
|
||||
if state:
|
||||
@@ -354,6 +376,19 @@ async def process_message(
|
||||
active_chat_id = user["activeChatId"]
|
||||
api_key = user["geminiApiKey"]
|
||||
model_name = user.get("model", "gemini-3-pro-preview")
|
||||
convex_user_id = user["_id"]
|
||||
|
||||
rag_connections = await convex.query(
|
||||
"ragConnections:getActiveForUser", {"userId": convex_user_id}
|
||||
)
|
||||
rag_db_names: list[str] = []
|
||||
if rag_connections:
|
||||
for conn in rag_connections:
|
||||
db = await convex.query(
|
||||
"rag:getDatabaseById", {"ragDatabaseId": conn["ragDatabaseId"]}
|
||||
)
|
||||
if db:
|
||||
rag_db_names.append(db["name"])
|
||||
|
||||
if not skip_user_message:
|
||||
await convex.mutation(
|
||||
@@ -382,7 +417,16 @@ async def process_message(
|
||||
)
|
||||
|
||||
text_agent = create_text_agent(
|
||||
api_key=api_key, model_name=model_name, system_prompt=user.get("systemPrompt")
|
||||
api_key=api_key,
|
||||
model_name=model_name,
|
||||
system_prompt=user.get("systemPrompt"),
|
||||
rag_db_names=rag_db_names if rag_db_names else None,
|
||||
)
|
||||
|
||||
agent_deps = (
|
||||
AgentDeps(user_id=convex_user_id, api_key=api_key, rag_db_names=rag_db_names)
|
||||
if rag_db_names
|
||||
else None
|
||||
)
|
||||
|
||||
processing_msg = await bot.send_message(chat_id, "...")
|
||||
@@ -401,7 +445,12 @@ async def process_message(
|
||||
chat_images = await fetch_chat_images(active_chat_id)
|
||||
|
||||
final_answer = await stream_response(
|
||||
text_agent, text, history[:-2], on_chunk, images=chat_images
|
||||
text_agent,
|
||||
text,
|
||||
history[:-2],
|
||||
on_chunk,
|
||||
images=chat_images,
|
||||
deps=agent_deps,
|
||||
)
|
||||
|
||||
await state.flush()
|
||||
|
||||
10
backend/src/bot/handlers/rag/__init__.py
Normal file
10
backend/src/bot/handlers/rag/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from aiogram import Router
|
||||
|
||||
from .collection import router as collection_router
|
||||
from .handler import router as command_router
|
||||
|
||||
router = Router()
|
||||
|
||||
router.include_routers(command_router, collection_router)
|
||||
|
||||
__all__ = ["router"]
|
||||
94
backend/src/bot/handlers/rag/collection.py
Normal file
94
backend/src/bot/handlers/rag/collection.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import io
|
||||
from uuid import uuid4
|
||||
|
||||
from aiogram import Bot, F, Router, types
|
||||
from aiogram.filters import Filter
|
||||
from convex import ConvexInt64
|
||||
|
||||
from utils import env
|
||||
from utils.convex import ConvexClient
|
||||
|
||||
router = Router()
|
||||
convex = ConvexClient(env.convex_url)
|
||||
|
||||
|
||||
class InRagCollectionMode(Filter):
|
||||
async def __call__(self, message: types.Message) -> bool | dict:
|
||||
if not message.from_user:
|
||||
return False
|
||||
|
||||
user = await convex.query(
|
||||
"users:getByTelegramId", {"telegramId": ConvexInt64(message.from_user.id)}
|
||||
)
|
||||
|
||||
if not user or not user.get("ragCollectionMode"):
|
||||
return False
|
||||
|
||||
return {"rag_user": user, "rag_collection_mode": user["ragCollectionMode"]}
|
||||
|
||||
|
||||
in_collection_mode = InRagCollectionMode()
|
||||
|
||||
|
||||
@router.message(in_collection_mode, F.text & ~F.text.startswith("/"))
|
||||
async def on_text_in_collection_mode(
|
||||
message: types.Message, rag_user: dict, rag_collection_mode: dict
|
||||
) -> None:
|
||||
if not message.text:
|
||||
return
|
||||
|
||||
api_key = rag_user.get("geminiApiKey")
|
||||
if not api_key:
|
||||
return
|
||||
|
||||
await convex.action(
|
||||
"rag:addContent",
|
||||
{
|
||||
"userId": rag_user["_id"],
|
||||
"ragDatabaseId": rag_collection_mode["ragDatabaseId"],
|
||||
"apiKey": api_key,
|
||||
"text": message.text,
|
||||
"key": str(uuid4()),
|
||||
},
|
||||
)
|
||||
|
||||
await message.answer("✓ Text added to knowledge base.")
|
||||
|
||||
|
||||
@router.message(in_collection_mode, F.document)
|
||||
async def on_document_in_collection_mode(
|
||||
message: types.Message, bot: Bot, rag_user: dict, rag_collection_mode: dict
|
||||
) -> None:
|
||||
if not message.document:
|
||||
return
|
||||
|
||||
api_key = rag_user.get("geminiApiKey")
|
||||
if not api_key:
|
||||
return
|
||||
|
||||
doc = message.document
|
||||
if not doc.file_name or not doc.file_name.endswith(".txt"):
|
||||
await message.answer("Only .txt files are supported for RAG.")
|
||||
return
|
||||
|
||||
file = await bot.get_file(doc.file_id)
|
||||
if not file.file_path:
|
||||
await message.answer("Failed to download file.")
|
||||
return
|
||||
|
||||
buffer = io.BytesIO()
|
||||
await bot.download_file(file.file_path, buffer)
|
||||
text = buffer.getvalue().decode("utf-8")
|
||||
|
||||
await convex.action(
|
||||
"rag:addContent",
|
||||
{
|
||||
"userId": rag_user["_id"],
|
||||
"ragDatabaseId": rag_collection_mode["ragDatabaseId"],
|
||||
"apiKey": api_key,
|
||||
"text": text,
|
||||
"key": doc.file_name,
|
||||
},
|
||||
)
|
||||
|
||||
await message.answer(f"✓ File '{doc.file_name}' added to knowledge base.")
|
||||
217
backend/src/bot/handlers/rag/handler.py
Normal file
217
backend/src/bot/handlers/rag/handler.py
Normal file
@@ -0,0 +1,217 @@
|
||||
from aiogram import Router, types
|
||||
from aiogram.filters import Command
|
||||
from convex import ConvexInt64
|
||||
|
||||
from utils import env
|
||||
from utils.convex import ConvexClient
|
||||
|
||||
router = Router()
|
||||
convex = ConvexClient(env.convex_url)
|
||||
|
||||
|
||||
@router.message(Command("rag"))
|
||||
async def on_rag(message: types.Message) -> None: # noqa: C901, PLR0911
|
||||
if not message.from_user or not message.text:
|
||||
return
|
||||
|
||||
args = message.text.split()[1:]
|
||||
|
||||
if not args:
|
||||
await show_usage(message)
|
||||
return
|
||||
|
||||
user = await convex.query(
|
||||
"users:getByTelegramId", {"telegramId": ConvexInt64(message.from_user.id)}
|
||||
)
|
||||
|
||||
if not user:
|
||||
await message.answer("Use /apikey first to set your Gemini API key.")
|
||||
return
|
||||
|
||||
if not user.get("geminiApiKey"):
|
||||
await message.answer("Use /apikey first to set your Gemini API key.")
|
||||
return
|
||||
|
||||
user_id = user["_id"]
|
||||
|
||||
if args[0] == "list":
|
||||
await list_databases(message, user_id)
|
||||
return
|
||||
|
||||
if args[0] == "save":
|
||||
await save_collection(message, user_id)
|
||||
return
|
||||
|
||||
db_name = args[0]
|
||||
|
||||
if len(args) < 2: # noqa: PLR2004
|
||||
await show_db_usage(message, db_name)
|
||||
return
|
||||
|
||||
command = args[1]
|
||||
|
||||
if command == "add":
|
||||
await start_collection(message, user_id, db_name)
|
||||
elif command == "connect":
|
||||
await connect_database(message, user_id, db_name)
|
||||
elif command == "disconnect":
|
||||
await disconnect_database(message, user_id, db_name)
|
||||
elif command == "clear":
|
||||
await clear_database(message, user_id, user["geminiApiKey"], db_name)
|
||||
else:
|
||||
await show_db_usage(message, db_name)
|
||||
|
||||
|
||||
async def show_usage(message: types.Message) -> None:
|
||||
await message.answer(
|
||||
"<b>RAG Commands:</b>\n\n"
|
||||
"<code>/rag list</code> - List your RAG databases\n"
|
||||
"<code>/rag save</code> - Exit collection mode\n\n"
|
||||
"<code>/rag <name> add</code> - Start adding content\n"
|
||||
"<code>/rag <name> connect</code> - Connect to all chats\n"
|
||||
"<code>/rag <name> disconnect</code> - Disconnect\n"
|
||||
"<code>/rag <name> clear</code> - Delete database",
|
||||
parse_mode="HTML",
|
||||
)
|
||||
|
||||
|
||||
async def show_db_usage(message: types.Message, db_name: str) -> None:
|
||||
await message.answer(
|
||||
f"<b>Commands for '{db_name}':</b>\n\n"
|
||||
f"<code>/rag {db_name} add</code> - Start adding content\n"
|
||||
f"<code>/rag {db_name} connect</code> - Connect to all chats\n"
|
||||
f"<code>/rag {db_name} disconnect</code> - Disconnect\n"
|
||||
f"<code>/rag {db_name} clear</code> - Delete database",
|
||||
parse_mode="HTML",
|
||||
)
|
||||
|
||||
|
||||
async def list_databases(message: types.Message, user_id: str) -> None:
|
||||
databases = await convex.query("rag:listDatabases", {"userId": user_id})
|
||||
connections = await convex.query(
|
||||
"ragConnections:getActiveForUser", {"userId": user_id}
|
||||
)
|
||||
|
||||
connected_db_ids = {conn["ragDatabaseId"] for conn in connections}
|
||||
|
||||
if not databases:
|
||||
await message.answer(
|
||||
"No RAG databases found.\n\nCreate one with: <code>/rag mydb add</code>",
|
||||
parse_mode="HTML",
|
||||
)
|
||||
return
|
||||
|
||||
lines = ["<b>Your RAG databases:</b>\n"]
|
||||
for db in databases:
|
||||
status = " (connected)" if db["_id"] in connected_db_ids else ""
|
||||
lines.append(f"• {db['name']}{status}")
|
||||
|
||||
await message.answer("\n".join(lines), parse_mode="HTML")
|
||||
|
||||
|
||||
async def start_collection(message: types.Message, user_id: str, db_name: str) -> None:
|
||||
collection_mode = await convex.query(
|
||||
"users:getRagCollectionMode", {"userId": user_id}
|
||||
)
|
||||
|
||||
if collection_mode:
|
||||
await message.answer(
|
||||
"Already in collection mode. Use <code>/rag save</code> to exit first.",
|
||||
parse_mode="HTML",
|
||||
)
|
||||
return
|
||||
|
||||
db_id = await convex.mutation(
|
||||
"rag:createDatabase", {"userId": user_id, "name": db_name}
|
||||
)
|
||||
|
||||
await convex.mutation(
|
||||
"users:startRagCollectionMode", {"userId": user_id, "ragDatabaseId": db_id}
|
||||
)
|
||||
|
||||
await message.answer(
|
||||
f"📚 <b>Collection mode started for '{db_name}'</b>\n\n"
|
||||
"Send text messages or .txt files to add content.\n"
|
||||
"Use <code>/rag save</code> when done.",
|
||||
parse_mode="HTML",
|
||||
)
|
||||
|
||||
|
||||
async def save_collection(message: types.Message, user_id: str) -> None:
|
||||
collection_mode = await convex.query(
|
||||
"users:getRagCollectionMode", {"userId": user_id}
|
||||
)
|
||||
|
||||
if not collection_mode:
|
||||
await message.answer("Not in collection mode.")
|
||||
return
|
||||
|
||||
db = await convex.query(
|
||||
"rag:getDatabaseById", {"ragDatabaseId": collection_mode["ragDatabaseId"]}
|
||||
)
|
||||
db_name = db["name"] if db else "database"
|
||||
|
||||
await convex.mutation("users:stopRagCollectionMode", {"userId": user_id})
|
||||
|
||||
await message.answer(
|
||||
f"✓ Collection mode ended for '{db_name}'.\n\n"
|
||||
f"Connect it with: <code>/rag {db_name} connect</code>",
|
||||
parse_mode="HTML",
|
||||
)
|
||||
|
||||
|
||||
async def connect_database(message: types.Message, user_id: str, db_name: str) -> None:
|
||||
db = await convex.query("rag:getDatabase", {"userId": user_id, "name": db_name})
|
||||
|
||||
if not db:
|
||||
await message.answer(
|
||||
f"Database '{db_name}' not found.\n"
|
||||
f"Create it with: <code>/rag {db_name} add</code>",
|
||||
parse_mode="HTML",
|
||||
)
|
||||
return
|
||||
|
||||
await convex.mutation(
|
||||
"ragConnections:connect",
|
||||
{"userId": user_id, "ragDatabaseId": db["_id"], "isGlobal": True},
|
||||
)
|
||||
|
||||
await message.answer(f"✓ '{db_name}' connected to all your chats.")
|
||||
|
||||
|
||||
async def disconnect_database(
|
||||
message: types.Message, user_id: str, db_name: str
|
||||
) -> None:
|
||||
db = await convex.query("rag:getDatabase", {"userId": user_id, "name": db_name})
|
||||
|
||||
if not db:
|
||||
await message.answer(f"Database '{db_name}' not found.")
|
||||
return
|
||||
|
||||
result = await convex.mutation(
|
||||
"ragConnections:disconnect", {"userId": user_id, "ragDatabaseId": db["_id"]}
|
||||
)
|
||||
|
||||
if result:
|
||||
await message.answer(f"✓ '{db_name}' disconnected.")
|
||||
else:
|
||||
await message.answer(f"'{db_name}' was not connected.")
|
||||
|
||||
|
||||
async def clear_database(
|
||||
message: types.Message, user_id: str, api_key: str, db_name: str
|
||||
) -> None:
|
||||
db = await convex.query("rag:getDatabase", {"userId": user_id, "name": db_name})
|
||||
|
||||
if not db:
|
||||
await message.answer(f"Database '{db_name}' not found.")
|
||||
return
|
||||
|
||||
result = await convex.action(
|
||||
"rag:deleteDatabase", {"userId": user_id, "name": db_name, "apiKey": api_key}
|
||||
)
|
||||
|
||||
if result:
|
||||
await message.answer(f"✓ '{db_name}' deleted.")
|
||||
else:
|
||||
await message.answer(f"Failed to delete '{db_name}'.")
|
||||
@@ -1,4 +1,5 @@
|
||||
from .agent import (
|
||||
AgentDeps,
|
||||
ImageData,
|
||||
StreamCallback,
|
||||
create_follow_up_agent,
|
||||
@@ -12,6 +13,7 @@ __all__ = [
|
||||
"DEFAULT_FOLLOW_UP",
|
||||
"PRESETS",
|
||||
"SUMMARIZE_PROMPT",
|
||||
"AgentDeps",
|
||||
"ImageData",
|
||||
"StreamCallback",
|
||||
"create_follow_up_agent",
|
||||
|
||||
@@ -7,16 +7,22 @@ from pydantic_ai import (
|
||||
ModelMessage,
|
||||
ModelRequest,
|
||||
ModelResponse,
|
||||
RunContext,
|
||||
TextPart,
|
||||
UserPromptPart,
|
||||
)
|
||||
from pydantic_ai.models.google import GoogleModel
|
||||
from pydantic_ai.providers.google import GoogleProvider
|
||||
|
||||
from utils import env
|
||||
from utils.convex import ConvexClient
|
||||
from utils.logging import logger
|
||||
|
||||
from .models import FollowUpOptions
|
||||
from .prompts import DEFAULT_FOLLOW_UP
|
||||
|
||||
StreamCallback = Callable[[str], Awaitable[None]]
|
||||
convex = ConvexClient(env.convex_url)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -25,21 +31,70 @@ class ImageData:
|
||||
media_type: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class AgentDeps:
|
||||
user_id: str
|
||||
api_key: str
|
||||
rag_db_names: list[str]
|
||||
|
||||
|
||||
LATEX_INSTRUCTION = "For math, use LaTeX: $...$ inline, $$...$$ display."
|
||||
|
||||
DEFAULT_SYSTEM_PROMPT = (
|
||||
"You are a helpful AI assistant. Provide clear, concise answers."
|
||||
)
|
||||
|
||||
RAG_SYSTEM_ADDITION = (
|
||||
" You have access to a knowledge base. Use the search_knowledge_base tool "
|
||||
"to find relevant information when the user asks about topics that might "
|
||||
"be covered in the knowledge base."
|
||||
)
|
||||
|
||||
|
||||
def create_text_agent(
|
||||
api_key: str,
|
||||
model_name: str = "gemini-3-pro-preview",
|
||||
system_prompt: str | None = None,
|
||||
) -> Agent[None, str]:
|
||||
rag_db_names: list[str] | None = None,
|
||||
) -> Agent[AgentDeps, str] | Agent[None, str]:
|
||||
provider = GoogleProvider(api_key=api_key)
|
||||
model = GoogleModel(model_name, provider=provider)
|
||||
base_prompt = system_prompt or DEFAULT_SYSTEM_PROMPT
|
||||
|
||||
if rag_db_names:
|
||||
full_prompt = f"{base_prompt}{RAG_SYSTEM_ADDITION} {LATEX_INSTRUCTION}"
|
||||
agent: Agent[None, str] = Agent(
|
||||
model, instructions=full_prompt, deps_type=AgentDeps
|
||||
)
|
||||
|
||||
@agent.tool
|
||||
async def search_knowledge_base(ctx: RunContext[AgentDeps], query: str) -> str:
|
||||
"""Search the user's knowledge base for relevant information.
|
||||
|
||||
Args:
|
||||
ctx: The run context containing user dependencies.
|
||||
query: The search query to find relevant information.
|
||||
|
||||
Returns:
|
||||
Relevant text from the knowledge base.
|
||||
"""
|
||||
logger.info(f"Searching knowledge base for {query}")
|
||||
result = await convex.action(
|
||||
"rag:searchMultiple",
|
||||
{
|
||||
"userId": ctx.deps.user_id,
|
||||
"dbNames": ctx.deps.rag_db_names,
|
||||
"apiKey": ctx.deps.api_key,
|
||||
"query": query,
|
||||
"limit": 5,
|
||||
},
|
||||
)
|
||||
if result and result.get("text"):
|
||||
return f"Knowledge base results:\n\n{result['text']}"
|
||||
return "No relevant information found in the knowledge base."
|
||||
|
||||
return agent
|
||||
|
||||
full_prompt = f"{base_prompt} {LATEX_INSTRUCTION}"
|
||||
return Agent(model, instructions=full_prompt)
|
||||
|
||||
@@ -68,12 +123,13 @@ def build_message_history(history: list[dict[str, str]]) -> list[ModelMessage]:
|
||||
|
||||
|
||||
async def stream_response( # noqa: PLR0913
|
||||
text_agent: Agent[None, str],
|
||||
text_agent: Agent[AgentDeps, str] | Agent[None, str],
|
||||
message: str,
|
||||
history: list[dict[str, str]] | None = None,
|
||||
on_chunk: StreamCallback | None = None,
|
||||
image: ImageData | None = None,
|
||||
images: list[ImageData] | None = None,
|
||||
deps: AgentDeps | None = None,
|
||||
) -> str:
|
||||
message_history = build_message_history(history) if history else None
|
||||
|
||||
@@ -88,7 +144,7 @@ async def stream_response( # noqa: PLR0913
|
||||
else:
|
||||
prompt = message # type: ignore[assignment]
|
||||
|
||||
stream = text_agent.run_stream(prompt, message_history=message_history)
|
||||
stream = text_agent.run_stream(prompt, message_history=message_history, deps=deps)
|
||||
async with stream as result:
|
||||
async for text in result.stream_text():
|
||||
if on_chunk:
|
||||
|
||||
@@ -22,6 +22,39 @@ for example Group A: 1, Group A: 2a, Group B: 2b, etc.
|
||||
Or, Theory: 1, Theory: 2a, Practice: 1, etc.
|
||||
Only output identifiers that exist in the image."""
|
||||
|
||||
|
||||
RAGTHEORY_SYSTEM = """You help answer theoretical exam questions.
|
||||
|
||||
When you receive an IMAGE with exam questions:
|
||||
1. Identify ALL questions/blanks to fill
|
||||
2. For EACH question, use search_knowledge_base to find relevant info
|
||||
3. Provide exam-ready answers
|
||||
|
||||
OUTPUT FORMAT:
|
||||
- Number each answer matching the question number
|
||||
- Answer length: match what the question expects
|
||||
(1 sentence, 1-2 sentences, fill blank, list items, etc.)
|
||||
- Write answers EXACTLY as they should appear on the exam sheet - ready to copy 1:1
|
||||
- Use precise terminology from the course
|
||||
- No explanations, no "because", no fluff - just the answer itself
|
||||
- For multi-part questions (a, b, c), answer each part separately
|
||||
|
||||
LANGUAGE: Match the exam language (usually English for technical terms)
|
||||
|
||||
STYLE: Academic, precise, minimal - as if you're writing on paper with limited space
|
||||
|
||||
Example input:
|
||||
"Stigmergy is ............"
|
||||
Example output:
|
||||
"1. Stigmergy is indirect communication through environment\
|
||||
modification, e.g. by leaving some marks."
|
||||
|
||||
Example input:
|
||||
"How is crossing over performed in genetic programming? (one precise sentence)"
|
||||
Example output:
|
||||
"3. Usually implemented as swapping randomly selected subtrees of parent trees"
|
||||
"""
|
||||
|
||||
DEFAULT_FOLLOW_UP = (
|
||||
"Based on the conversation, suggest 3 short follow-up questions "
|
||||
"the user might want to ask. Each option should be under 50 characters."
|
||||
@@ -38,4 +71,7 @@ Summarize VERY briefly:
|
||||
|
||||
Max 2-3 sentences. This is for Apple Watch display."""
|
||||
|
||||
PRESETS: dict[str, tuple[str, str]] = {"exam": (EXAM_SYSTEM, EXAM_FOLLOW_UP)}
|
||||
PRESETS: dict[str, tuple[str, str]] = {
|
||||
"exam": (EXAM_SYSTEM, EXAM_FOLLOW_UP),
|
||||
"ragtheory": (RAGTHEORY_SYSTEM, EXAM_FOLLOW_UP),
|
||||
}
|
||||
|
||||
@@ -23,10 +23,19 @@ class Settings(BaseSettings):
|
||||
log: LogSettings
|
||||
|
||||
convex_url: str = Field(validation_alias=AliasChoices("CONVEX_SELF_HOSTED_URL"))
|
||||
convex_http_url: str = Field(
|
||||
default="", validation_alias=AliasChoices("CONVEX_HTTP_URL")
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
case_sensitive=False, env_file=".env", env_nested_delimiter="__", extra="ignore"
|
||||
)
|
||||
|
||||
@property
|
||||
def convex_http_base_url(self) -> str:
|
||||
if self.convex_http_url:
|
||||
return self.convex_http_url
|
||||
return self.convex_url.replace("/convex", "/convex-http")
|
||||
|
||||
|
||||
env = Settings() # ty:ignore[missing-argument]
|
||||
|
||||
Reference in New Issue
Block a user