Compare commits

..

5 Commits

28 changed files with 1620 additions and 66 deletions

View File

@@ -1,9 +1,14 @@
from aiogram import Router from aiogram import Router
from . import apikey, chat, initialize, message, start from . import apikey, chat, initialize, message, rag, start
router = Router() router = Router()
router.include_routers( router.include_routers(
start.router, initialize.router, apikey.router, chat.router, message.router start.router,
initialize.router,
apikey.router,
chat.router,
rag.router,
message.router,
) )

View File

@@ -20,6 +20,7 @@ from convex import ConvexInt64
from bot.modules.ai import ( from bot.modules.ai import (
SUMMARIZE_PROMPT, SUMMARIZE_PROMPT,
AgentDeps,
ImageData, ImageData,
create_follow_up_agent, create_follow_up_agent,
create_text_agent, create_text_agent,
@@ -218,6 +219,18 @@ async def process_message_from_web( # noqa: C901, PLR0912, PLR0913, PLR0915
api_key = user["geminiApiKey"] api_key = user["geminiApiKey"]
model_name = user.get("model", "gemini-3-pro-preview") model_name = user.get("model", "gemini-3-pro-preview")
rag_connections = await convex.query(
"ragConnections:getActiveForUser", {"userId": convex_user_id}
)
rag_db_names: list[str] = []
if rag_connections:
for conn in rag_connections:
db = await convex.query(
"rag:getDatabaseById", {"ragDatabaseId": conn["ragDatabaseId"]}
)
if db:
rag_db_names.append(db["name"])
assistant_message_id = await convex.mutation( assistant_message_id = await convex.mutation(
"messages:create", "messages:create",
{ {
@@ -235,7 +248,16 @@ async def process_message_from_web( # noqa: C901, PLR0912, PLR0913, PLR0915
system_prompt = SUMMARIZE_PROMPT if is_summarize else user.get("systemPrompt") system_prompt = SUMMARIZE_PROMPT if is_summarize else user.get("systemPrompt")
text_agent = create_text_agent( text_agent = create_text_agent(
api_key=api_key, model_name=model_name, system_prompt=system_prompt api_key=api_key,
model_name=model_name,
system_prompt=system_prompt,
rag_db_names=rag_db_names if rag_db_names else None,
)
agent_deps = (
AgentDeps(user_id=convex_user_id, api_key=api_key, rag_db_names=rag_db_names)
if rag_db_names
else None
) )
processing_msg = None processing_msg = None
@@ -266,7 +288,7 @@ async def process_message_from_web( # noqa: C901, PLR0912, PLR0913, PLR0915
chat_images = await fetch_chat_images(convex_chat_id) chat_images = await fetch_chat_images(convex_chat_id)
final_answer = await stream_response( final_answer = await stream_response(
text_agent, prompt_text, hist, on_chunk, images=chat_images text_agent, prompt_text, hist, on_chunk, images=chat_images, deps=agent_deps
) )
if state: if state:
@@ -354,6 +376,19 @@ async def process_message(
active_chat_id = user["activeChatId"] active_chat_id = user["activeChatId"]
api_key = user["geminiApiKey"] api_key = user["geminiApiKey"]
model_name = user.get("model", "gemini-3-pro-preview") model_name = user.get("model", "gemini-3-pro-preview")
convex_user_id = user["_id"]
rag_connections = await convex.query(
"ragConnections:getActiveForUser", {"userId": convex_user_id}
)
rag_db_names: list[str] = []
if rag_connections:
for conn in rag_connections:
db = await convex.query(
"rag:getDatabaseById", {"ragDatabaseId": conn["ragDatabaseId"]}
)
if db:
rag_db_names.append(db["name"])
if not skip_user_message: if not skip_user_message:
await convex.mutation( await convex.mutation(
@@ -382,7 +417,16 @@ async def process_message(
) )
text_agent = create_text_agent( text_agent = create_text_agent(
api_key=api_key, model_name=model_name, system_prompt=user.get("systemPrompt") api_key=api_key,
model_name=model_name,
system_prompt=user.get("systemPrompt"),
rag_db_names=rag_db_names if rag_db_names else None,
)
agent_deps = (
AgentDeps(user_id=convex_user_id, api_key=api_key, rag_db_names=rag_db_names)
if rag_db_names
else None
) )
processing_msg = await bot.send_message(chat_id, "...") processing_msg = await bot.send_message(chat_id, "...")
@@ -401,7 +445,12 @@ async def process_message(
chat_images = await fetch_chat_images(active_chat_id) chat_images = await fetch_chat_images(active_chat_id)
final_answer = await stream_response( final_answer = await stream_response(
text_agent, text, history[:-2], on_chunk, images=chat_images text_agent,
text,
history[:-2],
on_chunk,
images=chat_images,
deps=agent_deps,
) )
await state.flush() await state.flush()

View File

@@ -0,0 +1,10 @@
from aiogram import Router
from .collection import router as collection_router
from .handler import router as command_router
router = Router()
router.include_routers(command_router, collection_router)
__all__ = ["router"]

View File

@@ -0,0 +1,94 @@
import io
from uuid import uuid4
from aiogram import Bot, F, Router, types
from aiogram.filters import Filter
from convex import ConvexInt64
from utils import env
from utils.convex import ConvexClient
router = Router()
convex = ConvexClient(env.convex_url)
class InRagCollectionMode(Filter):
async def __call__(self, message: types.Message) -> bool | dict:
if not message.from_user:
return False
user = await convex.query(
"users:getByTelegramId", {"telegramId": ConvexInt64(message.from_user.id)}
)
if not user or not user.get("ragCollectionMode"):
return False
return {"rag_user": user, "rag_collection_mode": user["ragCollectionMode"]}
in_collection_mode = InRagCollectionMode()
@router.message(in_collection_mode, F.text & ~F.text.startswith("/"))
async def on_text_in_collection_mode(
message: types.Message, rag_user: dict, rag_collection_mode: dict
) -> None:
if not message.text:
return
api_key = rag_user.get("geminiApiKey")
if not api_key:
return
await convex.action(
"rag:addContent",
{
"userId": rag_user["_id"],
"ragDatabaseId": rag_collection_mode["ragDatabaseId"],
"apiKey": api_key,
"text": message.text,
"key": str(uuid4()),
},
)
await message.answer("✓ Text added to knowledge base.")
@router.message(in_collection_mode, F.document)
async def on_document_in_collection_mode(
message: types.Message, bot: Bot, rag_user: dict, rag_collection_mode: dict
) -> None:
if not message.document:
return
api_key = rag_user.get("geminiApiKey")
if not api_key:
return
doc = message.document
if not doc.file_name or not doc.file_name.endswith(".txt"):
await message.answer("Only .txt files are supported for RAG.")
return
file = await bot.get_file(doc.file_id)
if not file.file_path:
await message.answer("Failed to download file.")
return
buffer = io.BytesIO()
await bot.download_file(file.file_path, buffer)
text = buffer.getvalue().decode("utf-8")
await convex.action(
"rag:addContent",
{
"userId": rag_user["_id"],
"ragDatabaseId": rag_collection_mode["ragDatabaseId"],
"apiKey": api_key,
"text": text,
"key": doc.file_name,
},
)
await message.answer(f"✓ File '{doc.file_name}' added to knowledge base.")

View File

@@ -0,0 +1,217 @@
from aiogram import Router, types
from aiogram.filters import Command
from convex import ConvexInt64
from utils import env
from utils.convex import ConvexClient
router = Router()
convex = ConvexClient(env.convex_url)
@router.message(Command("rag"))
async def on_rag(message: types.Message) -> None: # noqa: C901, PLR0911
if not message.from_user or not message.text:
return
args = message.text.split()[1:]
if not args:
await show_usage(message)
return
user = await convex.query(
"users:getByTelegramId", {"telegramId": ConvexInt64(message.from_user.id)}
)
if not user:
await message.answer("Use /apikey first to set your Gemini API key.")
return
if not user.get("geminiApiKey"):
await message.answer("Use /apikey first to set your Gemini API key.")
return
user_id = user["_id"]
if args[0] == "list":
await list_databases(message, user_id)
return
if args[0] == "save":
await save_collection(message, user_id)
return
db_name = args[0]
if len(args) < 2: # noqa: PLR2004
await show_db_usage(message, db_name)
return
command = args[1]
if command == "add":
await start_collection(message, user_id, db_name)
elif command == "connect":
await connect_database(message, user_id, db_name)
elif command == "disconnect":
await disconnect_database(message, user_id, db_name)
elif command == "clear":
await clear_database(message, user_id, user["geminiApiKey"], db_name)
else:
await show_db_usage(message, db_name)
async def show_usage(message: types.Message) -> None:
await message.answer(
"<b>RAG Commands:</b>\n\n"
"<code>/rag list</code> - List your RAG databases\n"
"<code>/rag save</code> - Exit collection mode\n\n"
"<code>/rag &lt;name&gt; add</code> - Start adding content\n"
"<code>/rag &lt;name&gt; connect</code> - Connect to all chats\n"
"<code>/rag &lt;name&gt; disconnect</code> - Disconnect\n"
"<code>/rag &lt;name&gt; clear</code> - Delete database",
parse_mode="HTML",
)
async def show_db_usage(message: types.Message, db_name: str) -> None:
await message.answer(
f"<b>Commands for '{db_name}':</b>\n\n"
f"<code>/rag {db_name} add</code> - Start adding content\n"
f"<code>/rag {db_name} connect</code> - Connect to all chats\n"
f"<code>/rag {db_name} disconnect</code> - Disconnect\n"
f"<code>/rag {db_name} clear</code> - Delete database",
parse_mode="HTML",
)
async def list_databases(message: types.Message, user_id: str) -> None:
databases = await convex.query("rag:listDatabases", {"userId": user_id})
connections = await convex.query(
"ragConnections:getActiveForUser", {"userId": user_id}
)
connected_db_ids = {conn["ragDatabaseId"] for conn in connections}
if not databases:
await message.answer(
"No RAG databases found.\n\nCreate one with: <code>/rag mydb add</code>",
parse_mode="HTML",
)
return
lines = ["<b>Your RAG databases:</b>\n"]
for db in databases:
status = " (connected)" if db["_id"] in connected_db_ids else ""
lines.append(f"{db['name']}{status}")
await message.answer("\n".join(lines), parse_mode="HTML")
async def start_collection(message: types.Message, user_id: str, db_name: str) -> None:
collection_mode = await convex.query(
"users:getRagCollectionMode", {"userId": user_id}
)
if collection_mode:
await message.answer(
"Already in collection mode. Use <code>/rag save</code> to exit first.",
parse_mode="HTML",
)
return
db_id = await convex.mutation(
"rag:createDatabase", {"userId": user_id, "name": db_name}
)
await convex.mutation(
"users:startRagCollectionMode", {"userId": user_id, "ragDatabaseId": db_id}
)
await message.answer(
f"📚 <b>Collection mode started for '{db_name}'</b>\n\n"
"Send text messages or .txt files to add content.\n"
"Use <code>/rag save</code> when done.",
parse_mode="HTML",
)
async def save_collection(message: types.Message, user_id: str) -> None:
collection_mode = await convex.query(
"users:getRagCollectionMode", {"userId": user_id}
)
if not collection_mode:
await message.answer("Not in collection mode.")
return
db = await convex.query(
"rag:getDatabaseById", {"ragDatabaseId": collection_mode["ragDatabaseId"]}
)
db_name = db["name"] if db else "database"
await convex.mutation("users:stopRagCollectionMode", {"userId": user_id})
await message.answer(
f"✓ Collection mode ended for '{db_name}'.\n\n"
f"Connect it with: <code>/rag {db_name} connect</code>",
parse_mode="HTML",
)
async def connect_database(message: types.Message, user_id: str, db_name: str) -> None:
db = await convex.query("rag:getDatabase", {"userId": user_id, "name": db_name})
if not db:
await message.answer(
f"Database '{db_name}' not found.\n"
f"Create it with: <code>/rag {db_name} add</code>",
parse_mode="HTML",
)
return
await convex.mutation(
"ragConnections:connect",
{"userId": user_id, "ragDatabaseId": db["_id"], "isGlobal": True},
)
await message.answer(f"'{db_name}' connected to all your chats.")
async def disconnect_database(
message: types.Message, user_id: str, db_name: str
) -> None:
db = await convex.query("rag:getDatabase", {"userId": user_id, "name": db_name})
if not db:
await message.answer(f"Database '{db_name}' not found.")
return
result = await convex.mutation(
"ragConnections:disconnect", {"userId": user_id, "ragDatabaseId": db["_id"]}
)
if result:
await message.answer(f"'{db_name}' disconnected.")
else:
await message.answer(f"'{db_name}' was not connected.")
async def clear_database(
message: types.Message, user_id: str, api_key: str, db_name: str
) -> None:
db = await convex.query("rag:getDatabase", {"userId": user_id, "name": db_name})
if not db:
await message.answer(f"Database '{db_name}' not found.")
return
result = await convex.action(
"rag:deleteDatabase", {"userId": user_id, "name": db_name, "apiKey": api_key}
)
if result:
await message.answer(f"'{db_name}' deleted.")
else:
await message.answer(f"Failed to delete '{db_name}'.")

View File

@@ -1,4 +1,5 @@
from .agent import ( from .agent import (
AgentDeps,
ImageData, ImageData,
StreamCallback, StreamCallback,
create_follow_up_agent, create_follow_up_agent,
@@ -12,6 +13,7 @@ __all__ = [
"DEFAULT_FOLLOW_UP", "DEFAULT_FOLLOW_UP",
"PRESETS", "PRESETS",
"SUMMARIZE_PROMPT", "SUMMARIZE_PROMPT",
"AgentDeps",
"ImageData", "ImageData",
"StreamCallback", "StreamCallback",
"create_follow_up_agent", "create_follow_up_agent",

View File

@@ -7,16 +7,22 @@ from pydantic_ai import (
ModelMessage, ModelMessage,
ModelRequest, ModelRequest,
ModelResponse, ModelResponse,
RunContext,
TextPart, TextPart,
UserPromptPart, UserPromptPart,
) )
from pydantic_ai.models.google import GoogleModel from pydantic_ai.models.google import GoogleModel
from pydantic_ai.providers.google import GoogleProvider from pydantic_ai.providers.google import GoogleProvider
from utils import env
from utils.convex import ConvexClient
from utils.logging import logger
from .models import FollowUpOptions from .models import FollowUpOptions
from .prompts import DEFAULT_FOLLOW_UP from .prompts import DEFAULT_FOLLOW_UP
StreamCallback = Callable[[str], Awaitable[None]] StreamCallback = Callable[[str], Awaitable[None]]
convex = ConvexClient(env.convex_url)
@dataclass @dataclass
@@ -25,21 +31,70 @@ class ImageData:
media_type: str media_type: str
@dataclass
class AgentDeps:
user_id: str
api_key: str
rag_db_names: list[str]
LATEX_INSTRUCTION = "For math, use LaTeX: $...$ inline, $$...$$ display." LATEX_INSTRUCTION = "For math, use LaTeX: $...$ inline, $$...$$ display."
DEFAULT_SYSTEM_PROMPT = ( DEFAULT_SYSTEM_PROMPT = (
"You are a helpful AI assistant. Provide clear, concise answers." "You are a helpful AI assistant. Provide clear, concise answers."
) )
RAG_SYSTEM_ADDITION = (
" You have access to a knowledge base. Use the search_knowledge_base tool "
"to find relevant information when the user asks about topics that might "
"be covered in the knowledge base."
)
def create_text_agent( def create_text_agent(
api_key: str, api_key: str,
model_name: str = "gemini-3-pro-preview", model_name: str = "gemini-3-pro-preview",
system_prompt: str | None = None, system_prompt: str | None = None,
) -> Agent[None, str]: rag_db_names: list[str] | None = None,
) -> Agent[AgentDeps, str] | Agent[None, str]:
provider = GoogleProvider(api_key=api_key) provider = GoogleProvider(api_key=api_key)
model = GoogleModel(model_name, provider=provider) model = GoogleModel(model_name, provider=provider)
base_prompt = system_prompt or DEFAULT_SYSTEM_PROMPT base_prompt = system_prompt or DEFAULT_SYSTEM_PROMPT
if rag_db_names:
full_prompt = f"{base_prompt}{RAG_SYSTEM_ADDITION} {LATEX_INSTRUCTION}"
agent: Agent[None, str] = Agent(
model, instructions=full_prompt, deps_type=AgentDeps
)
@agent.tool
async def search_knowledge_base(ctx: RunContext[AgentDeps], query: str) -> str:
"""Search the user's knowledge base for relevant information.
Args:
ctx: The run context containing user dependencies.
query: The search query to find relevant information.
Returns:
Relevant text from the knowledge base.
"""
logger.info(f"Searching knowledge base for {query}")
result = await convex.action(
"rag:searchMultiple",
{
"userId": ctx.deps.user_id,
"dbNames": ctx.deps.rag_db_names,
"apiKey": ctx.deps.api_key,
"query": query,
"limit": 5,
},
)
if result and result.get("text"):
return f"Knowledge base results:\n\n{result['text']}"
return "No relevant information found in the knowledge base."
return agent
full_prompt = f"{base_prompt} {LATEX_INSTRUCTION}" full_prompt = f"{base_prompt} {LATEX_INSTRUCTION}"
return Agent(model, instructions=full_prompt) return Agent(model, instructions=full_prompt)
@@ -68,12 +123,13 @@ def build_message_history(history: list[dict[str, str]]) -> list[ModelMessage]:
async def stream_response( # noqa: PLR0913 async def stream_response( # noqa: PLR0913
text_agent: Agent[None, str], text_agent: Agent[AgentDeps, str] | Agent[None, str],
message: str, message: str,
history: list[dict[str, str]] | None = None, history: list[dict[str, str]] | None = None,
on_chunk: StreamCallback | None = None, on_chunk: StreamCallback | None = None,
image: ImageData | None = None, image: ImageData | None = None,
images: list[ImageData] | None = None, images: list[ImageData] | None = None,
deps: AgentDeps | None = None,
) -> str: ) -> str:
message_history = build_message_history(history) if history else None message_history = build_message_history(history) if history else None
@@ -88,7 +144,7 @@ async def stream_response( # noqa: PLR0913
else: else:
prompt = message # type: ignore[assignment] prompt = message # type: ignore[assignment]
stream = text_agent.run_stream(prompt, message_history=message_history) stream = text_agent.run_stream(prompt, message_history=message_history, deps=deps)
async with stream as result: async with stream as result:
async for text in result.stream_text(): async for text in result.stream_text():
if on_chunk: if on_chunk:

View File

@@ -22,6 +22,39 @@ for example Group A: 1, Group A: 2a, Group B: 2b, etc.
Or, Theory: 1, Theory: 2a, Practice: 1, etc. Or, Theory: 1, Theory: 2a, Practice: 1, etc.
Only output identifiers that exist in the image.""" Only output identifiers that exist in the image."""
RAGTHEORY_SYSTEM = """You help answer theoretical exam questions.
When you receive an IMAGE with exam questions:
1. Identify ALL questions/blanks to fill
2. For EACH question, use search_knowledge_base to find relevant info
3. Provide exam-ready answers
OUTPUT FORMAT:
- Number each answer matching the question number
- Answer length: match what the question expects
(1 sentence, 1-2 sentences, fill blank, list items, etc.)
- Write answers EXACTLY as they should appear on the exam sheet - ready to copy 1:1
- Use precise terminology from the course
- No explanations, no "because", no fluff - just the answer itself
- For multi-part questions (a, b, c), answer each part separately
LANGUAGE: Match the exam language (usually English for technical terms)
STYLE: Academic, precise, minimal - as if you're writing on paper with limited space
Example input:
"Stigmergy is ............"
Example output:
"1. Stigmergy is indirect communication through environment\
modification, e.g. by leaving some marks."
Example input:
"How is crossing over performed in genetic programming? (one precise sentence)"
Example output:
"3. Usually implemented as swapping randomly selected subtrees of parent trees"
"""
DEFAULT_FOLLOW_UP = ( DEFAULT_FOLLOW_UP = (
"Based on the conversation, suggest 3 short follow-up questions " "Based on the conversation, suggest 3 short follow-up questions "
"the user might want to ask. Each option should be under 50 characters." "the user might want to ask. Each option should be under 50 characters."
@@ -38,4 +71,7 @@ Summarize VERY briefly:
Max 2-3 sentences. This is for Apple Watch display.""" Max 2-3 sentences. This is for Apple Watch display."""
PRESETS: dict[str, tuple[str, str]] = {"exam": (EXAM_SYSTEM, EXAM_FOLLOW_UP)} PRESETS: dict[str, tuple[str, str]] = {
"exam": (EXAM_SYSTEM, EXAM_FOLLOW_UP),
"ragtheory": (RAGTHEORY_SYSTEM, EXAM_FOLLOW_UP),
}

View File

@@ -7,14 +7,18 @@ from convex import ConvexClient as SyncConvexClient
class ConvexClient: class ConvexClient:
def __init__(self, url: str) -> None: def __init__(self, url: str) -> None:
self._client = SyncConvexClient(url) self._client = SyncConvexClient(url)
self._lock = asyncio.Lock()
async def query(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401 async def query(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401
async with self._lock:
return await asyncio.to_thread(self._client.query, name, args or {}) return await asyncio.to_thread(self._client.query, name, args or {})
async def mutation(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401 async def mutation(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401
async with self._lock:
return await asyncio.to_thread(self._client.mutation, name, args or {}) return await asyncio.to_thread(self._client.mutation, name, args or {})
async def action(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401 async def action(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401
async with self._lock:
return await asyncio.to_thread(self._client.action, name, args or {}) return await asyncio.to_thread(self._client.action, name, args or {})
def subscribe(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401 def subscribe(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401

View File

@@ -23,10 +23,19 @@ class Settings(BaseSettings):
log: LogSettings log: LogSettings
convex_url: str = Field(validation_alias=AliasChoices("CONVEX_SELF_HOSTED_URL")) convex_url: str = Field(validation_alias=AliasChoices("CONVEX_SELF_HOSTED_URL"))
convex_http_url: str = Field(
default="", validation_alias=AliasChoices("CONVEX_HTTP_URL")
)
model_config = SettingsConfigDict( model_config = SettingsConfigDict(
case_sensitive=False, env_file=".env", env_nested_delimiter="__", extra="ignore" case_sensitive=False, env_file=".env", env_nested_delimiter="__", extra="ignore"
) )
@property
def convex_http_base_url(self) -> str:
if self.convex_http_url:
return self.convex_http_url
return self.convex_url.replace("/convex", "/convex-http")
env = Settings() # ty:ignore[missing-argument] env = Settings() # ty:ignore[missing-argument]

View File

@@ -5,6 +5,8 @@
"": { "": {
"name": "frontend", "name": "frontend",
"dependencies": { "dependencies": {
"@ai-sdk/google": "^3.0.13",
"@convex-dev/rag": "^0.7.0",
"convex": "^1.31.5", "convex": "^1.31.5",
"convex-svelte": "^0.0.12", "convex-svelte": "^0.0.12",
"marked": "^17.0.1", "marked": "^17.0.1",
@@ -35,6 +37,18 @@
}, },
}, },
"packages": { "packages": {
"@ai-sdk/gateway": ["@ai-sdk/gateway@3.0.22", "", { "dependencies": { "@ai-sdk/provider": "3.0.5", "@ai-sdk/provider-utils": "4.0.9", "@vercel/oidc": "3.1.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-NgnlY73JNuooACHqUIz5uMOEWvqR1MMVbb2soGLMozLY1fgwEIF5iJFDAGa5/YArlzw2ATVU7zQu7HkR/FUjgA=="],
"@ai-sdk/google": ["@ai-sdk/google@3.0.13", "", { "dependencies": { "@ai-sdk/provider": "3.0.5", "@ai-sdk/provider-utils": "4.0.9" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-HYCh8miS4FLxOIpjo/BmoFVMO5BuxNpHVVDQkoJotoH8ZSFftkJJGGayIxQT/Lwx9GGvVVCOQ+lCdBBAnkl1sA=="],
"@ai-sdk/provider": ["@ai-sdk/provider@3.0.5", "", { "dependencies": { "json-schema": "^0.4.0" } }, "sha512-2Xmoq6DBJqmSl80U6V9z5jJSJP7ehaJJQMy2iFUqTay06wdCqTnPVBBQbtEL8RCChenL+q5DC5H5WzU3vV3v8w=="],
"@ai-sdk/provider-utils": ["@ai-sdk/provider-utils@4.0.9", "", { "dependencies": { "@ai-sdk/provider": "3.0.5", "@standard-schema/spec": "^1.1.0", "eventsource-parser": "^3.0.6" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-bB4r6nfhBOpmoS9mePxjRoCy+LnzP3AfhyMGCkGL4Mn9clVNlqEeKj26zEKEtB6yoSVcT1IQ0Zh9fytwMCDnow=="],
"@convex-dev/rag": ["@convex-dev/rag@0.7.0", "", { "dependencies": { "ai": "^6.0.0" }, "peerDependencies": { "@convex-dev/workpool": "^0.3.0", "convex": "^1.24.8", "convex-helpers": "^0.1.94" } }, "sha512-hs/py/0SZ+wcKzP8LtN89HQEI2Ts0AXMUb9N3hIr70nQ/T+wBiEOG+3WI91x1JvbkV0ChWYlaiqB1KzoQHYF1A=="],
"@convex-dev/workpool": ["@convex-dev/workpool@0.3.1", "", { "peerDependencies": { "convex": "^1.24.8", "convex-helpers": "^0.1.94" } }, "sha512-uw4Mi+irhhoYA/KwaMo5wXyYJ7BbxqeaLcCZbst3t1SxPN5488rpnR0OwBcPDCmwcdQjBVHOx+q8S4GUjq0Csg=="],
"@emnapi/core": ["@emnapi/core@1.8.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" } }, "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg=="], "@emnapi/core": ["@emnapi/core@1.8.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" } }, "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg=="],
"@emnapi/runtime": ["@emnapi/runtime@1.8.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg=="], "@emnapi/runtime": ["@emnapi/runtime@1.8.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg=="],
@@ -133,6 +147,8 @@
"@napi-rs/wasm-runtime": ["@napi-rs/wasm-runtime@0.2.12", "", { "dependencies": { "@emnapi/core": "^1.4.3", "@emnapi/runtime": "^1.4.3", "@tybys/wasm-util": "^0.10.0" } }, "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ=="], "@napi-rs/wasm-runtime": ["@napi-rs/wasm-runtime@0.2.12", "", { "dependencies": { "@emnapi/core": "^1.4.3", "@emnapi/runtime": "^1.4.3", "@tybys/wasm-util": "^0.10.0" } }, "sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ=="],
"@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="],
"@oxc-project/runtime": ["@oxc-project/runtime@0.71.0", "", {}, "sha512-QwoF5WUXIGFQ+hSxWEib4U/aeLoiDN9JlP18MnBgx9LLPRDfn1iICtcow7Jgey6HLH4XFceWXQD5WBJ39dyJcw=="], "@oxc-project/runtime": ["@oxc-project/runtime@0.71.0", "", {}, "sha512-QwoF5WUXIGFQ+hSxWEib4U/aeLoiDN9JlP18MnBgx9LLPRDfn1iICtcow7Jgey6HLH4XFceWXQD5WBJ39dyJcw=="],
"@oxc-project/types": ["@oxc-project/types@0.71.0", "", {}, "sha512-5CwQ4MI+P4MQbjLWXgNurA+igGwu/opNetIE13LBs9+V93R64MLvDKOOLZIXSzEfovU3Zef3q3GjPnMTgJTn2w=="], "@oxc-project/types": ["@oxc-project/types@0.71.0", "", {}, "sha512-5CwQ4MI+P4MQbjLWXgNurA+igGwu/opNetIE13LBs9+V93R64MLvDKOOLZIXSzEfovU3Zef3q3GjPnMTgJTn2w=="],
@@ -285,12 +301,16 @@
"@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.53.1", "", { "dependencies": { "@typescript-eslint/types": "8.53.1", "eslint-visitor-keys": "^4.2.1" } }, "sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg=="], "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.53.1", "", { "dependencies": { "@typescript-eslint/types": "8.53.1", "eslint-visitor-keys": "^4.2.1" } }, "sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg=="],
"@vercel/oidc": ["@vercel/oidc@3.1.0", "", {}, "sha512-Fw28YZpRnA3cAHHDlkt7xQHiJ0fcL+NRcIqsocZQUSmbzeIKRpwttJjik5ZGanXP+vlA4SbTg+AbA3bP363l+w=="],
"@xmldom/xmldom": ["@xmldom/xmldom@0.9.8", "", {}, "sha512-p96FSY54r+WJ50FIOsCOjyj/wavs8921hG5+kVMmZgKcvIKxMXHTrjNJvRgWa/zuX3B6t2lijLNFaOyuxUH+2A=="], "@xmldom/xmldom": ["@xmldom/xmldom@0.9.8", "", {}, "sha512-p96FSY54r+WJ50FIOsCOjyj/wavs8921hG5+kVMmZgKcvIKxMXHTrjNJvRgWa/zuX3B6t2lijLNFaOyuxUH+2A=="],
"acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="], "acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="],
"acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="], "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="],
"ai": ["ai@6.0.49", "", { "dependencies": { "@ai-sdk/gateway": "3.0.22", "@ai-sdk/provider": "3.0.5", "@ai-sdk/provider-utils": "4.0.9", "@opentelemetry/api": "1.9.0" }, "peerDependencies": { "zod": "^3.25.76 || ^4.1.8" } }, "sha512-LABniBX/0R6Tv+iUK5keUZhZLaZUe4YjP5M2rZ4wAdZ8iKV3EfTAoJxuL1aaWTSJKIilKa9QUEkCgnp89/32bw=="],
"ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="], "ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="],
"ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], "ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="],
@@ -325,6 +345,8 @@
"convex": ["convex@1.31.5", "", { "dependencies": { "esbuild": "0.27.0", "prettier": "^3.0.0" }, "peerDependencies": { "@auth0/auth0-react": "^2.0.1", "@clerk/clerk-react": "^4.12.8 || ^5.0.0", "react": "^18.0.0 || ^19.0.0-0 || ^19.0.0" }, "optionalPeers": ["@auth0/auth0-react", "@clerk/clerk-react", "react"], "bin": { "convex": "bin/main.js" } }, "sha512-E1IuJKFwMCHDToNGukBPs6c7RFaarR3t8chLF9n98TM5/Tgmj8lM6l7sKM1aJ3VwqGaB4wbeUAPY8osbCOXBhQ=="], "convex": ["convex@1.31.5", "", { "dependencies": { "esbuild": "0.27.0", "prettier": "^3.0.0" }, "peerDependencies": { "@auth0/auth0-react": "^2.0.1", "@clerk/clerk-react": "^4.12.8 || ^5.0.0", "react": "^18.0.0 || ^19.0.0-0 || ^19.0.0" }, "optionalPeers": ["@auth0/auth0-react", "@clerk/clerk-react", "react"], "bin": { "convex": "bin/main.js" } }, "sha512-E1IuJKFwMCHDToNGukBPs6c7RFaarR3t8chLF9n98TM5/Tgmj8lM6l7sKM1aJ3VwqGaB4wbeUAPY8osbCOXBhQ=="],
"convex-helpers": ["convex-helpers@0.1.111", "", { "peerDependencies": { "@standard-schema/spec": "^1.0.0", "convex": "^1.25.4", "hono": "^4.0.5", "react": "^17.0.2 || ^18.0.0 || ^19.0.0", "typescript": "^5.5", "zod": "^3.25.0 || ^4.0.0" }, "optionalPeers": ["@standard-schema/spec", "hono", "react", "typescript", "zod"], "bin": { "convex-helpers": "bin.cjs" } }, "sha512-0O59Ohi8HVc3+KULxSC6JHsw8cQJyc8gZ7OAfNRVX7T5Wy6LhPx3l8veYN9avKg7UiPlO7m1eBiQMHKclIyXyQ=="],
"convex-svelte": ["convex-svelte@0.0.12", "", { "dependencies": { "esm-env": "^1.2.2", "runed": "^0.31.1" }, "peerDependencies": { "convex": "^1.10.0", "svelte": "^5.0.0" } }, "sha512-sUZoYp4ZsokyvKlbbg1dWYB7MkAjZn4nNG9DnADEt9L6KTKuhhnEIt6fdLj+3GnVBUGDTssm17+7ppzFc4y7Gg=="], "convex-svelte": ["convex-svelte@0.0.12", "", { "dependencies": { "esm-env": "^1.2.2", "runed": "^0.31.1" }, "peerDependencies": { "convex": "^1.10.0", "svelte": "^5.0.0" } }, "sha512-sUZoYp4ZsokyvKlbbg1dWYB7MkAjZn4nNG9DnADEt9L6KTKuhhnEIt6fdLj+3GnVBUGDTssm17+7ppzFc4y7Gg=="],
"cookie": ["cookie@0.6.0", "", {}, "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw=="], "cookie": ["cookie@0.6.0", "", {}, "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw=="],
@@ -375,6 +397,8 @@
"esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="], "esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="],
"eventsource-parser": ["eventsource-parser@3.0.6", "", {}, "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg=="],
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
"fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="], "fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="],
@@ -421,6 +445,8 @@
"json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="], "json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="],
"json-schema": ["json-schema@0.4.0", "", {}, "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="],
"json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="], "json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="],
"json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="], "json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="],
@@ -603,6 +629,8 @@
"zimmerframe": ["zimmerframe@1.1.4", "", {}, "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ=="], "zimmerframe": ["zimmerframe@1.1.4", "", {}, "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ=="],
"zod": ["zod@4.3.6", "", {}, "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg=="],
"@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], "@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
"@eslint/eslintrc/globals": ["globals@14.0.0", "", {}, "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="], "@eslint/eslintrc/globals": ["globals@14.0.0", "", {}, "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="],

View File

@@ -36,6 +36,8 @@
"vite": "^7.2.6" "vite": "^7.2.6"
}, },
"dependencies": { "dependencies": {
"@ai-sdk/google": "^3.0.13",
"@convex-dev/rag": "^0.7.0",
"convex": "^1.31.5", "convex": "^1.31.5",
"convex-svelte": "^0.0.12", "convex-svelte": "^0.0.12",
"marked": "^17.0.1", "marked": "^17.0.1",

View File

@@ -1,6 +1,8 @@
<script lang="ts"> <script lang="ts">
import type { Id } from '$lib/convex/_generated/dataModel';
interface Photo { interface Photo {
base64: string; _id: Id<'photoDrafts'>;
mediaType: string; mediaType: string;
} }

View File

@@ -3,9 +3,10 @@
interface Props { interface Props {
oncapture: (base64: string, mediaType: string, thumbnailBase64: string) => void; oncapture: (base64: string, mediaType: string, thumbnailBase64: string) => void;
onunpair?: () => void;
} }
let { oncapture }: Props = $props(); let { oncapture, onunpair }: Props = $props();
let videoElement: HTMLVideoElement | null = $state(null); let videoElement: HTMLVideoElement | null = $state(null);
let stream: MediaStream | null = $state(null); let stream: MediaStream | null = $state(null);
@@ -123,4 +124,12 @@
<div class="fixed inset-0 z-40 bg-black"> <div class="fixed inset-0 z-40 bg-black">
<video bind:this={videoElement} autoplay playsinline muted class="h-full w-full object-cover" <video bind:this={videoElement} autoplay playsinline muted class="h-full w-full object-cover"
></video> ></video>
{#if onunpair}
<button
onclick={onunpair}
class="absolute top-4 left-4 z-10 rounded-full bg-red-600/80 px-3 py-1.5 text-xs text-white"
>
unpair
</button>
{/if}
</div> </div>

View File

@@ -3,7 +3,7 @@ import { getContext, setContext } from 'svelte';
import type { FunctionReference, FunctionArgs, FunctionReturnType } from 'convex/server'; import type { FunctionReference, FunctionArgs, FunctionReturnType } from 'convex/server';
const POLLING_CONTEXT_KEY = 'convex-polling'; const POLLING_CONTEXT_KEY = 'convex-polling';
const POLL_INTERVAL = 500; const POLL_INTERVAL = 1000;
type PollingContext = { type PollingContext = {
client: ConvexHttpClient; client: ConvexHttpClient;
@@ -47,7 +47,11 @@ export function usePollingMutation<Mutation extends FunctionReference<'mutation'
export function usePollingQuery<Query extends FunctionReference<'query'>>( export function usePollingQuery<Query extends FunctionReference<'query'>>(
query: Query, query: Query,
argsGetter: () => FunctionArgs<Query> | 'skip' argsGetter: () => FunctionArgs<Query> | 'skip'
): { data: FunctionReturnType<Query> | undefined; error: Error | null; isLoading: boolean } { ): {
data: FunctionReturnType<Query> | undefined;
error: Error | null;
isLoading: boolean;
} {
const client = usePollingClient(); const client = usePollingClient();
// eslint-disable-next-line prefer-const // eslint-disable-next-line prefer-const

View File

@@ -10,11 +10,14 @@
import type * as chats from "../chats.js"; import type * as chats from "../chats.js";
import type * as devicePairings from "../devicePairings.js"; import type * as devicePairings from "../devicePairings.js";
import type * as http from "../http.js";
import type * as messages from "../messages.js"; import type * as messages from "../messages.js";
import type * as pairingRequests from "../pairingRequests.js"; import type * as pairingRequests from "../pairingRequests.js";
import type * as pendingGenerations from "../pendingGenerations.js"; import type * as pendingGenerations from "../pendingGenerations.js";
import type * as photoDrafts from "../photoDrafts.js"; import type * as photoDrafts from "../photoDrafts.js";
import type * as photoRequests from "../photoRequests.js"; import type * as photoRequests from "../photoRequests.js";
import type * as rag from "../rag.js";
import type * as ragConnections from "../ragConnections.js";
import type * as users from "../users.js"; import type * as users from "../users.js";
import type { import type {
@@ -26,11 +29,14 @@ import type {
declare const fullApi: ApiFromModules<{ declare const fullApi: ApiFromModules<{
chats: typeof chats; chats: typeof chats;
devicePairings: typeof devicePairings; devicePairings: typeof devicePairings;
http: typeof http;
messages: typeof messages; messages: typeof messages;
pairingRequests: typeof pairingRequests; pairingRequests: typeof pairingRequests;
pendingGenerations: typeof pendingGenerations; pendingGenerations: typeof pendingGenerations;
photoDrafts: typeof photoDrafts; photoDrafts: typeof photoDrafts;
photoRequests: typeof photoRequests; photoRequests: typeof photoRequests;
rag: typeof rag;
ragConnections: typeof ragConnections;
users: typeof users; users: typeof users;
}>; }>;
@@ -60,4 +66,401 @@ export declare const internal: FilterApi<
FunctionReference<any, "internal"> FunctionReference<any, "internal">
>; >;
export declare const components: {}; export declare const components: {
rag: {
chunks: {
insert: FunctionReference<
"mutation",
"internal",
{
chunks: Array<{
content: { metadata?: Record<string, any>; text: string };
embedding: Array<number>;
searchableText?: string;
}>;
entryId: string;
startOrder: number;
},
{ status: "pending" | "ready" | "replaced" }
>;
list: FunctionReference<
"query",
"internal",
{
entryId: string;
order: "desc" | "asc";
paginationOpts: {
cursor: string | null;
endCursor?: string | null;
id?: number;
maximumBytesRead?: number;
maximumRowsRead?: number;
numItems: number;
};
},
{
continueCursor: string;
isDone: boolean;
page: Array<{
metadata?: Record<string, any>;
order: number;
state: "pending" | "ready" | "replaced";
text: string;
}>;
pageStatus?: "SplitRecommended" | "SplitRequired" | null;
splitCursor?: string | null;
}
>;
replaceChunksPage: FunctionReference<
"mutation",
"internal",
{ entryId: string; startOrder: number },
{ nextStartOrder: number; status: "pending" | "ready" | "replaced" }
>;
};
entries: {
add: FunctionReference<
"mutation",
"internal",
{
allChunks?: Array<{
content: { metadata?: Record<string, any>; text: string };
embedding: Array<number>;
searchableText?: string;
}>;
entry: {
contentHash?: string;
filterValues: Array<{ name: string; value: any }>;
importance: number;
key?: string;
metadata?: Record<string, any>;
namespaceId: string;
title?: string;
};
onComplete?: string;
},
{
created: boolean;
entryId: string;
status: "pending" | "ready" | "replaced";
}
>;
addAsync: FunctionReference<
"mutation",
"internal",
{
chunker: string;
entry: {
contentHash?: string;
filterValues: Array<{ name: string; value: any }>;
importance: number;
key?: string;
metadata?: Record<string, any>;
namespaceId: string;
title?: string;
};
onComplete?: string;
},
{ created: boolean; entryId: string; status: "pending" | "ready" }
>;
deleteAsync: FunctionReference<
"mutation",
"internal",
{ entryId: string; startOrder: number },
null
>;
deleteByKeyAsync: FunctionReference<
"mutation",
"internal",
{ beforeVersion?: number; key: string; namespaceId: string },
null
>;
deleteByKeySync: FunctionReference<
"action",
"internal",
{ key: string; namespaceId: string },
null
>;
deleteSync: FunctionReference<
"action",
"internal",
{ entryId: string },
null
>;
findByContentHash: FunctionReference<
"query",
"internal",
{
contentHash: string;
dimension: number;
filterNames: Array<string>;
key: string;
modelId: string;
namespace: string;
},
{
contentHash?: string;
entryId: string;
filterValues: Array<{ name: string; value: any }>;
importance: number;
key?: string;
metadata?: Record<string, any>;
replacedAt?: number;
status: "pending" | "ready" | "replaced";
title?: string;
} | null
>;
get: FunctionReference<
"query",
"internal",
{ entryId: string },
{
contentHash?: string;
entryId: string;
filterValues: Array<{ name: string; value: any }>;
importance: number;
key?: string;
metadata?: Record<string, any>;
replacedAt?: number;
status: "pending" | "ready" | "replaced";
title?: string;
} | null
>;
list: FunctionReference<
"query",
"internal",
{
namespaceId?: string;
order?: "desc" | "asc";
paginationOpts: {
cursor: string | null;
endCursor?: string | null;
id?: number;
maximumBytesRead?: number;
maximumRowsRead?: number;
numItems: number;
};
status: "pending" | "ready" | "replaced";
},
{
continueCursor: string;
isDone: boolean;
page: Array<{
contentHash?: string;
entryId: string;
filterValues: Array<{ name: string; value: any }>;
importance: number;
key?: string;
metadata?: Record<string, any>;
replacedAt?: number;
status: "pending" | "ready" | "replaced";
title?: string;
}>;
pageStatus?: "SplitRecommended" | "SplitRequired" | null;
splitCursor?: string | null;
}
>;
promoteToReady: FunctionReference<
"mutation",
"internal",
{ entryId: string },
{
replacedEntry: {
contentHash?: string;
entryId: string;
filterValues: Array<{ name: string; value: any }>;
importance: number;
key?: string;
metadata?: Record<string, any>;
replacedAt?: number;
status: "pending" | "ready" | "replaced";
title?: string;
} | null;
}
>;
};
namespaces: {
deleteNamespace: FunctionReference<
"mutation",
"internal",
{ namespaceId: string },
{
deletedNamespace: null | {
createdAt: number;
dimension: number;
filterNames: Array<string>;
modelId: string;
namespace: string;
namespaceId: string;
status: "pending" | "ready" | "replaced";
version: number;
};
}
>;
deleteNamespaceSync: FunctionReference<
"action",
"internal",
{ namespaceId: string },
null
>;
get: FunctionReference<
"query",
"internal",
{
dimension: number;
filterNames: Array<string>;
modelId: string;
namespace: string;
},
null | {
createdAt: number;
dimension: number;
filterNames: Array<string>;
modelId: string;
namespace: string;
namespaceId: string;
status: "pending" | "ready" | "replaced";
version: number;
}
>;
getOrCreate: FunctionReference<
"mutation",
"internal",
{
dimension: number;
filterNames: Array<string>;
modelId: string;
namespace: string;
onComplete?: string;
status: "pending" | "ready";
},
{ namespaceId: string; status: "pending" | "ready" }
>;
list: FunctionReference<
"query",
"internal",
{
paginationOpts: {
cursor: string | null;
endCursor?: string | null;
id?: number;
maximumBytesRead?: number;
maximumRowsRead?: number;
numItems: number;
};
status: "pending" | "ready" | "replaced";
},
{
continueCursor: string;
isDone: boolean;
page: Array<{
createdAt: number;
dimension: number;
filterNames: Array<string>;
modelId: string;
namespace: string;
namespaceId: string;
status: "pending" | "ready" | "replaced";
version: number;
}>;
pageStatus?: "SplitRecommended" | "SplitRequired" | null;
splitCursor?: string | null;
}
>;
listNamespaceVersions: FunctionReference<
"query",
"internal",
{
namespace: string;
paginationOpts: {
cursor: string | null;
endCursor?: string | null;
id?: number;
maximumBytesRead?: number;
maximumRowsRead?: number;
numItems: number;
};
},
{
continueCursor: string;
isDone: boolean;
page: Array<{
createdAt: number;
dimension: number;
filterNames: Array<string>;
modelId: string;
namespace: string;
namespaceId: string;
status: "pending" | "ready" | "replaced";
version: number;
}>;
pageStatus?: "SplitRecommended" | "SplitRequired" | null;
splitCursor?: string | null;
}
>;
lookup: FunctionReference<
"query",
"internal",
{
dimension: number;
filterNames: Array<string>;
modelId: string;
namespace: string;
},
null | string
>;
promoteToReady: FunctionReference<
"mutation",
"internal",
{ namespaceId: string },
{
replacedNamespace: null | {
createdAt: number;
dimension: number;
filterNames: Array<string>;
modelId: string;
namespace: string;
namespaceId: string;
status: "pending" | "ready" | "replaced";
version: number;
};
}
>;
};
search: {
search: FunctionReference<
"action",
"internal",
{
chunkContext?: { after: number; before: number };
embedding: Array<number>;
filters: Array<{ name: string; value: any }>;
limit: number;
modelId: string;
namespace: string;
vectorScoreThreshold?: number;
},
{
entries: Array<{
contentHash?: string;
entryId: string;
filterValues: Array<{ name: string; value: any }>;
importance: number;
key?: string;
metadata?: Record<string, any>;
replacedAt?: number;
status: "pending" | "ready" | "replaced";
title?: string;
}>;
results: Array<{
content: Array<{ metadata?: Record<string, any>; text: string }>;
entryId: string;
order: number;
score: number;
startOrder: number;
}>;
}
>;
};
};
};

View File

@@ -80,7 +80,13 @@ export const getWithUser = query({
followUpPrompt: v.optional(v.string()), followUpPrompt: v.optional(v.string()),
model: v.string(), model: v.string(),
followUpModel: v.optional(v.string()), followUpModel: v.optional(v.string()),
activeChatId: v.optional(v.id('chats')) activeChatId: v.optional(v.id('chats')),
ragCollectionMode: v.optional(
v.object({
ragDatabaseId: v.id('ragDatabases'),
activeSince: v.number()
})
)
}) })
}), }),
v.null() v.null()

View File

@@ -0,0 +1,7 @@
import { defineApp } from 'convex/server';
import rag from '@convex-dev/rag/convex.config';
const app = defineApp();
app.use(rag);
export default app;

View File

@@ -0,0 +1,43 @@
import { httpRouter } from 'convex/server';
import { httpAction } from './_generated/server';
import { api } from './_generated/api';
import type { Id } from './_generated/dataModel';
const http = httpRouter();
http.route({
path: '/rag/search',
method: 'POST',
handler: httpAction(async (ctx, req) => {
const body = await req.json();
const { userId, dbNames, query, apiKey, limit } = body as {
userId: string;
dbNames: string[];
query: string;
apiKey: string;
limit?: number;
};
if (!userId || !dbNames || !query || !apiKey) {
return new Response(JSON.stringify({ error: 'Missing required fields' }), {
status: 400,
headers: { 'Content-Type': 'application/json' }
});
}
const result = await ctx.runAction(api.rag.searchMultiple, {
userId: userId as Id<'users'>,
dbNames,
apiKey,
query,
limit
});
return new Response(JSON.stringify(result), {
status: 200,
headers: { 'Content-Type': 'application/json' }
});
})
});
export default http;

View File

@@ -11,10 +11,6 @@ export const listByChat = query({
chatId: v.id('chats'), chatId: v.id('chats'),
role: v.union(v.literal('user'), v.literal('assistant')), role: v.union(v.literal('user'), v.literal('assistant')),
content: v.string(), content: v.string(),
imageBase64: v.optional(v.string()),
imageMediaType: v.optional(v.string()),
imagesBase64: v.optional(v.array(v.string())),
imagesMediaTypes: v.optional(v.array(v.string())),
followUpOptions: v.optional(v.array(v.string())), followUpOptions: v.optional(v.array(v.string())),
source: v.union(v.literal('telegram'), v.literal('web')), source: v.union(v.literal('telegram'), v.literal('web')),
createdAt: v.number(), createdAt: v.number(),
@@ -22,11 +18,23 @@ export const listByChat = query({
}) })
), ),
handler: async (ctx, args) => { handler: async (ctx, args) => {
return await ctx.db const messages = await ctx.db
.query('messages') .query('messages')
.withIndex('by_chat_id_and_created_at', (q) => q.eq('chatId', args.chatId)) .withIndex('by_chat_id_and_created_at', (q) => q.eq('chatId', args.chatId))
.order('asc') .order('asc')
.collect(); .collect();
return messages.map((m) => ({
_id: m._id,
_creationTime: m._creationTime,
chatId: m.chatId,
role: m.role,
content: m.content,
followUpOptions: m.followUpOptions,
source: m.source,
createdAt: m.createdAt,
isStreaming: m.isStreaming
}));
} }
}); });

View File

@@ -12,7 +12,6 @@ export const get = query({
photos: v.array( photos: v.array(
v.object({ v.object({
_id: v.id('photoDrafts'), _id: v.id('photoDrafts'),
base64: v.string(),
mediaType: v.string() mediaType: v.string()
}) })
) )
@@ -28,7 +27,6 @@ export const get = query({
return { return {
photos: drafts.map((d) => ({ photos: drafts.map((d) => ({
_id: d._id, _id: d._id,
base64: d.base64,
mediaType: d.mediaType mediaType: d.mediaType
})) }))
}; };

View File

@@ -21,6 +21,20 @@ const photoRequestValidator = v.object({
createdAt: v.number() createdAt: v.number()
}); });
const photoRequestLightValidator = v.object({
_id: v.id('photoRequests'),
status: v.union(
v.literal('pending'),
v.literal('countdown'),
v.literal('capture_now'),
v.literal('captured'),
v.literal('accepted'),
v.literal('rejected')
),
photoMediaType: v.optional(v.string()),
thumbnailBase64: v.optional(v.string())
});
export const create = mutation({ export const create = mutation({
args: { args: {
chatId: v.id('chats'), chatId: v.id('chats'),
@@ -29,6 +43,17 @@ export const create = mutation({
}, },
returns: v.id('photoRequests'), returns: v.id('photoRequests'),
handler: async (ctx, args) => { handler: async (ctx, args) => {
const oldRequests = await ctx.db
.query('photoRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.take(20);
for (const req of oldRequests) {
if (req.status === 'pending' || req.status === 'countdown' || req.status === 'capture_now') {
await ctx.db.patch(req._id, { status: 'rejected' });
}
}
return await ctx.db.insert('photoRequests', { return await ctx.db.insert('photoRequests', {
chatId: args.chatId, chatId: args.chatId,
requesterId: args.requesterId, requesterId: args.requesterId,
@@ -55,15 +80,20 @@ export const submitPhoto = mutation({
photoMediaType: v.string(), photoMediaType: v.string(),
thumbnailBase64: v.optional(v.string()) thumbnailBase64: v.optional(v.string())
}, },
returns: v.null(), returns: v.boolean(),
handler: async (ctx, args) => { handler: async (ctx, args) => {
const req = await ctx.db.get(args.requestId);
if (!req || req.status !== 'capture_now') {
return false;
}
await ctx.db.patch(args.requestId, { await ctx.db.patch(args.requestId, {
status: 'captured', status: 'captured',
photoBase64: args.photoBase64, photoBase64: args.photoBase64,
photoMediaType: args.photoMediaType, photoMediaType: args.photoMediaType,
thumbnailBase64: args.thumbnailBase64 thumbnailBase64: args.thumbnailBase64
}); });
return null; return true;
} }
}); });
@@ -78,24 +108,39 @@ export const markAccepted = mutation({
export const markRejected = mutation({ export const markRejected = mutation({
args: { requestId: v.id('photoRequests') }, args: { requestId: v.id('photoRequests') },
returns: v.null(), returns: v.boolean(),
handler: async (ctx, args) => { handler: async (ctx, args) => {
await ctx.db.patch(args.requestId, { status: 'rejected' }); const req = await ctx.db.get(args.requestId);
return null; if (!req || req.status === 'accepted' || req.status === 'rejected') {
return false;
} }
await ctx.db.patch(req._id, { status: 'rejected' });
return true;
}
});
const captureNowLightValidator = v.object({
_id: v.id('photoRequests'),
status: v.literal('capture_now')
}); });
export const getCaptureNowRequest = query({ export const getCaptureNowRequest = query({
args: { chatId: v.id('chats'), deviceId: v.optional(v.string()) }, args: { chatId: v.id('chats'), deviceId: v.optional(v.string()) },
returns: v.union(photoRequestValidator, v.null()), returns: v.union(captureNowLightValidator, v.null()),
handler: async (ctx, args) => { handler: async (ctx, args) => {
const now = Date.now();
const maxAge = 60 * 1000;
const requests = await ctx.db const requests = await ctx.db
.query('photoRequests') .query('photoRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId)) .withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.order('desc') .order('desc')
.take(50); .take(50);
return requests.find((r) => r.status === 'capture_now') ?? null; const found = requests.find((r) => r.status === 'capture_now' && now - r.createdAt < maxAge);
if (!found) return null;
return { _id: found._id, status: 'capture_now' as const };
} }
}); });
@@ -115,22 +160,99 @@ export const getActiveForCapture = query({
export const getMyActiveRequest = query({ export const getMyActiveRequest = query({
args: { chatId: v.id('chats'), deviceId: v.optional(v.string()) }, args: { chatId: v.id('chats'), deviceId: v.optional(v.string()) },
returns: v.union(photoRequestValidator, v.null()), returns: v.union(photoRequestLightValidator, v.null()),
handler: async (ctx, args) => { handler: async (ctx, args) => {
const requests = await ctx.db const requests = await ctx.db
.query('photoRequests') .query('photoRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId)) .withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.order('desc')
.take(100); .take(100);
if (!args.deviceId) return null; if (!args.deviceId) return null;
return ( const found = requests.find(
requests.find(
(r) => (r) =>
r.requesterId === args.deviceId && r.requesterId === args.deviceId &&
(r.status === 'countdown' || r.status === 'capture_now' || r.status === 'captured') (r.status === 'countdown' || r.status === 'capture_now' || r.status === 'captured')
) ?? null
); );
if (!found) return null;
return {
_id: found._id,
status: found.status,
photoMediaType: found.photoMediaType,
thumbnailBase64: found.thumbnailBase64
};
}
});
export const getPhotoData = query({
args: { requestId: v.id('photoRequests') },
returns: v.union(
v.object({
photoBase64: v.string(),
photoMediaType: v.string(),
thumbnailBase64: v.optional(v.string())
}),
v.null()
),
handler: async (ctx, args) => {
const req = await ctx.db.get(args.requestId);
if (!req || !req.photoBase64 || !req.photoMediaType) return null;
return {
photoBase64: req.photoBase64,
photoMediaType: req.photoMediaType,
thumbnailBase64: req.thumbnailBase64
};
}
});
export const getPhotoPreview = query({
args: { requestId: v.id('photoRequests') },
returns: v.union(
v.object({
thumbnailBase64: v.string(),
photoMediaType: v.string()
}),
v.null()
),
handler: async (ctx, args) => {
const req = await ctx.db.get(args.requestId);
if (!req || !req.photoMediaType) return null;
return {
thumbnailBase64: req.thumbnailBase64 || req.photoBase64 || '',
photoMediaType: req.photoMediaType
};
}
});
export const acceptPhotoToDraft = mutation({
args: {
requestId: v.id('photoRequests'),
chatId: v.id('chats'),
deviceId: v.string()
},
returns: v.id('photoDrafts'),
handler: async (ctx, args) => {
const req = await ctx.db.get(args.requestId);
if (!req || !req.photoBase64 || !req.photoMediaType) {
throw new Error('Photo request not found or has no photo');
}
const draftId = await ctx.db.insert('photoDrafts', {
chatId: args.chatId,
deviceId: args.deviceId,
base64: req.photoBase64,
mediaType: req.photoMediaType,
createdAt: Date.now()
});
await ctx.db.patch(args.requestId, { status: 'accepted' });
return draftId;
} }
}); });

View File

@@ -0,0 +1,252 @@
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { RAG } from '@convex-dev/rag';
import { v } from 'convex/values';
import { api, components } from './_generated/api';
import { action, mutation, query } from './_generated/server';
function createRagInstance(apiKey: string) {
const google = createGoogleGenerativeAI({ apiKey });
return new RAG(components.rag, {
textEmbeddingModel: google.embedding('text-embedding-004'),
embeddingDimension: 768
});
}
function buildNamespace(userId: string, dbName: string): string {
return `user_${userId}/${dbName}`;
}
export const createDatabase = mutation({
args: { userId: v.id('users'), name: v.string() },
returns: v.id('ragDatabases'),
handler: async (ctx, args) => {
const existing = await ctx.db
.query('ragDatabases')
.withIndex('by_user_id_and_name', (q) => q.eq('userId', args.userId).eq('name', args.name))
.unique();
if (existing) {
return existing._id;
}
return await ctx.db.insert('ragDatabases', {
userId: args.userId,
name: args.name,
createdAt: Date.now()
});
}
});
export const getDatabase = query({
args: { userId: v.id('users'), name: v.string() },
returns: v.union(
v.object({
_id: v.id('ragDatabases'),
_creationTime: v.number(),
userId: v.id('users'),
name: v.string(),
createdAt: v.number()
}),
v.null()
),
handler: async (ctx, args) => {
return await ctx.db
.query('ragDatabases')
.withIndex('by_user_id_and_name', (q) => q.eq('userId', args.userId).eq('name', args.name))
.unique();
}
});
export const getDatabaseById = query({
args: { ragDatabaseId: v.id('ragDatabases') },
returns: v.union(
v.object({
_id: v.id('ragDatabases'),
_creationTime: v.number(),
userId: v.id('users'),
name: v.string(),
createdAt: v.number()
}),
v.null()
),
handler: async (ctx, args) => {
return await ctx.db.get(args.ragDatabaseId);
}
});
export const listDatabases = query({
args: { userId: v.id('users') },
returns: v.array(
v.object({
_id: v.id('ragDatabases'),
_creationTime: v.number(),
userId: v.id('users'),
name: v.string(),
createdAt: v.number()
})
),
handler: async (ctx, args) => {
return await ctx.db
.query('ragDatabases')
.withIndex('by_user_id', (q) => q.eq('userId', args.userId))
.collect();
}
});
export const deleteDatabase = action({
args: { userId: v.id('users'), name: v.string(), apiKey: v.string() },
returns: v.boolean(),
handler: async (ctx, args) => {
const db = await ctx.runQuery(api.rag.getDatabase, {
userId: args.userId,
name: args.name
});
if (!db) {
return false;
}
const connections = await ctx.runQuery(api.ragConnections.getByRagDatabaseId, {
ragDatabaseId: db._id
});
for (const conn of connections) {
await ctx.runMutation(api.ragConnections.deleteConnection, {
connectionId: conn._id
});
}
await ctx.runMutation(api.rag.deleteDatabaseRecord, {
ragDatabaseId: db._id
});
return true;
}
});
export const deleteDatabaseRecord = mutation({
args: { ragDatabaseId: v.id('ragDatabases') },
returns: v.null(),
handler: async (ctx, args) => {
await ctx.db.delete(args.ragDatabaseId);
return null;
}
});
export const addContent = action({
args: {
userId: v.id('users'),
ragDatabaseId: v.id('ragDatabases'),
apiKey: v.string(),
text: v.string(),
key: v.optional(v.string())
},
returns: v.null(),
handler: async (ctx, args) => {
const db = await ctx.runQuery(api.rag.getDatabaseById, {
ragDatabaseId: args.ragDatabaseId
});
if (!db) {
throw new Error('RAG database not found');
}
const rag = createRagInstance(args.apiKey);
const namespace = buildNamespace(args.userId, db.name);
await rag.add(ctx, {
namespace,
text: args.text,
key: args.key
});
return null;
}
});
export const search = action({
args: {
userId: v.id('users'),
dbName: v.string(),
apiKey: v.string(),
query: v.string(),
limit: v.optional(v.number())
},
returns: v.object({
text: v.string(),
results: v.array(
v.object({
text: v.string(),
score: v.number()
})
)
}),
handler: async (ctx, args) => {
const rag = createRagInstance(args.apiKey);
const namespace = buildNamespace(args.userId, args.dbName);
const { results, text } = await rag.search(ctx, {
namespace,
query: args.query,
limit: args.limit ?? 5
});
return {
text: text ?? '',
results: results.map((r) => ({
text: r.content.map((c) => c.text).join('\n'),
score: r.score
}))
};
}
});
export const searchMultiple = action({
args: {
userId: v.id('users'),
dbNames: v.array(v.string()),
apiKey: v.string(),
query: v.string(),
limit: v.optional(v.number())
},
returns: v.object({
text: v.string(),
results: v.array(
v.object({
text: v.string(),
score: v.number(),
dbName: v.string()
})
)
}),
handler: async (ctx, args) => {
const rag = createRagInstance(args.apiKey);
const allResults: Array<{ text: string; score: number; dbName: string }> = [];
for (const dbName of args.dbNames) {
const namespace = buildNamespace(args.userId, dbName);
const { results } = await rag.search(ctx, {
namespace,
query: args.query,
limit: args.limit ?? 5
});
for (const r of results) {
allResults.push({
text: r.content.map((c) => c.text).join('\n'),
score: r.score,
dbName
});
}
}
allResults.sort((a, b) => b.score - a.score);
const topResults = allResults.slice(0, args.limit ?? 5);
const combinedText = topResults.map((r) => r.text).join('\n\n---\n\n');
return {
text: combinedText,
results: topResults
};
}
});

View File

@@ -0,0 +1,102 @@
import { v } from 'convex/values';
import { mutation, query } from './_generated/server';
export const connect = mutation({
args: {
userId: v.id('users'),
ragDatabaseId: v.id('ragDatabases'),
isGlobal: v.optional(v.boolean())
},
returns: v.id('ragConnections'),
handler: async (ctx, args) => {
const existing = await ctx.db
.query('ragConnections')
.withIndex('by_user_id_and_rag_database_id', (q) =>
q.eq('userId', args.userId).eq('ragDatabaseId', args.ragDatabaseId)
)
.unique();
if (existing) {
return existing._id;
}
return await ctx.db.insert('ragConnections', {
userId: args.userId,
ragDatabaseId: args.ragDatabaseId,
isGlobal: args.isGlobal ?? true,
createdAt: Date.now()
});
}
});
export const disconnect = mutation({
args: {
userId: v.id('users'),
ragDatabaseId: v.id('ragDatabases')
},
returns: v.boolean(),
handler: async (ctx, args) => {
const existing = await ctx.db
.query('ragConnections')
.withIndex('by_user_id_and_rag_database_id', (q) =>
q.eq('userId', args.userId).eq('ragDatabaseId', args.ragDatabaseId)
)
.unique();
if (!existing) {
return false;
}
await ctx.db.delete(existing._id);
return true;
}
});
export const getActiveForUser = query({
args: { userId: v.id('users') },
returns: v.array(
v.object({
_id: v.id('ragConnections'),
_creationTime: v.number(),
userId: v.id('users'),
ragDatabaseId: v.id('ragDatabases'),
isGlobal: v.boolean(),
createdAt: v.number()
})
),
handler: async (ctx, args) => {
return await ctx.db
.query('ragConnections')
.withIndex('by_user_id', (q) => q.eq('userId', args.userId))
.collect();
}
});
export const getByRagDatabaseId = query({
args: { ragDatabaseId: v.id('ragDatabases') },
returns: v.array(
v.object({
_id: v.id('ragConnections'),
_creationTime: v.number(),
userId: v.id('users'),
ragDatabaseId: v.id('ragDatabases'),
isGlobal: v.boolean(),
createdAt: v.number()
})
),
handler: async (ctx, args) => {
return await ctx.db
.query('ragConnections')
.withIndex('by_rag_database_id', (q) => q.eq('ragDatabaseId', args.ragDatabaseId))
.collect();
}
});
export const deleteConnection = mutation({
args: { connectionId: v.id('ragConnections') },
returns: v.null(),
handler: async (ctx, args) => {
await ctx.db.delete(args.connectionId);
return null;
}
});

View File

@@ -10,7 +10,13 @@ export default defineSchema({
followUpPrompt: v.optional(v.string()), followUpPrompt: v.optional(v.string()),
model: v.string(), model: v.string(),
followUpModel: v.optional(v.string()), followUpModel: v.optional(v.string()),
activeChatId: v.optional(v.id('chats')) activeChatId: v.optional(v.id('chats')),
ragCollectionMode: v.optional(
v.object({
ragDatabaseId: v.id('ragDatabases'),
activeSince: v.number()
})
)
}).index('by_telegram_id', ['telegramId']), }).index('by_telegram_id', ['telegramId']),
chats: defineTable({ chats: defineTable({
@@ -98,5 +104,23 @@ export default defineSchema({
base64: v.string(), base64: v.string(),
mediaType: v.string(), mediaType: v.string(),
createdAt: v.number() createdAt: v.number()
}).index('by_chat_id_and_device_id', ['chatId', 'deviceId']) }).index('by_chat_id_and_device_id', ['chatId', 'deviceId']),
ragDatabases: defineTable({
userId: v.id('users'),
name: v.string(),
createdAt: v.number()
})
.index('by_user_id', ['userId'])
.index('by_user_id_and_name', ['userId', 'name']),
ragConnections: defineTable({
userId: v.id('users'),
ragDatabaseId: v.id('ragDatabases'),
isGlobal: v.boolean(),
createdAt: v.number()
})
.index('by_user_id', ['userId'])
.index('by_user_id_and_rag_database_id', ['userId', 'ragDatabaseId'])
.index('by_rag_database_id', ['ragDatabaseId'])
}); });

View File

@@ -16,7 +16,13 @@ export const getById = query({
followUpPrompt: v.optional(v.string()), followUpPrompt: v.optional(v.string()),
model: v.string(), model: v.string(),
followUpModel: v.optional(v.string()), followUpModel: v.optional(v.string()),
activeChatId: v.optional(v.id('chats')) activeChatId: v.optional(v.id('chats')),
ragCollectionMode: v.optional(
v.object({
ragDatabaseId: v.id('ragDatabases'),
activeSince: v.number()
})
)
}), }),
v.null() v.null()
), ),
@@ -38,7 +44,13 @@ export const getByTelegramId = query({
followUpPrompt: v.optional(v.string()), followUpPrompt: v.optional(v.string()),
model: v.string(), model: v.string(),
followUpModel: v.optional(v.string()), followUpModel: v.optional(v.string()),
activeChatId: v.optional(v.id('chats')) activeChatId: v.optional(v.id('chats')),
ragCollectionMode: v.optional(
v.object({
ragDatabaseId: v.id('ragDatabases'),
activeSince: v.number()
})
)
}), }),
v.null() v.null()
), ),
@@ -127,3 +139,41 @@ export const setActiveChat = mutation({
return null; return null;
} }
}); });
export const startRagCollectionMode = mutation({
args: { userId: v.id('users'), ragDatabaseId: v.id('ragDatabases') },
returns: v.null(),
handler: async (ctx, args) => {
await ctx.db.patch(args.userId, {
ragCollectionMode: {
ragDatabaseId: args.ragDatabaseId,
activeSince: Date.now()
}
});
return null;
}
});
export const stopRagCollectionMode = mutation({
args: { userId: v.id('users') },
returns: v.null(),
handler: async (ctx, args) => {
await ctx.db.patch(args.userId, { ragCollectionMode: undefined });
return null;
}
});
export const getRagCollectionMode = query({
args: { userId: v.id('users') },
returns: v.union(
v.object({
ragDatabaseId: v.id('ragDatabases'),
activeSince: v.number()
}),
v.null()
),
handler: async (ctx, args) => {
const user = await ctx.db.get(args.userId);
return user?.ragCollectionMode ?? null;
}
});

View File

@@ -4,7 +4,11 @@
import { getContext, onMount } from 'svelte'; import { getContext, onMount } from 'svelte';
import { SvelteSet } from 'svelte/reactivity'; import { SvelteSet } from 'svelte/reactivity';
import { useQuery, useConvexClient } from 'convex-svelte'; import { useQuery, useConvexClient } from 'convex-svelte';
import { usePollingQuery, usePollingMutation } from '$lib/convex-polling.svelte'; import {
usePollingQuery,
usePollingMutation,
usePollingClient
} from '$lib/convex-polling.svelte';
import { api } from '$lib/convex/_generated/api'; import { api } from '$lib/convex/_generated/api';
import type { Id } from '$lib/convex/_generated/dataModel'; import type { Id } from '$lib/convex/_generated/dataModel';
import ChatMessage from '$lib/components/ChatMessage.svelte'; import ChatMessage from '$lib/components/ChatMessage.svelte';
@@ -30,7 +34,6 @@
let activeRequestId: Id<'photoRequests'> | null = $state(null); let activeRequestId: Id<'photoRequests'> | null = $state(null);
let previewPhoto: { let previewPhoto: {
thumbnail: string; thumbnail: string;
fullBase64: string;
mediaType: string; mediaType: string;
requestId: Id<'photoRequests'>; requestId: Id<'photoRequests'>;
} | null = $state(null); } | null = $state(null);
@@ -206,17 +209,24 @@
$effect(() => { $effect(() => {
const req = myActiveRequest.data; const req = myActiveRequest.data;
if (req?.status === 'captured' && req.photoBase64 && req.photoMediaType) { if (req?.status === 'captured' && req.photoMediaType) {
if (shownPreviewIds.has(req._id)) return; if (shownPreviewIds.has(req._id)) return;
shownPreviewIds.add(req._id); shownPreviewIds.add(req._id);
const client = pollingClient ?? clientWs;
if (client) {
client.query(api.photoRequests.getPhotoPreview, { requestId: req._id }).then((data) => {
if (data) {
previewPhoto = { previewPhoto = {
thumbnail: req.thumbnailBase64 || req.photoBase64, thumbnail: data.thumbnailBase64,
fullBase64: req.photoBase64, mediaType: data.photoMediaType,
mediaType: req.photoMediaType,
requestId: req._id requestId: req._id
}; };
} }
}); });
}
}
});
let prevMessageCount = 0; let prevMessageCount = 0;
let prevLastMessageId: string | undefined; let prevLastMessageId: string | undefined;
@@ -232,6 +242,7 @@
}); });
const clientWs = usePolling ? null : useConvexClient(); const clientWs = usePolling ? null : useConvexClient();
const pollingClient = usePolling ? usePollingClient() : null;
const createMessagePoll = usePolling ? usePollingMutation(api.messages.create) : null; const createMessagePoll = usePolling ? usePollingMutation(api.messages.create) : null;
const registerDevicePoll = usePolling ? usePollingMutation(api.devicePairings.register) : null; const registerDevicePoll = usePolling ? usePollingMutation(api.devicePairings.register) : null;
const heartbeatPoll = usePolling ? usePollingMutation(api.devicePairings.heartbeat) : null; const heartbeatPoll = usePolling ? usePollingMutation(api.devicePairings.heartbeat) : null;
@@ -246,8 +257,10 @@
? usePollingMutation(api.photoRequests.markCaptureNow) ? usePollingMutation(api.photoRequests.markCaptureNow)
: null; : null;
const submitPhotoPoll = usePolling ? usePollingMutation(api.photoRequests.submitPhoto) : null; const submitPhotoPoll = usePolling ? usePollingMutation(api.photoRequests.submitPhoto) : null;
const markAcceptedPoll = usePolling ? usePollingMutation(api.photoRequests.markAccepted) : null;
const markRejectedPoll = usePolling ? usePollingMutation(api.photoRequests.markRejected) : null; const markRejectedPoll = usePolling ? usePollingMutation(api.photoRequests.markRejected) : null;
const acceptPhotoToDraftPoll = usePolling
? usePollingMutation(api.photoRequests.acceptPhotoToDraft)
: null;
$effect(() => { $effect(() => {
if (!chatId || !deviceId) return; if (!chatId || !deviceId) return;
@@ -467,21 +480,16 @@
function handlePreviewAccept() { function handlePreviewAccept() {
if (!previewPhoto || !chatId) return; if (!previewPhoto || !chatId) return;
const photo = { base64: previewPhoto.fullBase64, mediaType: previewPhoto.mediaType };
const reqId = previewPhoto.requestId; const reqId = previewPhoto.requestId;
previewPhoto = null; previewPhoto = null;
if (usePolling && addPhotoPoll && markAcceptedPoll) { if (usePolling && acceptPhotoToDraftPoll) {
addPhotoPoll({ chatId, deviceId, photo }); acceptPhotoToDraftPoll({ requestId: reqId, chatId, deviceId });
markAcceptedPoll({ requestId: reqId });
} else if (clientWs) { } else if (clientWs) {
clientWs.mutation(api.photoDrafts.addPhoto, { clientWs.mutation(api.photoRequests.acceptPhotoToDraft, {
requestId: reqId,
chatId, chatId,
deviceId, deviceId
photo
});
clientWs.mutation(api.photoRequests.markAccepted, {
requestId: reqId
}); });
} }
} }
@@ -661,6 +669,10 @@
{/if} {/if}
{#if hasCamera && isPaired} {#if hasCamera && isPaired}
<SilentCapture bind:this={silentCaptureRef} oncapture={handleSilentCapture} /> <SilentCapture
bind:this={silentCaptureRef}
oncapture={handleSilentCapture}
onunpair={handleUnpair}
/>
{/if} {/if}
</div> </div>

View File

@@ -4,5 +4,5 @@ import { defineConfig } from 'vite';
export default defineConfig({ export default defineConfig({
plugins: [tailwindcss(), sveltekit()], plugins: [tailwindcss(), sveltekit()],
server: { allowedHosts: ['porter-davidson-fibre-handhelds.trycloudflare.com'] } server: { allowedHosts: ['parameters-detection-adware-christ.trycloudflare.com'] }
}); });