feat(*): first mvp
This commit is contained in:
@@ -15,3 +15,24 @@ repos:
|
||||
entry: uvx ty check
|
||||
language: python
|
||||
types_or: [ python, pyi ]
|
||||
|
||||
- id: frontend-format
|
||||
name: frontend format
|
||||
entry: bash -c 'cd frontend && bun format'
|
||||
language: system
|
||||
files: ^frontend/
|
||||
pass_filenames: false
|
||||
|
||||
- id: frontend-lint
|
||||
name: frontend lint
|
||||
entry: bash -c 'cd frontend && bun lint'
|
||||
language: system
|
||||
files: ^frontend/
|
||||
pass_filenames: false
|
||||
|
||||
- id: frontend-check
|
||||
name: frontend check
|
||||
entry: bash -c 'cd frontend && bun check'
|
||||
language: system
|
||||
files: ^frontend/
|
||||
pass_filenames: false
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
BOT__TOKEN=<BOT__TOKEN>
|
||||
|
||||
SITE__URL=<SITE__URL>
|
||||
|
||||
LOG__LEVEL=INFO
|
||||
LOG__LEVEL_EXTERNAL=WARNING
|
||||
LOG__SHOW_TIME=false
|
||||
|
||||
@@ -12,6 +12,7 @@ dependencies = [
|
||||
"pydantic-ai-slim[google]>=1.44.0",
|
||||
"pydantic-settings>=2.12.0",
|
||||
"rich>=14.2.0",
|
||||
"xkcdpass>=1.19.0",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
|
||||
@@ -11,11 +11,20 @@ setup_logging()
|
||||
async def runner() -> None:
|
||||
from . import handlers # noqa: PLC0415
|
||||
from .common import bot, dp # noqa: PLC0415
|
||||
from .sync import start_sync_listener # noqa: PLC0415
|
||||
|
||||
dp.include_routers(handlers.router)
|
||||
|
||||
sync_task = asyncio.create_task(start_sync_listener(bot))
|
||||
|
||||
await bot.delete_webhook(drop_pending_updates=True)
|
||||
await dp.start_polling(bot)
|
||||
|
||||
try:
|
||||
await dp.start_polling(bot)
|
||||
finally:
|
||||
sync_task.cancel()
|
||||
with contextlib.suppress(asyncio.CancelledError):
|
||||
await sync_task
|
||||
|
||||
|
||||
def plugins() -> None:
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from aiogram import Router
|
||||
|
||||
from . import initialize, start
|
||||
from . import apikey, chat, initialize, message, start
|
||||
|
||||
router = Router()
|
||||
|
||||
router.include_routers(start.router, initialize.router)
|
||||
router.include_routers(
|
||||
start.router, initialize.router, apikey.router, chat.router, message.router
|
||||
)
|
||||
|
||||
3
backend/src/bot/handlers/apikey/__init__.py
Normal file
3
backend/src/bot/handlers/apikey/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .handler import router
|
||||
|
||||
__all__ = ["router"]
|
||||
33
backend/src/bot/handlers/apikey/handler.py
Normal file
33
backend/src/bot/handlers/apikey/handler.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from aiogram import Router, types
|
||||
from aiogram.filters import Command
|
||||
from convex import ConvexInt64
|
||||
|
||||
from utils import env
|
||||
from utils.convex import ConvexClient
|
||||
|
||||
router = Router()
|
||||
convex = ConvexClient(env.convex_url)
|
||||
|
||||
|
||||
@router.message(Command("apikey"))
|
||||
async def on_apikey(message: types.Message) -> None:
|
||||
if not message.from_user:
|
||||
return
|
||||
|
||||
args = message.text.split(maxsplit=1) if message.text else []
|
||||
if len(args) < 2: # noqa: PLR2004
|
||||
await message.answer(
|
||||
"Usage: /apikey YOUR_GEMINI_API_KEY\n\n"
|
||||
"Get your API key at https://aistudio.google.com/apikey"
|
||||
)
|
||||
return
|
||||
|
||||
api_key = args[1].strip()
|
||||
|
||||
user_id = await convex.mutation(
|
||||
"users:getOrCreate", {"telegramId": ConvexInt64(message.from_user.id)}
|
||||
)
|
||||
await convex.mutation("users:setApiKey", {"userId": user_id, "apiKey": api_key})
|
||||
|
||||
await message.delete()
|
||||
await message.answer("✓ API key saved. Use /new to create a chat.")
|
||||
3
backend/src/bot/handlers/chat/__init__.py
Normal file
3
backend/src/bot/handlers/chat/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .handlers import router
|
||||
|
||||
__all__ = ["router"]
|
||||
155
backend/src/bot/handlers/chat/handlers.py
Normal file
155
backend/src/bot/handlers/chat/handlers.py
Normal file
@@ -0,0 +1,155 @@
|
||||
from aiogram import Router, types
|
||||
from aiogram.filters import Command
|
||||
from convex import ConvexInt64
|
||||
|
||||
from bot.modules.ai import PRESETS
|
||||
from bot.modules.mnemonic import generate_mnemonic
|
||||
from utils import env
|
||||
from utils.convex import ConvexClient
|
||||
|
||||
router = Router()
|
||||
convex = ConvexClient(env.convex_url)
|
||||
|
||||
|
||||
@router.message(Command("new"))
|
||||
async def on_new(message: types.Message) -> None:
|
||||
if not message.from_user:
|
||||
return
|
||||
|
||||
user = await convex.query(
|
||||
"users:getByTelegramId", {"telegramId": ConvexInt64(message.from_user.id)}
|
||||
)
|
||||
|
||||
if not user:
|
||||
await message.answer("Use /apikey first to set your Gemini API key.")
|
||||
return
|
||||
|
||||
if not user.get("geminiApiKey"):
|
||||
await message.answer("Use /apikey first to set your Gemini API key.")
|
||||
return
|
||||
|
||||
mnemonic = generate_mnemonic()
|
||||
chat_id = await convex.mutation(
|
||||
"chats:create", {"userId": user["_id"], "mnemonic": mnemonic}
|
||||
)
|
||||
await convex.mutation(
|
||||
"users:setActiveChat", {"userId": user["_id"], "chatId": chat_id}
|
||||
)
|
||||
|
||||
url = f"{env.site.url}/{mnemonic}"
|
||||
await message.answer(f"New chat created!\n\n<code>{url}</code>", parse_mode="HTML")
|
||||
|
||||
|
||||
@router.message(Command("clear"))
|
||||
async def on_clear(message: types.Message) -> None:
|
||||
if not message.from_user:
|
||||
return
|
||||
|
||||
user = await convex.query(
|
||||
"users:getByTelegramId", {"telegramId": ConvexInt64(message.from_user.id)}
|
||||
)
|
||||
|
||||
if not user or not user.get("activeChatId"):
|
||||
await message.answer("No active chat. Use /new to create one.")
|
||||
return
|
||||
|
||||
await convex.mutation("chats:clear", {"chatId": user["activeChatId"]})
|
||||
await message.answer("✓ Chat history cleared.")
|
||||
|
||||
|
||||
@router.message(Command("prompt"))
|
||||
async def on_prompt(message: types.Message) -> None:
|
||||
if not message.from_user:
|
||||
return
|
||||
|
||||
args = message.text.split(maxsplit=1) if message.text else []
|
||||
if len(args) < 2: # noqa: PLR2004
|
||||
await message.answer(
|
||||
"Usage: /prompt YOUR_SYSTEM_PROMPT\n\n"
|
||||
"Example: /prompt You are a helpful math tutor."
|
||||
)
|
||||
return
|
||||
|
||||
prompt = args[1].strip()
|
||||
|
||||
user_id = await convex.mutation(
|
||||
"users:getOrCreate", {"telegramId": ConvexInt64(message.from_user.id)}
|
||||
)
|
||||
await convex.mutation(
|
||||
"users:setSystemPrompt", {"userId": user_id, "prompt": prompt}
|
||||
)
|
||||
|
||||
await message.answer("✓ System prompt updated.")
|
||||
|
||||
|
||||
@router.message(Command("model"))
|
||||
async def on_model(message: types.Message) -> None:
|
||||
if not message.from_user:
|
||||
return
|
||||
|
||||
args = message.text.split(maxsplit=1) if message.text else []
|
||||
if len(args) < 2: # noqa: PLR2004
|
||||
await message.answer(
|
||||
"Usage: /model MODEL_NAME\n\n"
|
||||
"Available models:\n"
|
||||
"• gemini-2.5-pro-preview-05-06 (default)\n"
|
||||
"• gemini-2.5-flash-preview-05-20\n"
|
||||
"• gemini-2.0-flash"
|
||||
)
|
||||
return
|
||||
|
||||
model = args[1].strip()
|
||||
|
||||
user_id = await convex.mutation(
|
||||
"users:getOrCreate", {"telegramId": ConvexInt64(message.from_user.id)}
|
||||
)
|
||||
await convex.mutation("users:setModel", {"userId": user_id, "model": model})
|
||||
|
||||
await message.answer(f"✓ Model set to {model}")
|
||||
|
||||
|
||||
@router.message(Command("presets"))
|
||||
async def on_presets(message: types.Message) -> None:
|
||||
if not message.from_user:
|
||||
return
|
||||
|
||||
lines = ["<b>Available presets:</b>\n"]
|
||||
lines.extend(f"• <code>/preset {name}</code>" for name in PRESETS)
|
||||
lines.append("\nUse /preset NAME to apply a preset.")
|
||||
await message.answer("\n".join(lines), parse_mode="HTML")
|
||||
|
||||
|
||||
@router.message(Command("preset"))
|
||||
async def on_preset(message: types.Message) -> None:
|
||||
if not message.from_user:
|
||||
return
|
||||
|
||||
args = message.text.split(maxsplit=1) if message.text else []
|
||||
if len(args) < 2: # noqa: PLR2004
|
||||
await message.answer(
|
||||
"Usage: /preset NAME\n\nUse /presets to see available presets."
|
||||
)
|
||||
return
|
||||
|
||||
preset_name = args[1].strip().lower()
|
||||
preset = PRESETS.get(preset_name)
|
||||
|
||||
if not preset:
|
||||
await message.answer(
|
||||
f"Unknown preset: {preset_name}\n\nUse /presets to see available presets."
|
||||
)
|
||||
return
|
||||
|
||||
system_prompt, follow_up_prompt = preset
|
||||
|
||||
user_id = await convex.mutation(
|
||||
"users:getOrCreate", {"telegramId": ConvexInt64(message.from_user.id)}
|
||||
)
|
||||
await convex.mutation(
|
||||
"users:setSystemPrompt", {"userId": user_id, "prompt": system_prompt}
|
||||
)
|
||||
await convex.mutation(
|
||||
"users:setFollowUpPrompt", {"userId": user_id, "prompt": follow_up_prompt}
|
||||
)
|
||||
|
||||
await message.answer(f"✓ Preset '{preset_name}' applied.")
|
||||
@@ -8,7 +8,16 @@ router = Router()
|
||||
@router.startup()
|
||||
async def startup(bot: Bot) -> None:
|
||||
await bot.set_my_commands(
|
||||
[types.BotCommand(command="/start", description="Start bot")]
|
||||
[
|
||||
types.BotCommand(command="/start", description="Start bot"),
|
||||
types.BotCommand(command="/apikey", description="Set Gemini API key"),
|
||||
types.BotCommand(command="/new", description="Create new chat"),
|
||||
types.BotCommand(command="/clear", description="Clear chat history"),
|
||||
types.BotCommand(command="/prompt", description="Set system prompt"),
|
||||
types.BotCommand(command="/model", description="Change AI model"),
|
||||
types.BotCommand(command="/presets", description="Show prompt presets"),
|
||||
types.BotCommand(command="/preset", description="Apply a preset"),
|
||||
]
|
||||
)
|
||||
logger.info(f"[green]Started as[/] @{(await bot.me()).username}")
|
||||
|
||||
|
||||
3
backend/src/bot/handlers/message/__init__.py
Normal file
3
backend/src/bot/handlers/message/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .handler import router
|
||||
|
||||
__all__ = ["router"]
|
||||
401
backend/src/bot/handlers/message/handler.py
Normal file
401
backend/src/bot/handlers/message/handler.py
Normal file
@@ -0,0 +1,401 @@
|
||||
import asyncio
|
||||
import contextlib
|
||||
import io
|
||||
import time
|
||||
|
||||
from aiogram import Bot, F, Router, html, types
|
||||
from aiogram.enums import ChatAction
|
||||
from aiogram.types import KeyboardButton, ReplyKeyboardMarkup, ReplyKeyboardRemove
|
||||
from convex import ConvexInt64
|
||||
|
||||
from bot.modules.ai import (
|
||||
SUMMARIZE_PROMPT,
|
||||
ImageData,
|
||||
create_follow_up_agent,
|
||||
create_text_agent,
|
||||
get_follow_ups,
|
||||
stream_response,
|
||||
)
|
||||
from utils import env
|
||||
from utils.convex import ConvexClient
|
||||
|
||||
router = Router()
|
||||
convex = ConvexClient(env.convex_url)
|
||||
|
||||
EDIT_THROTTLE_SECONDS = 1.0
|
||||
TELEGRAM_MAX_LENGTH = 4096
|
||||
|
||||
|
||||
def make_follow_up_keyboard(options: list[str]) -> ReplyKeyboardMarkup:
|
||||
buttons = [[KeyboardButton(text=opt)] for opt in options]
|
||||
return ReplyKeyboardMarkup(
|
||||
keyboard=buttons, resize_keyboard=True, one_time_keyboard=True
|
||||
)
|
||||
|
||||
|
||||
def split_message(text: str, max_length: int = TELEGRAM_MAX_LENGTH) -> list[str]:
|
||||
if len(text) <= max_length:
|
||||
return [text]
|
||||
|
||||
parts: list[str] = []
|
||||
while text:
|
||||
if len(text) <= max_length:
|
||||
parts.append(text)
|
||||
break
|
||||
|
||||
split_pos = text.rfind("\n", 0, max_length)
|
||||
if split_pos == -1:
|
||||
split_pos = text.rfind(" ", 0, max_length)
|
||||
if split_pos == -1:
|
||||
split_pos = max_length
|
||||
|
||||
parts.append(text[:split_pos])
|
||||
text = text[split_pos:].lstrip()
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
class StreamingState:
|
||||
def __init__(self, bot: Bot, chat_id: int, message: types.Message) -> None:
|
||||
self.bot = bot
|
||||
self.chat_id = chat_id
|
||||
self.message = message
|
||||
self.last_edit_time = 0.0
|
||||
self.last_content = ""
|
||||
self.pending_content: str | None = None
|
||||
self._typing_task: asyncio.Task[None] | None = None
|
||||
|
||||
async def start_typing(self) -> None:
|
||||
async def typing_loop() -> None:
|
||||
while True:
|
||||
await self.bot.send_chat_action(self.chat_id, ChatAction.TYPING)
|
||||
await asyncio.sleep(4)
|
||||
|
||||
self._typing_task = asyncio.create_task(typing_loop())
|
||||
|
||||
async def stop_typing(self) -> None:
|
||||
if self._typing_task:
|
||||
self._typing_task.cancel()
|
||||
with contextlib.suppress(asyncio.CancelledError):
|
||||
await self._typing_task
|
||||
|
||||
async def update_message(self, content: str, *, force: bool = False) -> None:
|
||||
if content == self.last_content:
|
||||
return
|
||||
|
||||
if len(content) > TELEGRAM_MAX_LENGTH:
|
||||
display_content = content[: TELEGRAM_MAX_LENGTH - 3] + "..."
|
||||
else:
|
||||
display_content = content
|
||||
|
||||
now = time.monotonic()
|
||||
if force or (now - self.last_edit_time) >= EDIT_THROTTLE_SECONDS:
|
||||
with contextlib.suppress(Exception):
|
||||
await self.message.edit_text(html.quote(display_content))
|
||||
self.last_edit_time = now
|
||||
self.last_content = content
|
||||
self.pending_content = None
|
||||
else:
|
||||
self.pending_content = content
|
||||
|
||||
async def flush(self) -> None:
|
||||
if self.pending_content and self.pending_content != self.last_content:
|
||||
await self.update_message(self.pending_content, force=True)
|
||||
|
||||
|
||||
async def send_long_message(
|
||||
bot: Bot, chat_id: int, text: str, reply_markup: ReplyKeyboardMarkup | None = None
|
||||
) -> None:
|
||||
parts = split_message(text)
|
||||
for i, part in enumerate(parts):
|
||||
is_last = i == len(parts) - 1
|
||||
await bot.send_message(
|
||||
chat_id, html.quote(part), reply_markup=reply_markup if is_last else None
|
||||
)
|
||||
|
||||
|
||||
async def process_message_from_web( # noqa: C901, PLR0915
|
||||
convex_user_id: str, text: str, bot: Bot, convex_chat_id: str
|
||||
) -> None:
|
||||
user = await convex.query("users:getById", {"userId": convex_user_id})
|
||||
|
||||
if not user or not user.get("geminiApiKey"):
|
||||
return
|
||||
|
||||
tg_chat_id = user["telegramChatId"].value if user.get("telegramChatId") else None
|
||||
is_summarize = text == "/summarize"
|
||||
|
||||
if tg_chat_id and not is_summarize:
|
||||
await bot.send_message(
|
||||
tg_chat_id, f"📱 {html.quote(text)}", reply_markup=ReplyKeyboardRemove()
|
||||
)
|
||||
|
||||
api_key = user["geminiApiKey"]
|
||||
model_name = user.get("model", "gemini-3-pro-preview")
|
||||
|
||||
assistant_message_id = await convex.mutation(
|
||||
"messages:create",
|
||||
{
|
||||
"chatId": convex_chat_id,
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"source": "web",
|
||||
"isStreaming": True,
|
||||
},
|
||||
)
|
||||
|
||||
history = await convex.query(
|
||||
"messages:getHistoryForAI", {"chatId": convex_chat_id, "limit": 50}
|
||||
)
|
||||
|
||||
system_prompt = SUMMARIZE_PROMPT if is_summarize else user.get("systemPrompt")
|
||||
text_agent = create_text_agent(
|
||||
api_key=api_key, model_name=model_name, system_prompt=system_prompt
|
||||
)
|
||||
|
||||
processing_msg = None
|
||||
state = None
|
||||
if tg_chat_id:
|
||||
processing_msg = await bot.send_message(tg_chat_id, "...")
|
||||
state = StreamingState(bot, tg_chat_id, processing_msg)
|
||||
|
||||
try:
|
||||
if state:
|
||||
await state.start_typing()
|
||||
|
||||
async def on_chunk(content: str) -> None:
|
||||
if state:
|
||||
await state.update_message(content)
|
||||
await convex.mutation(
|
||||
"messages:update",
|
||||
{"messageId": assistant_message_id, "content": content},
|
||||
)
|
||||
|
||||
if is_summarize:
|
||||
prompt_text = "Summarize what was done in this conversation."
|
||||
hist = history[:-2]
|
||||
else:
|
||||
prompt_text = text
|
||||
hist = history[:-1]
|
||||
|
||||
final_answer = await stream_response(text_agent, prompt_text, hist, on_chunk)
|
||||
|
||||
if state:
|
||||
await state.flush()
|
||||
|
||||
full_history = [*history, {"role": "assistant", "content": final_answer}]
|
||||
follow_up_model = user.get("followUpModel", "gemini-2.5-flash-lite")
|
||||
follow_up_prompt = user.get("followUpPrompt")
|
||||
follow_up_agent = create_follow_up_agent(
|
||||
api_key=api_key, model_name=follow_up_model, system_prompt=follow_up_prompt
|
||||
)
|
||||
follow_ups = await get_follow_ups(follow_up_agent, full_history)
|
||||
|
||||
if state:
|
||||
await state.stop_typing()
|
||||
|
||||
await convex.mutation(
|
||||
"messages:update",
|
||||
{
|
||||
"messageId": assistant_message_id,
|
||||
"content": final_answer,
|
||||
"followUpOptions": follow_ups,
|
||||
"isStreaming": False,
|
||||
},
|
||||
)
|
||||
|
||||
if tg_chat_id and processing_msg:
|
||||
with contextlib.suppress(Exception):
|
||||
await processing_msg.delete()
|
||||
keyboard = make_follow_up_keyboard(follow_ups)
|
||||
await send_long_message(bot, tg_chat_id, final_answer, keyboard)
|
||||
|
||||
except Exception as e: # noqa: BLE001
|
||||
if state:
|
||||
await state.stop_typing()
|
||||
error_msg = f"Error: {e}"
|
||||
await convex.mutation(
|
||||
"messages:update",
|
||||
{
|
||||
"messageId": assistant_message_id,
|
||||
"content": error_msg,
|
||||
"isStreaming": False,
|
||||
},
|
||||
)
|
||||
if tg_chat_id and processing_msg:
|
||||
with contextlib.suppress(Exception):
|
||||
truncated = html.quote(error_msg[:TELEGRAM_MAX_LENGTH])
|
||||
await processing_msg.edit_text(truncated)
|
||||
|
||||
|
||||
async def process_message(
|
||||
user_id: int, text: str, bot: Bot, chat_id: int, image: ImageData | None = None
|
||||
) -> None:
|
||||
user = await convex.query(
|
||||
"users:getByTelegramId", {"telegramId": ConvexInt64(user_id)}
|
||||
)
|
||||
|
||||
if not user:
|
||||
await bot.send_message(chat_id, "Use /apikey first to set your Gemini API key.")
|
||||
return
|
||||
|
||||
if not user.get("geminiApiKey"):
|
||||
await bot.send_message(chat_id, "Use /apikey first to set your Gemini API key.")
|
||||
return
|
||||
|
||||
if not user.get("activeChatId"):
|
||||
await bot.send_message(chat_id, "Use /new first to create a chat.")
|
||||
return
|
||||
|
||||
active_chat_id = user["activeChatId"]
|
||||
api_key = user["geminiApiKey"]
|
||||
model_name = user.get("model", "gemini-3-pro-preview")
|
||||
|
||||
await convex.mutation(
|
||||
"messages:create",
|
||||
{
|
||||
"chatId": active_chat_id,
|
||||
"role": "user",
|
||||
"content": text,
|
||||
"source": "telegram",
|
||||
},
|
||||
)
|
||||
|
||||
assistant_message_id = await convex.mutation(
|
||||
"messages:create",
|
||||
{
|
||||
"chatId": active_chat_id,
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"source": "telegram",
|
||||
"isStreaming": True,
|
||||
},
|
||||
)
|
||||
|
||||
history = await convex.query(
|
||||
"messages:getHistoryForAI", {"chatId": active_chat_id, "limit": 50}
|
||||
)
|
||||
|
||||
text_agent = create_text_agent(
|
||||
api_key=api_key, model_name=model_name, system_prompt=user.get("systemPrompt")
|
||||
)
|
||||
|
||||
processing_msg = await bot.send_message(chat_id, "...")
|
||||
state = StreamingState(bot, chat_id, processing_msg)
|
||||
|
||||
try:
|
||||
await state.start_typing()
|
||||
|
||||
async def on_chunk(content: str) -> None:
|
||||
await state.update_message(content)
|
||||
await convex.mutation(
|
||||
"messages:update",
|
||||
{"messageId": assistant_message_id, "content": content},
|
||||
)
|
||||
|
||||
final_answer = await stream_response(
|
||||
text_agent, text, history[:-2], on_chunk, image=image
|
||||
)
|
||||
|
||||
await state.flush()
|
||||
|
||||
full_history = [*history[:-1], {"role": "assistant", "content": final_answer}]
|
||||
follow_up_model = user.get("followUpModel", "gemini-2.5-flash-lite")
|
||||
follow_up_prompt = user.get("followUpPrompt")
|
||||
follow_up_agent = create_follow_up_agent(
|
||||
api_key=api_key, model_name=follow_up_model, system_prompt=follow_up_prompt
|
||||
)
|
||||
follow_ups = await get_follow_ups(follow_up_agent, full_history, image=image)
|
||||
|
||||
await state.stop_typing()
|
||||
|
||||
await convex.mutation(
|
||||
"messages:update",
|
||||
{
|
||||
"messageId": assistant_message_id,
|
||||
"content": final_answer,
|
||||
"followUpOptions": follow_ups,
|
||||
"isStreaming": False,
|
||||
},
|
||||
)
|
||||
|
||||
with contextlib.suppress(Exception):
|
||||
await processing_msg.delete()
|
||||
|
||||
keyboard = make_follow_up_keyboard(follow_ups)
|
||||
await send_long_message(bot, chat_id, final_answer, keyboard)
|
||||
|
||||
except Exception as e: # noqa: BLE001
|
||||
await state.stop_typing()
|
||||
error_msg = f"Error: {e}"
|
||||
await convex.mutation(
|
||||
"messages:update",
|
||||
{
|
||||
"messageId": assistant_message_id,
|
||||
"content": error_msg,
|
||||
"isStreaming": False,
|
||||
},
|
||||
)
|
||||
with contextlib.suppress(Exception):
|
||||
await processing_msg.edit_text(html.quote(error_msg[:TELEGRAM_MAX_LENGTH]))
|
||||
|
||||
|
||||
async def send_to_telegram(user_id: int, text: str, bot: Bot) -> None:
|
||||
user = await convex.query(
|
||||
"users:getByTelegramId", {"telegramId": ConvexInt64(user_id)}
|
||||
)
|
||||
if not user or not user.get("telegramChatId"):
|
||||
return
|
||||
|
||||
tg_chat_id = user["telegramChatId"]
|
||||
await bot.send_message(
|
||||
tg_chat_id, f"📱 {html.quote(text)}", reply_markup=ReplyKeyboardRemove()
|
||||
)
|
||||
|
||||
|
||||
@router.message(F.text & ~F.text.startswith("/"))
|
||||
async def on_text_message(message: types.Message, bot: Bot) -> None:
|
||||
if not message.from_user or not message.text:
|
||||
return
|
||||
await convex.mutation(
|
||||
"users:getOrCreate",
|
||||
{
|
||||
"telegramId": ConvexInt64(message.from_user.id),
|
||||
"telegramChatId": ConvexInt64(message.chat.id),
|
||||
},
|
||||
)
|
||||
await process_message(message.from_user.id, message.text, bot, message.chat.id)
|
||||
|
||||
|
||||
@router.message(F.photo)
|
||||
async def on_photo_message(message: types.Message, bot: Bot) -> None:
|
||||
if not message.from_user or not message.photo:
|
||||
return
|
||||
|
||||
await convex.mutation(
|
||||
"users:getOrCreate",
|
||||
{
|
||||
"telegramId": ConvexInt64(message.from_user.id),
|
||||
"telegramChatId": ConvexInt64(message.chat.id),
|
||||
},
|
||||
)
|
||||
|
||||
caption = message.caption or "Process the image according to your task"
|
||||
photo = message.photo[-1]
|
||||
|
||||
file = await bot.get_file(photo.file_id)
|
||||
if not file.file_path:
|
||||
await message.answer("Failed to get photo.")
|
||||
return
|
||||
|
||||
buffer = io.BytesIO()
|
||||
await bot.download_file(file.file_path, buffer)
|
||||
image_bytes = buffer.getvalue()
|
||||
|
||||
ext = file.file_path.rsplit(".", 1)[-1].lower()
|
||||
media_type = f"image/{ext}" if ext in ("png", "gif", "webp") else "image/jpeg"
|
||||
image = ImageData(data=image_bytes, media_type=media_type)
|
||||
|
||||
await process_message(
|
||||
message.from_user.id, caption, bot, message.chat.id, image=image
|
||||
)
|
||||
@@ -3,7 +3,23 @@ from aiogram.filters import CommandStart
|
||||
|
||||
router = Router()
|
||||
|
||||
WELCOME_MESSAGE = """
|
||||
<b>Welcome to AI Chat!</b>
|
||||
|
||||
Get started:
|
||||
1. /apikey YOUR_KEY — Set your Gemini API key
|
||||
2. /new — Create a new chat and get your Watch URL
|
||||
|
||||
Commands:
|
||||
• /clear — Clear chat history
|
||||
• /prompt — Set custom system prompt
|
||||
• /model — Change AI model
|
||||
• /presets — Show available presets
|
||||
|
||||
Get your API key at https://aistudio.google.com/apikey
|
||||
""".strip()
|
||||
|
||||
|
||||
@router.message(CommandStart())
|
||||
async def on_start(message: types.Message) -> None:
|
||||
await message.answer("hi")
|
||||
await message.answer(WELCOME_MESSAGE, parse_mode="HTML")
|
||||
|
||||
0
backend/src/bot/modules/__init__.py
Normal file
0
backend/src/bot/modules/__init__.py
Normal file
21
backend/src/bot/modules/ai/__init__.py
Normal file
21
backend/src/bot/modules/ai/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from .agent import (
|
||||
ImageData,
|
||||
StreamCallback,
|
||||
create_follow_up_agent,
|
||||
create_text_agent,
|
||||
get_follow_ups,
|
||||
stream_response,
|
||||
)
|
||||
from .prompts import DEFAULT_FOLLOW_UP, PRESETS, SUMMARIZE_PROMPT
|
||||
|
||||
__all__ = [
|
||||
"DEFAULT_FOLLOW_UP",
|
||||
"PRESETS",
|
||||
"SUMMARIZE_PROMPT",
|
||||
"ImageData",
|
||||
"StreamCallback",
|
||||
"create_follow_up_agent",
|
||||
"create_text_agent",
|
||||
"get_follow_ups",
|
||||
"stream_response",
|
||||
]
|
||||
115
backend/src/bot/modules/ai/agent.py
Normal file
115
backend/src/bot/modules/ai/agent.py
Normal file
@@ -0,0 +1,115 @@
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pydantic_ai import (
|
||||
Agent,
|
||||
BinaryContent,
|
||||
ModelMessage,
|
||||
ModelRequest,
|
||||
ModelResponse,
|
||||
TextPart,
|
||||
UserPromptPart,
|
||||
)
|
||||
from pydantic_ai.models.google import GoogleModel
|
||||
from pydantic_ai.providers.google import GoogleProvider
|
||||
|
||||
from .models import FollowUpOptions
|
||||
from .prompts import DEFAULT_FOLLOW_UP
|
||||
|
||||
StreamCallback = Callable[[str], Awaitable[None]]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ImageData:
|
||||
data: bytes
|
||||
media_type: str
|
||||
|
||||
|
||||
LATEX_INSTRUCTION = "For math, use LaTeX: $...$ inline, $$...$$ display."
|
||||
|
||||
DEFAULT_SYSTEM_PROMPT = (
|
||||
"You are a helpful AI assistant. Provide clear, concise answers."
|
||||
)
|
||||
|
||||
|
||||
def create_text_agent(
|
||||
api_key: str,
|
||||
model_name: str = "gemini-3-pro-preview",
|
||||
system_prompt: str | None = None,
|
||||
) -> Agent[None, str]:
|
||||
provider = GoogleProvider(api_key=api_key)
|
||||
model = GoogleModel(model_name, provider=provider)
|
||||
base_prompt = system_prompt or DEFAULT_SYSTEM_PROMPT
|
||||
full_prompt = f"{base_prompt} {LATEX_INSTRUCTION}"
|
||||
return Agent(model, system_prompt=full_prompt)
|
||||
|
||||
|
||||
def create_follow_up_agent(
|
||||
api_key: str,
|
||||
model_name: str = "gemini-2.5-flash-lite",
|
||||
system_prompt: str | None = None,
|
||||
) -> Agent[None, FollowUpOptions]:
|
||||
provider = GoogleProvider(api_key=api_key)
|
||||
model = GoogleModel(model_name, provider=provider)
|
||||
prompt = system_prompt or DEFAULT_FOLLOW_UP
|
||||
return Agent(model, output_type=FollowUpOptions, system_prompt=prompt)
|
||||
|
||||
|
||||
def build_message_history(history: list[dict[str, str]]) -> list[ModelMessage]:
|
||||
messages: list[ModelMessage] = []
|
||||
for msg in history:
|
||||
if msg["role"] == "user":
|
||||
messages.append(
|
||||
ModelRequest(parts=[UserPromptPart(content=msg["content"])])
|
||||
)
|
||||
else:
|
||||
messages.append(ModelResponse(parts=[TextPart(content=msg["content"])]))
|
||||
return messages
|
||||
|
||||
|
||||
async def stream_response( # noqa: PLR0913
|
||||
text_agent: Agent[None, str],
|
||||
message: str,
|
||||
history: list[dict[str, str]] | None = None,
|
||||
on_chunk: StreamCallback | None = None,
|
||||
image: ImageData | None = None,
|
||||
images: list[ImageData] | None = None,
|
||||
) -> str:
|
||||
message_history = build_message_history(history) if history else None
|
||||
|
||||
all_images = images or ([image] if image else [])
|
||||
|
||||
if all_images:
|
||||
prompt: list[str | BinaryContent] = [message]
|
||||
prompt.extend(
|
||||
BinaryContent(data=img.data, media_type=img.media_type)
|
||||
for img in all_images
|
||||
)
|
||||
else:
|
||||
prompt = message # type: ignore[assignment]
|
||||
|
||||
stream = text_agent.run_stream(prompt, message_history=message_history)
|
||||
async with stream as result:
|
||||
async for text in result.stream_text():
|
||||
if on_chunk:
|
||||
await on_chunk(text)
|
||||
return await result.get_output()
|
||||
|
||||
|
||||
async def get_follow_ups(
|
||||
follow_up_agent: Agent[None, FollowUpOptions],
|
||||
history: list[dict[str, str]],
|
||||
image: ImageData | None = None,
|
||||
) -> list[str]:
|
||||
message_history = build_message_history(history) if history else None
|
||||
|
||||
if image:
|
||||
prompt: list[str | BinaryContent] = [
|
||||
"Suggest follow-up options based on this conversation and image.",
|
||||
BinaryContent(data=image.data, media_type=image.media_type),
|
||||
]
|
||||
else:
|
||||
prompt = "Suggest follow-up questions based on this conversation." # type: ignore[assignment]
|
||||
|
||||
result = await follow_up_agent.run(prompt, message_history=message_history)
|
||||
return result.output["options"]
|
||||
10
backend/src/bot/modules/ai/models.py
Normal file
10
backend/src/bot/modules/ai/models.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class AIResponse(TypedDict):
|
||||
answer: str
|
||||
follow_up_options: list[str]
|
||||
|
||||
|
||||
class FollowUpOptions(TypedDict):
|
||||
options: list[str]
|
||||
37
backend/src/bot/modules/ai/prompts.py
Normal file
37
backend/src/bot/modules/ai/prompts.py
Normal file
@@ -0,0 +1,37 @@
|
||||
EXAM_SYSTEM = """You help solve problem sets and exams.
|
||||
|
||||
When you receive an IMAGE with problems:
|
||||
- Give HINTS in Russian for each problem
|
||||
- Focus on key insights and potential difficulties,
|
||||
give all formulas that will be helpful
|
||||
- Be quite concise, but include all needed hints - this will be viewed on Apple Watch
|
||||
- Format: info needed to solve each problem or "unstuck" while solving
|
||||
|
||||
When asked for DETAILS on a specific problem (or a problem number):
|
||||
- Provide full structured solution in English
|
||||
- Academic style, as it would be written in a notebook
|
||||
- Step by step, clean, no fluff"""
|
||||
|
||||
EXAM_FOLLOW_UP = """You see a problem set image. List available problem numbers.
|
||||
Output only the numbers that exist in the image, like: 1, 2, 3, 4, 5
|
||||
If problems have letters (a, b, c), list them as: 1a, 1b, 2a, etc.
|
||||
Keep it minimal - just the identifiers.
|
||||
Then, if applicable, output some possible followups of conversation"""
|
||||
|
||||
DEFAULT_FOLLOW_UP = (
|
||||
"Based on the conversation, suggest 3 short follow-up questions "
|
||||
"the user might want to ask. Be concise, each under 50 chars."
|
||||
)
|
||||
|
||||
SUMMARIZE_PROMPT = """You are summarize agent. You may receive:
|
||||
1. Images
|
||||
2. Conversation history showing what was discussed/solved
|
||||
|
||||
Summarize VERY briefly:
|
||||
- Which problems were solved
|
||||
- Key results or answers found
|
||||
- What's left to do
|
||||
|
||||
Max 2-3 sentences. This is for Apple Watch display."""
|
||||
|
||||
PRESETS: dict[str, tuple[str, str]] = {"exam": (EXAM_SYSTEM, EXAM_FOLLOW_UP)}
|
||||
3
backend/src/bot/modules/mnemonic/__init__.py
Normal file
3
backend/src/bot/modules/mnemonic/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .generator import generate_mnemonic
|
||||
|
||||
__all__ = ["generate_mnemonic"]
|
||||
8
backend/src/bot/modules/mnemonic/generator.py
Normal file
8
backend/src/bot/modules/mnemonic/generator.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from xkcdpass import xkcd_password as xp
|
||||
|
||||
_wordfile = xp.locate_wordfile()
|
||||
_wordlist = xp.generate_wordlist(wordfile=_wordfile, min_length=4, max_length=6)
|
||||
|
||||
|
||||
def generate_mnemonic(word_count: int = 3, separator: str = "-") -> str:
|
||||
return xp.generate_xkcdpassword(_wordlist, numwords=word_count, delimiter=separator)
|
||||
58
backend/src/bot/sync.py
Normal file
58
backend/src/bot/sync.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import asyncio
|
||||
|
||||
from aiogram import Bot
|
||||
|
||||
from bot.handlers.message.handler import process_message_from_web
|
||||
from utils import env
|
||||
from utils.convex import ConvexClient
|
||||
from utils.logging import logger
|
||||
|
||||
convex = ConvexClient(env.convex_url)
|
||||
|
||||
|
||||
background_tasks = set()
|
||||
|
||||
|
||||
async def start_sync_listener(bot: Bot) -> None:
|
||||
logger.info("Starting Convex sync listener...")
|
||||
processed_ids: set[str] = set()
|
||||
|
||||
sub = convex.subscribe("pendingGenerations:list", {})
|
||||
|
||||
try:
|
||||
async for pending_list in sub:
|
||||
for item in pending_list:
|
||||
item_id = item["_id"]
|
||||
if item_id in processed_ids:
|
||||
continue
|
||||
|
||||
processed_ids.add(item_id)
|
||||
logger.info(f"Processing pending generation: {item_id}")
|
||||
|
||||
task = asyncio.create_task(
|
||||
handle_pending_generation(bot, item, item_id)
|
||||
)
|
||||
background_tasks.add(task)
|
||||
task.add_done_callback(background_tasks.discard)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Sync listener cancelled")
|
||||
raise
|
||||
except Exception as e: # noqa: BLE001
|
||||
logger.error(f"Sync listener error: {e}")
|
||||
finally:
|
||||
sub.unsubscribe()
|
||||
|
||||
|
||||
async def handle_pending_generation(bot: Bot, item: dict, item_id: str) -> None:
|
||||
try:
|
||||
await process_message_from_web(
|
||||
convex_user_id=item["userId"],
|
||||
text=item["userMessage"],
|
||||
bot=bot,
|
||||
convex_chat_id=item["chatId"],
|
||||
)
|
||||
except Exception as e: # noqa: BLE001
|
||||
logger.error(f"Error processing {item_id}: {e}")
|
||||
finally:
|
||||
await convex.mutation("pendingGenerations:remove", {"id": item_id})
|
||||
3
backend/src/utils/convex/__init__.py
Normal file
3
backend/src/utils/convex/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .client import ConvexClient
|
||||
|
||||
__all__ = ["ConvexClient"]
|
||||
21
backend/src/utils/convex/client.py
Normal file
21
backend/src/utils/convex/client.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from convex import ConvexClient as SyncConvexClient
|
||||
|
||||
|
||||
class ConvexClient:
|
||||
def __init__(self, url: str) -> None:
|
||||
self._client = SyncConvexClient(url)
|
||||
|
||||
async def query(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401
|
||||
return await asyncio.to_thread(self._client.query, name, args or {})
|
||||
|
||||
async def mutation(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401
|
||||
return await asyncio.to_thread(self._client.mutation, name, args or {})
|
||||
|
||||
async def action(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401
|
||||
return await asyncio.to_thread(self._client.action, name, args or {})
|
||||
|
||||
def subscribe(self, name: str, args: dict[str, Any] | None = None) -> Any: # noqa: ANN401
|
||||
return self._client.subscribe(name, args or {})
|
||||
@@ -6,6 +6,10 @@ class BotSettings(BaseSettings):
|
||||
token: SecretStr
|
||||
|
||||
|
||||
class SiteSettings(BaseSettings):
|
||||
url: str = Field(default="https://localhost")
|
||||
|
||||
|
||||
class LogSettings(BaseSettings):
|
||||
level: str = "INFO"
|
||||
level_external: str = "WARNING"
|
||||
@@ -15,6 +19,7 @@ class LogSettings(BaseSettings):
|
||||
|
||||
class Settings(BaseSettings):
|
||||
bot: BotSettings
|
||||
site: SiteSettings
|
||||
log: LogSettings
|
||||
|
||||
convex_url: str = Field(validation_alias=AliasChoices("CONVEX_SELF_HOSTED_URL"))
|
||||
|
||||
11
backend/uv.lock
generated
11
backend/uv.lock
generated
@@ -157,6 +157,7 @@ dependencies = [
|
||||
{ name = "pydantic-ai-slim", extra = ["google"] },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "rich" },
|
||||
{ name = "xkcdpass" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
@@ -166,6 +167,7 @@ requires-dist = [
|
||||
{ name = "pydantic-ai-slim", extras = ["google"], specifier = ">=1.44.0" },
|
||||
{ name = "pydantic-settings", specifier = ">=2.12.0" },
|
||||
{ name = "rich", specifier = ">=14.2.0" },
|
||||
{ name = "xkcdpass", specifier = ">=1.19.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -934,6 +936,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xkcdpass"
|
||||
version = "1.30.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/18/98/bdd7df66d995eab38887a8eb0afb023750b0c590eb7d8545a7b722f683ef/xkcdpass-1.30.0.tar.gz", hash = "sha256:8a3a6b60255da40d0e5c812458280278c82d2c1cb90e48afbd6777dbbf8795c3", size = 2763380, upload-time = "2026-01-11T16:09:15.567Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/be/ea93adc1b4597b62c236d61dc6cf0e26ca8a729cb5afae4dc5acc5b33fa8/xkcdpass-1.30.0-py3-none-any.whl", hash = "sha256:3653a4a1e13de230808bcaf11f8c04207a5d3df8e2f7e1de698e11c262b5b797", size = 2746372, upload-time = "2026-01-12T14:48:30.627Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yarl"
|
||||
version = "1.22.0"
|
||||
|
||||
@@ -13,19 +13,16 @@
|
||||
}
|
||||
|
||||
<DOMAIN> {
|
||||
handle /convex* {
|
||||
uri strip_prefix /convex
|
||||
handle /api/check_admin_key {
|
||||
reverse_proxy stealth-ai-relay-convex:3210
|
||||
}
|
||||
|
||||
handle /convex-http* {
|
||||
uri strip_prefix /convex-http
|
||||
reverse_proxy stealth-ai-relay-convex:3211
|
||||
handle_path /convex/* {
|
||||
reverse_proxy stealth-ai-relay-convex:3210
|
||||
}
|
||||
|
||||
handle /convex-dashboard* {
|
||||
uri strip_prefix /convex-dashboard
|
||||
reverse_proxy stealth-ai-relay-convex-dashboard:6791
|
||||
handle_path /convex-http/* {
|
||||
reverse_proxy stealth-ai-relay-convex:3211
|
||||
}
|
||||
|
||||
handle {
|
||||
|
||||
2
frontend/.gitignore
vendored
2
frontend/.gitignore
vendored
@@ -23,4 +23,4 @@ vite.config.js.timestamp-*
|
||||
vite.config.ts.timestamp-*
|
||||
|
||||
# Convex
|
||||
src/convex/_generated
|
||||
src/lib/convex/_generated
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
"dependencies": {
|
||||
"convex": "^1.31.5",
|
||||
"convex-svelte": "^0.0.12",
|
||||
"marked": "^17.0.1",
|
||||
"mathjax-full": "^3.2.2",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/compat": "^1.4.0",
|
||||
@@ -283,6 +285,8 @@
|
||||
|
||||
"@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.53.1", "", { "dependencies": { "@typescript-eslint/types": "8.53.1", "eslint-visitor-keys": "^4.2.1" } }, "sha512-oy+wV7xDKFPRyNggmXuZQSBzvoLnpmJs+GhzRhPjrxl2b/jIlyjVokzm47CZCDUdXKr2zd7ZLodPfOBpOPyPlg=="],
|
||||
|
||||
"@xmldom/xmldom": ["@xmldom/xmldom@0.9.8", "", {}, "sha512-p96FSY54r+WJ50FIOsCOjyj/wavs8921hG5+kVMmZgKcvIKxMXHTrjNJvRgWa/zuX3B6t2lijLNFaOyuxUH+2A=="],
|
||||
|
||||
"acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="],
|
||||
|
||||
"acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="],
|
||||
@@ -315,6 +319,8 @@
|
||||
|
||||
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
|
||||
|
||||
"commander": ["commander@13.1.0", "", {}, "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw=="],
|
||||
|
||||
"concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="],
|
||||
|
||||
"convex": ["convex@1.31.5", "", { "dependencies": { "esbuild": "0.27.0", "prettier": "^3.0.0" }, "peerDependencies": { "@auth0/auth0-react": "^2.0.1", "@clerk/clerk-react": "^4.12.8 || ^5.0.0", "react": "^18.0.0 || ^19.0.0-0 || ^19.0.0" }, "optionalPeers": ["@auth0/auth0-react", "@clerk/clerk-react", "react"], "bin": { "convex": "bin/main.js" } }, "sha512-E1IuJKFwMCHDToNGukBPs6c7RFaarR3t8chLF9n98TM5/Tgmj8lM6l7sKM1aJ3VwqGaB4wbeUAPY8osbCOXBhQ=="],
|
||||
@@ -353,6 +359,8 @@
|
||||
|
||||
"eslint-visitor-keys": ["eslint-visitor-keys@4.2.1", "", {}, "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ=="],
|
||||
|
||||
"esm": ["esm@3.2.25", "", {}, "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA=="],
|
||||
|
||||
"esm-env": ["esm-env@1.2.2", "", {}, "sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA=="],
|
||||
|
||||
"espree": ["espree@10.4.0", "", { "dependencies": { "acorn": "^8.15.0", "acorn-jsx": "^5.3.2", "eslint-visitor-keys": "^4.2.1" } }, "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ=="],
|
||||
@@ -459,8 +467,16 @@
|
||||
|
||||
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
|
||||
|
||||
"marked": ["marked@17.0.1", "", { "bin": { "marked": "bin/marked.js" } }, "sha512-boeBdiS0ghpWcSwoNm/jJBwdpFaMnZWRzjA6SkUMYb40SVaN1x7mmfGKp0jvexGcx+7y2La5zRZsYFZI6Qpypg=="],
|
||||
|
||||
"mathjax-full": ["mathjax-full@3.2.2", "", { "dependencies": { "esm": "^3.2.25", "mhchemparser": "^4.1.0", "mj-context-menu": "^0.6.1", "speech-rule-engine": "^4.0.6" } }, "sha512-+LfG9Fik+OuI8SLwsiR02IVdjcnRCy5MufYLi0C3TdMT56L/pjB0alMVGgoWJF8pN9Rc7FESycZB9BMNWIid5w=="],
|
||||
|
||||
"mhchemparser": ["mhchemparser@4.2.1", "", {}, "sha512-kYmyrCirqJf3zZ9t/0wGgRZ4/ZJw//VwaRVGA75C4nhE60vtnIzhl9J9ndkX/h6hxSN7pjg/cE0VxbnNM+bnDQ=="],
|
||||
|
||||
"minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="],
|
||||
|
||||
"mj-context-menu": ["mj-context-menu@0.6.1", "", {}, "sha512-7NO5s6n10TIV96d4g2uDpG7ZDpIhMh0QNfGdJw/W47JswFcosz457wqz/b5sAKvl12sxINGFCn80NZHKwxQEXA=="],
|
||||
|
||||
"mri": ["mri@1.2.0", "", {}, "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA=="],
|
||||
|
||||
"mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="],
|
||||
@@ -533,6 +549,8 @@
|
||||
|
||||
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||
|
||||
"speech-rule-engine": ["speech-rule-engine@4.1.2", "", { "dependencies": { "@xmldom/xmldom": "0.9.8", "commander": "13.1.0", "wicked-good-xpath": "1.3.0" }, "bin": { "sre": "bin/sre" } }, "sha512-S6ji+flMEga+1QU79NDbwZ8Ivf0S/MpupQQiIC0rTpU/ZTKgcajijJJb1OcByBQDjrXCN1/DJtGz4ZJeBMPGJw=="],
|
||||
|
||||
"strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="],
|
||||
|
||||
"supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="],
|
||||
@@ -575,6 +593,8 @@
|
||||
|
||||
"which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
|
||||
|
||||
"wicked-good-xpath": ["wicked-good-xpath@1.3.0", "", {}, "sha512-Gd9+TUn5nXdwj/hFsPVx5cuHHiF5Bwuc30jZ4+ronF1qHK5O7HD0sgmXWSEgwKquT3ClLoKPVbO6qGwVwLzvAw=="],
|
||||
|
||||
"word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="],
|
||||
|
||||
"yaml": ["yaml@1.10.2", "", {}, "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg=="],
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"functions": "src/convex/"
|
||||
"functions": "src/lib/convex/"
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ const gitignorePath = fileURLToPath(new URL('./.gitignore', import.meta.url));
|
||||
|
||||
export default defineConfig(
|
||||
includeIgnoreFile(gitignorePath),
|
||||
{ ignores: ['**/_generated/**'] },
|
||||
js.configs.recommended,
|
||||
...ts.configs.recommended,
|
||||
...svelte.configs.recommended,
|
||||
|
||||
@@ -37,6 +37,8 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"convex": "^1.31.5",
|
||||
"convex-svelte": "^0.0.12"
|
||||
"convex-svelte": "^0.0.12",
|
||||
"marked": "^17.0.1",
|
||||
"mathjax-full": "^3.2.2"
|
||||
}
|
||||
}
|
||||
|
||||
35
frontend/src/convex/_generated/api.d.ts
vendored
Normal file
35
frontend/src/convex/_generated/api.d.ts
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* Generated `api` utility.
|
||||
*
|
||||
* THIS CODE IS AUTOMATICALLY GENERATED.
|
||||
*
|
||||
* To regenerate, run `npx convex dev`.
|
||||
* @module
|
||||
*/
|
||||
|
||||
import type { ApiFromModules, FilterApi, FunctionReference } from 'convex/server';
|
||||
|
||||
declare const fullApi: ApiFromModules<{}>;
|
||||
|
||||
/**
|
||||
* A utility for referencing Convex functions in your app's public API.
|
||||
*
|
||||
* Usage:
|
||||
* ```js
|
||||
* const myFunctionReference = api.myModule.myFunction;
|
||||
* ```
|
||||
*/
|
||||
export declare const api: FilterApi<typeof fullApi, FunctionReference<any, 'public'>>;
|
||||
|
||||
/**
|
||||
* A utility for referencing Convex functions in your app's internal API.
|
||||
*
|
||||
* Usage:
|
||||
* ```js
|
||||
* const myFunctionReference = internal.myModule.myFunction;
|
||||
* ```
|
||||
*/
|
||||
export declare const internal: FilterApi<typeof fullApi, FunctionReference<any, 'internal'>>;
|
||||
|
||||
export declare const components: {};
|
||||
23
frontend/src/convex/_generated/api.js
Normal file
23
frontend/src/convex/_generated/api.js
Normal file
@@ -0,0 +1,23 @@
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* Generated `api` utility.
|
||||
*
|
||||
* THIS CODE IS AUTOMATICALLY GENERATED.
|
||||
*
|
||||
* To regenerate, run `npx convex dev`.
|
||||
* @module
|
||||
*/
|
||||
|
||||
import { anyApi, componentsGeneric } from 'convex/server';
|
||||
|
||||
/**
|
||||
* A utility for referencing Convex functions in your app's API.
|
||||
*
|
||||
* Usage:
|
||||
* ```js
|
||||
* const myFunctionReference = api.myModule.myFunction;
|
||||
* ```
|
||||
*/
|
||||
export const api = anyApi;
|
||||
export const internal = anyApi;
|
||||
export const components = componentsGeneric();
|
||||
57
frontend/src/convex/_generated/dataModel.d.ts
vendored
Normal file
57
frontend/src/convex/_generated/dataModel.d.ts
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* Generated data model types.
|
||||
*
|
||||
* THIS CODE IS AUTOMATICALLY GENERATED.
|
||||
*
|
||||
* To regenerate, run `npx convex dev`.
|
||||
* @module
|
||||
*/
|
||||
|
||||
import { AnyDataModel } from 'convex/server';
|
||||
import type { GenericId } from 'convex/values';
|
||||
|
||||
/**
|
||||
* No `schema.ts` file found!
|
||||
*
|
||||
* This generated code has permissive types like `Doc = any` because
|
||||
* Convex doesn't know your schema. If you'd like more type safety, see
|
||||
* https://docs.convex.dev/using/schemas for instructions on how to add a
|
||||
* schema file.
|
||||
*
|
||||
* After you change a schema, rerun codegen with `npx convex dev`.
|
||||
*/
|
||||
|
||||
/**
|
||||
* The names of all of your Convex tables.
|
||||
*/
|
||||
export type TableNames = string;
|
||||
|
||||
/**
|
||||
* The type of a document stored in Convex.
|
||||
*/
|
||||
export type Doc = any;
|
||||
|
||||
/**
|
||||
* An identifier for a document in Convex.
|
||||
*
|
||||
* Convex documents are uniquely identified by their `Id`, which is accessible
|
||||
* on the `_id` field. To learn more, see [Document IDs](https://docs.convex.dev/using/document-ids).
|
||||
*
|
||||
* Documents can be loaded using `db.get(tableName, id)` in query and mutation functions.
|
||||
*
|
||||
* IDs are just strings at runtime, but this type can be used to distinguish them from other
|
||||
* strings when type checking.
|
||||
*/
|
||||
export type Id<TableName extends TableNames = TableNames> = GenericId<TableName>;
|
||||
|
||||
/**
|
||||
* A type describing your Convex data model.
|
||||
*
|
||||
* This type includes information about what tables you have, the type of
|
||||
* documents stored in those tables, and the indexes defined on them.
|
||||
*
|
||||
* This type is used to parameterize methods like `queryGeneric` and
|
||||
* `mutationGeneric` to make them type-safe.
|
||||
*/
|
||||
export type DataModel = AnyDataModel;
|
||||
143
frontend/src/convex/_generated/server.d.ts
vendored
Normal file
143
frontend/src/convex/_generated/server.d.ts
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* Generated utilities for implementing server-side Convex query and mutation functions.
|
||||
*
|
||||
* THIS CODE IS AUTOMATICALLY GENERATED.
|
||||
*
|
||||
* To regenerate, run `npx convex dev`.
|
||||
* @module
|
||||
*/
|
||||
|
||||
import {
|
||||
ActionBuilder,
|
||||
HttpActionBuilder,
|
||||
MutationBuilder,
|
||||
QueryBuilder,
|
||||
GenericActionCtx,
|
||||
GenericMutationCtx,
|
||||
GenericQueryCtx,
|
||||
GenericDatabaseReader,
|
||||
GenericDatabaseWriter
|
||||
} from 'convex/server';
|
||||
import type { DataModel } from './dataModel.js';
|
||||
|
||||
/**
|
||||
* Define a query in this Convex app's public API.
|
||||
*
|
||||
* This function will be allowed to read your Convex database and will be accessible from the client.
|
||||
*
|
||||
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
|
||||
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export declare const query: QueryBuilder<DataModel, 'public'>;
|
||||
|
||||
/**
|
||||
* Define a query that is only accessible from other Convex functions (but not from the client).
|
||||
*
|
||||
* This function will be allowed to read from your Convex database. It will not be accessible from the client.
|
||||
*
|
||||
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
|
||||
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export declare const internalQuery: QueryBuilder<DataModel, 'internal'>;
|
||||
|
||||
/**
|
||||
* Define a mutation in this Convex app's public API.
|
||||
*
|
||||
* This function will be allowed to modify your Convex database and will be accessible from the client.
|
||||
*
|
||||
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
|
||||
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export declare const mutation: MutationBuilder<DataModel, 'public'>;
|
||||
|
||||
/**
|
||||
* Define a mutation that is only accessible from other Convex functions (but not from the client).
|
||||
*
|
||||
* This function will be allowed to modify your Convex database. It will not be accessible from the client.
|
||||
*
|
||||
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
|
||||
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export declare const internalMutation: MutationBuilder<DataModel, 'internal'>;
|
||||
|
||||
/**
|
||||
* Define an action in this Convex app's public API.
|
||||
*
|
||||
* An action is a function which can execute any JavaScript code, including non-deterministic
|
||||
* code and code with side-effects, like calling third-party services.
|
||||
* They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive.
|
||||
* They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}.
|
||||
*
|
||||
* @param func - The action. It receives an {@link ActionCtx} as its first argument.
|
||||
* @returns The wrapped action. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export declare const action: ActionBuilder<DataModel, 'public'>;
|
||||
|
||||
/**
|
||||
* Define an action that is only accessible from other Convex functions (but not from the client).
|
||||
*
|
||||
* @param func - The function. It receives an {@link ActionCtx} as its first argument.
|
||||
* @returns The wrapped function. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export declare const internalAction: ActionBuilder<DataModel, 'internal'>;
|
||||
|
||||
/**
|
||||
* Define an HTTP action.
|
||||
*
|
||||
* The wrapped function will be used to respond to HTTP requests received
|
||||
* by a Convex deployment if the requests matches the path and method where
|
||||
* this action is routed. Be sure to route your httpAction in `convex/http.js`.
|
||||
*
|
||||
* @param func - The function. It receives an {@link ActionCtx} as its first argument
|
||||
* and a Fetch API `Request` object as its second.
|
||||
* @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up.
|
||||
*/
|
||||
export declare const httpAction: HttpActionBuilder;
|
||||
|
||||
/**
|
||||
* A set of services for use within Convex query functions.
|
||||
*
|
||||
* The query context is passed as the first argument to any Convex query
|
||||
* function run on the server.
|
||||
*
|
||||
* This differs from the {@link MutationCtx} because all of the services are
|
||||
* read-only.
|
||||
*/
|
||||
export type QueryCtx = GenericQueryCtx<DataModel>;
|
||||
|
||||
/**
|
||||
* A set of services for use within Convex mutation functions.
|
||||
*
|
||||
* The mutation context is passed as the first argument to any Convex mutation
|
||||
* function run on the server.
|
||||
*/
|
||||
export type MutationCtx = GenericMutationCtx<DataModel>;
|
||||
|
||||
/**
|
||||
* A set of services for use within Convex action functions.
|
||||
*
|
||||
* The action context is passed as the first argument to any Convex action
|
||||
* function run on the server.
|
||||
*/
|
||||
export type ActionCtx = GenericActionCtx<DataModel>;
|
||||
|
||||
/**
|
||||
* An interface to read from the database within Convex query functions.
|
||||
*
|
||||
* The two entry points are {@link DatabaseReader.get}, which fetches a single
|
||||
* document by its {@link Id}, or {@link DatabaseReader.query}, which starts
|
||||
* building a query.
|
||||
*/
|
||||
export type DatabaseReader = GenericDatabaseReader<DataModel>;
|
||||
|
||||
/**
|
||||
* An interface to read from and write to the database within Convex mutation
|
||||
* functions.
|
||||
*
|
||||
* Convex guarantees that all writes within a single mutation are
|
||||
* executed atomically, so you never have to worry about partial writes leaving
|
||||
* your data in an inconsistent state. See [the Convex Guide](https://docs.convex.dev/understanding/convex-fundamentals/functions#atomicity-and-optimistic-concurrency-control)
|
||||
* for the guarantees Convex provides your functions.
|
||||
*/
|
||||
export type DatabaseWriter = GenericDatabaseWriter<DataModel>;
|
||||
93
frontend/src/convex/_generated/server.js
Normal file
93
frontend/src/convex/_generated/server.js
Normal file
@@ -0,0 +1,93 @@
|
||||
/* eslint-disable */
|
||||
/**
|
||||
* Generated utilities for implementing server-side Convex query and mutation functions.
|
||||
*
|
||||
* THIS CODE IS AUTOMATICALLY GENERATED.
|
||||
*
|
||||
* To regenerate, run `npx convex dev`.
|
||||
* @module
|
||||
*/
|
||||
|
||||
import {
|
||||
actionGeneric,
|
||||
httpActionGeneric,
|
||||
queryGeneric,
|
||||
mutationGeneric,
|
||||
internalActionGeneric,
|
||||
internalMutationGeneric,
|
||||
internalQueryGeneric
|
||||
} from 'convex/server';
|
||||
|
||||
/**
|
||||
* Define a query in this Convex app's public API.
|
||||
*
|
||||
* This function will be allowed to read your Convex database and will be accessible from the client.
|
||||
*
|
||||
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
|
||||
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export const query = queryGeneric;
|
||||
|
||||
/**
|
||||
* Define a query that is only accessible from other Convex functions (but not from the client).
|
||||
*
|
||||
* This function will be allowed to read from your Convex database. It will not be accessible from the client.
|
||||
*
|
||||
* @param func - The query function. It receives a {@link QueryCtx} as its first argument.
|
||||
* @returns The wrapped query. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export const internalQuery = internalQueryGeneric;
|
||||
|
||||
/**
|
||||
* Define a mutation in this Convex app's public API.
|
||||
*
|
||||
* This function will be allowed to modify your Convex database and will be accessible from the client.
|
||||
*
|
||||
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
|
||||
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export const mutation = mutationGeneric;
|
||||
|
||||
/**
|
||||
* Define a mutation that is only accessible from other Convex functions (but not from the client).
|
||||
*
|
||||
* This function will be allowed to modify your Convex database. It will not be accessible from the client.
|
||||
*
|
||||
* @param func - The mutation function. It receives a {@link MutationCtx} as its first argument.
|
||||
* @returns The wrapped mutation. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export const internalMutation = internalMutationGeneric;
|
||||
|
||||
/**
|
||||
* Define an action in this Convex app's public API.
|
||||
*
|
||||
* An action is a function which can execute any JavaScript code, including non-deterministic
|
||||
* code and code with side-effects, like calling third-party services.
|
||||
* They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive.
|
||||
* They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}.
|
||||
*
|
||||
* @param func - The action. It receives an {@link ActionCtx} as its first argument.
|
||||
* @returns The wrapped action. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export const action = actionGeneric;
|
||||
|
||||
/**
|
||||
* Define an action that is only accessible from other Convex functions (but not from the client).
|
||||
*
|
||||
* @param func - The function. It receives an {@link ActionCtx} as its first argument.
|
||||
* @returns The wrapped function. Include this as an `export` to name it and make it accessible.
|
||||
*/
|
||||
export const internalAction = internalActionGeneric;
|
||||
|
||||
/**
|
||||
* Define an HTTP action.
|
||||
*
|
||||
* The wrapped function will be used to respond to HTTP requests received
|
||||
* by a Convex deployment if the requests matches the path and method where
|
||||
* this action is routed. Be sure to route your httpAction in `convex/http.js`.
|
||||
*
|
||||
* @param func - The function. It receives an {@link ActionCtx} as its first argument
|
||||
* and a Fetch API `Request` object as its second.
|
||||
* @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up.
|
||||
*/
|
||||
export const httpAction = httpActionGeneric;
|
||||
35
frontend/src/lib/components/ChatInput.svelte
Normal file
35
frontend/src/lib/components/ChatInput.svelte
Normal file
@@ -0,0 +1,35 @@
|
||||
<script lang="ts">
|
||||
interface Props {
|
||||
onsubmit: (message: string) => void;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
let { onsubmit, disabled = false }: Props = $props();
|
||||
let value = $state('');
|
||||
|
||||
function handleSubmit(e: Event) {
|
||||
e.preventDefault();
|
||||
const trimmed = value.trim();
|
||||
if (trimmed && !disabled) {
|
||||
onsubmit(trimmed);
|
||||
value = '';
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<form onsubmit={handleSubmit} class="flex gap-2">
|
||||
<input
|
||||
type="text"
|
||||
bind:value
|
||||
{disabled}
|
||||
placeholder="Message..."
|
||||
class="flex-1 rounded-lg bg-neutral-800 px-3 py-2 text-[11px] text-white placeholder-neutral-500 outline-none focus:ring-1 focus:ring-neutral-600"
|
||||
/>
|
||||
<button
|
||||
type="submit"
|
||||
{disabled}
|
||||
class="rounded-lg bg-blue-600 px-3 py-2 text-[11px] text-white transition-colors hover:bg-blue-500 disabled:opacity-50"
|
||||
>
|
||||
Send
|
||||
</button>
|
||||
</form>
|
||||
47
frontend/src/lib/components/ChatMessage.svelte
Normal file
47
frontend/src/lib/components/ChatMessage.svelte
Normal file
@@ -0,0 +1,47 @@
|
||||
<script lang="ts">
|
||||
import { Marked } from 'marked';
|
||||
import LoadingDots from './LoadingDots.svelte';
|
||||
|
||||
interface Props {
|
||||
role: 'user' | 'assistant';
|
||||
content: string;
|
||||
isStreaming?: boolean;
|
||||
}
|
||||
|
||||
let { role, content, isStreaming = false }: Props = $props();
|
||||
|
||||
const marked = new Marked({
|
||||
breaks: true,
|
||||
gfm: true
|
||||
});
|
||||
|
||||
function processLatex(text: string): string {
|
||||
return text
|
||||
.replace(/\$\$(.*?)\$\$/gs, (_, tex) => {
|
||||
const encoded = encodeURIComponent(tex.trim());
|
||||
return `<img src="/service/latex?tex=${encoded}&display=1" alt="LaTeX" class="block my-1 max-h-12" />`;
|
||||
})
|
||||
.replace(/\$(.+?)\$/g, (_, tex) => {
|
||||
const encoded = encodeURIComponent(tex.trim());
|
||||
return `<img src="/service/latex?tex=${encoded}" alt="LaTeX" class="inline-block align-middle max-h-4" />`;
|
||||
});
|
||||
}
|
||||
|
||||
function processContent(text: string): string {
|
||||
const withLatex = processLatex(text);
|
||||
return marked.parse(withLatex) as string;
|
||||
}
|
||||
</script>
|
||||
|
||||
<div
|
||||
class="prose-mini w-full rounded-lg px-2.5 py-1.5 text-[11px] leading-relaxed {role === 'user'
|
||||
? 'bg-blue-600 text-white'
|
||||
: 'bg-neutral-800 text-neutral-100'}"
|
||||
>
|
||||
{#if isStreaming && !content}
|
||||
<LoadingDots />
|
||||
{:else}
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||
{@html processContent(content)}
|
||||
{/if}
|
||||
</div>
|
||||
28
frontend/src/lib/components/FollowUpButtons.svelte
Normal file
28
frontend/src/lib/components/FollowUpButtons.svelte
Normal file
@@ -0,0 +1,28 @@
|
||||
<script lang="ts">
|
||||
interface Props {
|
||||
options: string[];
|
||||
onselect: (option: string) => void;
|
||||
}
|
||||
|
||||
let { options, onselect }: Props = $props();
|
||||
|
||||
function processLatex(text: string): string {
|
||||
return text.replace(/\$(.+?)\$/g, (_, tex) => {
|
||||
const encoded = encodeURIComponent(tex.trim());
|
||||
return `<img src="/service/latex?tex=${encoded}" alt="LaTeX" class="inline-block align-middle max-h-3" />`;
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="flex flex-wrap gap-1.5">
|
||||
{#each options as option (option)}
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => onselect(option)}
|
||||
class="rounded-full bg-neutral-800 px-2.5 py-1 text-[10px] text-neutral-200 transition-colors hover:bg-neutral-700 active:bg-neutral-600"
|
||||
>
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags -->
|
||||
{@html processLatex(option)}
|
||||
</button>
|
||||
{/each}
|
||||
</div>
|
||||
8
frontend/src/lib/components/LoadingDots.svelte
Normal file
8
frontend/src/lib/components/LoadingDots.svelte
Normal file
@@ -0,0 +1,8 @@
|
||||
<script lang="ts">
|
||||
</script>
|
||||
|
||||
<span class="inline-flex gap-1">
|
||||
<span class="h-1.5 w-1.5 animate-bounce rounded-full bg-current [animation-delay:-0.3s]"></span>
|
||||
<span class="h-1.5 w-1.5 animate-bounce rounded-full bg-current [animation-delay:-0.15s]"></span>
|
||||
<span class="h-1.5 w-1.5 animate-bounce rounded-full bg-current"></span>
|
||||
</span>
|
||||
98
frontend/src/lib/convex/chats.ts
Normal file
98
frontend/src/lib/convex/chats.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { v } from 'convex/values';
|
||||
import { mutation, query } from './_generated/server';
|
||||
|
||||
export const getByMnemonic = query({
|
||||
args: { mnemonic: v.string() },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
_id: v.id('chats'),
|
||||
_creationTime: v.number(),
|
||||
userId: v.id('users'),
|
||||
mnemonic: v.string(),
|
||||
createdAt: v.number()
|
||||
}),
|
||||
v.null()
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db
|
||||
.query('chats')
|
||||
.withIndex('by_mnemonic', (q) => q.eq('mnemonic', args.mnemonic))
|
||||
.unique();
|
||||
}
|
||||
});
|
||||
|
||||
export const create = mutation({
|
||||
args: { userId: v.id('users'), mnemonic: v.string() },
|
||||
returns: v.id('chats'),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert('chats', {
|
||||
userId: args.userId,
|
||||
mnemonic: args.mnemonic,
|
||||
createdAt: Date.now()
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export const clear = mutation({
|
||||
args: { chatId: v.id('chats'), preserveImages: v.optional(v.boolean()) },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const messages = await ctx.db
|
||||
.query('messages')
|
||||
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
|
||||
.collect();
|
||||
|
||||
for (const message of messages) {
|
||||
if (args.preserveImages && message.imageStorageId) {
|
||||
continue;
|
||||
}
|
||||
await ctx.db.delete(message._id);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
export const getWithUser = query({
|
||||
args: { mnemonic: v.string() },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
chat: v.object({
|
||||
_id: v.id('chats'),
|
||||
_creationTime: v.number(),
|
||||
userId: v.id('users'),
|
||||
mnemonic: v.string(),
|
||||
createdAt: v.number()
|
||||
}),
|
||||
user: v.object({
|
||||
_id: v.id('users'),
|
||||
_creationTime: v.number(),
|
||||
telegramId: v.int64(),
|
||||
telegramChatId: v.optional(v.int64()),
|
||||
geminiApiKey: v.optional(v.string()),
|
||||
systemPrompt: v.optional(v.string()),
|
||||
followUpPrompt: v.optional(v.string()),
|
||||
model: v.string(),
|
||||
followUpModel: v.optional(v.string()),
|
||||
activeChatId: v.optional(v.id('chats'))
|
||||
})
|
||||
}),
|
||||
v.null()
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
const chat = await ctx.db
|
||||
.query('chats')
|
||||
.withIndex('by_mnemonic', (q) => q.eq('mnemonic', args.mnemonic))
|
||||
.unique();
|
||||
|
||||
if (!chat) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const user = await ctx.db.get(chat.userId);
|
||||
if (!user) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return { chat, user };
|
||||
}
|
||||
});
|
||||
215
frontend/src/lib/convex/messages.ts
Normal file
215
frontend/src/lib/convex/messages.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
import { v } from 'convex/values';
|
||||
import { internalMutation, mutation, query } from './_generated/server';
|
||||
|
||||
export const listByChat = query({
|
||||
args: { chatId: v.id('chats') },
|
||||
returns: v.array(
|
||||
v.object({
|
||||
_id: v.id('messages'),
|
||||
_creationTime: v.number(),
|
||||
chatId: v.id('chats'),
|
||||
role: v.union(v.literal('user'), v.literal('assistant')),
|
||||
content: v.string(),
|
||||
imageStorageId: v.optional(v.id('_storage')),
|
||||
imageMediaType: v.optional(v.string()),
|
||||
followUpOptions: v.optional(v.array(v.string())),
|
||||
source: v.union(v.literal('telegram'), v.literal('web')),
|
||||
createdAt: v.number(),
|
||||
isStreaming: v.optional(v.boolean())
|
||||
})
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db
|
||||
.query('messages')
|
||||
.withIndex('by_chat_id_and_created_at', (q) => q.eq('chatId', args.chatId))
|
||||
.order('asc')
|
||||
.collect();
|
||||
}
|
||||
});
|
||||
|
||||
export const create = mutation({
|
||||
args: {
|
||||
chatId: v.id('chats'),
|
||||
role: v.union(v.literal('user'), v.literal('assistant')),
|
||||
content: v.string(),
|
||||
source: v.union(v.literal('telegram'), v.literal('web')),
|
||||
imageStorageId: v.optional(v.id('_storage')),
|
||||
imageMediaType: v.optional(v.string()),
|
||||
followUpOptions: v.optional(v.array(v.string())),
|
||||
isStreaming: v.optional(v.boolean())
|
||||
},
|
||||
returns: v.id('messages'),
|
||||
handler: async (ctx, args) => {
|
||||
const messageId = await ctx.db.insert('messages', {
|
||||
chatId: args.chatId,
|
||||
role: args.role,
|
||||
content: args.content,
|
||||
source: args.source,
|
||||
imageStorageId: args.imageStorageId,
|
||||
imageMediaType: args.imageMediaType,
|
||||
followUpOptions: args.followUpOptions,
|
||||
createdAt: Date.now(),
|
||||
isStreaming: args.isStreaming
|
||||
});
|
||||
|
||||
if (args.source === 'web' && args.role === 'user') {
|
||||
const chat = await ctx.db.get(args.chatId);
|
||||
if (chat) {
|
||||
await ctx.db.insert('pendingGenerations', {
|
||||
userId: chat.userId,
|
||||
chatId: args.chatId,
|
||||
userMessage: args.content,
|
||||
createdAt: Date.now()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return messageId;
|
||||
}
|
||||
});
|
||||
|
||||
export const update = mutation({
|
||||
args: {
|
||||
messageId: v.id('messages'),
|
||||
content: v.optional(v.string()),
|
||||
followUpOptions: v.optional(v.array(v.string())),
|
||||
isStreaming: v.optional(v.boolean())
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const updates: {
|
||||
content?: string;
|
||||
followUpOptions?: string[];
|
||||
isStreaming?: boolean;
|
||||
} = {};
|
||||
|
||||
if (args.content !== undefined) {
|
||||
updates.content = args.content;
|
||||
}
|
||||
if (args.followUpOptions !== undefined) {
|
||||
updates.followUpOptions = args.followUpOptions;
|
||||
}
|
||||
if (args.isStreaming !== undefined) {
|
||||
updates.isStreaming = args.isStreaming;
|
||||
}
|
||||
|
||||
await ctx.db.patch(args.messageId, updates);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
export const getHistoryForAI = query({
|
||||
args: { chatId: v.id('chats'), limit: v.optional(v.number()) },
|
||||
returns: v.array(
|
||||
v.object({
|
||||
role: v.union(v.literal('user'), v.literal('assistant')),
|
||||
content: v.string()
|
||||
})
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
const messages = await ctx.db
|
||||
.query('messages')
|
||||
.withIndex('by_chat_id_and_created_at', (q) => q.eq('chatId', args.chatId))
|
||||
.order('asc')
|
||||
.collect();
|
||||
|
||||
const limit = args.limit ?? 50;
|
||||
const limited = messages.slice(-limit);
|
||||
|
||||
return limited.map((m) => ({
|
||||
role: m.role,
|
||||
content: m.content
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
export const getLastAssistantMessage = query({
|
||||
args: { chatId: v.id('chats') },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
_id: v.id('messages'),
|
||||
_creationTime: v.number(),
|
||||
chatId: v.id('chats'),
|
||||
role: v.union(v.literal('user'), v.literal('assistant')),
|
||||
content: v.string(),
|
||||
imageStorageId: v.optional(v.id('_storage')),
|
||||
imageMediaType: v.optional(v.string()),
|
||||
followUpOptions: v.optional(v.array(v.string())),
|
||||
source: v.union(v.literal('telegram'), v.literal('web')),
|
||||
createdAt: v.number(),
|
||||
isStreaming: v.optional(v.boolean())
|
||||
}),
|
||||
v.null()
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
const messages = await ctx.db
|
||||
.query('messages')
|
||||
.withIndex('by_chat_id_and_created_at', (q) => q.eq('chatId', args.chatId))
|
||||
.order('desc')
|
||||
.collect();
|
||||
|
||||
return messages.find((m) => m.role === 'assistant') ?? null;
|
||||
}
|
||||
});
|
||||
|
||||
export const generateUploadUrl = mutation({
|
||||
args: {},
|
||||
returns: v.string(),
|
||||
handler: async (ctx) => {
|
||||
return await ctx.storage.generateUploadUrl();
|
||||
}
|
||||
});
|
||||
|
||||
export const getImageUrls = query({
|
||||
args: { chatId: v.id('chats') },
|
||||
returns: v.array(
|
||||
v.object({
|
||||
storageId: v.id('_storage'),
|
||||
mediaType: v.string(),
|
||||
url: v.union(v.string(), v.null())
|
||||
})
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
const messages = await ctx.db
|
||||
.query('messages')
|
||||
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
|
||||
.collect();
|
||||
|
||||
const imageMessages = messages.filter((m) => m.imageStorageId && m.imageMediaType);
|
||||
const results = [];
|
||||
|
||||
for (const msg of imageMessages) {
|
||||
if (msg.imageStorageId && msg.imageMediaType) {
|
||||
const url = await ctx.storage.getUrl(msg.imageStorageId);
|
||||
results.push({
|
||||
storageId: msg.imageStorageId,
|
||||
mediaType: msg.imageMediaType,
|
||||
url
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
});
|
||||
|
||||
export const createWithImage = internalMutation({
|
||||
args: {
|
||||
chatId: v.id('chats'),
|
||||
content: v.string(),
|
||||
imageStorageId: v.id('_storage'),
|
||||
imageMediaType: v.string()
|
||||
},
|
||||
returns: v.id('messages'),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert('messages', {
|
||||
chatId: args.chatId,
|
||||
role: 'user' as const,
|
||||
content: args.content,
|
||||
source: 'telegram' as const,
|
||||
imageStorageId: args.imageStorageId,
|
||||
imageMediaType: args.imageMediaType,
|
||||
createdAt: Date.now()
|
||||
});
|
||||
}
|
||||
});
|
||||
45
frontend/src/lib/convex/pendingGenerations.ts
Normal file
45
frontend/src/lib/convex/pendingGenerations.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { v } from 'convex/values';
|
||||
import { mutation, query } from './_generated/server';
|
||||
|
||||
export const list = query({
|
||||
args: {},
|
||||
returns: v.array(
|
||||
v.object({
|
||||
_id: v.id('pendingGenerations'),
|
||||
_creationTime: v.number(),
|
||||
userId: v.id('users'),
|
||||
chatId: v.id('chats'),
|
||||
userMessage: v.string(),
|
||||
createdAt: v.number()
|
||||
})
|
||||
),
|
||||
handler: async (ctx) => {
|
||||
return await ctx.db.query('pendingGenerations').collect();
|
||||
}
|
||||
});
|
||||
|
||||
export const create = mutation({
|
||||
args: {
|
||||
userId: v.id('users'),
|
||||
chatId: v.id('chats'),
|
||||
userMessage: v.string()
|
||||
},
|
||||
returns: v.id('pendingGenerations'),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert('pendingGenerations', {
|
||||
userId: args.userId,
|
||||
chatId: args.chatId,
|
||||
userMessage: args.userMessage,
|
||||
createdAt: Date.now()
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export const remove = mutation({
|
||||
args: { id: v.id('pendingGenerations') },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.delete(args.id);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
42
frontend/src/lib/convex/schema.ts
Normal file
42
frontend/src/lib/convex/schema.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { defineSchema, defineTable } from 'convex/server';
|
||||
import { v } from 'convex/values';
|
||||
|
||||
export default defineSchema({
|
||||
users: defineTable({
|
||||
telegramId: v.int64(),
|
||||
telegramChatId: v.optional(v.int64()),
|
||||
geminiApiKey: v.optional(v.string()),
|
||||
systemPrompt: v.optional(v.string()),
|
||||
followUpPrompt: v.optional(v.string()),
|
||||
model: v.string(),
|
||||
followUpModel: v.optional(v.string()),
|
||||
activeChatId: v.optional(v.id('chats'))
|
||||
}).index('by_telegram_id', ['telegramId']),
|
||||
|
||||
chats: defineTable({
|
||||
userId: v.id('users'),
|
||||
mnemonic: v.string(),
|
||||
createdAt: v.number()
|
||||
}).index('by_mnemonic', ['mnemonic']),
|
||||
|
||||
messages: defineTable({
|
||||
chatId: v.id('chats'),
|
||||
role: v.union(v.literal('user'), v.literal('assistant')),
|
||||
content: v.string(),
|
||||
imageStorageId: v.optional(v.id('_storage')),
|
||||
imageMediaType: v.optional(v.string()),
|
||||
followUpOptions: v.optional(v.array(v.string())),
|
||||
source: v.union(v.literal('telegram'), v.literal('web')),
|
||||
createdAt: v.number(),
|
||||
isStreaming: v.optional(v.boolean())
|
||||
})
|
||||
.index('by_chat_id', ['chatId'])
|
||||
.index('by_chat_id_and_created_at', ['chatId', 'createdAt']),
|
||||
|
||||
pendingGenerations: defineTable({
|
||||
userId: v.id('users'),
|
||||
chatId: v.id('chats'),
|
||||
userMessage: v.string(),
|
||||
createdAt: v.number()
|
||||
})
|
||||
});
|
||||
129
frontend/src/lib/convex/users.ts
Normal file
129
frontend/src/lib/convex/users.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { v } from 'convex/values';
|
||||
import { mutation, query } from './_generated/server';
|
||||
|
||||
const DEFAULT_MODEL = 'gemini-3-pro-preview';
|
||||
|
||||
export const getById = query({
|
||||
args: { userId: v.id('users') },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
_id: v.id('users'),
|
||||
_creationTime: v.number(),
|
||||
telegramId: v.int64(),
|
||||
telegramChatId: v.optional(v.int64()),
|
||||
geminiApiKey: v.optional(v.string()),
|
||||
systemPrompt: v.optional(v.string()),
|
||||
followUpPrompt: v.optional(v.string()),
|
||||
model: v.string(),
|
||||
followUpModel: v.optional(v.string()),
|
||||
activeChatId: v.optional(v.id('chats'))
|
||||
}),
|
||||
v.null()
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.get(args.userId);
|
||||
}
|
||||
});
|
||||
|
||||
export const getByTelegramId = query({
|
||||
args: { telegramId: v.int64() },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
_id: v.id('users'),
|
||||
_creationTime: v.number(),
|
||||
telegramId: v.int64(),
|
||||
telegramChatId: v.optional(v.int64()),
|
||||
geminiApiKey: v.optional(v.string()),
|
||||
systemPrompt: v.optional(v.string()),
|
||||
followUpPrompt: v.optional(v.string()),
|
||||
model: v.string(),
|
||||
followUpModel: v.optional(v.string()),
|
||||
activeChatId: v.optional(v.id('chats'))
|
||||
}),
|
||||
v.null()
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db
|
||||
.query('users')
|
||||
.withIndex('by_telegram_id', (q) => q.eq('telegramId', args.telegramId))
|
||||
.unique();
|
||||
}
|
||||
});
|
||||
|
||||
export const getOrCreate = mutation({
|
||||
args: { telegramId: v.int64(), telegramChatId: v.optional(v.int64()) },
|
||||
returns: v.id('users'),
|
||||
handler: async (ctx, args) => {
|
||||
const existing = await ctx.db
|
||||
.query('users')
|
||||
.withIndex('by_telegram_id', (q) => q.eq('telegramId', args.telegramId))
|
||||
.unique();
|
||||
|
||||
if (existing) {
|
||||
if (args.telegramChatId && existing.telegramChatId !== args.telegramChatId) {
|
||||
await ctx.db.patch(existing._id, { telegramChatId: args.telegramChatId });
|
||||
}
|
||||
return existing._id;
|
||||
}
|
||||
|
||||
return await ctx.db.insert('users', {
|
||||
telegramId: args.telegramId,
|
||||
telegramChatId: args.telegramChatId,
|
||||
model: DEFAULT_MODEL
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export const setApiKey = mutation({
|
||||
args: { userId: v.id('users'), apiKey: v.string() },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.patch(args.userId, { geminiApiKey: args.apiKey });
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
export const setSystemPrompt = mutation({
|
||||
args: { userId: v.id('users'), prompt: v.string() },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.patch(args.userId, { systemPrompt: args.prompt });
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
export const setFollowUpPrompt = mutation({
|
||||
args: { userId: v.id('users'), prompt: v.string() },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.patch(args.userId, { followUpPrompt: args.prompt });
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
export const setModel = mutation({
|
||||
args: { userId: v.id('users'), model: v.string() },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.patch(args.userId, { model: args.model });
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
export const setFollowUpModel = mutation({
|
||||
args: { userId: v.id('users'), model: v.string() },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.patch(args.userId, { followUpModel: args.model });
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
export const setActiveChat = mutation({
|
||||
args: { userId: v.id('users'), chatId: v.id('chats') },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.patch(args.userId, { activeChatId: args.chatId });
|
||||
return null;
|
||||
}
|
||||
});
|
||||
@@ -1,8 +1,12 @@
|
||||
<script lang="ts">
|
||||
import './layout.css';
|
||||
import favicon from '$lib/assets/favicon.svg';
|
||||
import { PUBLIC_CONVEX_URL } from '$env/static/public';
|
||||
import { setupConvex } from 'convex-svelte';
|
||||
|
||||
let { children } = $props();
|
||||
|
||||
setupConvex(PUBLIC_CONVEX_URL);
|
||||
</script>
|
||||
|
||||
<svelte:head><link rel="icon" href={favicon} /></svelte:head>
|
||||
|
||||
@@ -1,2 +1 @@
|
||||
<h1>Welcome to SvelteKit</h1>
|
||||
<p>Visit <a href="https://svelte.dev/docs/kit">svelte.dev/docs/kit</a> to read the documentation</p>
|
||||
<h1>iykyk</h1>
|
||||
|
||||
100
frontend/src/routes/[mnemonic]/+page.svelte
Normal file
100
frontend/src/routes/[mnemonic]/+page.svelte
Normal file
@@ -0,0 +1,100 @@
|
||||
<script lang="ts">
|
||||
import { page } from '$app/state';
|
||||
import { useQuery, useConvexClient } from 'convex-svelte';
|
||||
import { api } from '$lib/convex/_generated/api';
|
||||
import ChatMessage from '$lib/components/ChatMessage.svelte';
|
||||
import ChatInput from '$lib/components/ChatInput.svelte';
|
||||
import FollowUpButtons from '$lib/components/FollowUpButtons.svelte';
|
||||
|
||||
let mnemonic = $derived(page.params.mnemonic);
|
||||
const client = useConvexClient();
|
||||
|
||||
const chatData = useQuery(api.chats.getWithUser, () => (mnemonic ? { mnemonic } : 'skip'));
|
||||
const messagesQuery = useQuery(api.messages.listByChat, () =>
|
||||
chatData.data?.chat?._id ? { chatId: chatData.data.chat._id } : 'skip'
|
||||
);
|
||||
|
||||
let messages = $derived(messagesQuery.data ?? []);
|
||||
let lastMessage = $derived(messages[messages.length - 1]);
|
||||
let followUpOptions = $derived(
|
||||
lastMessage?.role === 'assistant' && lastMessage.followUpOptions
|
||||
? lastMessage.followUpOptions
|
||||
: []
|
||||
);
|
||||
|
||||
$effect(() => {
|
||||
if (messages.length) {
|
||||
window.scrollTo(0, document.body.scrollHeight);
|
||||
}
|
||||
});
|
||||
|
||||
async function sendMessage(content: string) {
|
||||
const chat = chatData.data?.chat;
|
||||
if (!chat) return;
|
||||
|
||||
await client.mutation(api.messages.create, {
|
||||
chatId: chat._id,
|
||||
role: 'user',
|
||||
content,
|
||||
source: 'web'
|
||||
});
|
||||
}
|
||||
|
||||
async function summarize() {
|
||||
const chat = chatData.data?.chat;
|
||||
if (!chat) return;
|
||||
|
||||
await client.mutation(api.messages.create, {
|
||||
chatId: chat._id,
|
||||
role: 'user',
|
||||
content: '/summarize',
|
||||
source: 'web'
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>Chat</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
</svelte:head>
|
||||
|
||||
<div class="min-h-dvh bg-black text-white">
|
||||
{#if chatData.isLoading}
|
||||
<div class="flex min-h-dvh items-center justify-center text-neutral-500">Loading...</div>
|
||||
{:else if chatData.error}
|
||||
<div class="flex min-h-dvh items-center justify-center text-red-500">
|
||||
Error: {chatData.error.toString()}
|
||||
</div>
|
||||
{:else if !chatData.data}
|
||||
<div class="flex min-h-dvh items-center justify-center text-neutral-500">Chat not found</div>
|
||||
{:else}
|
||||
<div class="space-y-1.5 p-2">
|
||||
{#each messages as message (message._id)}
|
||||
<ChatMessage
|
||||
role={message.role}
|
||||
content={message.content}
|
||||
isStreaming={message.isStreaming}
|
||||
/>
|
||||
{/each}
|
||||
</div>
|
||||
|
||||
{#if followUpOptions.length > 0}
|
||||
<div class="border-t border-neutral-800 px-2 py-1.5">
|
||||
<FollowUpButtons options={followUpOptions} onselect={sendMessage} />
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="border-t border-neutral-800 px-2 pt-1.5">
|
||||
<button
|
||||
onclick={summarize}
|
||||
class="rounded bg-neutral-800 px-2 py-1 text-[10px] text-neutral-400"
|
||||
>
|
||||
/summarize
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div class="p-2 pt-1">
|
||||
<ChatInput onsubmit={sendMessage} />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
@@ -1 +1,79 @@
|
||||
@import 'tailwindcss';
|
||||
|
||||
.prose-mini h1,
|
||||
.prose-mini h2,
|
||||
.prose-mini h3,
|
||||
.prose-mini h4 {
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
margin: 0.5em 0 0.25em;
|
||||
}
|
||||
|
||||
.prose-mini h1 {
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.prose-mini p {
|
||||
margin: 0.4em 0;
|
||||
}
|
||||
|
||||
.prose-mini p:first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.prose-mini p:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.prose-mini ul,
|
||||
.prose-mini ol {
|
||||
margin: 0.4em 0;
|
||||
padding-left: 1.2em;
|
||||
}
|
||||
|
||||
.prose-mini li {
|
||||
margin: 0.15em 0;
|
||||
}
|
||||
|
||||
.prose-mini code {
|
||||
font-size: 10px;
|
||||
background: rgba(0, 0, 0, 0.3);
|
||||
padding: 0.1em 0.3em;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
.prose-mini pre {
|
||||
font-size: 10px;
|
||||
background: rgba(0, 0, 0, 0.3);
|
||||
padding: 0.5em;
|
||||
border-radius: 4px;
|
||||
overflow-x: auto;
|
||||
margin: 0.4em 0;
|
||||
}
|
||||
|
||||
.prose-mini pre code {
|
||||
background: none;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.prose-mini blockquote {
|
||||
border-left: 2px solid rgba(255, 255, 255, 0.3);
|
||||
padding-left: 0.5em;
|
||||
margin: 0.4em 0;
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.prose-mini a {
|
||||
text-decoration: underline;
|
||||
text-underline-offset: 2px;
|
||||
}
|
||||
|
||||
.prose-mini strong {
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.prose-mini hr {
|
||||
border: none;
|
||||
border-top: 1px solid rgba(255, 255, 255, 0.2);
|
||||
margin: 0.5em 0;
|
||||
}
|
||||
|
||||
54
frontend/src/routes/service/latex/+server.ts
Normal file
54
frontend/src/routes/service/latex/+server.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import type { RequestHandler } from './$types';
|
||||
import { mathjax } from 'mathjax-full/js/mathjax.js';
|
||||
import { TeX } from 'mathjax-full/js/input/tex.js';
|
||||
import { SVG } from 'mathjax-full/js/output/svg.js';
|
||||
import { liteAdaptor } from 'mathjax-full/js/adaptors/liteAdaptor.js';
|
||||
import { RegisterHTMLHandler } from 'mathjax-full/js/handlers/html.js';
|
||||
import { AllPackages } from 'mathjax-full/js/input/tex/AllPackages.js';
|
||||
|
||||
const adaptor = liteAdaptor();
|
||||
RegisterHTMLHandler(adaptor);
|
||||
|
||||
const tex = new TeX({ packages: AllPackages });
|
||||
const svg = new SVG({ fontCache: 'none' });
|
||||
const html = mathjax.document('', { InputJax: tex, OutputJax: svg });
|
||||
|
||||
const cache = new Map<string, string>();
|
||||
|
||||
export const GET: RequestHandler = async ({ url }) => {
|
||||
const texInput = url.searchParams.get('tex');
|
||||
|
||||
if (!texInput) {
|
||||
return new Response('Missing tex parameter', { status: 400 });
|
||||
}
|
||||
|
||||
const cached = cache.get(texInput);
|
||||
if (cached) {
|
||||
return new Response(cached, {
|
||||
headers: {
|
||||
'Content-Type': 'image/svg+xml',
|
||||
'Cache-Control': 'public, max-age=31536000, immutable'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const node = html.convert(texInput, { display: false });
|
||||
const svgString = adaptor.innerHTML(node).replace('style="', 'style="color: white; ');
|
||||
|
||||
if (cache.size > 1000) {
|
||||
const firstKey = cache.keys().next().value;
|
||||
if (firstKey) cache.delete(firstKey);
|
||||
}
|
||||
cache.set(texInput, svgString);
|
||||
|
||||
return new Response(svgString, {
|
||||
headers: {
|
||||
'Content-Type': 'image/svg+xml',
|
||||
'Cache-Control': 'public, max-age=31536000, immutable'
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
return new Response(`Error rendering LaTeX: ${e}`, { status: 500 });
|
||||
}
|
||||
};
|
||||
@@ -2,4 +2,6 @@ import tailwindcss from '@tailwindcss/vite';
|
||||
import { sveltekit } from '@sveltejs/kit/vite';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({ plugins: [tailwindcss(), sveltekit()] });
|
||||
export default defineConfig({
|
||||
plugins: [tailwindcss(), sveltekit()]
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user