Compare commits

..

16 Commits

29 changed files with 2300 additions and 79 deletions

View File

@@ -1,4 +1,4 @@
.PHONY: recreate down restart frontend deploy rebuild migrate convex-key script .PHONY: recreate down reset hard-reset restart frontend deploy rebuild migrate convex-key script
recreate: recreate:
docker compose --profile services up -d docker compose --profile services up -d
@@ -6,11 +6,20 @@ recreate:
down: down:
docker compose --profile services down docker compose --profile services down
reset:
$(MAKE) down
$(MAKE) recreate
hard-reset:
docker compose down
docker compose up -d
restart: restart:
docker compose --profile services restart docker compose --profile services restart
frontend: frontend:
docker compose build frontend docker compose build frontend
$(MAKE) migrate
docker compose up -d frontend docker compose up -d frontend
deploy: deploy:
@@ -25,7 +34,13 @@ migrate:
docker compose run --rm migrate docker compose run --rm migrate
convex-key: convex-key:
docker compose exec convex ./generate_admin_key.sh @output=$$(docker compose exec convex ./generate_admin_key.sh 2>&1); \
echo "$$output"; \
if echo "$$output" | grep -q "Admin key:"; then \
key=$$(echo "$$output" | tail -1); \
sed -i '' 's#^CONVEX_SELF_HOSTED_ADMIN_KEY=.*#CONVEX_SELF_HOSTED_ADMIN_KEY='"$$key"'#' frontend/.env; \
echo "Updated frontend/.env with new admin key"; \
fi
script: script:
@cd backend && docker compose --profile scripts run --rm script-runner scripts/$(subst .,/,$(word 2,$(MAKECMDGOALS))).py $(wordlist 3,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS)) @cd backend && docker compose --profile scripts run --rm script-runner scripts/$(subst .,/,$(word 2,$(MAKECMDGOALS))).py $(wordlist 3,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS))

View File

@@ -3,10 +3,19 @@ import base64
import contextlib import contextlib
import io import io
import time import time
from collections.abc import Awaitable, Callable
from typing import Any
from aiogram import Bot, F, Router, html, types from aiogram import BaseMiddleware, Bot, F, Router, html, types
from aiogram.enums import ChatAction from aiogram.enums import ChatAction
from aiogram.types import KeyboardButton, ReplyKeyboardMarkup, ReplyKeyboardRemove from aiogram.types import (
BufferedInputFile,
InputMediaPhoto,
KeyboardButton,
ReplyKeyboardMarkup,
ReplyKeyboardRemove,
TelegramObject,
)
from convex import ConvexInt64 from convex import ConvexInt64
from bot.modules.ai import ( from bot.modules.ai import (
@@ -23,6 +32,45 @@ from utils.convex import ConvexClient
router = Router() router = Router()
convex = ConvexClient(env.convex_url) convex = ConvexClient(env.convex_url)
ALBUM_COLLECT_DELAY = 0.5
class AlbumMiddleware(BaseMiddleware):
def __init__(self) -> None:
self.albums: dict[str, list[types.Message]] = {}
self.scheduled: set[str] = set()
async def __call__(
self,
handler: Callable[[TelegramObject, dict[str, Any]], Awaitable[Any]],
event: TelegramObject,
data: dict[str, Any],
) -> Any: # noqa: ANN401
if not isinstance(event, types.Message) or not event.media_group_id:
return await handler(event, data)
album_id = event.media_group_id
if album_id not in self.albums:
self.albums[album_id] = []
self.albums[album_id].append(event)
if album_id in self.scheduled:
return None
self.scheduled.add(album_id)
await asyncio.sleep(ALBUM_COLLECT_DELAY)
messages = self.albums.pop(album_id, [])
self.scheduled.discard(album_id)
if messages:
data["album"] = messages
return await handler(messages[0], data)
return None
router.message.middleware(AlbumMiddleware())
EDIT_THROTTLE_SECONDS = 1.0 EDIT_THROTTLE_SECONDS = 1.0
TELEGRAM_MAX_LENGTH = 4096 TELEGRAM_MAX_LENGTH = 4096
@@ -123,8 +171,13 @@ async def send_long_message(
) )
async def process_message_from_web( # noqa: C901, PLR0912, PLR0915 async def process_message_from_web( # noqa: C901, PLR0912, PLR0913, PLR0915
convex_user_id: str, text: str, bot: Bot, convex_chat_id: str convex_user_id: str,
text: str,
bot: Bot,
convex_chat_id: str,
images_base64: list[str] | None = None,
images_media_types: list[str] | None = None,
) -> None: ) -> None:
user = await convex.query("users:getById", {"userId": convex_user_id}) user = await convex.query("users:getById", {"userId": convex_user_id})
@@ -135,6 +188,29 @@ async def process_message_from_web( # noqa: C901, PLR0912, PLR0915
is_summarize = text == "/summarize" is_summarize = text == "/summarize"
if tg_chat_id and not is_summarize: if tg_chat_id and not is_summarize:
if images_base64 and images_media_types:
if len(images_base64) == 1:
photo_bytes = base64.b64decode(images_base64[0])
await bot.send_photo(
tg_chat_id,
BufferedInputFile(photo_bytes, "photo.jpg"),
caption=f"📱 {text}" if text else "📱",
reply_markup=ReplyKeyboardRemove(),
)
else:
media = []
img_pairs = zip(images_base64, images_media_types, strict=True)
for i, (img_b64, _) in enumerate(img_pairs):
photo_bytes = base64.b64decode(img_b64)
caption = f"📱 {text}" if i == 0 and text else None
media.append(
InputMediaPhoto(
media=BufferedInputFile(photo_bytes, f"photo_{i}.jpg"),
caption=caption,
)
)
await bot.send_media_group(tg_chat_id, media)
else:
await bot.send_message( await bot.send_message(
tg_chat_id, f"📱 {html.quote(text)}", reply_markup=ReplyKeyboardRemove() tg_chat_id, f"📱 {html.quote(text)}", reply_markup=ReplyKeyboardRemove()
) )
@@ -398,6 +474,74 @@ async def on_text_message(message: types.Message, bot: Bot) -> None:
await process_message(message.from_user.id, message.text, bot, message.chat.id) await process_message(message.from_user.id, message.text, bot, message.chat.id)
@router.message(F.media_group_id, F.photo)
async def on_album_message(
message: types.Message, bot: Bot, album: list[types.Message]
) -> None:
if not message.from_user:
return
await convex.mutation(
"users:getOrCreate",
{
"telegramId": ConvexInt64(message.from_user.id),
"telegramChatId": ConvexInt64(message.chat.id),
},
)
user = await convex.query(
"users:getByTelegramId", {"telegramId": ConvexInt64(message.from_user.id)}
)
if not user or not user.get("activeChatId"):
await message.answer("Use /new first to create a chat.")
return
caption = message.caption or "Process the images according to your task"
images_base64: list[str] = []
images_media_types: list[str] = []
for msg in album:
if not msg.photo:
continue
photo = msg.photo[-1]
file = await bot.get_file(photo.file_id)
if not file.file_path:
continue
buffer = io.BytesIO()
await bot.download_file(file.file_path, buffer)
image_bytes = buffer.getvalue()
images_base64.append(base64.b64encode(image_bytes).decode())
ext = file.file_path.rsplit(".", 1)[-1].lower()
media_type = f"image/{ext}" if ext in ("png", "gif", "webp") else "image/jpeg"
images_media_types.append(media_type)
if not images_base64:
await message.answer("Failed to get photos.")
return
active_chat_id = user["activeChatId"]
await convex.mutation(
"messages:create",
{
"chatId": active_chat_id,
"role": "user",
"content": caption,
"source": "telegram",
"imagesBase64": images_base64,
"imagesMediaTypes": images_media_types,
},
)
await process_message(
message.from_user.id, caption, bot, message.chat.id, skip_user_message=True
)
@router.message(F.photo) @router.message(F.photo)
async def on_photo_message(message: types.Message, bot: Bot) -> None: async def on_photo_message(message: types.Message, bot: Bot) -> None:
if not message.from_user or not message.photo: if not message.from_user or not message.photo:

View File

@@ -1,6 +1,6 @@
EXAM_SYSTEM = """You help solve problem sets and exams. EXAM_SYSTEM = """You help solve problem sets and exams.
When you receive an IMAGE with problems: When you receive just an IMAGE to process with problems:
- Give HINTS in Russian for each problem - Give HINTS in Russian for each problem
- Focus on key insights and potential difficulties, - Focus on key insights and potential difficulties,
give all formulas that will be helpful give all formulas that will be helpful
@@ -9,11 +9,17 @@ give all formulas that will be helpful
When asked for DETAILS on a specific problem (or a problem number): When asked for DETAILS on a specific problem (or a problem number):
- Provide full structured solution in English - Provide full structured solution in English
- Academic style, as it would be written in a notebook - Academic style, as it would be written in a notebook on real exam
- Step by step, clean, no fluff""" - Step by step, clean, no fluff, no overcompications, reuse thoughts inside
one task, as you would write it on an exam, be consistent
- This is also true if you get a summary, and then problem number is asked"""
EXAM_FOLLOW_UP = """Look at the problem set image and list problem numbers as options. EXAM_FOLLOW_UP = """Look at the problem set image and list ALL problem numbers as
If problems have sub-parts (a, b, c), list as: 1a, 1b, 2a, etc. options. Split by subparts ONLY if they are totally different tasks, not the steps of
one.
If there are multiple problem sets/sheets, break it down logically and specify set,
for example Group A: 1, Group A: 2a, Group B: 2b, etc.
Or, Theory: 1, Theory: 2a, Practice: 1, etc.
Only output identifiers that exist in the image.""" Only output identifiers that exist in the image."""
DEFAULT_FOLLOW_UP = ( DEFAULT_FOLLOW_UP = (

View File

@@ -51,6 +51,8 @@ async def handle_pending_generation(bot: Bot, item: dict, item_id: str) -> None:
text=item["userMessage"], text=item["userMessage"],
bot=bot, bot=bot,
convex_chat_id=item["chatId"], convex_chat_id=item["chatId"],
images_base64=item.get("imagesBase64"),
images_media_types=item.get("imagesMediaTypes"),
) )
except Exception as e: # noqa: BLE001 except Exception as e: # noqa: BLE001
logger.error(f"Error processing {item_id}: {e}") logger.error(f"Error processing {item_id}: {e}")

View File

@@ -137,8 +137,7 @@ services:
- migrate - migrate
networks: networks:
database: database:
entrypoint: bunx command: x convex deploy
command: convex dev
convex-dashboard: convex-dashboard:
image: ghcr.io/get-convex/convex-dashboard:latest image: ghcr.io/get-convex/convex-dashboard:latest

View File

@@ -5,5 +5,8 @@ yarn.lock
bun.lock bun.lock
bun.lockb bun.lockb
# Convex generated files
src/lib/convex/_generated/
# Miscellaneous # Miscellaneous
/static/ /static/

View File

@@ -22,9 +22,11 @@ export default defineConfig(
languageOptions: { globals: { ...globals.browser, ...globals.node } }, languageOptions: { globals: { ...globals.browser, ...globals.node } },
rules: { rules: {
// typescript-eslint strongly recommend that you do not use the no-undef lint rule on TypeScript projects. 'no-undef': 'off',
// see: https://typescript-eslint.io/troubleshooting/faqs/eslint/#i-get-errors-from-the-no-undef-rule-about-global-variables-not-being-defined-even-though-there-are-no-typescript-errors '@typescript-eslint/no-unused-vars': [
'no-undef': 'off' 'error',
{ argsIgnorePattern: '^_', varsIgnorePattern: '^_' }
]
} }
}, },
{ {

View File

@@ -0,0 +1,184 @@
<script lang="ts">
import { onMount } from 'svelte';
interface Props {
showPreview?: boolean;
oncapture: (base64: string, mediaType: string) => void;
onclose: () => void;
}
let { showPreview = true, oncapture, onclose }: Props = $props();
let videoElement: HTMLVideoElement | null = $state(null);
let stream: MediaStream | null = $state(null);
let capturedImage: { base64: string; mediaType: string } | null = $state(null);
let error: string | null = $state(null);
let closed = false;
async function findUltraWideCamera(): Promise<string | null> {
try {
const devices = await navigator.mediaDevices.enumerateDevices();
const videoDevices = devices.filter((d) => d.kind === 'videoinput');
const ultraWide = videoDevices.find(
(d) => d.label.toLowerCase().includes('ultra') && d.label.toLowerCase().includes('back')
);
return ultraWide?.deviceId ?? null;
} catch {
return null;
}
}
async function startCamera() {
if (closed) return;
if (!navigator.mediaDevices?.getUserMedia) {
error = 'Camera not supported (requires HTTPS)';
return;
}
try {
stream = await navigator.mediaDevices.getUserMedia({
video: { facingMode: { ideal: 'environment' } },
audio: false
});
const ultraWideId = await findUltraWideCamera();
if (ultraWideId) {
stream.getTracks().forEach((t) => t.stop());
stream = await navigator.mediaDevices.getUserMedia({
video: {
deviceId: { exact: ultraWideId },
width: { ideal: 4032 },
height: { ideal: 3024 }
},
audio: false
});
} else {
stream.getTracks().forEach((t) => t.stop());
stream = await navigator.mediaDevices.getUserMedia({
video: {
facingMode: { ideal: 'environment' },
width: { ideal: 4032 },
height: { ideal: 3024 }
},
audio: false
});
}
if (videoElement && !closed) {
videoElement.srcObject = stream;
}
} catch (e) {
error = e instanceof Error ? e.message : 'Camera access denied';
}
}
function stopCamera() {
if (stream) {
stream.getTracks().forEach((track) => track.stop());
stream = null;
}
}
function capture() {
if (!videoElement) return;
const canvas = document.createElement('canvas');
canvas.width = videoElement.videoWidth;
canvas.height = videoElement.videoHeight;
const ctx = canvas.getContext('2d');
if (!ctx) return;
ctx.drawImage(videoElement, 0, 0);
const maxSize = 1920;
const scale = Math.min(maxSize / canvas.width, maxSize / canvas.height, 1);
const outCanvas = document.createElement('canvas');
outCanvas.width = Math.round(canvas.width * scale);
outCanvas.height = Math.round(canvas.height * scale);
const outCtx = outCanvas.getContext('2d');
if (!outCtx) return;
outCtx.drawImage(canvas, 0, 0, outCanvas.width, outCanvas.height);
const base64 = outCanvas.toDataURL('image/jpeg', 0.65).split(',')[1];
const mediaType = 'image/jpeg';
stopCamera();
if (showPreview) {
capturedImage = { base64, mediaType };
} else {
oncapture(base64, mediaType);
}
}
function acceptCapture() {
if (capturedImage) {
oncapture(capturedImage.base64, capturedImage.mediaType);
}
}
function retake() {
capturedImage = null;
startCamera();
}
function close() {
closed = true;
stopCamera();
onclose();
}
onMount(() => {
startCamera();
return () => {
closed = true;
stopCamera();
};
});
</script>
<div class="fixed inset-0 z-50 flex flex-col bg-black" data-camera-ui>
{#if error}
<div class="flex flex-1 flex-col items-center justify-center p-4">
<p class="mb-4 text-center text-sm text-red-400">{error}</p>
<button onclick={close} class="rounded bg-neutral-700 px-4 py-2 text-sm text-white">
Close
</button>
</div>
{:else if capturedImage}
<div class="relative min-h-0 flex-1">
<button class="absolute inset-0" onclick={acceptCapture}>
<img
src="data:{capturedImage.mediaType};base64,{capturedImage.base64}"
alt="Captured"
class="h-full w-full object-contain"
/>
</button>
</div>
<div class="flex gap-4 p-4">
<button onclick={retake} class="flex-1 rounded bg-neutral-700 py-3 text-sm text-white">
Retake
</button>
<button onclick={acceptCapture} class="flex-1 rounded bg-blue-600 py-3 text-sm text-white">
Use
</button>
</div>
{:else}
<video bind:this={videoElement} autoplay playsinline muted class="h-full w-full object-cover"
></video>
<button
onclick={close}
class="absolute top-4 right-4 flex h-8 w-8 items-center justify-center rounded-full bg-black/50 text-white"
>
×
</button>
<button
onclick={capture}
aria-label="Capture photo"
class="absolute bottom-8 left-1/2 h-16 w-16 -translate-x-1/2 rounded-full border-4 border-white bg-white/20"
></button>
{/if}
</div>

View File

@@ -0,0 +1,144 @@
<script lang="ts">
import { onMount } from 'svelte';
interface Props {
oncomplete: (base64: string, mediaType: string, thumbnailBase64: string) => void;
oncancel: () => void;
}
let { oncomplete, oncancel }: Props = $props();
let count = $state(3);
let videoElement: HTMLVideoElement | null = $state(null);
let stream: MediaStream | null = $state(null);
let error: string | null = $state(null);
let cancelled = false;
let countdownInterval: ReturnType<typeof setInterval> | null = null;
async function startCamera() {
if (cancelled) return;
if (!navigator.mediaDevices?.getUserMedia) {
error = 'Camera not supported (requires HTTPS)';
return;
}
try {
const constraints: MediaStreamConstraints = {
video: {
facingMode: { ideal: 'environment' },
width: { ideal: 1920 },
height: { ideal: 1080 }
},
audio: false
};
stream = await navigator.mediaDevices.getUserMedia(constraints);
if (videoElement && !cancelled) {
videoElement.srcObject = stream;
startCountdown();
}
} catch (e) {
error = e instanceof Error ? e.message : 'Camera access denied';
}
}
function stopCamera() {
if (countdownInterval) {
clearInterval(countdownInterval);
countdownInterval = null;
}
if (stream) {
stream.getTracks().forEach((track) => track.stop());
stream = null;
}
}
function startCountdown() {
countdownInterval = setInterval(() => {
if (cancelled) {
stopCamera();
return;
}
count--;
if (count === 0) {
if (countdownInterval) clearInterval(countdownInterval);
capture();
}
}, 1000);
}
function capture() {
if (cancelled || !videoElement) {
stopCamera();
return;
}
const canvas = document.createElement('canvas');
canvas.width = videoElement.videoWidth;
canvas.height = videoElement.videoHeight;
const ctx = canvas.getContext('2d');
if (!ctx) {
stopCamera();
oncancel();
return;
}
ctx.drawImage(videoElement, 0, 0);
const base64 = canvas.toDataURL('image/jpeg', 0.85).split(',')[1];
const mediaType = 'image/jpeg';
const thumbMaxSize = 800;
const scale = Math.min(thumbMaxSize / canvas.width, thumbMaxSize / canvas.height, 1);
const thumbCanvas = document.createElement('canvas');
thumbCanvas.width = Math.round(canvas.width * scale);
thumbCanvas.height = Math.round(canvas.height * scale);
const thumbCtx = thumbCanvas.getContext('2d');
if (thumbCtx) {
thumbCtx.drawImage(canvas, 0, 0, thumbCanvas.width, thumbCanvas.height);
}
const thumbnailBase64 = thumbCanvas.toDataURL('image/jpeg', 0.7).split(',')[1];
stopCamera();
oncomplete(base64, mediaType, thumbnailBase64);
}
function handleCancel() {
cancelled = true;
stopCamera();
oncancel();
}
onMount(() => {
startCamera();
return () => {
cancelled = true;
stopCamera();
};
});
</script>
<div class="fixed inset-0 z-50 flex flex-col bg-black" data-camera-ui>
{#if error}
<div class="flex flex-1 flex-col items-center justify-center p-4">
<p class="mb-4 text-center text-sm text-red-400">{error}</p>
<button onclick={handleCancel} class="rounded bg-neutral-700 px-4 py-2 text-sm text-white">
Close
</button>
</div>
{:else}
<div class="relative flex-1">
<video bind:this={videoElement} autoplay playsinline muted class="h-full w-full object-cover"
></video>
<div class="absolute inset-0 flex items-center justify-center">
<span class="text-8xl font-bold text-white drop-shadow-lg">{count}</span>
</div>
</div>
<div class="p-4 text-center">
<button onclick={handleCancel} class="text-sm text-neutral-400">Cancel</button>
</div>
{/if}
</div>

View File

@@ -2,15 +2,16 @@
interface Props { interface Props {
onsubmit: (message: string) => void; onsubmit: (message: string) => void;
disabled?: boolean; disabled?: boolean;
allowEmpty?: boolean;
} }
let { onsubmit, disabled = false }: Props = $props(); let { onsubmit, disabled = false, allowEmpty = false }: Props = $props();
let value = $state(''); let value = $state('');
function handleSubmit(e: Event) { function handleSubmit(e: Event) {
e.preventDefault(); e.preventDefault();
const trimmed = value.trim(); const trimmed = value.trim();
if (trimmed && !disabled) { if ((trimmed || allowEmpty) && !disabled) {
onsubmit(trimmed); onsubmit(trimmed);
value = ''; value = '';
} }

View File

@@ -0,0 +1,29 @@
<script lang="ts">
import type { Id } from '$lib/convex/_generated/dataModel';
interface Photo {
_id: Id<'photoDrafts'>;
mediaType: string;
}
interface Props {
photos: Photo[];
onremove: (index: number) => void;
}
let { photos, onremove }: Props = $props();
</script>
{#if photos.length > 0}
<div class="flex flex-wrap gap-1">
{#each photos as _photo, i (i)}
<button
onclick={() => onremove(i)}
class="flex items-center gap-1 rounded bg-blue-600/30 px-1.5 py-0.5 text-[8px] text-blue-300"
>
<span>photo {i + 1}</span>
<span class="text-blue-400">&times;</span>
</button>
{/each}
</div>
{/if}

View File

@@ -0,0 +1,65 @@
<script lang="ts">
interface Props {
hasCamera: boolean;
hasOnlineDevices: boolean;
ontakephoto: () => void;
onrequestphoto: () => void;
}
let { hasCamera, hasOnlineDevices, ontakephoto, onrequestphoto }: Props = $props();
let menuOpen = $state(false);
function handleClick() {
if (hasCamera && hasOnlineDevices) {
menuOpen = !menuOpen;
} else if (hasOnlineDevices) {
onrequestphoto();
} else {
ontakephoto();
}
}
function handleTakePhoto() {
menuOpen = false;
ontakephoto();
}
function handleRequestPhoto() {
menuOpen = false;
onrequestphoto();
}
function handleBackdropClick() {
menuOpen = false;
}
</script>
<div class="relative">
<button
onclick={handleClick}
class="shrink-0 rounded bg-neutral-800 px-1.5 py-0.5 text-[8px] text-neutral-400"
>
+
</button>
{#if menuOpen}
<button class="fixed inset-0 z-40" onclick={handleBackdropClick} aria-label="Close menu"
></button>
<div
class="absolute bottom-full left-0 z-50 mb-1 overflow-hidden rounded bg-neutral-800 shadow-lg"
>
<button
onclick={handleTakePhoto}
class="block w-full px-3 py-2 text-left text-[10px] whitespace-nowrap text-white hover:bg-neutral-700"
>
Take photo
</button>
<button
onclick={handleRequestPhoto}
class="block w-full px-3 py-2 text-left text-[10px] whitespace-nowrap text-white hover:bg-neutral-700"
>
Request photo
</button>
</div>
{/if}
</div>

View File

@@ -0,0 +1,27 @@
<script lang="ts">
interface Props {
base64: string;
mediaType: string;
onaccept: () => void;
onreject: () => void;
}
let { base64, mediaType, onaccept, onreject }: Props = $props();
</script>
<div class="fixed inset-0 z-50 overflow-auto bg-black" data-camera-ui>
<button class="block min-h-full min-w-full" onclick={onaccept}>
<img
src="data:{mediaType};base64,{base64}"
alt="Preview"
class="min-h-dvh min-w-full object-cover"
/>
</button>
</div>
<button
onclick={onreject}
class="fixed top-4 right-4 z-[9999] flex h-10 w-10 items-center justify-center rounded-full bg-red-600 text-xl text-white shadow-lg"
data-camera-ui
>
×
</button>

View File

@@ -0,0 +1,22 @@
<script lang="ts">
interface Props {
onaccept: () => void;
ondecline: () => void;
}
let { onaccept, ondecline }: Props = $props();
</script>
<div class="fixed inset-0 z-50 flex items-center justify-center bg-black/80 p-4" data-camera-ui>
<div class="w-full max-w-xs rounded-lg bg-neutral-900 p-4">
<p class="mb-4 text-center text-sm text-white">Photo requested</p>
<div class="flex gap-3">
<button onclick={ondecline} class="flex-1 rounded bg-neutral-700 py-2 text-sm text-white">
Decline
</button>
<button onclick={onaccept} class="flex-1 rounded bg-blue-600 py-2 text-sm text-white">
Capture
</button>
</div>
</div>
</div>

View File

@@ -0,0 +1,135 @@
<script lang="ts">
import { onMount } from 'svelte';
interface Props {
oncapture: (base64: string, mediaType: string, thumbnailBase64: string) => void;
onunpair?: () => void;
}
let { oncapture, onunpair }: Props = $props();
let videoElement: HTMLVideoElement | null = $state(null);
let stream: MediaStream | null = $state(null);
let ready = $state(false);
async function findUltraWideCamera(): Promise<string | null> {
try {
const devices = await navigator.mediaDevices.enumerateDevices();
const videoDevices = devices.filter((d) => d.kind === 'videoinput');
const ultraWide = videoDevices.find(
(d) => d.label.toLowerCase().includes('ultra') && d.label.toLowerCase().includes('back')
);
return ultraWide?.deviceId ?? null;
} catch {
return null;
}
}
async function startCamera() {
if (!navigator.mediaDevices?.getUserMedia) return;
try {
stream = await navigator.mediaDevices.getUserMedia({
video: { facingMode: { ideal: 'environment' } },
audio: false
});
const ultraWideId = await findUltraWideCamera();
if (ultraWideId) {
stream.getTracks().forEach((t) => t.stop());
stream = await navigator.mediaDevices.getUserMedia({
video: {
deviceId: { exact: ultraWideId },
width: { ideal: 4032 },
height: { ideal: 3024 }
},
audio: false
});
} else {
stream.getTracks().forEach((t) => t.stop());
stream = await navigator.mediaDevices.getUserMedia({
video: {
facingMode: { ideal: 'environment' },
width: { ideal: 4032 },
height: { ideal: 3024 }
},
audio: false
});
}
if (videoElement) {
videoElement.srcObject = stream;
await new Promise<void>((resolve) => {
if (videoElement) {
videoElement.onloadedmetadata = () => resolve();
}
});
ready = true;
}
} catch {
ready = false;
}
}
export function capture() {
if (!ready || !videoElement) return false;
const canvas = document.createElement('canvas');
canvas.width = videoElement.videoWidth;
canvas.height = videoElement.videoHeight;
const ctx = canvas.getContext('2d');
if (!ctx) return false;
ctx.drawImage(videoElement, 0, 0);
const maxSize = 1920;
const scale = Math.min(maxSize / canvas.width, maxSize / canvas.height, 1);
const outCanvas = document.createElement('canvas');
outCanvas.width = Math.round(canvas.width * scale);
outCanvas.height = Math.round(canvas.height * scale);
const outCtx = outCanvas.getContext('2d');
if (!outCtx) return false;
outCtx.drawImage(canvas, 0, 0, outCanvas.width, outCanvas.height);
const base64 = outCanvas.toDataURL('image/jpeg', 0.65).split(',')[1];
const mediaType = 'image/jpeg';
const thumbMaxSize = 800;
const thumbScale = Math.min(thumbMaxSize / outCanvas.width, thumbMaxSize / outCanvas.height, 1);
const thumbCanvas = document.createElement('canvas');
thumbCanvas.width = Math.round(outCanvas.width * thumbScale);
thumbCanvas.height = Math.round(outCanvas.height * thumbScale);
const thumbCtx = thumbCanvas.getContext('2d');
if (thumbCtx) {
thumbCtx.drawImage(outCanvas, 0, 0, thumbCanvas.width, thumbCanvas.height);
}
const thumbnailBase64 = thumbCanvas.toDataURL('image/jpeg', 0.6).split(',')[1];
oncapture(base64, mediaType, thumbnailBase64);
return true;
}
onMount(() => {
startCamera();
return () => {
if (stream) {
stream.getTracks().forEach((track) => track.stop());
}
};
});
</script>
<div class="fixed inset-0 z-40 bg-black">
<video bind:this={videoElement} autoplay playsinline muted class="h-full w-full object-cover"
></video>
{#if onunpair}
<button
onclick={onunpair}
class="absolute top-4 left-4 z-10 rounded-full bg-red-600/80 px-3 py-1.5 text-xs text-white"
>
unpair
</button>
{/if}
</div>

View File

@@ -0,0 +1,64 @@
<script lang="ts">
import { onMount } from 'svelte';
let stealthMode = $state(false);
let lastTap = $state({ time: 0, x: 0, y: 0 });
onMount(() => {
document.body.style.touchAction = 'manipulation';
return () => {
document.body.style.touchAction = '';
};
});
function isInCenterZone(x: number, y: number): boolean {
const w = window.innerWidth;
const h = window.innerHeight;
return x > w * 0.3 && x < w * 0.7 && y > h * 0.3 && y < h * 0.7;
}
function handleTouchEnd(e: TouchEvent) {
if (e.touches.length > 0) return;
const target = e.target as HTMLElement;
if (target?.closest('[data-camera-ui]')) return;
const touch = e.changedTouches[0];
const now = Date.now();
const x = touch.clientX;
const y = touch.clientY;
if (!isInCenterZone(x, y)) {
lastTap = { time: 0, x: 0, y: 0 };
return;
}
const timeDiff = now - lastTap.time;
const distX = Math.abs(x - lastTap.x);
const distY = Math.abs(y - lastTap.y);
if (timeDiff < 500 && distX < 50 && distY < 50) {
stealthMode = !stealthMode;
lastTap = { time: 0, x: 0, y: 0 };
e.preventDefault();
} else {
lastTap = { time: now, x, y };
}
}
</script>
<svelte:document ontouchend={handleTouchEnd} />
{#if stealthMode}
<div class="stealth-overlay" ontouchend={handleTouchEnd}></div>
{/if}
<style>
.stealth-overlay {
position: fixed;
inset: 0;
z-index: 9999;
background: #000;
touch-action: manipulation;
}
</style>

View File

@@ -0,0 +1,29 @@
<script lang="ts">
import { onMount } from 'svelte';
interface Props {
oncomplete: () => void;
oncancel: () => void;
}
let { oncomplete, oncancel }: Props = $props();
let count = $state(3);
onMount(() => {
const interval = setInterval(() => {
count--;
if (count === 0) {
clearInterval(interval);
oncomplete();
}
}, 1000);
return () => clearInterval(interval);
});
</script>
<div class="fixed inset-0 z-50 flex flex-col items-center justify-center bg-black" data-camera-ui>
<span class="text-8xl font-bold text-white">{count}</span>
<button onclick={oncancel} class="mt-8 text-sm text-neutral-400">Cancel</button>
</div>

View File

@@ -47,7 +47,11 @@ export function usePollingMutation<Mutation extends FunctionReference<'mutation'
export function usePollingQuery<Query extends FunctionReference<'query'>>( export function usePollingQuery<Query extends FunctionReference<'query'>>(
query: Query, query: Query,
argsGetter: () => FunctionArgs<Query> | 'skip' argsGetter: () => FunctionArgs<Query> | 'skip'
): { data: FunctionReturnType<Query> | undefined; error: Error | null; isLoading: boolean } { ): {
data: FunctionReturnType<Query> | undefined;
error: Error | null;
isLoading: boolean;
} {
const client = usePollingClient(); const client = usePollingClient();
// eslint-disable-next-line prefer-const // eslint-disable-next-line prefer-const

View File

@@ -9,8 +9,12 @@
*/ */
import type * as chats from "../chats.js"; import type * as chats from "../chats.js";
import type * as devicePairings from "../devicePairings.js";
import type * as messages from "../messages.js"; import type * as messages from "../messages.js";
import type * as pairingRequests from "../pairingRequests.js";
import type * as pendingGenerations from "../pendingGenerations.js"; import type * as pendingGenerations from "../pendingGenerations.js";
import type * as photoDrafts from "../photoDrafts.js";
import type * as photoRequests from "../photoRequests.js";
import type * as users from "../users.js"; import type * as users from "../users.js";
import type { import type {
@@ -21,8 +25,12 @@ import type {
declare const fullApi: ApiFromModules<{ declare const fullApi: ApiFromModules<{
chats: typeof chats; chats: typeof chats;
devicePairings: typeof devicePairings;
messages: typeof messages; messages: typeof messages;
pairingRequests: typeof pairingRequests;
pendingGenerations: typeof pendingGenerations; pendingGenerations: typeof pendingGenerations;
photoDrafts: typeof photoDrafts;
photoRequests: typeof photoRequests;
users: typeof users; users: typeof users;
}>; }>;

View File

@@ -43,9 +43,16 @@ export const clear = mutation({
.collect(); .collect();
for (const message of messages) { for (const message of messages) {
if (args.preserveImages && message.imageBase64) { if (args.preserveImages) {
const hasLegacyImage = message.imageBase64 || message.imagesBase64?.length;
const messageImages = await ctx.db
.query('messageImages')
.withIndex('by_message_id', (q) => q.eq('messageId', message._id))
.first();
if (hasLegacyImage || messageImages) {
continue; continue;
} }
}
await ctx.db.delete(message._id); await ctx.db.delete(message._id);
} }
return null; return null;

View File

@@ -0,0 +1,109 @@
import { v } from 'convex/values';
import { mutation, query } from './_generated/server';
export const register = mutation({
args: {
chatId: v.id('chats'),
deviceId: v.string(),
hasCamera: v.boolean()
},
returns: v.id('devicePairings'),
handler: async (ctx, args) => {
const existing = await ctx.db
.query('devicePairings')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.collect();
const device = existing.find((d) => d.deviceId === args.deviceId);
if (device) {
await ctx.db.patch(device._id, {
hasCamera: args.hasCamera,
lastSeen: Date.now()
});
return device._id;
}
return await ctx.db.insert('devicePairings', {
chatId: args.chatId,
deviceId: args.deviceId,
hasCamera: args.hasCamera,
lastSeen: Date.now()
});
}
});
export const heartbeat = mutation({
args: {
chatId: v.id('chats'),
deviceId: v.string()
},
returns: v.null(),
handler: async (ctx, args) => {
const devices = await ctx.db
.query('devicePairings')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.collect();
const device = devices.find((d) => d.deviceId === args.deviceId);
if (device) {
await ctx.db.patch(device._id, { lastSeen: Date.now() });
}
return null;
}
});
export const getMyDevice = query({
args: { chatId: v.id('chats'), deviceId: v.string() },
returns: v.union(
v.object({
_id: v.id('devicePairings'),
_creationTime: v.number(),
chatId: v.id('chats'),
deviceId: v.string(),
hasCamera: v.boolean(),
pairedWithDeviceId: v.optional(v.string()),
lastSeen: v.number()
}),
v.null()
),
handler: async (ctx, args) => {
const devices = await ctx.db
.query('devicePairings')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.collect();
return devices.find((d) => d.deviceId === args.deviceId) ?? null;
}
});
export const getPairedDevice = query({
args: { chatId: v.id('chats'), deviceId: v.string() },
returns: v.union(
v.object({
_id: v.id('devicePairings'),
_creationTime: v.number(),
chatId: v.id('chats'),
deviceId: v.string(),
hasCamera: v.boolean(),
pairedWithDeviceId: v.optional(v.string()),
lastSeen: v.number()
}),
v.null()
),
handler: async (ctx, args) => {
const devices = await ctx.db
.query('devicePairings')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.collect();
const myDevice = devices.find((d) => d.deviceId === args.deviceId);
if (!myDevice?.pairedWithDeviceId) return null;
const thirtySecondsAgo = Date.now() - 30000;
const paired = devices.find(
(d) => d.deviceId === myDevice.pairedWithDeviceId && d.lastSeen > thirtySecondsAgo
);
return paired ?? null;
}
});

View File

@@ -1,5 +1,6 @@
import { v } from 'convex/values'; import { v } from 'convex/values';
import { mutation, query } from './_generated/server'; import { mutation, query } from './_generated/server';
import type { Id } from './_generated/dataModel';
export const listByChat = query({ export const listByChat = query({
args: { chatId: v.id('chats') }, args: { chatId: v.id('chats') },
@@ -10,8 +11,6 @@ export const listByChat = query({
chatId: v.id('chats'), chatId: v.id('chats'),
role: v.union(v.literal('user'), v.literal('assistant')), role: v.union(v.literal('user'), v.literal('assistant')),
content: v.string(), content: v.string(),
imageBase64: v.optional(v.string()),
imageMediaType: v.optional(v.string()),
followUpOptions: v.optional(v.array(v.string())), followUpOptions: v.optional(v.array(v.string())),
source: v.union(v.literal('telegram'), v.literal('web')), source: v.union(v.literal('telegram'), v.literal('web')),
createdAt: v.number(), createdAt: v.number(),
@@ -19,11 +18,23 @@ export const listByChat = query({
}) })
), ),
handler: async (ctx, args) => { handler: async (ctx, args) => {
return await ctx.db const messages = await ctx.db
.query('messages') .query('messages')
.withIndex('by_chat_id_and_created_at', (q) => q.eq('chatId', args.chatId)) .withIndex('by_chat_id_and_created_at', (q) => q.eq('chatId', args.chatId))
.order('asc') .order('asc')
.collect(); .collect();
return messages.map((m) => ({
_id: m._id,
_creationTime: m._creationTime,
chatId: m.chatId,
role: m.role,
content: m.content,
followUpOptions: m.followUpOptions,
source: m.source,
createdAt: m.createdAt,
isStreaming: m.isStreaming
}));
} }
}); });
@@ -35,6 +46,9 @@ export const create = mutation({
source: v.union(v.literal('telegram'), v.literal('web')), source: v.union(v.literal('telegram'), v.literal('web')),
imageBase64: v.optional(v.string()), imageBase64: v.optional(v.string()),
imageMediaType: v.optional(v.string()), imageMediaType: v.optional(v.string()),
imagesBase64: v.optional(v.array(v.string())),
imagesMediaTypes: v.optional(v.array(v.string())),
photoDraftIds: v.optional(v.array(v.id('photoDrafts'))),
followUpOptions: v.optional(v.array(v.string())), followUpOptions: v.optional(v.array(v.string())),
isStreaming: v.optional(v.boolean()) isStreaming: v.optional(v.boolean())
}, },
@@ -47,22 +61,56 @@ export const create = mutation({
source: args.source, source: args.source,
imageBase64: args.imageBase64, imageBase64: args.imageBase64,
imageMediaType: args.imageMediaType, imageMediaType: args.imageMediaType,
imagesBase64: args.imagesBase64,
imagesMediaTypes: args.imagesMediaTypes,
followUpOptions: args.followUpOptions, followUpOptions: args.followUpOptions,
createdAt: Date.now(), createdAt: Date.now(),
isStreaming: args.isStreaming isStreaming: args.isStreaming
}); });
const drafts: Array<{ base64: string; mediaType: string; id: Id<'photoDrafts'> }> = [];
if (args.photoDraftIds && args.photoDraftIds.length > 0) {
for (const draftId of args.photoDraftIds) {
const draft = await ctx.db.get(draftId);
if (draft) {
drafts.push({ base64: draft.base64, mediaType: draft.mediaType, id: draft._id });
}
}
}
for (let i = 0; i < drafts.length; i++) {
await ctx.db.insert('messageImages', {
messageId,
base64: drafts[i].base64,
mediaType: drafts[i].mediaType,
order: i
});
}
if (args.source === 'web' && args.role === 'user') { if (args.source === 'web' && args.role === 'user') {
const chat = await ctx.db.get(args.chatId); const chat = await ctx.db.get(args.chatId);
if (chat) { if (chat) {
await ctx.db.insert('pendingGenerations', { const pendingGenId = await ctx.db.insert('pendingGenerations', {
userId: chat.userId, userId: chat.userId,
chatId: args.chatId, chatId: args.chatId,
userMessage: args.content, userMessage: args.content,
createdAt: Date.now() createdAt: Date.now()
}); });
for (let i = 0; i < drafts.length; i++) {
await ctx.db.insert('pendingGenerationImages', {
pendingGenerationId: pendingGenId,
base64: drafts[i].base64,
mediaType: drafts[i].mediaType,
order: i
});
} }
} }
}
for (const draft of drafts) {
await ctx.db.delete(draft.id);
}
return messageId; return messageId;
} }
@@ -134,6 +182,8 @@ export const getLastAssistantMessage = query({
content: v.string(), content: v.string(),
imageBase64: v.optional(v.string()), imageBase64: v.optional(v.string()),
imageMediaType: v.optional(v.string()), imageMediaType: v.optional(v.string()),
imagesBase64: v.optional(v.array(v.string())),
imagesMediaTypes: v.optional(v.array(v.string())),
followUpOptions: v.optional(v.array(v.string())), followUpOptions: v.optional(v.array(v.string())),
source: v.union(v.literal('telegram'), v.literal('web')), source: v.union(v.literal('telegram'), v.literal('web')),
createdAt: v.number(), createdAt: v.number(),
@@ -166,11 +216,33 @@ export const getChatImages = query({
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId)) .withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.collect(); .collect();
return messages const images: Array<{ base64: string; mediaType: string }> = [];
.filter((m) => m.imageBase64 && m.imageMediaType)
.map((m) => ({ for (const m of messages) {
base64: m.imageBase64!, const msgImages = await ctx.db
mediaType: m.imageMediaType! .query('messageImages')
})); .withIndex('by_message_id', (q) => q.eq('messageId', m._id))
.collect();
for (const img of msgImages.sort((a, b) => a.order - b.order)) {
images.push({ base64: img.base64, mediaType: img.mediaType });
}
if (m.imagesBase64 && m.imagesMediaTypes) {
for (let i = 0; i < m.imagesBase64.length; i++) {
images.push({
base64: m.imagesBase64[i],
mediaType: m.imagesMediaTypes[i]
});
}
} else if (m.imageBase64 && m.imageMediaType) {
images.push({
base64: m.imageBase64,
mediaType: m.imageMediaType
});
}
}
return images;
} }
}); });

View File

@@ -0,0 +1,122 @@
import { v } from 'convex/values';
import { mutation, query } from './_generated/server';
export const create = mutation({
args: {
chatId: v.id('chats'),
fromDeviceId: v.string()
},
returns: v.id('pairingRequests'),
handler: async (ctx, args) => {
const existing = await ctx.db
.query('pairingRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.collect();
const pending = existing.find(
(r) => r.fromDeviceId === args.fromDeviceId && r.status === 'pending'
);
if (pending) return pending._id;
return await ctx.db.insert('pairingRequests', {
chatId: args.chatId,
fromDeviceId: args.fromDeviceId,
status: 'pending',
createdAt: Date.now()
});
}
});
export const accept = mutation({
args: {
requestId: v.id('pairingRequests'),
acceptingDeviceId: v.string()
},
returns: v.null(),
handler: async (ctx, args) => {
const request = await ctx.db.get(args.requestId);
if (!request || request.status !== 'pending') return null;
await ctx.db.patch(args.requestId, { status: 'accepted' });
const devices = await ctx.db
.query('devicePairings')
.withIndex('by_chat_id', (q) => q.eq('chatId', request.chatId))
.collect();
const fromDevice = devices.find((d) => d.deviceId === request.fromDeviceId);
const acceptingDevice = devices.find((d) => d.deviceId === args.acceptingDeviceId);
if (fromDevice) {
await ctx.db.patch(fromDevice._id, { pairedWithDeviceId: args.acceptingDeviceId });
}
if (acceptingDevice) {
await ctx.db.patch(acceptingDevice._id, { pairedWithDeviceId: request.fromDeviceId });
}
return null;
}
});
export const reject = mutation({
args: { requestId: v.id('pairingRequests') },
returns: v.null(),
handler: async (ctx, args) => {
const request = await ctx.db.get(args.requestId);
if (!request || request.status !== 'pending') return null;
await ctx.db.patch(args.requestId, { status: 'rejected' });
return null;
}
});
export const getPending = query({
args: { chatId: v.id('chats'), excludeDeviceId: v.string() },
returns: v.union(
v.object({
_id: v.id('pairingRequests'),
_creationTime: v.number(),
chatId: v.id('chats'),
fromDeviceId: v.string(),
status: v.union(v.literal('pending'), v.literal('accepted'), v.literal('rejected')),
createdAt: v.number()
}),
v.null()
),
handler: async (ctx, args) => {
const requests = await ctx.db
.query('pairingRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.collect();
return (
requests.find((r) => r.status === 'pending' && r.fromDeviceId !== args.excludeDeviceId) ??
null
);
}
});
export const unpair = mutation({
args: {
chatId: v.id('chats'),
deviceId: v.string()
},
returns: v.null(),
handler: async (ctx, args) => {
const devices = await ctx.db
.query('devicePairings')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.collect();
const myDevice = devices.find((d) => d.deviceId === args.deviceId);
if (!myDevice?.pairedWithDeviceId) return null;
const pairedDevice = devices.find((d) => d.deviceId === myDevice.pairedWithDeviceId);
await ctx.db.patch(myDevice._id, { pairedWithDeviceId: undefined });
if (pairedDevice) {
await ctx.db.patch(pairedDevice._id, { pairedWithDeviceId: undefined });
}
return null;
}
});

View File

@@ -10,11 +10,33 @@ export const list = query({
userId: v.id('users'), userId: v.id('users'),
chatId: v.id('chats'), chatId: v.id('chats'),
userMessage: v.string(), userMessage: v.string(),
imagesBase64: v.optional(v.array(v.string())),
imagesMediaTypes: v.optional(v.array(v.string())),
createdAt: v.number() createdAt: v.number()
}) })
), ),
handler: async (ctx) => { handler: async (ctx) => {
return await ctx.db.query('pendingGenerations').collect(); const pending = await ctx.db.query('pendingGenerations').collect();
const result = [];
for (const p of pending) {
const images = await ctx.db
.query('pendingGenerationImages')
.withIndex('by_pending_generation_id', (q) => q.eq('pendingGenerationId', p._id))
.collect();
const sortedImages = images.sort((a, b) => a.order - b.order);
result.push({
...p,
imagesBase64:
sortedImages.length > 0 ? sortedImages.map((img) => img.base64) : p.imagesBase64,
imagesMediaTypes:
sortedImages.length > 0 ? sortedImages.map((img) => img.mediaType) : p.imagesMediaTypes
});
}
return result;
} }
}); });
@@ -39,7 +61,35 @@ export const remove = mutation({
args: { id: v.id('pendingGenerations') }, args: { id: v.id('pendingGenerations') },
returns: v.null(), returns: v.null(),
handler: async (ctx, args) => { handler: async (ctx, args) => {
const images = await ctx.db
.query('pendingGenerationImages')
.withIndex('by_pending_generation_id', (q) => q.eq('pendingGenerationId', args.id))
.collect();
for (const img of images) {
await ctx.db.delete(img._id);
}
await ctx.db.delete(args.id); await ctx.db.delete(args.id);
return null; return null;
} }
}); });
export const getImages = query({
args: { pendingGenerationId: v.id('pendingGenerations') },
returns: v.array(
v.object({
base64: v.string(),
mediaType: v.string()
})
),
handler: async (ctx, args) => {
const images = await ctx.db
.query('pendingGenerationImages')
.withIndex('by_pending_generation_id', (q) =>
q.eq('pendingGenerationId', args.pendingGenerationId)
)
.collect();
return images
.sort((a, b) => a.order - b.order)
.map((img) => ({ base64: img.base64, mediaType: img.mediaType }));
}
});

View File

@@ -0,0 +1,93 @@
import { v } from 'convex/values';
import { mutation, query } from './_generated/server';
const photoValidator = v.object({
base64: v.string(),
mediaType: v.string()
});
export const get = query({
args: { chatId: v.id('chats'), deviceId: v.string() },
returns: v.object({
photos: v.array(
v.object({
_id: v.id('photoDrafts'),
mediaType: v.string()
})
)
}),
handler: async (ctx, args) => {
const drafts = await ctx.db
.query('photoDrafts')
.withIndex('by_chat_id_and_device_id', (q) =>
q.eq('chatId', args.chatId).eq('deviceId', args.deviceId)
)
.collect();
return {
photos: drafts.map((d) => ({
_id: d._id,
mediaType: d.mediaType
}))
};
}
});
export const addPhoto = mutation({
args: {
chatId: v.id('chats'),
deviceId: v.string(),
photo: photoValidator
},
returns: v.null(),
handler: async (ctx, args) => {
await ctx.db.insert('photoDrafts', {
chatId: args.chatId,
deviceId: args.deviceId,
base64: args.photo.base64,
mediaType: args.photo.mediaType,
createdAt: Date.now()
});
return null;
}
});
export const removePhoto = mutation({
args: {
chatId: v.id('chats'),
deviceId: v.string(),
index: v.number()
},
returns: v.null(),
handler: async (ctx, args) => {
const drafts = await ctx.db
.query('photoDrafts')
.withIndex('by_chat_id_and_device_id', (q) =>
q.eq('chatId', args.chatId).eq('deviceId', args.deviceId)
)
.collect();
if (drafts[args.index]) {
await ctx.db.delete(drafts[args.index]._id);
}
return null;
}
});
export const clear = mutation({
args: { chatId: v.id('chats'), deviceId: v.string() },
returns: v.null(),
handler: async (ctx, args) => {
const drafts = await ctx.db
.query('photoDrafts')
.withIndex('by_chat_id_and_device_id', (q) =>
q.eq('chatId', args.chatId).eq('deviceId', args.deviceId)
)
.collect();
for (const draft of drafts) {
await ctx.db.delete(draft._id);
}
return null;
}
});

View File

@@ -0,0 +1,293 @@
import { v } from 'convex/values';
import { mutation, query } from './_generated/server';
const photoRequestValidator = v.object({
_id: v.id('photoRequests'),
_creationTime: v.number(),
chatId: v.id('chats'),
requesterId: v.string(),
captureDeviceId: v.optional(v.string()),
status: v.union(
v.literal('pending'),
v.literal('countdown'),
v.literal('capture_now'),
v.literal('captured'),
v.literal('accepted'),
v.literal('rejected')
),
photoBase64: v.optional(v.string()),
photoMediaType: v.optional(v.string()),
thumbnailBase64: v.optional(v.string()),
createdAt: v.number()
});
const photoRequestLightValidator = v.object({
_id: v.id('photoRequests'),
status: v.union(
v.literal('pending'),
v.literal('countdown'),
v.literal('capture_now'),
v.literal('captured'),
v.literal('accepted'),
v.literal('rejected')
),
photoMediaType: v.optional(v.string()),
thumbnailBase64: v.optional(v.string())
});
export const create = mutation({
args: {
chatId: v.id('chats'),
requesterId: v.string(),
captureDeviceId: v.string()
},
returns: v.id('photoRequests'),
handler: async (ctx, args) => {
const oldRequests = await ctx.db
.query('photoRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.take(20);
for (const req of oldRequests) {
if (req.status === 'pending' || req.status === 'countdown' || req.status === 'capture_now') {
await ctx.db.patch(req._id, { status: 'rejected' });
}
}
return await ctx.db.insert('photoRequests', {
chatId: args.chatId,
requesterId: args.requesterId,
captureDeviceId: args.captureDeviceId,
status: 'countdown',
createdAt: Date.now()
});
}
});
export const markCaptureNow = mutation({
args: { requestId: v.id('photoRequests') },
returns: v.null(),
handler: async (ctx, args) => {
await ctx.db.patch(args.requestId, { status: 'capture_now' });
return null;
}
});
export const submitPhoto = mutation({
args: {
requestId: v.id('photoRequests'),
photoBase64: v.string(),
photoMediaType: v.string(),
thumbnailBase64: v.optional(v.string())
},
returns: v.boolean(),
handler: async (ctx, args) => {
const req = await ctx.db.get(args.requestId);
if (!req || req.status !== 'capture_now') {
return false;
}
await ctx.db.patch(args.requestId, {
status: 'captured',
photoBase64: args.photoBase64,
photoMediaType: args.photoMediaType,
thumbnailBase64: args.thumbnailBase64
});
return true;
}
});
export const markAccepted = mutation({
args: { requestId: v.id('photoRequests') },
returns: v.null(),
handler: async (ctx, args) => {
await ctx.db.patch(args.requestId, { status: 'accepted' });
return null;
}
});
export const markRejected = mutation({
args: { requestId: v.id('photoRequests') },
returns: v.boolean(),
handler: async (ctx, args) => {
const req = await ctx.db.get(args.requestId);
if (!req || req.status === 'accepted' || req.status === 'rejected') {
return false;
}
await ctx.db.patch(req._id, { status: 'rejected' });
return true;
}
});
const captureNowLightValidator = v.object({
_id: v.id('photoRequests'),
status: v.literal('capture_now')
});
export const getCaptureNowRequest = query({
args: { chatId: v.id('chats'), deviceId: v.optional(v.string()) },
returns: v.union(captureNowLightValidator, v.null()),
handler: async (ctx, args) => {
const now = Date.now();
const maxAge = 60 * 1000;
const requests = await ctx.db
.query('photoRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.order('desc')
.take(50);
const found = requests.find((r) => r.status === 'capture_now' && now - r.createdAt < maxAge);
if (!found) return null;
return { _id: found._id, status: 'capture_now' as const };
}
});
export const getActiveForCapture = query({
args: { chatId: v.id('chats'), deviceId: v.optional(v.string()) },
returns: v.union(photoRequestValidator, v.null()),
handler: async (ctx, args) => {
const requests = await ctx.db
.query('photoRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.order('desc')
.take(50);
return requests.find((r) => r.status === 'countdown' || r.status === 'capture_now') ?? null;
}
});
export const getMyActiveRequest = query({
args: { chatId: v.id('chats'), deviceId: v.optional(v.string()) },
returns: v.union(photoRequestLightValidator, v.null()),
handler: async (ctx, args) => {
const requests = await ctx.db
.query('photoRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.order('desc')
.take(100);
if (!args.deviceId) return null;
const found = requests.find(
(r) =>
r.requesterId === args.deviceId &&
(r.status === 'countdown' || r.status === 'capture_now' || r.status === 'captured')
);
if (!found) return null;
return {
_id: found._id,
status: found.status,
photoMediaType: found.photoMediaType,
thumbnailBase64: found.thumbnailBase64
};
}
});
export const getPhotoData = query({
args: { requestId: v.id('photoRequests') },
returns: v.union(
v.object({
photoBase64: v.string(),
photoMediaType: v.string(),
thumbnailBase64: v.optional(v.string())
}),
v.null()
),
handler: async (ctx, args) => {
const req = await ctx.db.get(args.requestId);
if (!req || !req.photoBase64 || !req.photoMediaType) return null;
return {
photoBase64: req.photoBase64,
photoMediaType: req.photoMediaType,
thumbnailBase64: req.thumbnailBase64
};
}
});
export const getPhotoPreview = query({
args: { requestId: v.id('photoRequests') },
returns: v.union(
v.object({
thumbnailBase64: v.string(),
photoMediaType: v.string()
}),
v.null()
),
handler: async (ctx, args) => {
const req = await ctx.db.get(args.requestId);
if (!req || !req.photoMediaType) return null;
return {
thumbnailBase64: req.thumbnailBase64 || req.photoBase64 || '',
photoMediaType: req.photoMediaType
};
}
});
export const acceptPhotoToDraft = mutation({
args: {
requestId: v.id('photoRequests'),
chatId: v.id('chats'),
deviceId: v.string()
},
returns: v.id('photoDrafts'),
handler: async (ctx, args) => {
const req = await ctx.db.get(args.requestId);
if (!req || !req.photoBase64 || !req.photoMediaType) {
throw new Error('Photo request not found or has no photo');
}
const draftId = await ctx.db.insert('photoDrafts', {
chatId: args.chatId,
deviceId: args.deviceId,
base64: req.photoBase64,
mediaType: req.photoMediaType,
createdAt: Date.now()
});
await ctx.db.patch(args.requestId, { status: 'accepted' });
return draftId;
}
});
export const cleanup = mutation({
args: { chatId: v.id('chats') },
returns: v.number(),
handler: async (ctx, args) => {
const requests = await ctx.db
.query('photoRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.take(20);
let deleted = 0;
for (const req of requests) {
await ctx.db.delete(req._id);
deleted++;
}
return deleted;
}
});
export const getCapturedForPhone = query({
args: { chatId: v.id('chats'), deviceId: v.optional(v.string()) },
returns: v.union(photoRequestValidator, v.null()),
handler: async (ctx, args) => {
const requests = await ctx.db
.query('photoRequests')
.withIndex('by_chat_id', (q) => q.eq('chatId', args.chatId))
.order('desc')
.take(50);
if (!args.deviceId) return null;
return (
requests.find((r) => r.captureDeviceId === args.deviceId && r.status === 'captured') ?? null
);
}
});

View File

@@ -26,6 +26,8 @@ export default defineSchema({
imageBase64: v.optional(v.string()), imageBase64: v.optional(v.string()),
imageMediaType: v.optional(v.string()), imageMediaType: v.optional(v.string()),
imageStorageId: v.optional(v.id('_storage')), imageStorageId: v.optional(v.id('_storage')),
imagesBase64: v.optional(v.array(v.string())),
imagesMediaTypes: v.optional(v.array(v.string())),
followUpOptions: v.optional(v.array(v.string())), followUpOptions: v.optional(v.array(v.string())),
source: v.union(v.literal('telegram'), v.literal('web')), source: v.union(v.literal('telegram'), v.literal('web')),
createdAt: v.number(), createdAt: v.number(),
@@ -38,6 +40,63 @@ export default defineSchema({
userId: v.id('users'), userId: v.id('users'),
chatId: v.id('chats'), chatId: v.id('chats'),
userMessage: v.string(), userMessage: v.string(),
imagesBase64: v.optional(v.array(v.string())),
imagesMediaTypes: v.optional(v.array(v.string())),
createdAt: v.number() createdAt: v.number()
}) }),
pendingGenerationImages: defineTable({
pendingGenerationId: v.id('pendingGenerations'),
base64: v.string(),
mediaType: v.string(),
order: v.number()
}).index('by_pending_generation_id', ['pendingGenerationId']),
messageImages: defineTable({
messageId: v.id('messages'),
base64: v.string(),
mediaType: v.string(),
order: v.number()
}).index('by_message_id', ['messageId']),
devicePairings: defineTable({
chatId: v.id('chats'),
deviceId: v.string(),
hasCamera: v.boolean(),
pairedWithDeviceId: v.optional(v.string()),
lastSeen: v.number()
}).index('by_chat_id', ['chatId']),
pairingRequests: defineTable({
chatId: v.id('chats'),
fromDeviceId: v.string(),
status: v.union(v.literal('pending'), v.literal('accepted'), v.literal('rejected')),
createdAt: v.number()
}).index('by_chat_id', ['chatId']),
photoRequests: defineTable({
chatId: v.id('chats'),
requesterId: v.string(),
captureDeviceId: v.optional(v.string()),
status: v.union(
v.literal('pending'),
v.literal('countdown'),
v.literal('capture_now'),
v.literal('captured'),
v.literal('accepted'),
v.literal('rejected')
),
photoBase64: v.optional(v.string()),
photoMediaType: v.optional(v.string()),
thumbnailBase64: v.optional(v.string()),
createdAt: v.number()
}).index('by_chat_id', ['chatId']),
photoDrafts: defineTable({
chatId: v.id('chats'),
deviceId: v.string(),
base64: v.string(),
mediaType: v.string(),
createdAt: v.number()
}).index('by_chat_id_and_device_id', ['chatId', 'deviceId'])
}); });

View File

@@ -1,16 +1,103 @@
<script lang="ts"> <script lang="ts">
import { page } from '$app/state'; import { page } from '$app/state';
import { getContext } from 'svelte'; import { browser } from '$app/environment';
import { getContext, onMount } from 'svelte';
import { SvelteSet } from 'svelte/reactivity';
import { useQuery, useConvexClient } from 'convex-svelte'; import { useQuery, useConvexClient } from 'convex-svelte';
import { usePollingQuery, usePollingMutation } from '$lib/convex-polling.svelte'; import {
usePollingQuery,
usePollingMutation,
usePollingClient
} from '$lib/convex-polling.svelte';
import { api } from '$lib/convex/_generated/api'; import { api } from '$lib/convex/_generated/api';
import type { Id } from '$lib/convex/_generated/dataModel';
import ChatMessage from '$lib/components/ChatMessage.svelte'; import ChatMessage from '$lib/components/ChatMessage.svelte';
import ChatInput from '$lib/components/ChatInput.svelte'; import ChatInput from '$lib/components/ChatInput.svelte';
import FollowUpButtons from '$lib/components/FollowUpButtons.svelte'; import FollowUpButtons from '$lib/components/FollowUpButtons.svelte';
import StealthOverlay from '$lib/components/StealthOverlay.svelte';
import CameraCapture from '$lib/components/CameraCapture.svelte';
import WatchCountdown from '$lib/components/WatchCountdown.svelte';
import PhotoPreview from '$lib/components/PhotoPreview.svelte';
import DraftBadge from '$lib/components/DraftBadge.svelte';
import SilentCapture from '$lib/components/SilentCapture.svelte';
const usePolling = getContext<boolean>('convex-use-polling') ?? false; const usePolling = getContext<boolean>('convex-use-polling') ?? false;
let mnemonic = $derived(page.params.mnemonic); let mnemonic = $derived(page.params.mnemonic);
let lastMessageElement: HTMLDivElement | null = $state(null);
let showScrollButton = $state(false);
let deviceId = $state('');
let hasCamera = $state(false);
let showCamera = $state(false);
let showWatchCountdown = $state(false);
let activeRequestId: Id<'photoRequests'> | null = $state(null);
let previewPhoto: {
thumbnail: string;
mediaType: string;
requestId: Id<'photoRequests'>;
} | null = $state(null);
let shownPreviewIds = new SvelteSet<string>();
let silentCaptureRef: SilentCapture | null = $state(null);
let processedCaptureNowIds = new SvelteSet<string>();
function generateId(): string {
if (typeof crypto !== 'undefined' && crypto.randomUUID) {
return crypto.randomUUID();
}
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => {
const r = (Math.random() * 16) | 0;
const v = c === 'x' ? r : (r & 0x3) | 0x8;
return v.toString(16);
});
}
function getOrCreateDeviceId(): string {
if (!browser) return '';
let id = localStorage.getItem('stealth-device-id');
if (!id) {
id = generateId();
localStorage.setItem('stealth-device-id', id);
}
return id;
}
async function checkCamera(): Promise<boolean> {
if (!browser) return false;
if (!navigator.mediaDevices?.enumerateDevices) return false;
try {
const devices = await navigator.mediaDevices.enumerateDevices();
return devices.some((d) => d.kind === 'videoinput');
} catch {
return false;
}
}
onMount(() => {
deviceId = getOrCreateDeviceId();
checkCamera().then((has) => {
hasCamera = has;
});
});
$effect(() => {
if (!lastMessageElement) return;
const observer = new IntersectionObserver(
([entry]) => {
showScrollButton = !entry.isIntersecting;
},
{ threshold: 0, rootMargin: '0px 0px -90% 0px' }
);
observer.observe(lastMessageElement);
return () => observer.disconnect();
});
function scrollToLastMessage() {
lastMessageElement?.scrollIntoView({ behavior: 'smooth', block: 'start' });
}
const chatDataWs = usePolling const chatDataWs = usePolling
? null ? null
: useQuery(api.chats.getWithUser, () => (mnemonic ? { mnemonic } : 'skip')); : useQuery(api.chats.getWithUser, () => (mnemonic ? { mnemonic } : 'skip'));
@@ -19,15 +106,13 @@
: null; : null;
const chatData = $derived(usePolling ? chatDataPoll! : chatDataWs!); const chatData = $derived(usePolling ? chatDataPoll! : chatDataWs!);
const chatId = $derived(chatData.data?.chat?._id);
const messagesQueryWs = usePolling const messagesQueryWs = usePolling
? null ? null
: useQuery(api.messages.listByChat, () => : useQuery(api.messages.listByChat, () => (chatId ? { chatId } : 'skip'));
chatData.data?.chat?._id ? { chatId: chatData.data.chat._id } : 'skip'
);
const messagesQueryPoll = usePolling const messagesQueryPoll = usePolling
? usePollingQuery(api.messages.listByChat, () => ? usePollingQuery(api.messages.listByChat, () => (chatId ? { chatId } : 'skip'))
chatData.data?.chat?._id ? { chatId: chatData.data.chat._id } : 'skip'
)
: null; : null;
const messagesQuery = $derived(usePolling ? messagesQueryPoll! : messagesQueryWs!); const messagesQuery = $derived(usePolling ? messagesQueryPoll! : messagesQueryWs!);
@@ -39,6 +124,110 @@
: [] : []
); );
const myDeviceWs = usePolling
? null
: useQuery(api.devicePairings.getMyDevice, () =>
chatId && deviceId ? { chatId, deviceId } : 'skip'
);
const myDevicePoll = usePolling
? usePollingQuery(api.devicePairings.getMyDevice, () =>
chatId && deviceId ? { chatId, deviceId } : 'skip'
)
: null;
const myDevice = $derived(usePolling ? myDevicePoll! : myDeviceWs!);
const pairedDeviceWs = usePolling
? null
: useQuery(api.devicePairings.getPairedDevice, () =>
chatId && deviceId ? { chatId, deviceId } : 'skip'
);
const pairedDevicePoll = usePolling
? usePollingQuery(api.devicePairings.getPairedDevice, () =>
chatId && deviceId ? { chatId, deviceId } : 'skip'
)
: null;
const pairedDevice = $derived(usePolling ? pairedDevicePoll! : pairedDeviceWs!);
const isPaired = $derived(!!myDevice.data?.pairedWithDeviceId && !!pairedDevice.data);
const pendingPairingWs = usePolling
? null
: useQuery(api.pairingRequests.getPending, () =>
chatId && deviceId ? { chatId, excludeDeviceId: deviceId } : 'skip'
);
const pendingPairingPoll = usePolling
? usePollingQuery(api.pairingRequests.getPending, () =>
chatId && deviceId ? { chatId, excludeDeviceId: deviceId } : 'skip'
)
: null;
const pendingPairing = $derived(usePolling ? pendingPairingPoll! : pendingPairingWs!);
const captureNowRequestWs = usePolling
? null
: useQuery(api.photoRequests.getCaptureNowRequest, () => (chatId ? { chatId } : 'skip'));
const captureNowRequestPoll = usePolling
? usePollingQuery(api.photoRequests.getCaptureNowRequest, () => (chatId ? { chatId } : 'skip'))
: null;
const captureNowRequest = $derived(usePolling ? captureNowRequestPoll! : captureNowRequestWs!);
const myActiveRequestWs = usePolling
? null
: useQuery(api.photoRequests.getMyActiveRequest, () =>
chatId && deviceId ? { chatId, deviceId } : 'skip'
);
const myActiveRequestPoll = usePolling
? usePollingQuery(api.photoRequests.getMyActiveRequest, () =>
chatId && deviceId ? { chatId, deviceId } : 'skip'
)
: null;
const myActiveRequest = $derived(usePolling ? myActiveRequestPoll! : myActiveRequestWs!);
const photoDraftWs = usePolling
? null
: useQuery(api.photoDrafts.get, () => (chatId && deviceId ? { chatId, deviceId } : 'skip'));
const photoDraftPoll = usePolling
? usePollingQuery(api.photoDrafts.get, () =>
chatId && deviceId ? { chatId, deviceId } : 'skip'
)
: null;
const photoDraft = $derived(usePolling ? photoDraftPoll! : photoDraftWs!);
const draftPhotos = $derived(photoDraft.data?.photos ?? []);
$effect(() => {
const req = captureNowRequest.data;
if (req && hasCamera && !processedCaptureNowIds.has(req._id)) {
processedCaptureNowIds.add(req._id);
const tryCapture = () => {
const success = silentCaptureRef?.capture();
if (!success) {
setTimeout(tryCapture, 100);
}
};
tryCapture();
}
});
$effect(() => {
const req = myActiveRequest.data;
if (req?.status === 'captured' && req.photoMediaType) {
if (shownPreviewIds.has(req._id)) return;
shownPreviewIds.add(req._id);
const client = pollingClient ?? clientWs;
if (client) {
client.query(api.photoRequests.getPhotoPreview, { requestId: req._id }).then((data) => {
if (data) {
previewPhoto = {
thumbnail: data.thumbnailBase64,
mediaType: data.photoMediaType,
requestId: req._id
};
}
});
}
}
});
let prevMessageCount = 0; let prevMessageCount = 0;
let prevLastMessageId: string | undefined; let prevLastMessageId: string | undefined;
@@ -53,42 +242,93 @@
}); });
const clientWs = usePolling ? null : useConvexClient(); const clientWs = usePolling ? null : useConvexClient();
const pollingClient = usePolling ? usePollingClient() : null;
const createMessagePoll = usePolling ? usePollingMutation(api.messages.create) : null; const createMessagePoll = usePolling ? usePollingMutation(api.messages.create) : null;
const registerDevicePoll = usePolling ? usePollingMutation(api.devicePairings.register) : null;
const heartbeatPoll = usePolling ? usePollingMutation(api.devicePairings.heartbeat) : null;
const addPhotoPoll = usePolling ? usePollingMutation(api.photoDrafts.addPhoto) : null;
const removePhotoPoll = usePolling ? usePollingMutation(api.photoDrafts.removePhoto) : null;
const createPairingPoll = usePolling ? usePollingMutation(api.pairingRequests.create) : null;
const acceptPairingPoll = usePolling ? usePollingMutation(api.pairingRequests.accept) : null;
const rejectPairingPoll = usePolling ? usePollingMutation(api.pairingRequests.reject) : null;
const unpairPoll = usePolling ? usePollingMutation(api.pairingRequests.unpair) : null;
const createRequestPoll = usePolling ? usePollingMutation(api.photoRequests.create) : null;
const markCaptureNowPoll = usePolling
? usePollingMutation(api.photoRequests.markCaptureNow)
: null;
const submitPhotoPoll = usePolling ? usePollingMutation(api.photoRequests.submitPhoto) : null;
const markRejectedPoll = usePolling ? usePollingMutation(api.photoRequests.markRejected) : null;
const acceptPhotoToDraftPoll = usePolling
? usePollingMutation(api.photoRequests.acceptPhotoToDraft)
: null;
async function sendMessage(content: string) { $effect(() => {
if (!chatId || !deviceId) return;
if (usePolling && registerDevicePoll) {
registerDevicePoll({ chatId, deviceId, hasCamera });
} else if (clientWs) {
clientWs.mutation(api.devicePairings.register, {
chatId,
deviceId,
hasCamera
});
}
const interval = setInterval(() => {
if (usePolling && heartbeatPoll) {
heartbeatPoll({ chatId, deviceId });
} else if (clientWs) {
clientWs.mutation(api.devicePairings.heartbeat, { chatId, deviceId });
}
}, 10000);
return () => clearInterval(interval);
});
function sendMessage(content: string) {
const chat = chatData.data?.chat; const chat = chatData.data?.chat;
if (!chat) return; if (!chat) return;
const photos = draftPhotos;
const photoDraftIds = photos.length > 0 ? photos.map((p) => p._id) : undefined;
const messageContent =
content || (photos.length > 0 ? 'Process images according to your task' : '');
if (!messageContent) return;
if (usePolling && createMessagePoll) { if (usePolling && createMessagePoll) {
await createMessagePoll({ createMessagePoll({
chatId: chat._id, chatId: chat._id,
role: 'user', role: 'user',
content, content: messageContent,
source: 'web' source: 'web',
photoDraftIds
}); });
} else if (clientWs) { } else if (clientWs) {
await clientWs.mutation(api.messages.create, { clientWs.mutation(api.messages.create, {
chatId: chat._id, chatId: chat._id,
role: 'user', role: 'user',
content, content: messageContent,
source: 'web' source: 'web',
photoDraftIds
}); });
} }
} }
async function summarize() { function summarize() {
const chat = chatData.data?.chat; const chat = chatData.data?.chat;
if (!chat) return; if (!chat) return;
if (usePolling && createMessagePoll) { if (usePolling && createMessagePoll) {
await createMessagePoll({ createMessagePoll({
chatId: chat._id, chatId: chat._id,
role: 'user', role: 'user',
content: '/summarize', content: '/summarize',
source: 'web' source: 'web'
}); });
} else if (clientWs) { } else if (clientWs) {
await clientWs.mutation(api.messages.create, { clientWs.mutation(api.messages.create, {
chatId: chat._id, chatId: chat._id,
role: 'user', role: 'user',
content: '/summarize', content: '/summarize',
@@ -96,6 +336,191 @@
}); });
} }
} }
function handleTakePhoto() {
showCamera = true;
}
function handleCameraCapture(base64: string, mediaType: string) {
showCamera = false;
if (!chatId) return;
if (usePolling && addPhotoPoll) {
addPhotoPoll({ chatId, deviceId, photo: { base64, mediaType } });
} else if (clientWs) {
clientWs.mutation(api.photoDrafts.addPhoto, {
chatId,
deviceId,
photo: { base64, mediaType }
});
}
}
function handleCameraClose() {
showCamera = false;
}
function handlePair() {
if (!chatId) return;
if (usePolling && createPairingPoll) {
createPairingPoll({ chatId, fromDeviceId: deviceId });
} else if (clientWs) {
clientWs.mutation(api.pairingRequests.create, {
chatId,
fromDeviceId: deviceId
});
}
}
function handleAcceptPairing() {
const req = pendingPairing.data;
if (!req) return;
if (usePolling && acceptPairingPoll) {
acceptPairingPoll({ requestId: req._id, acceptingDeviceId: deviceId });
} else if (clientWs) {
clientWs.mutation(api.pairingRequests.accept, {
requestId: req._id,
acceptingDeviceId: deviceId
});
}
}
function handleRejectPairing() {
const req = pendingPairing.data;
if (!req) return;
if (usePolling && rejectPairingPoll) {
rejectPairingPoll({ requestId: req._id });
} else if (clientWs) {
clientWs.mutation(api.pairingRequests.reject, { requestId: req._id });
}
}
function handleUnpair() {
if (!chatId) return;
if (usePolling && unpairPoll) {
unpairPoll({ chatId, deviceId });
} else if (clientWs) {
clientWs.mutation(api.pairingRequests.unpair, {
chatId,
deviceId
});
}
}
function handleRequestPhoto() {
if (!chatId || !pairedDevice.data) return;
const captureDeviceId = pairedDevice.data.deviceId;
if (usePolling && createRequestPoll) {
createRequestPoll({ chatId, requesterId: deviceId, captureDeviceId }).then((id) => {
if (id) {
activeRequestId = id as Id<'photoRequests'>;
showWatchCountdown = true;
}
});
} else if (clientWs) {
clientWs
.mutation(api.photoRequests.create, {
chatId,
requesterId: deviceId,
captureDeviceId
})
.then((id) => {
activeRequestId = id;
showWatchCountdown = true;
});
}
}
function handleWatchCountdownComplete() {
showWatchCountdown = false;
if (!activeRequestId) return;
const reqId = activeRequestId;
activeRequestId = null;
if (usePolling && markCaptureNowPoll) {
markCaptureNowPoll({ requestId: reqId });
} else if (clientWs) {
clientWs.mutation(api.photoRequests.markCaptureNow, { requestId: reqId });
}
}
function handleWatchCountdownCancel() {
showWatchCountdown = false;
if (activeRequestId && markRejectedPoll) {
if (usePolling) {
markRejectedPoll({ requestId: activeRequestId });
} else if (clientWs) {
clientWs.mutation(api.photoRequests.markRejected, { requestId: activeRequestId });
}
}
activeRequestId = null;
}
function handleSilentCapture(base64: string, mediaType: string, thumbnailBase64: string) {
const req = captureNowRequest.data;
if (!req) return;
if (usePolling && submitPhotoPoll) {
submitPhotoPoll({
requestId: req._id,
photoBase64: base64,
photoMediaType: mediaType,
thumbnailBase64
});
} else if (clientWs) {
clientWs.mutation(api.photoRequests.submitPhoto, {
requestId: req._id,
photoBase64: base64,
photoMediaType: mediaType,
thumbnailBase64
});
}
}
function handlePreviewAccept() {
if (!previewPhoto || !chatId) return;
const reqId = previewPhoto.requestId;
previewPhoto = null;
if (usePolling && acceptPhotoToDraftPoll) {
acceptPhotoToDraftPoll({ requestId: reqId, chatId, deviceId });
} else if (clientWs) {
clientWs.mutation(api.photoRequests.acceptPhotoToDraft, {
requestId: reqId,
chatId,
deviceId
});
}
}
function handlePreviewReject() {
if (!previewPhoto) return;
const reqId = previewPhoto.requestId;
previewPhoto = null;
if (usePolling && markRejectedPoll) {
markRejectedPoll({ requestId: reqId });
} else if (clientWs) {
clientWs.mutation(api.photoRequests.markRejected, {
requestId: reqId
});
}
}
function handleRemoveDraftPhoto(index: number) {
if (!chatId) return;
if (usePolling && removePhotoPoll) {
removePhotoPoll({ chatId, deviceId, index });
} else if (clientWs) {
clientWs.mutation(api.photoDrafts.removePhoto, {
chatId,
deviceId,
index
});
}
}
</script> </script>
<svelte:head> <svelte:head>
@@ -118,12 +543,22 @@
<div class="py-4 text-center text-xs text-neutral-500">Not found</div> <div class="py-4 text-center text-xs text-neutral-500">Not found</div>
{:else} {:else}
<div class="space-y-1"> <div class="space-y-1">
{#each messages as message (message._id)} {#each messages as message, i (message._id)}
{#if i === messages.length - 1}
<div bind:this={lastMessageElement}>
<ChatMessage <ChatMessage
role={message.role} role={message.role}
content={message.content} content={message.content}
isStreaming={message.isStreaming} isStreaming={message.isStreaming}
/> />
</div>
{:else}
<ChatMessage
role={message.role}
content={message.content}
isStreaming={message.isStreaming}
/>
{/if}
{/each} {/each}
</div> </div>
@@ -133,16 +568,111 @@
</div> </div>
{/if} {/if}
<div class="mt-2 flex gap-1"> <div class="mt-3 space-y-2">
<div class="flex gap-2">
{#if hasCamera}
<button
onclick={handleTakePhoto}
class="flex-1 rounded bg-neutral-800 py-2 text-xs text-neutral-300"
>
+ photo
</button>
{/if}
{#if isPaired && pairedDevice.data?.hasCamera}
<button
onclick={handleRequestPhoto}
class="flex-1 rounded bg-neutral-800 py-2 text-xs text-neutral-300"
>
request
</button>
{/if}
{#if isPaired}
<button
onclick={handleUnpair}
class="flex-1 rounded bg-red-900/50 py-2 text-xs text-red-300"
>
unpair
</button>
{:else}
<button
onclick={handlePair}
class="flex-1 rounded bg-neutral-800 py-2 text-xs text-neutral-300"
>
pair
</button>
{/if}
<button <button
onclick={summarize} onclick={summarize}
class="shrink-0 rounded bg-neutral-800 px-1.5 py-0.5 text-[8px] text-neutral-400" class="flex-1 rounded bg-neutral-800 py-2 text-xs text-neutral-300"
> >
/sum /sum
</button> </button>
<div class="flex-1"> </div>
<ChatInput onsubmit={sendMessage} /> {#if draftPhotos.length > 0}
<DraftBadge photos={draftPhotos} onremove={handleRemoveDraftPhoto} />
{/if}
<ChatInput onsubmit={sendMessage} allowEmpty={draftPhotos.length > 0} />
</div>
{/if}
{#if showScrollButton}
<button
onclick={scrollToLastMessage}
class="fixed right-3 bottom-12 z-50 flex h-8 w-8 animate-pulse items-center justify-center rounded-full bg-blue-600 text-white shadow-lg"
>
</button>
{/if}
<StealthOverlay />
{#if showCamera}
<CameraCapture oncapture={handleCameraCapture} onclose={handleCameraClose} />
{/if}
{#if pendingPairing.data && !isPaired}
<div class="fixed inset-0 z-50 flex items-center justify-center bg-black/90" data-camera-ui>
<div class="rounded-lg bg-neutral-900 p-6 text-center">
<p class="mb-4 text-sm text-white">Accept pairing request?</p>
<div class="flex gap-3">
<button
onclick={handleAcceptPairing}
class="flex-1 rounded bg-blue-600 py-2 text-sm text-white"
>
Accept
</button>
<button
onclick={handleRejectPairing}
class="flex-1 rounded bg-neutral-700 py-2 text-sm text-white"
>
Reject
</button>
</div>
</div> </div>
</div> </div>
{/if} {/if}
{#if showWatchCountdown}
<WatchCountdown
oncomplete={handleWatchCountdownComplete}
oncancel={handleWatchCountdownCancel}
/>
{/if}
{#if previewPhoto}
<PhotoPreview
base64={previewPhoto.thumbnail}
mediaType={previewPhoto.mediaType}
onaccept={handlePreviewAccept}
onreject={handlePreviewReject}
/>
{/if}
{#if hasCamera && isPaired}
<SilentCapture
bind:this={silentCaptureRef}
oncapture={handleSilentCapture}
onunpair={handleUnpair}
/>
{/if}
</div> </div>

View File

@@ -2,4 +2,7 @@ import tailwindcss from '@tailwindcss/vite';
import { sveltekit } from '@sveltejs/kit/vite'; import { sveltekit } from '@sveltejs/kit/vite';
import { defineConfig } from 'vite'; import { defineConfig } from 'vite';
export default defineConfig({ plugins: [tailwindcss(), sveltekit()] }); export default defineConfig({
plugins: [tailwindcss(), sveltekit()],
server: { allowedHosts: ['reasonable-duncan-stations-parking.trycloudflare.com'] }
});