feat(solaris): moving to service system to support multi-chat
This commit is contained in:
@@ -1,18 +1,31 @@
|
||||
import json
|
||||
from dataclasses import asdict
|
||||
|
||||
from google import genai
|
||||
from google.genai import chats, types
|
||||
|
||||
from ..content_configs import generate_respond_config
|
||||
from utils.config import dconfig
|
||||
|
||||
from ..constants import SAFETY_SETTINGS
|
||||
from ..structures import InputMessage, OutputMessage
|
||||
|
||||
RESPOND_MODEL = "gemini-2.5-flash"
|
||||
|
||||
|
||||
class RespondAgent:
|
||||
def __init__(self, client: genai.client.AsyncClient, prompt: str) -> None:
|
||||
self.chat = client.chats.create(
|
||||
model=RESPOND_MODEL, config=generate_respond_config(prompt=prompt)
|
||||
chat: chats.AsyncChat
|
||||
|
||||
def __init__(self, client: genai.client.AsyncClient) -> None:
|
||||
self.client = client
|
||||
|
||||
async def load_chat(self, history: list[types.Content], system_prompt: str):
|
||||
self.chat = self.client.chats.create(
|
||||
model=(await dconfig()).models.respond_model,
|
||||
config=types.GenerateContentConfig(
|
||||
system_instruction=system_prompt,
|
||||
thinking_config=types.ThinkingConfig(thinking_budget=0),
|
||||
response_mime_type="application/json",
|
||||
response_schema=list[OutputMessage],
|
||||
safety_settings=SAFETY_SETTINGS,
|
||||
),
|
||||
history=history,
|
||||
)
|
||||
|
||||
async def send_messages(self, messages: list[InputMessage]) -> list[OutputMessage]:
|
||||
|
||||
Reference in New Issue
Block a user