display the current model in use

This commit is contained in:
Charlotte Som 2025-03-01 12:50:52 +00:00
parent 877a4932d3
commit 3a73e21c5b
3 changed files with 10 additions and 1 deletions

View file

@ -108,6 +108,8 @@ function conversationUI(id: string) {
inFlightMessages.get(message.r)?.element?.remove(); inFlightMessages.get(message.r)?.element?.remove();
inFlightMessages.delete(message.r); inFlightMessages.delete(message.r);
} }
} else if ("m" in message) {
chatlog.append(<article className="system info">model: {message.m}</article>);
} }
if (scrolledToBottom) chatlog.scrollTop = chatlog.scrollHeight - chatlog.clientHeight; if (scrolledToBottom) chatlog.scrollTop = chatlog.scrollHeight - chatlog.clientHeight;

View file

@ -75,7 +75,7 @@ main,
article { article {
white-space: preserve; white-space: preserve;
padding: 1em; padding: 1rem;
border-bottom: 1px solid rgb(var(--color-fg) / 0.5); border-bottom: 1px solid rgb(var(--color-fg) / 0.5);
} }
@ -90,6 +90,11 @@ main,
font-size: 0.5em; font-size: 0.5em;
font-family: var(--font-mono); font-family: var(--font-mono);
color: rgb(var(--color-fg) / 0.5); color: rgb(var(--color-fg) / 0.5);
&.info {
font-size: 1rem;
color: rgb(123 191 250 / 1);
}
} }
article.system.error { article.system.error {

View file

@ -40,6 +40,8 @@ async def connect_to_conversation(ws: WebSocket):
system_prompt = llm.cli.load_template("girlypop").system system_prompt = llm.cli.load_template("girlypop").system
if not continuing: if not continuing:
await ws.send_text(json({"m": conversation.model.model_id}))
for response in conversation.responses: for response in conversation.responses:
response: llm.AsyncResponse = response response: llm.AsyncResponse = response
if not response._done: continue if not response._done: continue