From 3a73e21c5bdc68ce128441cbbc10e3f575a68ff8 Mon Sep 17 00:00:00 2001 From: Charlotte Som Date: Sat, 1 Mar 2025 12:50:52 +0000 Subject: [PATCH] display the current model in use --- client/main.tsx | 2 ++ client/web/css/styles.css | 7 ++++++- server/inference.py | 2 ++ 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/client/main.tsx b/client/main.tsx index 08aa7fb..eca5bdf 100644 --- a/client/main.tsx +++ b/client/main.tsx @@ -108,6 +108,8 @@ function conversationUI(id: string) { inFlightMessages.get(message.r)?.element?.remove(); inFlightMessages.delete(message.r); } + } else if ("m" in message) { + chatlog.append(
model: {message.m}
); } if (scrolledToBottom) chatlog.scrollTop = chatlog.scrollHeight - chatlog.clientHeight; diff --git a/client/web/css/styles.css b/client/web/css/styles.css index c8faec3..2cfbf42 100644 --- a/client/web/css/styles.css +++ b/client/web/css/styles.css @@ -75,7 +75,7 @@ main, article { white-space: preserve; - padding: 1em; + padding: 1rem; border-bottom: 1px solid rgb(var(--color-fg) / 0.5); } @@ -90,6 +90,11 @@ main, font-size: 0.5em; font-family: var(--font-mono); color: rgb(var(--color-fg) / 0.5); + + &.info { + font-size: 1rem; + color: rgb(123 191 250 / 1); + } } article.system.error { diff --git a/server/inference.py b/server/inference.py index c872454..3864d5f 100644 --- a/server/inference.py +++ b/server/inference.py @@ -40,6 +40,8 @@ async def connect_to_conversation(ws: WebSocket): system_prompt = llm.cli.load_template("girlypop").system if not continuing: + await ws.send_text(json({"m": conversation.model.model_id})) + for response in conversation.responses: response: llm.AsyncResponse = response if not response._done: continue