skip sending previous context if we're continuing a connection
This commit is contained in:
parent
deb2f6e8e5
commit
2e828600ea
1 changed files with 9 additions and 7 deletions
|
@ -14,6 +14,7 @@ async def list_conversations(request: Request):
|
|||
girlypop_prompt = llm.cli.load_template("girlypop").system
|
||||
|
||||
async def connect_to_conversation(ws: WebSocket):
|
||||
continuing = bool(ws.query_params["continue"])
|
||||
conversation_id = ws.path_params["conversation"]
|
||||
if conversation_id == "new":
|
||||
conversation = llm.AsyncConversation(llm.get_async_model())
|
||||
|
@ -31,13 +32,14 @@ async def connect_to_conversation(ws: WebSocket):
|
|||
# only send the system prompt at the start of a conversation
|
||||
system_prompt = girlypop_prompt
|
||||
|
||||
for response in conversation.responses:
|
||||
response: llm.AsyncResponse = response
|
||||
if not response._done: continue
|
||||
if response.prompt.system:
|
||||
system_prompt = None
|
||||
await ws.send_text(json.dumps({"u": response.prompt.prompt})) # user
|
||||
await ws.send_text(json.dumps({"f": response.text_or_raise()})) # full
|
||||
if not continuing:
|
||||
for response in conversation.responses:
|
||||
response: llm.AsyncResponse = response
|
||||
if not response._done: continue
|
||||
if response.prompt.system:
|
||||
system_prompt = None
|
||||
await ws.send_text(json.dumps({"u": response.prompt.prompt})) # user
|
||||
await ws.send_text(json.dumps({"f": response.text_or_raise()})) # full
|
||||
|
||||
async for message in ws.iter_text():
|
||||
response = conversation.prompt(message, system=system_prompt, stream=True)
|
||||
|
|
Loading…
Reference in a new issue