skip sending previous context if we're continuing a connection
This commit is contained in:
parent
deb2f6e8e5
commit
2e828600ea
1 changed files with 9 additions and 7 deletions
|
@ -14,6 +14,7 @@ async def list_conversations(request: Request):
|
||||||
girlypop_prompt = llm.cli.load_template("girlypop").system
|
girlypop_prompt = llm.cli.load_template("girlypop").system
|
||||||
|
|
||||||
async def connect_to_conversation(ws: WebSocket):
|
async def connect_to_conversation(ws: WebSocket):
|
||||||
|
continuing = bool(ws.query_params["continue"])
|
||||||
conversation_id = ws.path_params["conversation"]
|
conversation_id = ws.path_params["conversation"]
|
||||||
if conversation_id == "new":
|
if conversation_id == "new":
|
||||||
conversation = llm.AsyncConversation(llm.get_async_model())
|
conversation = llm.AsyncConversation(llm.get_async_model())
|
||||||
|
@ -31,13 +32,14 @@ async def connect_to_conversation(ws: WebSocket):
|
||||||
# only send the system prompt at the start of a conversation
|
# only send the system prompt at the start of a conversation
|
||||||
system_prompt = girlypop_prompt
|
system_prompt = girlypop_prompt
|
||||||
|
|
||||||
for response in conversation.responses:
|
if not continuing:
|
||||||
response: llm.AsyncResponse = response
|
for response in conversation.responses:
|
||||||
if not response._done: continue
|
response: llm.AsyncResponse = response
|
||||||
if response.prompt.system:
|
if not response._done: continue
|
||||||
system_prompt = None
|
if response.prompt.system:
|
||||||
await ws.send_text(json.dumps({"u": response.prompt.prompt})) # user
|
system_prompt = None
|
||||||
await ws.send_text(json.dumps({"f": response.text_or_raise()})) # full
|
await ws.send_text(json.dumps({"u": response.prompt.prompt})) # user
|
||||||
|
await ws.send_text(json.dumps({"f": response.text_or_raise()})) # full
|
||||||
|
|
||||||
async for message in ws.iter_text():
|
async for message in ws.iter_text():
|
||||||
response = conversation.prompt(message, system=system_prompt, stream=True)
|
response = conversation.prompt(message, system=system_prompt, stream=True)
|
||||||
|
|
Loading…
Reference in a new issue