Skip to content

Commit a487b1e

Browse files
authored
🐛 Fix LMDB history mismatch (#4)
1 parent ab0396f commit a487b1e

File tree

2 files changed

+4
-3
lines changed

2 files changed

+4
-3
lines changed

app/server/chat.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,10 +98,11 @@ async def create_chat_completion(
9898

9999
# Format and clean the output
100100
model_output = client.extract_output(response)
101+
stored_output = client.extract_output(response, include_thoughts=False)
101102

102103
# After cleaning, persist the conversation
103104
try:
104-
last_message = Message(role="assistant", content=model_output)
105+
last_message = Message(role="assistant", content=stored_output)
105106
conv = ConversationInStore(
106107
model=model.model_name,
107108
metadata=session.metadata,

app/services/client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,13 +87,13 @@ async def process_conversation(messages: list[Message], tempdir: Path | None = N
8787
return "\n".join(conversation), files
8888

8989
@staticmethod
90-
def extract_output(response: ModelOutput):
90+
def extract_output(response: ModelOutput, include_thoughts: bool = True):
9191
"""
9292
Extract and format the output text from the Gemini response.
9393
"""
9494
text = ""
9595

96-
if response.thoughts:
96+
if include_thoughts and response.thoughts:
9797
text += f"<think>{response.thoughts}</think>"
9898

9999
if response.text:

0 commit comments

Comments
 (0)