Compare commits

...

13 Commits

Author SHA1 Message Date
Ryan Chen
f3ae76ce68 Fix mobile conversation launch resetting to homepage
Remove the useEffect on selectedConversation.id that race-conditions
with handleQuestionSubmit — it fetches the (still-empty) conversation
and wipes messages, sending the user back to the empty state. Refresh
conversation list after streaming completes instead to pick up the
auto-generated title.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-09 22:08:26 -04:00
ryan
7ee3bdef84 Merge pull request 'Simplify conversation naming to first message truncation' (#26) from feat/conversation-name-truncation into main
Reviewed-on: #26
2026-04-09 22:04:33 -04:00
Ryan Chen
500c44feb1 Simplify conversation naming to truncate first message
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-09 22:02:10 -04:00
ryan
896501deb1 Merge pull request 'Add user memory for cross-conversation recall' (#25) from feat/user-memory into main
Reviewed-on: #25
2026-04-09 21:54:04 -04:00
Ryan Chen
c95800e65d Add user memory feature for cross-conversation recall
Give the LangChain agent a save_user_memory tool so users can ask it to
remember preferences and personal facts. Memories are stored per-user in
a new user_memories table and injected into the system prompt on each
conversation turn.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-09 21:53:14 -04:00
ryan
90372a6a6d Merge pull request 'Order conversations by recency and auto-name from first message' (#24) from feat/conversation-ordering-and-naming into main
Reviewed-on: #24
2026-04-05 10:43:09 -04:00
Ryan Chen
c01764243f Order conversations by recency and auto-name from first message
Conversations are now returned sorted by most recently updated first.
New conversations are named using the first 100 characters of the
user's initial message instead of a username+timestamp placeholder.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-05 10:34:48 -04:00
ryan
dfaac4caf8 Merge pull request 'Extend JWT token expiration times' (#23) from extend-jwt-expiration into main
Reviewed-on: #23
2026-04-05 10:13:29 -04:00
ryan
17c3a2f888 Merge pull request 'Add redeploy Makefile target' (#20) from feat/makefile-redeploy into main
Reviewed-on: #20
2026-04-05 10:13:01 -04:00
ryan
fa0f68e3b4 Merge pull request 'Fix OIDC login crash when groups claim is null' (#22) from fix/oidc-null-groups into main
Reviewed-on: #22
2026-04-05 10:12:55 -04:00
Ryan Chen
a6c698c6bd Fix OIDC login crash when groups claim is null
Use `claims.get("groups") or []` instead of `claims.get("groups", [])`
so that an explicit `null` value is coerced to an empty list, preventing
a ValueError on the non-nullable ldap_groups field.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-05 10:12:12 -04:00
Ryan Chen
07c272c96a Extend JWT token expiration times
Access tokens now last 1 hour (up from default 15 min) and refresh
tokens last 30 days, reducing frequent re-authentication.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-05 10:10:47 -04:00
Ryan Chen
3671926430 Add redeploy Makefile target for quick pull-and-restart
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-04 09:10:10 -04:00
11 changed files with 228 additions and 78 deletions

View File

@@ -1,8 +1,11 @@
.PHONY: deploy build up down restart logs migrate migrate-new frontend test
.PHONY: deploy redeploy build up down restart logs migrate migrate-new frontend test
# Build and deploy
deploy: build up
redeploy:
git pull && $(MAKE) down && $(MAKE) up
build:
docker compose build raggr

12
app.py
View File

@@ -1,5 +1,6 @@
import logging
import os
from datetime import timedelta
from dotenv import load_dotenv
from quart import Quart, jsonify, render_template, request, send_from_directory
@@ -38,6 +39,8 @@ app = Quart(
)
app.config["JWT_SECRET_KEY"] = os.getenv("JWT_SECRET_KEY", "SECRET_KEY")
app.config["JWT_ACCESS_TOKEN_EXPIRES"] = timedelta(hours=1)
app.config["JWT_REFRESH_TOKEN_EXPIRES"] = timedelta(days=30)
app.config["MAX_CONTENT_LENGTH"] = 10 * 1024 * 1024 # 10 MB upload limit
jwt = JWTManager(app)
@@ -132,17 +135,10 @@ async def get_messages():
}
)
name = conversation.name
if len(messages) > 8:
name = await blueprints.conversation.logic.rename_conversation(
user=user,
conversation=conversation,
)
return jsonify(
{
"id": str(conversation.id),
"name": name,
"name": conversation.name,
"messages": messages,
"created_at": conversation.created_at.isoformat(),
"updated_at": conversation.updated_at.isoformat(),

View File

@@ -1,4 +1,3 @@
import datetime
import json
import logging
import uuid
@@ -20,8 +19,8 @@ from .agents import main_agent
from .logic import (
add_message_to_conversation,
get_conversation_by_id,
rename_conversation,
)
from .memory import get_memories_for_user
from .models import (
Conversation,
PydConversation,
@@ -36,15 +35,27 @@ conversation_blueprint = Blueprint(
_SYSTEM_PROMPT = SIMBA_SYSTEM_PROMPT
async def _build_system_prompt_with_memories(user_id: str) -> str:
"""Append user memories to the base system prompt."""
memories = await get_memories_for_user(user_id)
if not memories:
return _SYSTEM_PROMPT
memory_block = "\n".join(f"- {m}" for m in memories)
return f"{_SYSTEM_PROMPT}\n\nUSER MEMORIES (facts the user has asked you to remember):\n{memory_block}"
def _build_messages_payload(
conversation, query_text: str, image_description: str | None = None
conversation,
query_text: str,
image_description: str | None = None,
system_prompt: str | None = None,
) -> list:
recent_messages = (
conversation.messages[-10:]
if len(conversation.messages) > 10
else conversation.messages
)
messages_payload = [{"role": "system", "content": _SYSTEM_PROMPT}]
messages_payload = [{"role": "system", "content": system_prompt or _SYSTEM_PROMPT}]
for msg in recent_messages[:-1]: # Exclude the message we just added
role = "user" if msg.speaker == "user" else "assistant"
text = msg.text
@@ -80,10 +91,14 @@ async def query():
user=user,
)
messages_payload = _build_messages_payload(conversation, query)
system_prompt = await _build_system_prompt_with_memories(str(user.id))
messages_payload = _build_messages_payload(
conversation, query, system_prompt=system_prompt
)
payload = {"messages": messages_payload}
agent_config = {"configurable": {"user_id": str(user.id)}}
response = await main_agent.ainvoke(payload)
response = await main_agent.ainvoke(payload, config=agent_config)
message = response.get("messages", [])[-1].content
await add_message_to_conversation(
conversation=conversation,
@@ -163,15 +178,19 @@ async def stream_query():
logging.error(f"Failed to analyze image: {e}")
image_description = "[Image could not be analyzed]"
system_prompt = await _build_system_prompt_with_memories(str(user.id))
messages_payload = _build_messages_payload(
conversation, query_text or "", image_description
conversation, query_text or "", image_description, system_prompt=system_prompt
)
payload = {"messages": messages_payload}
agent_config = {"configurable": {"user_id": str(user.id)}}
async def event_generator():
final_message = None
try:
async for event in main_agent.astream_events(payload, version="v2"):
async for event in main_agent.astream_events(
payload, version="v2", config=agent_config
):
event_type = event.get("event")
if event_type == "on_tool_start":
@@ -221,8 +240,6 @@ async def stream_query():
@jwt_refresh_token_required
async def get_conversation(conversation_id: str):
conversation = await Conversation.get(id=conversation_id)
current_user_uuid = get_jwt_identity()
user = await blueprints.users.models.User.get(id=current_user_uuid)
await conversation.fetch_related("messages")
# Manually serialize the conversation with messages
@@ -237,18 +254,10 @@ async def get_conversation(conversation_id: str):
"image_key": msg.image_key,
}
)
name = conversation.name
if len(messages) > 8 and "datetime" in name.lower():
name = await rename_conversation(
user=user,
conversation=conversation,
)
print(name)
return jsonify(
{
"id": str(conversation.id),
"name": name,
"name": conversation.name,
"messages": messages,
"created_at": conversation.created_at.isoformat(),
"updated_at": conversation.updated_at.isoformat(),
@@ -262,7 +271,7 @@ async def create_conversation():
user_uuid = get_jwt_identity()
user = await blueprints.users.models.User.get(id=user_uuid)
conversation = await Conversation.create(
name=f"{user.username} {datetime.datetime.now().timestamp}",
name="New Conversation",
user=user,
)
@@ -275,7 +284,7 @@ async def create_conversation():
async def get_all_conversations():
user_uuid = get_jwt_identity()
user = await blueprints.users.models.User.get(id=user_uuid)
conversations = Conversation.filter(user=user)
conversations = Conversation.filter(user=user).order_by("-updated_at")
serialized_conversations = await PydListConversation.from_queryset(conversations)
return jsonify(serialized_conversations.model_dump())

View File

@@ -5,9 +5,11 @@ from dotenv import load_dotenv
from langchain.agents import create_agent
from langchain.chat_models import BaseChatModel
from langchain.tools import tool
from langchain_core.runnables import RunnableConfig
from langchain_openai import ChatOpenAI
from tavily import AsyncTavilyClient
from blueprints.conversation.memory import save_memory
from blueprints.rag.logic import query_vector_store
from utils.obsidian_service import ObsidianService
from utils.ynab_service import YNABService
@@ -589,8 +591,35 @@ async def obsidian_create_task(
return f"Error creating task: {str(e)}"
@tool
async def save_user_memory(content: str, config: RunnableConfig) -> str:
"""Save a fact or preference about the user for future conversations.
Use this tool when the user:
- Explicitly asks you to remember something ("remember that...", "keep in mind...")
- Shares a personal preference that would be useful in future conversations
(e.g., "I prefer metric units", "my cat's name is Luna")
- Tells you a meaningful personal fact (e.g., "I'm allergic to peanuts")
Do NOT save:
- Trivial or ephemeral info (e.g., "I'm tired today")
- Information already in the system prompt or documents
- Conversation-specific context that won't matter later
Args:
content: A concise statement of the fact or preference to remember.
Write it as a standalone sentence (e.g., "User prefers dark mode"
rather than "likes dark mode").
Returns:
Confirmation that the memory was saved.
"""
user_id = config["configurable"]["user_id"]
return await save_memory(user_id=user_id, content=content)
# Create tools list based on what's available
tools = [get_current_date, simba_search, web_search]
tools = [get_current_date, simba_search, web_search, save_user_memory]
if ynab_enabled:
tools.extend(
[

View File

@@ -1,9 +1,8 @@
import tortoise.exceptions
from langchain_openai import ChatOpenAI
import blueprints.users.models
from .models import Conversation, ConversationMessage, RenameConversationOutputSchema
from .models import Conversation, ConversationMessage
async def create_conversation(name: str = "") -> Conversation:
@@ -19,6 +18,12 @@ async def add_message_to_conversation(
image_key: str | None = None,
) -> ConversationMessage:
print(conversation, message, speaker)
# Name the conversation after the first user message
if speaker == "user" and not await conversation.messages.all().exists():
conversation.name = message[:100]
await conversation.save()
message = await ConversationMessage.create(
text=message,
speaker=speaker,
@@ -61,22 +66,3 @@ async def get_conversation_transcript(
messages.append(f"{message.speaker} at {message.created_at}: {message.text}")
return "\n".join(messages)
async def rename_conversation(
user: blueprints.users.models.User,
conversation: Conversation,
) -> str:
messages: str = await get_conversation_transcript(
user=user, conversation=conversation
)
llm = ChatOpenAI(model="gpt-4o-mini")
structured_llm = llm.with_structured_output(RenameConversationOutputSchema)
prompt = f"Summarize the following conversation into a sassy one-liner title:\n\n{messages}"
response = structured_llm.invoke(prompt)
new_name: str = response.get("title", "")
conversation.name = new_name
await conversation.save()
return new_name

View File

@@ -0,0 +1,19 @@
from .models import UserMemory
async def get_memories_for_user(user_id: str) -> list[str]:
"""Return all memory content strings for a user, ordered by most recently updated."""
memories = await UserMemory.filter(user_id=user_id).order_by("-updated_at")
return [m.content for m in memories]
async def save_memory(user_id: str, content: str) -> str:
"""Save a new memory or touch an existing one (exact-match dedup)."""
existing = await UserMemory.filter(user_id=user_id, content=content).first()
if existing:
existing.updated_at = None # auto_now=True will refresh it on save
await existing.save(update_fields=["updated_at"])
return "Memory already exists (refreshed)."
await UserMemory.create(user_id=user_id, content=content)
return "Memory saved."

View File

@@ -1,5 +1,4 @@
import enum
from dataclasses import dataclass
from tortoise import fields
from tortoise.contrib.pydantic import (
@@ -9,12 +8,6 @@ from tortoise.contrib.pydantic import (
from tortoise.models import Model
@dataclass
class RenameConversationOutputSchema:
title: str
justification: str
class Speaker(enum.Enum):
USER = "user"
SIMBA = "simba"
@@ -47,6 +40,17 @@ class ConversationMessage(Model):
table = "conversation_messages"
class UserMemory(Model):
id = fields.UUIDField(primary_key=True)
user = fields.ForeignKeyField("models.User", related_name="memories")
content = fields.TextField()
created_at = fields.DatetimeField(auto_now_add=True)
updated_at = fields.DatetimeField(auto_now=True)
class Meta:
table = "user_memories"
PydConversationMessage = pydantic_model_creator(ConversationMessage)
PydConversation = pydantic_model_creator(
Conversation, name="Conversation", allow_cycles=True, exclude=("user",)

View File

@@ -54,4 +54,7 @@ You have access to Ryan's daily journal notes. Each note lives at journal/YYYY/Y
- Use journal_get_tasks to list tasks (done/pending) for today or a specific date
- Use journal_add_task to add a new task to today's (or a given date's) note
- Use journal_complete_task to check off a task as done
Use these tools when Ryan asks about today's tasks, wants to add something to his list, or wants to mark a task complete."""
Use these tools when Ryan asks about today's tasks, wants to add something to his list, or wants to mark a task complete.
USER MEMORY:
You can remember facts about the user across conversations using the save_user_memory tool. When a user explicitly asks you to remember something, or shares a meaningful preference or personal fact, save it. Saved memories will automatically appear at the end of this prompt in future conversations under "USER MEMORIES"."""

View File

@@ -35,7 +35,7 @@ class OIDCUserService:
claims.get("preferred_username") or claims.get("name") or user.username
)
# Update LDAP groups from claims
user.ldap_groups = claims.get("groups", [])
user.ldap_groups = claims.get("groups") or []
await user.save()
return user
@@ -48,7 +48,7 @@ class OIDCUserService:
user.oidc_subject = oidc_subject
user.auth_provider = "oidc"
user.password = None # Clear password
user.ldap_groups = claims.get("groups", [])
user.ldap_groups = claims.get("groups") or []
await user.save()
return user
@@ -61,7 +61,7 @@ class OIDCUserService:
)
# Extract LDAP groups from claims
groups = claims.get("groups", [])
groups = claims.get("groups") or []
user = await User.create(
id=uuid4(),

View File

@@ -0,0 +1,112 @@
from tortoise import BaseDBAsyncClient
RUN_IN_TRANSACTION = True
async def upgrade(db: BaseDBAsyncClient) -> str:
return """
CREATE TABLE IF NOT EXISTS "user_memories" (
"id" UUID NOT NULL PRIMARY KEY,
"content" TEXT NOT NULL,
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
"user_id" UUID NOT NULL REFERENCES "users" ("id") ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS "email_accounts" (
"id" UUID NOT NULL PRIMARY KEY,
"email_address" VARCHAR(255) NOT NULL UNIQUE,
"display_name" VARCHAR(255),
"imap_host" VARCHAR(255) NOT NULL,
"imap_port" INT NOT NULL DEFAULT 993,
"imap_username" VARCHAR(255) NOT NULL,
"imap_password" TEXT NOT NULL,
"is_active" BOOL NOT NULL DEFAULT True,
"last_error" TEXT,
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
"user_id" UUID NOT NULL REFERENCES "users" ("id") ON DELETE CASCADE
);
COMMENT ON TABLE "email_accounts" IS 'Email account configuration for IMAP connections.';
CREATE TABLE IF NOT EXISTS "emails" (
"id" UUID NOT NULL PRIMARY KEY,
"message_id" VARCHAR(255) NOT NULL UNIQUE,
"subject" VARCHAR(500) NOT NULL,
"from_address" VARCHAR(255) NOT NULL,
"to_address" TEXT NOT NULL,
"date" TIMESTAMPTZ NOT NULL,
"body_text" TEXT,
"body_html" TEXT,
"chromadb_doc_id" VARCHAR(255),
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
"expires_at" TIMESTAMPTZ NOT NULL,
"account_id" UUID NOT NULL REFERENCES "email_accounts" ("id") ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS "idx_emails_message_981ddd" ON "emails" ("message_id");
COMMENT ON TABLE "emails" IS 'Email message metadata and content.';
CREATE TABLE IF NOT EXISTS "email_sync_status" (
"id" UUID NOT NULL PRIMARY KEY,
"last_sync_date" TIMESTAMPTZ,
"last_message_uid" INT NOT NULL DEFAULT 0,
"message_count" INT NOT NULL DEFAULT 0,
"consecutive_failures" INT NOT NULL DEFAULT 0,
"last_failure_date" TIMESTAMPTZ,
"updated_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
"account_id" UUID NOT NULL REFERENCES "email_accounts" ("id") ON DELETE CASCADE
);
COMMENT ON TABLE "email_sync_status" IS 'Tracks sync progress and state per email account.';"""
async def downgrade(db: BaseDBAsyncClient) -> str:
return """
DROP TABLE IF EXISTS "user_memories";
DROP TABLE IF EXISTS "email_accounts";
DROP TABLE IF EXISTS "emails";
DROP TABLE IF EXISTS "email_sync_status";"""
MODELS_STATE = (
"eJztXGtv2zYU/SuCPrVAFjTPbcUwwE7czVudDLGz9ZFCoCXa1ixRGkk1NYr+911Skq0HZV"
"t+RUr1oU1C8lLU4SV57tGVvuquZ2GHHV955DOmDHHbI/pr7atOkIvhF2X9kaYj31/UigKO"
"ho40MBMtZQ0aMk6RyaFyhByGocjCzKS2H12MBI4jCj0TGtpkvCgKiP1fgA3ujTGfYAoVHz"
"9BsU0s/AWz+E9/aoxs7FipcduWuLYsN/jMl2X3993rN7KluNzQMD0ncMmitT/jE4/MmweB"
"bR0LG1E3xgRTxLGVuA0xyui246JwxFDAaYDnQ7UWBRYeocARYOi/jAJiCgw0eSXx3/mveg"
"l4AGoBrU24wOLrt/CuFvcsS3VxqavfW3cvzi5fyrv0GB9TWSkR0b9JQ8RRaCpxXQApf+ag"
"vJogqoYybp8BEwa6CYxxwQLHhQ/FQMYAbYaa7qIvhoPJmE/gz9OLiyUw/t26k0hCKwmlB3"
"4dev1NVHUa1glIFxCaFItbNhDPA3kNNdx2sRrMtGUGUisyPY5/qSjAcA/WLXFm0SJYgu+g"
"2+v0B63eX+JOXMb+cyRErUFH1JzK0lmm9MVlZirmnWj/dAe/a+JP7cPtTSfr+/N2gw+6GB"
"MKuGcQ79FAVmK9xqUxMKmJDXxrw4lNWzYT+6QTGw0+Ma8MU6PcCZIw2eIYicZ2wEnc/NAQ"
"R+9oqjwzBBh58N54FNtj8ieeSQi7MA5ETNVhEZGO+6ibqoK2KF2MgqLHORtJOgXcHdwT5u"
"Hp2epfta47usRwiMzpI6KWUQCmixlDY8zygLYjyzd/3mFnTs3UWCYJXC/ssZq7ShG2Eivv"
"1EtglEIvX+WeutkSROC+reja4kpL0FnBghMgrkeGjeRENqS41qSY4y+KI38ApWoo4/Z1Ic"
"XLjvLOu0HqFI+p74te693L1En+9vbmt7h5gipfvb1tNwz5ORKpPENmPkZTFRkQAWSHBG6O"
"CqRmN2H+xEtHv+937l5r4kR/IP1ur916rTHbHSJ9vSlORZknr9YIMk9eFcaYoiq9gGwXTh"
"ZjimdlQvWU0Ub4Hp56pYG8ODldA0loVQilrMtsRslDu9yRqTDd5flZ03DAzIiHW4YFWS2y"
"siiujA8U7lI2TtgnKxbxVw+7Hp3pCjKcqF3KgWUQ5IqGdsN9nwH3hYtwTErR34RJw4AbBv"
"xdMeBGI34WE1sdjbham2FdROIKs8AtVOJ9s78i3rea8TVMr/5MT8xj2cf/SZu6cL0DpAD4"
"iLFHjyo8s20TRGdqMJNWGTCHMx5GU5VTaJaA1xa8N3m6A2Tt7k3r7r2aOsftk37bfj/otD"
"LoYhfZThkvnRvsxkVXr/hdOujJq/Xkw2X6YU5AfJwgzmBLN0jgDosEWzWYCtOdiImHRfVs"
"HVDPijE9y0EqnczARNyeauF7noMRWeKgSdvs8gfjfW2mZY/qEuv/9vZtav23u9nQ+L7X7o"
"DzSpihkR1Soe7NQAnuxEUmcIQpVuiKK1Z/xraGHntyuc42kI2QErvAZdZjPdsyDRYM/8Wm"
"IlotBjRrV0Mw93LqQ/w4MXzqfbatcltqzvBwVEp3PBM5W3DRzBOadbbVi+Jt9SK3rToW8o"
"0x9QJfkRLzR//2Rg1pxiwD6D2Bu/xo2SY/0hyb8U97g/fjp/3wfHHny1XJrACZIVaig0aV"
"fJbiVaNKPtOJnSfG5VShVVmFudc0dpNaWOWINJ9SmFwRySeUm2ORfihaPc9fC4qQHyPT9A"
"JhthUgHdFXK+yqZpDsU1yVsOgKdbUTKxPF8qqcnvX0VV12p0WZp/CTI6H2aYhYWvRQ9ljP"
"oLSOzQN5IH3uwbal+YgybGlyUJps+GjzCYTTP1hoplEs2sNgjrW3NpkyjXva1YR6LrpuP5"
"CRR7XPEDPAD4YRNSeaiXw0tCHsg5UoR9bowDvih1vowJErKJ92Fccwaas6Cm17iQk3CK+3"
"jayfXFK/WEuxvFiiWF7kFcsR7CKCGIEfK86oYjSzdvWEdC++CbyyENAlye1eHeE8dIKPiI"
"jKxlqxTT2jrJpEVfFtL42Xh541M8q+9ZEyqkl+9aGXhcRowl3F47sVwMZGDbDqhELJsuGK"
"MLSSzE1hWhOQm5f5G+VsU0kUf/Ft6G2DiU1b1nNiazKRax3WkXJVMjszbdUkaMaA5DEsna"
"NZXxHwKJOrmXaSKqVrpjAuEhYTc7BCX0zJv+vqjJGNkAlH9jigUhvWhMrX7bX+EsUES7mL"
"FamOJXpIaJBzK4otITfCGEMVEhOTznxwNC1OpTtK9KExzDlcnh09EKFuxt2AO/OAHWv9wP"
"c9ypnmgovZvoPjFkzzMZXvgjYaZUU0yshpy8tBOcNGqYwVC5v5DpoZZVOAs3ZN7JB4S9s3"
"JuDYZeBMGdVFXTsUmGJ/zoPZJQXCQcomg6W9P27y889nW0Apv0Xzw+nJ+Y/nP51dnv8ETe"
"RQ5iU/LgE3nzkpMdgktT9n2DhjxhkLk/w7MQ8p1rRyPdQF3UMLWzYE2sBHPit8d2lKdcru"
"gOnU84O/wtnUDmLcwJR6iizVYpdNW9XkmG9e7G70wiaFspnY5sXu5sXu6r7YnRE2dpGEWS"
"8s0zlTM2IaoSq3AyD60Ft/3lmNINm7fJxApkhBToO3SkTOTNxqHXkA1VOmCTvNp57YdJjM"
"PBWdYCm74qRQnNeRS/cgdOSegBn+MU1w2tBYnL1g4/pHYSF0ZkJf2JqnxsJOGCnHI+gwoF"
"gTdzeFgYg0Vxaqx5oNsR92MfTvhB0LA8matQn86kDzRkWuiIosIxrptJuka+Wtd8DtqhUh"
"VYjKrfUoWE5JnIkcqNZJoVaoMj2cZPhqC2q+Y8EwxqDgaXAhgDm77xI90T02AyE8GdExoS"
"AxhSAWmX+XWMolGaGw+Q6d7aDZpJ94k26UlOeppDR5WM8sD2vfSQ31z8JqYWqbE10RPUc1"
"R8uCZrRoU5kv5xU/S1+TEUcT+KTJMjvhIcVho3j9Xflt8+KH6QmTujzoPcQHc2BplAAxal"
"5PAPfyGbfCj3MXfxin+OPcB/sozt4O3Z19FKfENzZ2f7x8+x8fHBMe"
)

View File

@@ -120,20 +120,6 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
scrollToBottom();
}, [messages]);
useEffect(() => {
const load = async () => {
if (!selectedConversation) return;
try {
const conv = await conversationService.getConversation(selectedConversation.id);
setSelectedConversation({ id: conv.id, title: conv.name });
setMessages(conv.messages.map((m) => ({ text: m.text, speaker: m.speaker, image_key: m.image_key })));
} catch (err) {
console.error("Failed to load messages:", err);
}
};
load();
}, [selectedConversation?.id]);
const handleQuestionSubmit = useCallback(async () => {
if ((!query.trim() && !pendingImage) || isLoading) return;
@@ -215,7 +201,10 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
}
}
} finally {
if (isMountedRef.current) setIsLoading(false);
if (isMountedRef.current) {
setIsLoading(false);
loadConversations();
}
abortControllerRef.current = null;
}
}, [query, pendingImage, isLoading, selectedConversation, simbaMode, messages, setAuthenticated]);