Compare commits

..

6 Commits

Author SHA1 Message Date
Ryan Chen
ffbe992f64 Add management command to rename conversations
Conversations with >10 messages get an LLM-generated summary title.
Conversations with <=10 messages get the first user message truncated
to 100 chars. Supports --dry-run for previewing changes.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-10 13:26:47 -04:00
ryan
9ed4ca126a Merge pull request 'Fix mobile conversation launch resetting to homepage' (#27) from fix/mobile-conversation-launch into main
Reviewed-on: #27
2026-04-09 22:09:55 -04:00
Ryan Chen
f3ae76ce68 Fix mobile conversation launch resetting to homepage
Remove the useEffect on selectedConversation.id that race-conditions
with handleQuestionSubmit — it fetches the (still-empty) conversation
and wipes messages, sending the user back to the empty state. Refresh
conversation list after streaming completes instead to pick up the
auto-generated title.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-09 22:08:26 -04:00
ryan
7ee3bdef84 Merge pull request 'Simplify conversation naming to first message truncation' (#26) from feat/conversation-name-truncation into main
Reviewed-on: #26
2026-04-09 22:04:33 -04:00
Ryan Chen
500c44feb1 Simplify conversation naming to truncate first message
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-09 22:02:10 -04:00
ryan
896501deb1 Merge pull request 'Add user memory for cross-conversation recall' (#25) from feat/user-memory into main
Reviewed-on: #25
2026-04-09 21:54:04 -04:00
6 changed files with 105 additions and 65 deletions

9
app.py
View File

@@ -135,17 +135,10 @@ async def get_messages():
}
)
name = conversation.name
if len(messages) > 8:
name = await blueprints.conversation.logic.rename_conversation(
user=user,
conversation=conversation,
)
return jsonify(
{
"id": str(conversation.id),
"name": name,
"name": conversation.name,
"messages": messages,
"created_at": conversation.created_at.isoformat(),
"updated_at": conversation.updated_at.isoformat(),

View File

@@ -1,4 +1,3 @@
import datetime
import json
import logging
import uuid
@@ -20,7 +19,6 @@ from .agents import main_agent
from .logic import (
add_message_to_conversation,
get_conversation_by_id,
rename_conversation,
)
from .memory import get_memories_for_user
from .models import (
@@ -242,8 +240,6 @@ async def stream_query():
@jwt_refresh_token_required
async def get_conversation(conversation_id: str):
conversation = await Conversation.get(id=conversation_id)
current_user_uuid = get_jwt_identity()
user = await blueprints.users.models.User.get(id=current_user_uuid)
await conversation.fetch_related("messages")
# Manually serialize the conversation with messages
@@ -258,18 +254,10 @@ async def get_conversation(conversation_id: str):
"image_key": msg.image_key,
}
)
name = conversation.name
if len(messages) > 8 and "datetime" in name.lower():
name = await rename_conversation(
user=user,
conversation=conversation,
)
print(name)
return jsonify(
{
"id": str(conversation.id),
"name": name,
"name": conversation.name,
"messages": messages,
"created_at": conversation.created_at.isoformat(),
"updated_at": conversation.updated_at.isoformat(),
@@ -283,7 +271,7 @@ async def create_conversation():
user_uuid = get_jwt_identity()
user = await blueprints.users.models.User.get(id=user_uuid)
conversation = await Conversation.create(
name=f"{user.username} {datetime.datetime.now().timestamp}",
name="New Conversation",
user=user,
)

View File

@@ -1,9 +1,8 @@
import tortoise.exceptions
from langchain_openai import ChatOpenAI
import blueprints.users.models
from .models import Conversation, ConversationMessage, RenameConversationOutputSchema
from .models import Conversation, ConversationMessage
async def create_conversation(name: str = "") -> Conversation:
@@ -67,22 +66,3 @@ async def get_conversation_transcript(
messages.append(f"{message.speaker} at {message.created_at}: {message.text}")
return "\n".join(messages)
async def rename_conversation(
user: blueprints.users.models.User,
conversation: Conversation,
) -> str:
messages: str = await get_conversation_transcript(
user=user, conversation=conversation
)
llm = ChatOpenAI(model="gpt-4o-mini")
structured_llm = llm.with_structured_output(RenameConversationOutputSchema)
prompt = f"Summarize the following conversation into a sassy one-liner title:\n\n{messages}"
response = structured_llm.invoke(prompt)
new_name: str = response.get("title", "")
conversation.name = new_name
await conversation.save()
return new_name

View File

@@ -1,5 +1,4 @@
import enum
from dataclasses import dataclass
from tortoise import fields
from tortoise.contrib.pydantic import (
@@ -9,12 +8,6 @@ from tortoise.contrib.pydantic import (
from tortoise.models import Model
@dataclass
class RenameConversationOutputSchema:
title: str
justification: str
class Speaker(enum.Enum):
USER = "user"
SIMBA = "simba"

View File

@@ -120,20 +120,6 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
scrollToBottom();
}, [messages]);
useEffect(() => {
const load = async () => {
if (!selectedConversation) return;
try {
const conv = await conversationService.getConversation(selectedConversation.id);
setSelectedConversation({ id: conv.id, title: conv.name });
setMessages(conv.messages.map((m) => ({ text: m.text, speaker: m.speaker, image_key: m.image_key })));
} catch (err) {
console.error("Failed to load messages:", err);
}
};
load();
}, [selectedConversation?.id]);
const handleQuestionSubmit = useCallback(async () => {
if ((!query.trim() && !pendingImage) || isLoading) return;
@@ -215,7 +201,10 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
}
}
} finally {
if (isMountedRef.current) setIsLoading(false);
if (isMountedRef.current) {
setIsLoading(false);
loadConversations();
}
abortControllerRef.current = null;
}
}, [query, pendingImage, isLoading, selectedConversation, simbaMode, messages, setAuthenticated]);

View File

@@ -0,0 +1,97 @@
#!/usr/bin/env python3
"""
Management command to rename all conversations.
- Conversations with >10 messages: renamed to an LLM-generated summary
- Conversations with <=10 messages: renamed to a truncation of the first user message
"""
import argparse
import asyncio
import os
from tortoise import Tortoise
from blueprints.conversation.models import Conversation, Speaker
from llm import LLMClient
async def rename_conversations(dry_run: bool = False):
"""Rename all conversations based on message count."""
database_url = os.getenv("DATABASE_URL", "sqlite://raggr.db")
await Tortoise.init(
db_url=database_url,
modules={
"models": [
"blueprints.users.models",
"blueprints.conversation.models",
]
},
)
try:
llm = LLMClient()
conversations = await Conversation.all().prefetch_related("messages")
renamed = 0
skipped = 0
for conversation in conversations:
messages = sorted(conversation.messages, key=lambda m: m.created_at)
user_messages = [m for m in messages if m.speaker == Speaker.USER]
if not user_messages:
skipped += 1
continue
if len(messages) > 10:
# Summarize via LLM
message_text = "\n".join(
f"{m.speaker.value}: {m.text}" for m in messages[:30]
)
new_name = llm.chat(
prompt=message_text,
system_prompt=(
"You are naming a conversation. Given the messages below, "
"produce a short, descriptive title (max 8 words). "
"Reply with ONLY the title, nothing else."
),
)
new_name = new_name.strip().strip('"').strip("'")[:100]
else:
# Truncate first user message
new_name = user_messages[0].text[:100]
old_name = conversation.name
if old_name == new_name:
skipped += 1
continue
if dry_run:
print(f" [dry-run] '{old_name}' -> '{new_name}'")
else:
conversation.name = new_name
await conversation.save()
print(f" '{old_name}' -> '{new_name}'")
renamed += 1
print(f"\nRenamed: {renamed} Skipped: {skipped}")
if dry_run:
print("(dry run — no changes were saved)")
finally:
await Tortoise.close_connections()
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Rename conversations based on message count"
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Preview renames without saving",
)
args = parser.parse_args()
asyncio.run(rename_conversations(dry_run=args.dry_run))