1 Commits

Author SHA1 Message Date
Ryan Chen
5054b4a859 Added conversation history 2025-10-23 22:28:41 -04:00
61 changed files with 637 additions and 5080 deletions

View File

@@ -1,28 +0,0 @@
# Database Configuration
# Use DATABASE_PATH for simple relative/absolute paths (e.g., "database/raggr.db" or "dev.db")
# Or use DATABASE_URL for full connection strings (e.g., "sqlite://database/raggr.db")
DATABASE_PATH=database/raggr.db
# DATABASE_URL=sqlite://database/raggr.db
# JWT Configuration
JWT_SECRET_KEY=your-secret-key-here
# Paperless Configuration
PAPERLESS_TOKEN=your-paperless-token
BASE_URL=192.168.1.5:8000
# Ollama Configuration
OLLAMA_URL=http://192.168.1.14:11434
OLLAMA_HOST=http://192.168.1.14:11434
# ChromaDB Configuration
CHROMADB_PATH=/path/to/chromadb
# OpenAI Configuration
OPENAI_API_KEY=your-openai-api-key
# Immich Configuration
IMMICH_URL=http://192.168.1.5:2283
IMMICH_API_KEY=your-immich-api-key
SEARCH_QUERY=simba cat
DOWNLOAD_DIR=./simba_photos

7
.gitignore vendored
View File

@@ -9,10 +9,5 @@ wheels/
# Virtual environments # Virtual environments
.venv .venv
# Environment files
.env
# Database files .env
chromadb/
database/
*.db

View File

@@ -23,8 +23,6 @@ RUN uv pip install --system -e .
# Copy application code # Copy application code
COPY *.py ./ COPY *.py ./
COPY blueprints ./blueprints
COPY migrations ./migrations
COPY startup.sh ./ COPY startup.sh ./
RUN chmod +x startup.sh RUN chmod +x startup.sh
@@ -34,8 +32,8 @@ WORKDIR /app/raggr-frontend
RUN yarn install && yarn build RUN yarn install && yarn build
WORKDIR /app WORKDIR /app
# Create ChromaDB and database directories # Create ChromaDB directory
RUN mkdir -p /app/chromadb /app/database RUN mkdir -p /app/chromadb
# Expose port # Expose port
EXPOSE 8080 EXPOSE 8080

102
app.py Normal file
View File

@@ -0,0 +1,102 @@
import os
from quart import Quart, request, jsonify, render_template, send_from_directory
from tortoise.contrib.quart import register_tortoise
from quart_jwt_extended import JWTManager
from main import consult_simba_oracle
from blueprints.conversation.logic import (
get_the_only_conversation,
add_message_to_conversation,
)
app = Quart(
__name__,
static_folder="raggr-frontend/dist/static",
template_folder="raggr-frontend/dist",
)
app.config["JWT_SECRET_KEY"] = os.getenv("JWT_SECRET_KEY", "SECRET_KEY")
jwt = JWTManager(app)
# Initialize Tortoise ORM
register_tortoise(
app,
db_url=os.getenv("DATABASE_URL", "sqlite://raggr.db"),
modules={"models": ["blueprints.conversation.models"]},
generate_schemas=True,
)
# Serve React static files
@app.route("/static/<path:filename>")
async def static_files(filename):
return await send_from_directory(app.static_folder, filename)
# Serve the React app for all routes (catch-all)
@app.route("/", defaults={"path": ""})
@app.route("/<path:path>")
async def serve_react_app(path):
if path and os.path.exists(os.path.join(app.template_folder, path)):
return await send_from_directory(app.template_folder, path)
return await render_template("index.html")
@app.route("/api/query", methods=["POST"])
async def query():
data = await request.get_json()
query = data.get("query")
# add message to database
conversation = await get_the_only_conversation()
print(conversation)
await add_message_to_conversation(
conversation=conversation, message=query, speaker="user"
)
response = consult_simba_oracle(query)
await add_message_to_conversation(
conversation=conversation, message=response, speaker="simba"
)
return jsonify({"response": response})
@app.route("/api/messages", methods=["GET"])
async def get_messages():
conversation = await get_the_only_conversation()
# Prefetch related messages
await conversation.fetch_related("messages")
# Manually serialize the conversation with messages
messages = []
for msg in conversation.messages:
messages.append(
{
"id": str(msg.id),
"text": msg.text,
"speaker": msg.speaker.value,
"created_at": msg.created_at.isoformat(),
}
)
return jsonify(
{
"id": str(conversation.id),
"name": conversation.name,
"messages": messages,
"created_at": conversation.created_at.isoformat(),
"updated_at": conversation.updated_at.isoformat(),
}
)
# @app.route("/api/ingest", methods=["POST"])
# def webhook():
# data = request.get_json()
# print(data)
# return jsonify({"status": "received"})
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8080, debug=True)

View File

@@ -0,0 +1,17 @@
from quart import Blueprint, jsonify
from .models import (
Conversation,
PydConversation,
)
conversation_blueprint = Blueprint(
"conversation_api", __name__, url_prefix="/api/conversation"
)
@conversation_blueprint.route("/<conversation_id>")
async def get_conversation(conversation_id: str):
conversation = await Conversation.get(id=conversation_id)
serialized_conversation = await PydConversation.from_tortoise_orm(conversation)
return jsonify(serialized_conversation.model_dump_json())

View File

@@ -1,9 +1,5 @@
import tortoise.exceptions
from .models import Conversation, ConversationMessage from .models import Conversation, ConversationMessage
import blueprints.users.models
async def create_conversation(name: str = "") -> Conversation: async def create_conversation(name: str = "") -> Conversation:
conversation = await Conversation.create(name=name) conversation = await Conversation.create(name=name)
@@ -14,7 +10,6 @@ async def add_message_to_conversation(
conversation: Conversation, conversation: Conversation,
message: str, message: str,
speaker: str, speaker: str,
user: blueprints.users.models.User,
) -> ConversationMessage: ) -> ConversationMessage:
print(conversation, message, speaker) print(conversation, message, speaker)
message = await ConversationMessage.create( message = await ConversationMessage.create(
@@ -35,26 +30,3 @@ async def get_the_only_conversation() -> Conversation:
conversation = await Conversation.create(name="simba_chat") conversation = await Conversation.create(name="simba_chat")
return conversation return conversation
async def get_conversation_for_user(user: blueprints.users.models.User) -> Conversation:
try:
return await Conversation.get(user=user)
except tortoise.exceptions.DoesNotExist:
await Conversation.get_or_create(name=f"{user.username}'s chat", user=user)
return await Conversation.get(user=user)
async def get_conversation_by_id(id: str) -> Conversation:
return await Conversation.get(id=id)
async def get_conversation_transcript(
user: blueprints.users.models.User, conversation: Conversation
) -> str:
messages = []
for message in conversation.messages:
messages.append(f"{message.speaker} at {message.created_at}: {message.text}")
return "\n".join(messages)

View File

@@ -18,9 +18,6 @@ class Conversation(Model):
name = fields.CharField(max_length=255) name = fields.CharField(max_length=255)
created_at = fields.DatetimeField(auto_now_add=True) created_at = fields.DatetimeField(auto_now_add=True)
updated_at = fields.DatetimeField(auto_now=True) updated_at = fields.DatetimeField(auto_now=True)
user: fields.ForeignKeyRelation = fields.ForeignKeyField(
"models.User", related_name="conversations", null=True
)
class Meta: class Meta:
table = "conversations" table = "conversations"
@@ -40,15 +37,5 @@ class ConversationMessage(Model):
PydConversationMessage = pydantic_model_creator(ConversationMessage) PydConversationMessage = pydantic_model_creator(ConversationMessage)
PydConversation = pydantic_model_creator( PydConversation = pydantic_model_creator(Conversation, name="Conversation")
Conversation, name="Conversation", allow_cycles=True, exclude=("user",)
)
PydConversationWithMessages = pydantic_model_creator(
Conversation,
name="ConversationWithMessages",
allow_cycles=True,
exclude=("user",),
include=("messages",),
)
PydListConversation = pydantic_queryset_creator(Conversation)
PydListConversationMessage = pydantic_queryset_creator(ConversationMessage) PydListConversationMessage = pydantic_queryset_creator(ConversationMessage)

View File

@@ -14,7 +14,7 @@ from llm import LLMClient
load_dotenv() load_dotenv()
ollama_client = Client( ollama_client = Client(
host=os.getenv("OLLAMA_HOST", "http://localhost:11434"), timeout=1.0 host=os.getenv("OLLAMA_HOST", "http://localhost:11434"), timeout=10.0
) )

View File

@@ -2,9 +2,6 @@ version: "3.8"
services: services:
raggr: raggr:
build:
context: ./services/raggr
dockerfile: Dockerfile
image: torrtle/simbarag:latest image: torrtle/simbarag:latest
network_mode: host network_mode: host
environment: environment:
@@ -15,8 +12,6 @@ services:
- OPENAI_API_KEY=${OPENAI_API_KEY} - OPENAI_API_KEY=${OPENAI_API_KEY}
volumes: volumes:
- chromadb_data:/app/chromadb - chromadb_data:/app/chromadb
- database_data:/app/database
volumes: volumes:
chromadb_data: chromadb_data:
database_data:

View File

@@ -27,7 +27,7 @@ headers = {"x-api-key": API_KEY, "Content-Type": "application/json"}
VISITED = {} VISITED = {}
if __name__ == "__main__": if __name__ == "__main__":
conn = sqlite3.connect("./database/visited.db") conn = sqlite3.connect("./visited.db")
c = conn.cursor() c = conn.cursor()
c.execute("select immich_id from visited") c.execute("select immich_id from visited")
rows = c.fetchall() rows = c.fetchall()

64
llm.py Normal file
View File

@@ -0,0 +1,64 @@
import os
from ollama import Client
from openai import OpenAI
import logging
logging.basicConfig(level=logging.INFO)
class LLMClient:
def __init__(self):
try:
self.ollama_client = Client(
host=os.getenv("OLLAMA_URL", "http://localhost:11434"), timeout=10.0
)
self.ollama_client.chat(
model="gemma3:4b", messages=[{"role": "system", "content": "test"}]
)
self.PROVIDER = "ollama"
logging.info("Using Ollama as LLM backend")
except Exception as e:
print(e)
self.openai_client = OpenAI()
self.PROVIDER = "openai"
logging.info("Using OpenAI as LLM backend")
def chat(
self,
prompt: str,
system_prompt: str,
):
if self.PROVIDER == "ollama":
response = self.ollama_client.chat(
model="gemma3:4b",
messages=[
{
"role": "system",
"content": system_prompt,
},
{"role": "user", "content": prompt},
],
)
print(response)
output = response.message.content
elif self.PROVIDER == "openai":
response = self.openai_client.responses.create(
model="gpt-4o-mini",
input=[
{
"role": "system",
"content": system_prompt,
},
{"role": "user", "content": prompt},
],
)
output = response.output_text
return output
if __name__ == "__main__":
client = Client()
client.chat(model="gemma3:4b", messages=[{"role": "system", "promp": "hack"}])

View File

@@ -7,8 +7,6 @@ import argparse
import chromadb import chromadb
import ollama import ollama
import time
from request import PaperlessNGXService from request import PaperlessNGXService
from chunker import Chunker from chunker import Chunker
@@ -38,7 +36,6 @@ parser.add_argument("query", type=str, help="questions about simba's health")
parser.add_argument( parser.add_argument(
"--reindex", action="store_true", help="re-index the simba documents" "--reindex", action="store_true", help="re-index the simba documents"
) )
parser.add_argument("--classify", action="store_true", help="test classification")
parser.add_argument("--index", help="index a file") parser.add_argument("--index", help="index a file")
ppngx = PaperlessNGXService() ppngx = PaperlessNGXService()
@@ -80,7 +77,7 @@ def chunk_data(docs, collection, doctypes):
logging.info(f"chunking {len(docs)} documents") logging.info(f"chunking {len(docs)} documents")
texts: list[str] = [doc["content"] for doc in docs] texts: list[str] = [doc["content"] for doc in docs]
with sqlite3.connect("database/visited.db") as conn: with sqlite3.connect("visited.db") as conn:
to_insert = [] to_insert = []
c = conn.cursor() c = conn.cursor()
for index, text in enumerate(texts): for index, text in enumerate(texts):
@@ -116,22 +113,9 @@ def chunk_text(texts: list[str], collection):
) )
def classify_query(query: str, transcript: str) -> bool: def consult_oracle(input: str, collection):
logging.info("Starting query generation") import time
qg_start = time.time()
qg = QueryGenerator()
query_type = qg.get_query_type(input=query, transcript=transcript)
logging.info(query_type)
qg_end = time.time()
logging.info(f"Query generation took {qg_end - qg_start:.2f} seconds")
return query_type == "Simba"
def consult_oracle(
input: str,
collection,
transcript: str = "",
):
chunker = Chunker(collection) chunker = Chunker(collection)
start_time = time.time() start_time = time.time()
@@ -169,10 +153,7 @@ def consult_oracle(
logging.info("Starting LLM generation") logging.info("Starting LLM generation")
llm_start = time.time() llm_start = time.time()
system_prompt = "You are a helpful assistant that understands veterinary terms." system_prompt = "You are a helpful assistant that understands veterinary terms."
transcript_prompt = f"Here is the message transcript thus far {transcript}." prompt = f"Using the following data, help answer the user's query by providing as many details as possible. Using this data: {results}. Respond to this prompt: {input}"
prompt = f"""Using the following data, help answer the user's query by providing as many details as possible.
Using this data: {results}. {transcript_prompt if len(transcript) > 0 else ""}
Respond to this prompt: {input}"""
output = llm_client.chat(prompt=prompt, system_prompt=system_prompt) output = llm_client.chat(prompt=prompt, system_prompt=system_prompt)
llm_end = time.time() llm_end = time.time()
logging.info(f"LLM generation took {llm_end - llm_start:.2f} seconds") logging.info(f"LLM generation took {llm_end - llm_start:.2f} seconds")
@@ -183,16 +164,6 @@ def consult_oracle(
return output return output
def llm_chat(input: str, transcript: str = "") -> str:
system_prompt = "You are a helpful assistant that understands veterinary terms."
transcript_prompt = f"Here is the message transcript thus far {transcript}."
prompt = f"""Answer the user in as if you were a cat named Simba. Don't act too catlike. Be assertive.
{transcript_prompt if len(transcript) > 0 else ""}
Respond to this prompt: {input}"""
output = llm_client.chat(prompt=prompt, system_prompt=system_prompt)
return output
def paperless_workflow(input): def paperless_workflow(input):
# Step 1: Get the text # Step 1: Get the text
ppngx = PaperlessNGXService() ppngx = PaperlessNGXService()
@@ -202,24 +173,15 @@ def paperless_workflow(input):
consult_oracle(input, simba_docs) consult_oracle(input, simba_docs)
def consult_simba_oracle(input: str, transcript: str = ""): def consult_simba_oracle(input: str):
is_simba_related = classify_query(query=input, transcript=transcript) return consult_oracle(
input=input,
if is_simba_related: collection=simba_docs,
logging.info("Query is related to simba") )
return consult_oracle(
input=input,
collection=simba_docs,
transcript=transcript,
)
logging.info("Query is NOT related to simba")
return llm_chat(input=input, transcript=transcript)
def filter_indexed_files(docs): def filter_indexed_files(docs):
with sqlite3.connect("database/visited.db") as conn: with sqlite3.connect("visited.db") as conn:
c = conn.cursor() c = conn.cursor()
c.execute( c.execute(
"CREATE TABLE IF NOT EXISTS indexed_documents (id INTEGER PRIMARY KEY AUTOINCREMENT, paperless_id INTEGER)" "CREATE TABLE IF NOT EXISTS indexed_documents (id INTEGER PRIMARY KEY AUTOINCREMENT, paperless_id INTEGER)"
@@ -232,45 +194,42 @@ def filter_indexed_files(docs):
return [doc for doc in docs if doc["id"] not in visited] return [doc for doc in docs if doc["id"] not in visited]
def reindex():
with sqlite3.connect("database/visited.db") as conn:
c = conn.cursor()
c.execute("DELETE FROM indexed_documents")
conn.commit()
# Delete all documents from the collection
all_docs = simba_docs.get()
if all_docs["ids"]:
simba_docs.delete(ids=all_docs["ids"])
logging.info("Fetching documents from Paperless-NGX")
ppngx = PaperlessNGXService()
docs = ppngx.get_data()
docs = filter_indexed_files(docs)
logging.info(f"Fetched {len(docs)} documents")
# Delete all chromadb data
ids = simba_docs.get(ids=None, limit=None, offset=0)
all_ids = ids["ids"]
if len(all_ids) > 0:
simba_docs.delete(ids=all_ids)
# Chunk documents
logging.info("Chunking documents now ...")
doctype_lookup = ppngx.get_doctypes()
chunk_data(docs, collection=simba_docs, doctypes=doctype_lookup)
logging.info("Done chunking documents")
if __name__ == "__main__": if __name__ == "__main__":
args = parser.parse_args() args = parser.parse_args()
if args.reindex: if args.reindex:
reindex() with sqlite3.connect("./visited.db") as conn:
c = conn.cursor()
c.execute("DELETE FROM indexed_documents")
if args.classify: logging.info("Fetching documents from Paperless-NGX")
consult_simba_oracle(input="yohohoho testing") ppngx = PaperlessNGXService()
consult_simba_oracle(input="write an email") docs = ppngx.get_data()
consult_simba_oracle(input="how much does simba weigh") docs = filter_indexed_files(docs)
logging.info(f"Fetched {len(docs)} documents")
# Delete all chromadb data
ids = simba_docs.get(ids=None, limit=None, offset=0)
all_ids = ids["ids"]
if len(all_ids) > 0:
simba_docs.delete(ids=all_ids)
# Chunk documents
logging.info("Chunking documents now ...")
tag_lookup = ppngx.get_tags()
doctype_lookup = ppngx.get_doctypes()
chunk_data(docs, collection=simba_docs, doctypes=doctype_lookup)
logging.info("Done chunking documents")
# if args.index:
# with open(args.index) as file:
# extension = args.index.split(".")[-1]
# if extension == "pdf":
# pdf_path = ppngx.download_pdf_from_id(id=document_id)
# image_paths = pdf_to_image(filepath=pdf_path)
# print(f"summarizing {file}")
# generated_summary = summarize_pdf_image(filepaths=image_paths)
# elif extension in [".md", ".txt"]:
# chunk_text(texts=[file.readall()], collection=simba_docs)
if args.query: if args.query:
logging.info("Consulting oracle ...") logging.info("Consulting oracle ...")

27
pyproject.toml Normal file
View File

@@ -0,0 +1,27 @@
[project]
name = "raggr"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"chromadb>=1.1.0",
"python-dotenv>=1.0.0",
"flask>=3.1.2",
"httpx>=0.28.1",
"ollama>=0.6.0",
"openai>=2.0.1",
"pydantic>=2.11.9",
"pillow>=10.0.0",
"pymupdf>=1.24.0",
"black>=25.9.0",
"pillow-heif>=1.1.1",
"flask-jwt-extended>=4.7.1",
"bcrypt>=5.0.0",
"pony>=0.7.19",
"flask-login>=0.6.3",
"quart>=0.20.0",
"tortoise-orm>=0.25.1",
"quart-jwt-extended>=0.1.0",
"pre-commit>=4.3.0",
]

View File

@@ -49,20 +49,11 @@ DOCTYPE_OPTIONS = [
"Letter", "Letter",
] ]
QUERY_TYPE_OPTIONS = [
"Simba",
"Other",
]
class DocumentType(BaseModel): class DocumentType(BaseModel):
type: list[str] = Field(description="type of document", enum=DOCTYPE_OPTIONS) type: list[str] = Field(description="type of document", enum=DOCTYPE_OPTIONS)
class QueryType(BaseModel):
type: str = Field(desciption="type of query", enum=QUERY_TYPE_OPTIONS)
PROMPT = """ PROMPT = """
You are an information specialist that processes user queries. The current year is 2025. The user queries are all about You are an information specialist that processes user queries. The current year is 2025. The user queries are all about
a cat, Simba, and its records. The types of records are listed below. Using the query, extract the a cat, Simba, and its records. The types of records are listed below. Using the query, extract the
@@ -120,27 +111,6 @@ Query: "Who does Simba know?"
Tags: ["Letter", "Documentation"] Tags: ["Letter", "Documentation"]
""" """
QUERY_TYPE_PROMPT = f"""You are an information specialist that processes user queries.
A query can have one tag attached from the following options. Based on the query and the transcript which is listed below, determine
which of the following options is most appropriate: {",".join(QUERY_TYPE_OPTIONS)}
### Example 1
Query: "Who is Simba's current vet?"
Tags: ["Simba"]
### Example 2
Query: "What is the capital of Tokyo?"
Tags: ["Other"]
### Example 3
Query: "Can you help me write an email?"
Tags: ["Other"]
TRANSCRIPT:
"""
class QueryGenerator: class QueryGenerator:
def __init__(self) -> None: def __init__(self) -> None:
@@ -184,33 +154,6 @@ class QueryGenerator:
metadata_query = {"document_type": {"$in": type_data["type"]}} metadata_query = {"document_type": {"$in": type_data["type"]}}
return metadata_query return metadata_query
def get_query_type(self, input: str, transcript: str):
client = OpenAI()
response = client.chat.completions.create(
messages=[
{
"role": "system",
"content": "You are an information specialist that is really good at deciding what tags a query should have",
},
{
"role": "user",
"content": f"{QUERY_TYPE_PROMPT}\nTRANSCRIPT:\n{transcript}\nQUERY:{input}",
},
],
model="gpt-4o",
response_format={
"type": "json_schema",
"json_schema": {
"name": "query_type",
"schema": QueryType.model_json_schema(),
},
},
)
response_json_str = response.choices[0].message.content
type_data = json.loads(response_json_str)
return type_data["type"]
def get_query(self, input: str): def get_query(self, input: str):
client = OpenAI() client = OpenAI()
response = client.responses.parse( response = client.responses.parse(

View File

@@ -6,18 +6,14 @@
"scripts": { "scripts": {
"build": "rsbuild build", "build": "rsbuild build",
"dev": "rsbuild dev --open", "dev": "rsbuild dev --open",
"preview": "rsbuild preview", "preview": "rsbuild preview"
"watch": "npm-watch build",
"watch:build": "rsbuild build --watch"
}, },
"dependencies": { "dependencies": {
"axios": "^1.12.2", "axios": "^1.12.2",
"marked": "^16.3.0", "marked": "^16.3.0",
"npm-watch": "^0.13.0",
"react": "^19.1.1", "react": "^19.1.1",
"react-dom": "^19.1.1", "react-dom": "^19.1.1",
"react-markdown": "^10.1.0", "react-markdown": "^10.1.0"
"watch": "^1.0.2"
}, },
"devDependencies": { "devDependencies": {
"@rsbuild/core": "^1.5.6", "@rsbuild/core": "^1.5.6",
@@ -26,16 +22,5 @@
"@types/react": "^19.1.13", "@types/react": "^19.1.13",
"@types/react-dom": "^19.1.9", "@types/react-dom": "^19.1.9",
"typescript": "^5.9.2" "typescript": "^5.9.2"
},
"watch": {
"build": {
"patterns": [
"src"
],
"extensions": "ts,tsx,css,js,jsx",
"delay": 1000,
"quiet": false,
"inherit": true
}
} }
} }

View File

@@ -3,8 +3,4 @@ import { pluginReact } from '@rsbuild/plugin-react';
export default defineConfig({ export default defineConfig({
plugins: [pluginReact()], plugins: [pluginReact()],
html: {
title: 'Raggr',
favicon: './src/assets/favicon.svg',
},
}); });

Binary file not shown.

204
raggr-frontend/src/App.tsx Normal file
View File

@@ -0,0 +1,204 @@
import { useEffect, useState } from "react";
import axios from "axios";
import ReactMarkdown from "react-markdown";
import "./App.css";
type QuestionAnswer = {
question: string;
answer: string;
};
type QuestionBubbleProps = {
text: string;
};
type AnswerBubbleProps = {
text: string;
loading: string;
};
type QuestionAnswerPairProps = {
question: string;
answer: string;
loading: boolean;
};
type Conversation = {
title: string;
id: string;
};
type Message = {
text: string;
speaker: "simba" | "user";
};
type ConversationMenuProps = {
conversations: Conversation[];
};
const ConversationMenu = ({ conversations }: ConversationMenuProps) => {
return (
<div className="absolute bg-white w-md rounded-md shadow-xl m-4 p-4">
<p className="py-2 px-4 rounded-md w-full text-xl font-bold">askSimba!</p>
{conversations.map((conversation) => (
<p className="py-2 px-4 rounded-md hover:bg-stone-200 w-full text-xl font-bold cursor-pointer">
{conversation.title}
</p>
))}
</div>
);
};
const QuestionBubble = ({ text }: QuestionBubbleProps) => {
return <div className="rounded-md bg-stone-200 p-3">🤦: {text}</div>;
};
const AnswerBubble = ({ text, loading }: AnswerBubbleProps) => {
return (
<div className="rounded-md bg-orange-100 p-3">
{loading ? (
<div className="flex flex-col w-full animate-pulse gap-2">
<div className="flex flex-row gap-2 w-full">
<div className="bg-gray-400 w-1/2 p-3 rounded-lg" />
<div className="bg-gray-400 w-1/2 p-3 rounded-lg" />
</div>
<div className="flex flex-row gap-2 w-full">
<div className="bg-gray-400 w-1/3 p-3 rounded-lg" />
<div className="bg-gray-400 w-2/3 p-3 rounded-lg" />
</div>
</div>
) : (
<div className="flex flex-col">
<ReactMarkdown>{"🐈: " + text}</ReactMarkdown>
</div>
)}
</div>
);
};
const QuestionAnswerPair = ({
question,
answer,
loading,
}: QuestionAnswerPairProps) => {
return (
<div className="flex flex-col gap-4">
<QuestionBubble text={question} />
<AnswerBubble text={answer} loading={loading} />
</div>
);
};
const App = () => {
const [query, setQuery] = useState<string>("");
const [answer, setAnswer] = useState<string>("");
const [simbaMode, setSimbaMode] = useState<boolean>(false);
const [questionsAnswers, setQuestionsAnswers] = useState<QuestionAnswer[]>(
[],
);
const [messages, setMessages] = useState<Message[]>([]);
const [conversations, setConversations] = useState<Conversation[]>([
{ title: "simba meow meow", id: "uuid" },
]);
const simbaAnswers = ["meow.", "hiss...", "purrrrrr", "yowOWROWWowowr"];
useEffect(() => {
axios.get("/api/messages").then((result) => {
setMessages(
result.data.messages.map((message) => {
return {
text: message.text,
speaker: message.speaker,
};
}),
);
});
}, []);
const handleQuestionSubmit = () => {
let currMessages = messages.concat([{ text: query, speaker: "user" }]);
setMessages(currMessages);
if (simbaMode) {
console.log("simba mode activated");
const randomIndex = Math.floor(Math.random() * simbaAnswers.length);
const randomElement = simbaAnswers[randomIndex];
setAnswer(randomElement);
setQuestionsAnswers(
questionsAnswers.concat([
{
question: query,
answer: randomElement,
},
]),
);
return;
}
const payload = { query: query };
axios.post("/api/query", payload).then((result) => {
setQuestionsAnswers(
questionsAnswers.concat([
{ question: query, answer: result.data.response },
]),
);
setMessages(
currMessages.concat([{ text: result.data.response, speaker: "simba" }]),
);
});
};
const handleQueryChange = (event) => {
setQuery(event.target.value);
};
return (
<div className="h-screen bg-opacity-20">
<div className="bg-white/85 h-screen">
<div className="flex flex-row justify-center py-4">
<div className="flex flex-col gap-4 min-w-xl max-w-xl">
<header className="flex flex-row justify-center gap-2 grow sticky top-0 z-10 bg-white">
<h1 className="text-3xl">ask simba!</h1>
</header>
{/*{questionsAnswers.map((qa) => (
<QuestionAnswerPair question={qa.question} answer={qa.answer} />
))}*/}
{messages.map((msg) => {
if (msg.speaker == "simba") {
return <AnswerBubble text={msg.text} loading="" />;
}
return <QuestionBubble text={msg.text} />;
})}
<footer className="flex flex-col gap-2 sticky bottom-0">
<div className="flex flex-row justify-between gap-2 grow">
<textarea
type="text"
className="p-4 border border-blue-200 rounded-md grow bg-white"
onChange={handleQueryChange}
/>
</div>
<div className="flex flex-row justify-between gap-2 grow">
<button
className="p-4 border border-blue-400 bg-blue-200 hover:bg-blue-400 cursor-pointer rounded-md flex-grow"
onClick={() => handleQuestionSubmit()}
type="submit"
>
Submit
</button>
</div>
<div className="flex flex-row justify-center gap-2 grow">
<input
type="checkbox"
onChange={(event) => setSimbaMode(event.target.checked)}
/>
<p>simba mode?</p>
</div>
</footer>
</div>
</div>
</div>
</div>
);
};
export default App;

View File

Before

Width:  |  Height:  |  Size: 3.4 MiB

After

Width:  |  Height:  |  Size: 3.4 MiB

View File

Before

Width:  |  Height:  |  Size: 2.1 MiB

After

Width:  |  Height:  |  Size: 2.1 MiB

View File

@@ -1,54 +0,0 @@
# Database Migrations with Aerich
## Initial Setup (Run Once)
1. Install dependencies:
```bash
uv pip install -e .
```
2. Initialize Aerich:
```bash
aerich init-db
```
This will:
- Create a `migrations/` directory
- Generate the initial migration based on your models
- Create all tables in the database
## When You Add/Change Models
1. Generate a new migration:
```bash
aerich migrate --name "describe_your_changes"
```
Example:
```bash
aerich migrate --name "add_user_profile_model"
```
2. Apply the migration:
```bash
aerich upgrade
```
## Common Commands
- `aerich init-db` - Initialize database (first time only)
- `aerich migrate --name "description"` - Generate new migration
- `aerich upgrade` - Apply pending migrations
- `aerich downgrade` - Rollback last migration
- `aerich history` - Show migration history
- `aerich heads` - Show current migration heads
## Docker Setup
In Docker, migrations run automatically on container startup via the startup script.
## Notes
- Migration files are stored in `migrations/models/`
- Always commit migration files to version control
- Don't modify migration files manually after they're created

View File

@@ -1,146 +0,0 @@
# GENERATED BY CLAUDE
import os
import sys
import uuid
import asyncio
from tortoise import Tortoise
from blueprints.users.models import User
from dotenv import load_dotenv
load_dotenv()
# Database configuration with environment variable support
DATABASE_PATH = os.getenv("DATABASE_PATH", "database/raggr.db")
DATABASE_URL = os.getenv("DATABASE_URL", f"sqlite://{DATABASE_PATH}")
print(DATABASE_URL)
async def add_user(username: str, email: str, password: str):
"""Add a new user to the database"""
await Tortoise.init(
db_url=DATABASE_URL,
modules={
"models": [
"blueprints.users.models",
"blueprints.conversation.models",
]
},
)
try:
# Check if user already exists
existing_user = await User.filter(email=email).first()
if existing_user:
print(f"Error: User with email '{email}' already exists!")
return False
existing_username = await User.filter(username=username).first()
if existing_username:
print(f"Error: Username '{username}' is already taken!")
return False
# Create new user
user = User(
id=uuid.uuid4(),
username=username,
email=email,
)
user.set_password(password)
await user.save()
print("✓ User created successfully!")
print(f" Username: {username}")
print(f" Email: {email}")
print(f" ID: {user.id}")
return True
except Exception as e:
print(f"Error creating user: {e}")
return False
finally:
await Tortoise.close_connections()
async def list_users():
"""List all users in the database"""
await Tortoise.init(
db_url=DATABASE_URL,
modules={
"models": [
"blueprints.users.models",
"blueprints.conversation.models",
]
},
)
try:
users = await User.all()
if not users:
print("No users found in database.")
return
print(f"\nFound {len(users)} user(s):")
print("-" * 60)
for user in users:
print(f"Username: {user.username}")
print(f"Email: {user.email}")
print(f"ID: {user.id}")
print(f"Created: {user.created_at}")
print("-" * 60)
except Exception as e:
print(f"Error listing users: {e}")
finally:
await Tortoise.close_connections()
def print_usage():
"""Print usage instructions"""
print("Usage:")
print(" python add_user.py add <username> <email> <password>")
print(" python add_user.py list")
print("\nExamples:")
print(" python add_user.py add ryan ryan@example.com mypassword123")
print(" python add_user.py list")
print("\nEnvironment Variables:")
print(" DATABASE_PATH - Path to database file (default: database/raggr.db)")
print(" DATABASE_URL - Full database URL (overrides DATABASE_PATH)")
print("\n Example with custom database:")
print(" DATABASE_PATH=dev.db python add_user.py list")
async def main():
if len(sys.argv) < 2:
print_usage()
sys.exit(1)
command = sys.argv[1].lower()
if command == "add":
if len(sys.argv) != 5:
print("Error: Missing arguments for 'add' command")
print_usage()
sys.exit(1)
username = sys.argv[2]
email = sys.argv[3]
password = sys.argv[4]
success = await add_user(username, email, password)
sys.exit(0 if success else 1)
elif command == "list":
await list_users()
sys.exit(0)
else:
print(f"Error: Unknown command '{command}'")
print_usage()
sys.exit(1)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -1,20 +0,0 @@
import os
# Database configuration with environment variable support
# Use DATABASE_PATH for relative paths or DATABASE_URL for full connection strings
DATABASE_PATH = os.getenv("DATABASE_PATH", "database/raggr.db")
DATABASE_URL = os.getenv("DATABASE_URL", f"sqlite://{DATABASE_PATH}")
TORTOISE_ORM = {
"connections": {"default": DATABASE_URL},
"apps": {
"models": {
"models": [
"blueprints.conversation.models",
"blueprints.users.models",
"aerich.models",
],
"default_connection": "default",
},
},
}

View File

@@ -1,135 +0,0 @@
import os
from quart import Quart, jsonify, render_template, request, send_from_directory
from quart_jwt_extended import JWTManager, get_jwt_identity, jwt_refresh_token_required
from tortoise.contrib.quart import register_tortoise
import blueprints.conversation
import blueprints.conversation.logic
import blueprints.users
import blueprints.users.models
from main import consult_simba_oracle
app = Quart(
__name__,
static_folder="raggr-frontend/dist/static",
template_folder="raggr-frontend/dist",
)
app.config["JWT_SECRET_KEY"] = os.getenv("JWT_SECRET_KEY", "SECRET_KEY")
jwt = JWTManager(app)
# Register blueprints
app.register_blueprint(blueprints.users.user_blueprint)
app.register_blueprint(blueprints.conversation.conversation_blueprint)
# Database configuration with environment variable support
DATABASE_PATH = os.getenv("DATABASE_PATH", "database/raggr.db")
TORTOISE_CONFIG = {
"connections": {"default": f"sqlite://{DATABASE_PATH}"},
"apps": {
"models": {
"models": [
"blueprints.conversation.models",
"blueprints.users.models",
"aerich.models",
]
},
},
}
# Initialize Tortoise ORM
register_tortoise(
app,
config=TORTOISE_CONFIG,
generate_schemas=False, # Disabled - using Aerich for migrations
)
# Serve React static files
@app.route("/static/<path:filename>")
async def static_files(filename):
return await send_from_directory(app.static_folder, filename)
# Serve the React app for all routes (catch-all)
@app.route("/", defaults={"path": ""})
@app.route("/<path:path>")
async def serve_react_app(path):
if path and os.path.exists(os.path.join(app.template_folder, path)):
return await send_from_directory(app.template_folder, path)
return await render_template("index.html")
@app.route("/api/query", methods=["POST"])
@jwt_refresh_token_required
async def query():
current_user_uuid = get_jwt_identity()
user = await blueprints.users.models.User.get(id=current_user_uuid)
data = await request.get_json()
query = data.get("query")
conversation_id = data.get("conversation_id")
conversation = await blueprints.conversation.logic.get_conversation_by_id(
conversation_id
)
await conversation.fetch_related("messages")
await blueprints.conversation.logic.add_message_to_conversation(
conversation=conversation,
message=query,
speaker="user",
user=user,
)
transcript = await blueprints.conversation.logic.get_conversation_transcript(
user=user, conversation=conversation
)
response = consult_simba_oracle(input=query, transcript=transcript)
await blueprints.conversation.logic.add_message_to_conversation(
conversation=conversation,
message=response,
speaker="simba",
user=user,
)
return jsonify({"response": response})
@app.route("/api/messages", methods=["GET"])
@jwt_refresh_token_required
async def get_messages():
current_user_uuid = get_jwt_identity()
user = await blueprints.users.models.User.get(id=current_user_uuid)
conversation = await blueprints.conversation.logic.get_conversation_for_user(
user=user
)
# Prefetch related messages
await conversation.fetch_related("messages")
# Manually serialize the conversation with messages
messages = []
for msg in conversation.messages:
messages.append(
{
"id": str(msg.id),
"text": msg.text,
"speaker": msg.speaker.value,
"created_at": msg.created_at.isoformat(),
}
)
return jsonify(
{
"id": str(conversation.id),
"name": conversation.name,
"messages": messages,
"created_at": conversation.created_at.isoformat(),
"updated_at": conversation.updated_at.isoformat(),
}
)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8080, debug=True)

View File

@@ -1 +0,0 @@
# Blueprints package

View File

@@ -1,72 +0,0 @@
import datetime
from quart_jwt_extended import (
jwt_refresh_token_required,
get_jwt_identity,
)
from quart import Blueprint, jsonify
from .models import (
Conversation,
PydConversation,
PydListConversation,
)
import blueprints.users.models
conversation_blueprint = Blueprint(
"conversation_api", __name__, url_prefix="/api/conversation"
)
@conversation_blueprint.route("/<conversation_id>")
async def get_conversation(conversation_id: str):
conversation = await Conversation.get(id=conversation_id)
await conversation.fetch_related("messages")
# Manually serialize the conversation with messages
messages = []
for msg in conversation.messages:
messages.append(
{
"id": str(msg.id),
"text": msg.text,
"speaker": msg.speaker.value,
"created_at": msg.created_at.isoformat(),
}
)
return jsonify(
{
"id": str(conversation.id),
"name": conversation.name,
"messages": messages,
"created_at": conversation.created_at.isoformat(),
"updated_at": conversation.updated_at.isoformat(),
}
)
@conversation_blueprint.post("/")
@jwt_refresh_token_required
async def create_conversation():
user_uuid = get_jwt_identity()
user = await blueprints.users.models.User.get(id=user_uuid)
conversation = await Conversation.create(
name=f"{user.username} {datetime.datetime.now().timestamp}",
user=user,
)
serialized_conversation = await PydConversation.from_tortoise_orm(conversation)
return jsonify(serialized_conversation.model_dump())
@conversation_blueprint.get("/")
@jwt_refresh_token_required
async def get_all_conversations():
user_uuid = get_jwt_identity()
user = await blueprints.users.models.User.get(id=user_uuid)
conversations = Conversation.filter(user=user)
serialized_conversations = await PydListConversation.from_queryset(conversations)
return jsonify(serialized_conversations.model_dump())

View File

@@ -1,40 +0,0 @@
from quart import Blueprint, jsonify, request
from quart_jwt_extended import (
create_access_token,
create_refresh_token,
jwt_refresh_token_required,
get_jwt_identity,
)
from .models import User
user_blueprint = Blueprint("user_api", __name__, url_prefix="/api/user")
@user_blueprint.route("/login", methods=["POST"])
async def login():
data = await request.get_json()
username = data.get("username")
password = data.get("password")
user = await User.filter(username=username).first()
if not user or not user.verify_password(password):
return jsonify({"msg": "Invalid credentials"}), 401
access_token = create_access_token(identity=str(user.id))
refresh_token = create_refresh_token(identity=str(user.id))
return jsonify(
access_token=access_token,
refresh_token=refresh_token,
user={"id": user.id, "username": user.username},
)
@user_blueprint.route("/refresh", methods=["POST"])
@jwt_refresh_token_required
async def refresh():
user_id = get_jwt_identity()
new_token = create_access_token(identity=user_id)
return jsonify(access_token=new_token)

View File

@@ -1,26 +0,0 @@
from tortoise.models import Model
from tortoise import fields
import bcrypt
class User(Model):
id = fields.UUIDField(primary_key=True)
username = fields.CharField(max_length=255)
password = fields.BinaryField() # Hashed
email = fields.CharField(max_length=100, unique=True)
created_at = fields.DatetimeField(auto_now_add=True)
updated_at = fields.DatetimeField(auto_now=True)
class Meta:
table = "users"
def set_password(self, plain_password: str):
self.password = bcrypt.hashpw(
plain_password.encode("utf-8"),
bcrypt.gensalt(),
)
def verify_password(self, plain_password: str):
return bcrypt.checkpw(plain_password.encode("utf-8"), self.password)

View File

@@ -1,73 +0,0 @@
import os
from ollama import Client
from openai import OpenAI
import logging
from dotenv import load_dotenv
load_dotenv()
logging.basicConfig(level=logging.INFO)
TRY_OLLAMA = os.getenv("TRY_OLLAMA", False)
class LLMClient:
def __init__(self):
try:
self.ollama_client = Client(
host=os.getenv("OLLAMA_URL", "http://localhost:11434"), timeout=1.0
)
self.ollama_client.chat(
model="gemma3:4b", messages=[{"role": "system", "content": "test"}]
)
self.PROVIDER = "ollama"
logging.info("Using Ollama as LLM backend")
except Exception as e:
print(e)
self.openai_client = OpenAI()
self.PROVIDER = "openai"
logging.info("Using OpenAI as LLM backend")
def chat(
self,
prompt: str,
system_prompt: str,
):
# Instituting a fallback if my gaming PC is not on
if self.PROVIDER == "ollama":
try:
response = self.ollama_client.chat(
model="gemma3:4b",
messages=[
{
"role": "system",
"content": system_prompt,
},
{"role": "user", "content": prompt},
],
)
output = response.message.content
return output
except Exception as e:
logging.error(f"Could not connect to OLLAMA: {str(e)}")
response = self.openai_client.responses.create(
model="gpt-4o-mini",
input=[
{
"role": "system",
"content": system_prompt,
},
{"role": "user", "content": prompt},
],
)
output = response.output_text
return output
if __name__ == "__main__":
client = Client()
client.chat(model="gemma3:4b", messages=[{"role": "system", "promp": "hack"}])

View File

@@ -1,63 +0,0 @@
from tortoise import BaseDBAsyncClient
RUN_IN_TRANSACTION = True
async def upgrade(db: BaseDBAsyncClient) -> str:
return """
CREATE TABLE IF NOT EXISTS "conversations" (
"id" CHAR(36) NOT NULL PRIMARY KEY,
"name" VARCHAR(255) NOT NULL,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS "conversation_messages" (
"id" CHAR(36) NOT NULL PRIMARY KEY,
"text" TEXT NOT NULL,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"speaker" VARCHAR(10) NOT NULL /* USER: user\nSIMBA: simba */,
"conversation_id" CHAR(36) NOT NULL REFERENCES "conversations" ("id") ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS "users" (
"id" CHAR(36) NOT NULL PRIMARY KEY,
"username" VARCHAR(255) NOT NULL,
"password" BLOB NOT NULL,
"email" VARCHAR(100) NOT NULL UNIQUE,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS "aerich" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
"version" VARCHAR(255) NOT NULL,
"app" VARCHAR(100) NOT NULL,
"content" JSON NOT NULL
);"""
async def downgrade(db: BaseDBAsyncClient) -> str:
return """
"""
MODELS_STATE = (
"eJztmG1v4jgQx79KlFddaa9q2W53VZ1OCpTecrvACcLdPtwqMskAVhMnazvboorvfrbJE4"
"kJpWq3UPGmhRkPtn8ztv/2nRmEHvjsuBWSn0AZ4jgk5oVxZxIUgPig9b82TBRFuVcaOBr7"
"KsAttFQeNGacIpcL5wT5DITJA+ZSHCWdkdj3pTF0RUNMprkpJvhHDA4Pp8BnQIXj23dhxs"
"SDW2Dp1+jamWDwvZVxY0/2rewOn0fKNhp1Lq9US9nd2HFDPw5I3jqa81lIsuZxjL1jGSN9"
"UyBAEQevMA05ymTaqWk5YmHgNIZsqF5u8GCCYl/CMH+fxMSVDAzVk/xz9oe5BR6BWqLFhE"
"sWd4vlrPI5K6spu2p9sAZHb85fqVmGjE+pcioi5kIFIo6WoYprDlL9r6BszRDVo0zbl2CK"
"gT4EY2rIOeY1lIJMAT2MmhmgW8cHMuUz8bXx9m0Nxn+sgSIpWimUoajrZdX3Eldj6ZNIc4"
"QuBTllB/EqyEvh4TgAPczVyBJSLwk9Tj/sKGAxB69P/HmyCGr42p1ue2hb3b/lTALGfvgK"
"kWW3paehrPOS9ei8lIrsR4x/O/YHQ341vvZ77XLtZ+3sr6YcE4p56JDwxkFeYb2m1hTMSm"
"LjyHtgYlcjD4l91sSqwcuTZHJd2AKlYYzc6xtEPWfFUzgdgTE0BVZNfzOJvPo4AD87NkuJ"
"1hyu3eUv7mbGF2kZp9YivLARrqNXdQWNoGxBRMzbS/qWPdXQ2aBQChDvJ1ScYiIPgmWvBQ"
"uHW812bAurHmXafl8ES9022/5sr+ywqSw56lqfX63ssp/6vT/T5gUZ0/rUbx7Uy0s85Krq"
"hUWAroHqxX2bxIHKakfgQMSFSnYL4c+8dMzRsD24MGIG9D8y7HSb1oXBcDBG5gNuAKcn97"
"gAnJ6s1f/SVVpAxYNmu21eE/qYe/6zblYbtviKHtMDrdK8CingKfkI80r9bpZfO02xoruE"
"maKbTEzoykV8EJMEvlzY1rBlXbbNxXpt+5RKbsSUJKpIN2Wv1WpyaR+02f5rM5nHbR+Uij"
"H7otF+waNShBi7CammMpuYIDrXwyxGlWCO53x5/9k9nDX0mlKwFvWWYNbs9KzBF73mTdsX"
"C7f5xW5bJbwQIOxvU6ZZwOPU6OYl/5gVenpyP9VTJ3uquudwcXiZF4fDs+eLSOy2z55PKQ"
"0toNid6cRh4qmVhyhvszP6sEPWvDdp5aHU9KVqTxL2rIeEemr9rXF69u7s/Zvzs/eiiRpJ"
"ZnlXU/2dnr1BDsrLivYOt/6YLYQcxGAGUi6NLSAmzfcT4NNolZBwIJrz7K9hv7f2bSYNKY"
"EcETHBbx52+WvDx4x/302sNRTlrOsfkstvxqXDSP5AU/eK8yuPl8X/Etg7Fw=="
)

View File

@@ -1,60 +0,0 @@
from tortoise import BaseDBAsyncClient
RUN_IN_TRANSACTION = True
async def upgrade(db: BaseDBAsyncClient) -> str:
return """
-- SQLite doesn't support ADD CONSTRAINT, so we need to recreate the table
CREATE TABLE "conversations_new" (
"id" CHAR(36) NOT NULL PRIMARY KEY,
"name" VARCHAR(255) NOT NULL,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"user_id" CHAR(36),
FOREIGN KEY ("user_id") REFERENCES "users" ("id") ON DELETE CASCADE
);
INSERT INTO "conversations_new" ("id", "name", "created_at", "updated_at")
SELECT "id", "name", "created_at", "updated_at" FROM "conversations";
DROP TABLE "conversations";
ALTER TABLE "conversations_new" RENAME TO "conversations";"""
async def downgrade(db: BaseDBAsyncClient) -> str:
return """
-- Recreate table without user_id column
CREATE TABLE "conversations_new" (
"id" CHAR(36) NOT NULL PRIMARY KEY,
"name" VARCHAR(255) NOT NULL,
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO "conversations_new" ("id", "name", "created_at", "updated_at")
SELECT "id", "name", "created_at", "updated_at" FROM "conversations";
DROP TABLE "conversations";
ALTER TABLE "conversations_new" RENAME TO "conversations";"""
MODELS_STATE = (
"eJztmWtP2zAUhv9KlE8gbQg6xhCaJqWlbB20ndp0F9gUuYnbWiROiJ1Bhfjvs91cnMRNKe"
"PSon6B9vic2H5s57w+vdU934Eu2Wn4+C8MCaDIx/qRdqtj4EH2Qdn+RtNBEGSt3EDB0BUB"
"tuQpWsCQ0BDYlDWOgEsgMzmQ2CEK4s5w5Lrc6NvMEeFxZoowuoqgRf0xpBMYsoaLP8yMsA"
"NvIEm+BpfWCEHXyY0bObxvYbfoNBC2waB1fCI8eXdDy/bdyMOZdzClEx+n7lGEnB0ew9vG"
"EMMQUOhI0+CjjKedmGYjZgYaRjAdqpMZHDgCkcth6B9HEbY5A030xP/sf9KXwMNQc7QIU8"
"7i9m42q2zOwqrzrhpfjN7Wu4NtMUuf0HEoGgUR/U4EAgpmoYJrBlL8L6FsTECoRpn4F2Cy"
"gT4EY2LIOGZ7KAGZAHoYNd0DN5YL8ZhO2Nfa+/cVGL8bPUGSeQmUPtvXs13fiZtqszaONE"
"Noh5BP2QK0DPKYtVDkQTXMfGQBqROH7iQfVhQwm4PTxe40PgQVfM1Wu9k3jfY3PhOPkCtX"
"IDLMJm+pCeu0YN06KCxF+hDtR8v8ovGv2nm30yzu/dTPPNf5mEBEfQv71xZwpPOaWBMwuY"
"WNAueBC5uP3Czsiy5sPHhpXQkMreUyiBTyH2kkHtszLuLDkwZPvaNLZc7gMMrwTvwQojE+"
"hVOBsMXGAbCtShax6BjEj1lVaJk1G0UIrlM1Im8KNjs2J0hn2dPoN4zjpi4YDoF9eQ1Cx5"
"oD04OEgDEkZaD1OPLktAfdVJqpWcoCrj174mq+VeaxFaz8mi8xytErN3k1r2gBmM3bifvm"
"PVXQWaCCJYj3E8OWvJAbUbzWopjCG0XKN5lVjTLxXxdRXJXKmz/NXBZPpO9W2/i5ncvkZ9"
"3O58RdksqNs259o5Bfo5AqK2QSQHCpEgP8AtnEkVeSArnVlcJf+Ojog36zd6TxjP4b91vt"
"unGkEeQNgX6/Jc7dMvd273HJ3Nude8fkTYUDJCea5V7zitDHfOevqYS1CwWv/5SyxfrZyl"
"JcqGkV22VZbfuUSk7cGRTSLblLzNdq/GhvtNn6azO+jssWLeWYddFoz1C4DAAh136o2Jl1"
"hEE4VcOUowowh1M6u/+sHs4KenUuWGW9xZjVWx2j90uteRN/eePWf5lNo4AXegC5y2zTNO"
"Bx9ujiI/+YO3Rv936qp0r2lHXP5uLwOi8Om9L6q1jYtHJXEoCLyp6l35Efp/a5VvXkJ615"
"GjBE9kRXaOW4pVItg8xnZeRyC88pvynVMsdc2Azxyr9ozhSV57e1vf0P+4fvDvYPmYsYSW"
"r5UPEyaHXMBeqYHwTllXa+6pBCNto4BcmPxhIQY/f1BPg00s3HFGJFev/a73bmlqqSkALI"
"AWYTvHCQTd9oLiL0z2piraDIZ11dVy+W0Au5mT+gripqPWch5u4f/FVgYA=="
)

View File

@@ -1,12 +0,0 @@
[project]
name = "raggr"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = ["chromadb>=1.1.0", "python-dotenv>=1.0.0", "flask>=3.1.2", "httpx>=0.28.1", "ollama>=0.6.0", "openai>=2.0.1", "pydantic>=2.11.9", "pillow>=10.0.0", "pymupdf>=1.24.0", "black>=25.9.0", "pillow-heif>=1.1.1", "flask-jwt-extended>=4.7.1", "bcrypt>=5.0.0", "pony>=0.7.19", "flask-login>=0.6.3", "quart>=0.20.0", "tortoise-orm>=0.25.1", "quart-jwt-extended>=0.1.0", "pre-commit>=4.3.0", "tortoise-orm-stubs>=1.0.2", "aerich>=0.8.0", "tomlkit>=0.13.3"]
[tool.aerich]
tortoise_orm = "app.TORTOISE_CONFIG"
location = "./migrations"
src_folder = "./."

View File

@@ -1,63 +0,0 @@
# Token Refresh Implementation
## Overview
The API services now automatically handle token refresh when access tokens expire. This provides a seamless user experience without requiring manual re-authentication.
## How It Works
### 1. **userService.ts**
The `userService` now includes:
- **`refreshToken()`**: Automatically gets the refresh token from localStorage, calls the `/api/user/refresh` endpoint, and updates the access token
- **`fetchWithAuth()`**: A wrapper around `fetch()` that:
1. Automatically adds the Authorization header with the access token
2. Detects 401 (Unauthorized) responses
3. Automatically refreshes the token using the refresh token
4. Retries the original request with the new access token
5. Throws an error if refresh fails (e.g., refresh token expired)
### 2. **conversationService.ts**
Now uses `userService.fetchWithAuth()` for all API calls:
- `sendQuery()` - No longer needs token parameter
- `getMessages()` - No longer needs token parameter
### 3. **Components Updated**
**ChatScreen.tsx**:
- Removed manual token handling
- Now simply calls `conversationService.sendQuery(query)` and `conversationService.getMessages()`
## Benefits
**Automatic token refresh** - Users stay logged in longer
**Transparent retry logic** - Failed requests due to expired tokens are automatically retried
**Cleaner code** - Components don't need to manage tokens
**Better UX** - No interruptions when access token expires
**Centralized auth logic** - All auth handling in one place
## Error Handling
- If refresh token is missing or invalid, the error is thrown
- Components can catch these errors and redirect to login
- LocalStorage is automatically cleared when refresh fails
## Usage Example
```typescript
// Old way (manual token management)
const token = localStorage.getItem("access_token");
const result = await conversationService.sendQuery(query, token);
// New way (automatic token refresh)
const result = await conversationService.sendQuery(query);
```
## Token Storage
- **Access Token**: `localStorage.getItem("access_token")`
- **Refresh Token**: `localStorage.getItem("refresh_token")`
Both are automatically managed by the services.

File diff suppressed because it is too large Load Diff

View File

@@ -1,72 +0,0 @@
import { useState, useEffect } from "react";
import "./App.css";
import { AuthProvider } from "./contexts/AuthContext";
import { ChatScreen } from "./components/ChatScreen";
import { LoginScreen } from "./components/LoginScreen";
import { conversationService } from "./api/conversationService";
const AppContainer = () => {
const [isAuthenticated, setAuthenticated] = useState<boolean>(false);
const [isChecking, setIsChecking] = useState<boolean>(true);
useEffect(() => {
const checkAuth = async () => {
const accessToken = localStorage.getItem("access_token");
const refreshToken = localStorage.getItem("refresh_token");
// No tokens at all, not authenticated
if (!accessToken && !refreshToken) {
setIsChecking(false);
setAuthenticated(false);
return;
}
// Try to verify token by making a request
try {
await conversationService.getAllConversations();
// If successful, user is authenticated
setAuthenticated(true);
} catch (error) {
// Token is invalid or expired
console.error("Authentication check failed:", error);
localStorage.removeItem("access_token");
localStorage.removeItem("refresh_token");
setAuthenticated(false);
} finally {
setIsChecking(false);
}
};
checkAuth();
}, []);
// Show loading state while checking authentication
if (isChecking) {
return (
<div className="h-screen flex items-center justify-center bg-white/85">
<div className="text-xl">Loading...</div>
</div>
);
}
return (
<>
{isAuthenticated ? (
<ChatScreen setAuthenticated={setAuthenticated} />
) : (
<LoginScreen setAuthenticated={setAuthenticated} />
)}
</>
);
};
const App = () => {
return (
<AuthProvider>
<AppContainer />
</AuthProvider>
);
};
export default App;

View File

@@ -1,115 +0,0 @@
import { userService } from "./userService";
interface Message {
id: string;
text: string;
speaker: "user" | "simba";
created_at: string;
}
interface Conversation {
id: string;
name: string;
messages?: Message[];
created_at: string;
updated_at: string;
user_id?: string;
}
interface QueryRequest {
query: string;
}
interface QueryResponse {
response: string;
}
interface CreateConversationRequest {
user_id: string;
}
class ConversationService {
private baseUrl = "/api";
private conversationBaseUrl = "/api/conversation";
async sendQuery(
query: string,
conversation_id: string,
): Promise<QueryResponse> {
const response = await userService.fetchWithRefreshToken(
`${this.baseUrl}/query`,
{
method: "POST",
body: JSON.stringify({ query, conversation_id }),
},
);
if (!response.ok) {
throw new Error("Failed to send query");
}
return await response.json();
}
async getMessages(): Promise<Conversation> {
const response = await userService.fetchWithRefreshToken(
`${this.baseUrl}/messages`,
{
method: "GET",
},
);
if (!response.ok) {
throw new Error("Failed to fetch messages");
}
return await response.json();
}
async getConversation(conversationId: string): Promise<Conversation> {
const response = await userService.fetchWithRefreshToken(
`${this.conversationBaseUrl}/${conversationId}`,
{
method: "GET",
},
);
if (!response.ok) {
throw new Error("Failed to fetch conversation");
}
return await response.json();
}
async createConversation(): Promise<Conversation> {
const response = await userService.fetchWithRefreshToken(
`${this.conversationBaseUrl}/`,
{
method: "POST",
},
);
if (!response.ok) {
throw new Error("Failed to create conversation");
}
return await response.json();
}
async getAllConversations(): Promise<Conversation[]> {
const response = await userService.fetchWithRefreshToken(
`${this.conversationBaseUrl}/`,
{
method: "GET",
},
);
if (!response.ok) {
throw new Error("Failed to fetch conversations");
}
return await response.json();
}
}
export const conversationService = new ConversationService();

View File

@@ -1,138 +0,0 @@
interface LoginResponse {
access_token: string;
refresh_token: string;
user: {
id: string;
username: string;
};
}
interface RefreshResponse {
access_token: string;
}
class UserService {
private baseUrl = "/api/user";
async login(username: string, password: string): Promise<LoginResponse> {
const response = await fetch(`${this.baseUrl}/login`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ username, password }),
});
if (!response.ok) {
throw new Error("Invalid credentials");
}
return await response.json();
}
async refreshToken(): Promise<string> {
const refreshToken = localStorage.getItem("refresh_token");
if (!refreshToken) {
throw new Error("No refresh token available");
}
const response = await fetch(`${this.baseUrl}/refresh`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${refreshToken}`,
},
});
if (!response.ok) {
// Refresh token is invalid or expired, clear storage
localStorage.removeItem("access_token");
localStorage.removeItem("refresh_token");
throw new Error("Failed to refresh token");
}
const data: RefreshResponse = await response.json();
localStorage.setItem("access_token", data.access_token);
return data.access_token;
}
async validateToken(): Promise<boolean> {
const refreshToken = localStorage.getItem("refresh_token");
if (!refreshToken) {
return false;
}
try {
await this.refreshToken();
return true;
} catch (error) {
return false;
}
}
async fetchWithAuth(
url: string,
options: RequestInit = {},
): Promise<Response> {
const accessToken = localStorage.getItem("access_token");
// Add authorization header
const headers = {
"Content-Type": "application/json",
...(options.headers || {}),
...(accessToken && { Authorization: `Bearer ${accessToken}` }),
};
let response = await fetch(url, { ...options, headers });
// If unauthorized, try refreshing the token
if (response.status === 401) {
try {
const newAccessToken = await this.refreshToken();
// Retry the request with new token
headers.Authorization = `Bearer ${newAccessToken}`;
response = await fetch(url, { ...options, headers });
} catch (error) {
// Refresh failed, redirect to login or throw error
throw new Error("Session expired. Please log in again.");
}
}
return response;
}
async fetchWithRefreshToken(
url: string,
options: RequestInit = {},
): Promise<Response> {
const refreshToken = localStorage.getItem("refresh_token");
// Add authorization header
const headers = {
"Content-Type": "application/json",
...(options.headers || {}),
...(refreshToken && { Authorization: `Bearer ${refreshToken}` }),
};
let response = await fetch(url, { ...options, headers });
// If unauthorized, try refreshing the token
if (response.status === 401) {
try {
const newAccessToken = await this.refreshToken();
// Retry the request with new token
headers.Authorization = `Bearer ${newAccessToken}`;
response = await fetch(url, { ...options, headers });
} catch (error) {
// Refresh failed, redirect to login or throw error
throw new Error("Session expired. Please log in again.");
}
}
return response;
}
}
export const userService = new UserService();

View File

@@ -1,3 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 100">
<text y="80" font-size="80" font-family="system-ui, -apple-system, sans-serif">🐱</text>
</svg>

Before

Width:  |  Height:  |  Size: 163 B

View File

@@ -1,31 +0,0 @@
import ReactMarkdown from "react-markdown";
type AnswerBubbleProps = {
text: string;
loading?: boolean;
};
export const AnswerBubble = ({ text, loading }: AnswerBubbleProps) => {
return (
<div className="rounded-md bg-orange-100 p-3 sm:p-4">
{loading ? (
<div className="flex flex-col w-full animate-pulse gap-2">
<div className="flex flex-row gap-2 w-full">
<div className="bg-gray-400 w-1/2 p-3 rounded-lg" />
<div className="bg-gray-400 w-1/2 p-3 rounded-lg" />
</div>
<div className="flex flex-row gap-2 w-full">
<div className="bg-gray-400 w-1/3 p-3 rounded-lg" />
<div className="bg-gray-400 w-2/3 p-3 rounded-lg" />
</div>
</div>
) : (
<div className="flex flex-col break-words overflow-wrap-anywhere">
<ReactMarkdown className="text-sm sm:text-base [&>*]:break-words">
{"🐈: " + text}
</ReactMarkdown>
</div>
)}
</div>
);
};

View File

@@ -1,252 +0,0 @@
import { useEffect, useState, useRef } from "react";
import { conversationService } from "../api/conversationService";
import { QuestionBubble } from "./QuestionBubble";
import { AnswerBubble } from "./AnswerBubble";
import { ConversationList } from "./ConversationList";
import { parse } from "node:path/win32";
type Message = {
text: string;
speaker: "simba" | "user";
};
type QuestionAnswer = {
question: string;
answer: string;
};
type Conversation = {
title: string;
id: string;
};
type ChatScreenProps = {
setAuthenticated: (isAuth: boolean) => void;
};
export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
const [query, setQuery] = useState<string>("");
const [answer, setAnswer] = useState<string>("");
const [simbaMode, setSimbaMode] = useState<boolean>(false);
const [questionsAnswers, setQuestionsAnswers] = useState<QuestionAnswer[]>(
[],
);
const [messages, setMessages] = useState<Message[]>([]);
const [conversations, setConversations] = useState<Conversation[]>([
{ title: "simba meow meow", id: "uuid" },
]);
const [showConversations, setShowConversations] = useState<boolean>(false);
const [selectedConversation, setSelectedConversation] =
useState<Conversation | null>(null);
const messagesEndRef = useRef<HTMLDivElement>(null);
const simbaAnswers = ["meow.", "hiss...", "purrrrrr", "yowOWROWWowowr"];
const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
};
const handleSelectConversation = (conversation: Conversation) => {
setShowConversations(false);
setSelectedConversation(conversation);
const loadMessages = async () => {
try {
const fetchedConversation = await conversationService.getConversation(
conversation.id,
);
setMessages(
fetchedConversation.messages.map((message) => ({
text: message.text,
speaker: message.speaker,
})),
);
} catch (error) {
console.error("Failed to load messages:", error);
}
};
loadMessages();
};
const loadConversations = async () => {
try {
const fetchedConversations =
await conversationService.getAllConversations();
const parsedConversations = fetchedConversations.map((conversation) => ({
id: conversation.id,
title: conversation.name,
}));
setConversations(parsedConversations);
setSelectedConversation(parsedConversations[0]);
console.log(parsedConversations);
} catch (error) {
console.error("Failed to load messages:", error);
}
};
const handleCreateNewConversation = async () => {
const newConversation = await conversationService.createConversation();
await loadConversations();
setSelectedConversation({
title: newConversation.name,
id: newConversation.id,
});
};
useEffect(() => {
loadConversations();
}, []);
useEffect(() => {
scrollToBottom();
}, [messages]);
useEffect(() => {
const loadMessages = async () => {
if (selectedConversation == null) return;
try {
const conversation = await conversationService.getConversation(
selectedConversation.id,
);
setMessages(
conversation.messages.map((message) => ({
text: message.text,
speaker: message.speaker,
})),
);
} catch (error) {
console.error("Failed to load messages:", error);
}
};
loadMessages();
}, [selectedConversation]);
const handleQuestionSubmit = async () => {
if (!query.trim()) return; // Don't submit empty messages
const currMessages = messages.concat([{ text: query, speaker: "user" }]);
setMessages(currMessages);
setQuery(""); // Clear input immediately after submission
if (simbaMode) {
console.log("simba mode activated");
const randomIndex = Math.floor(Math.random() * simbaAnswers.length);
const randomElement = simbaAnswers[randomIndex];
setAnswer(randomElement);
setQuestionsAnswers(
questionsAnswers.concat([
{
question: query,
answer: randomElement,
},
]),
);
return;
}
try {
const result = await conversationService.sendQuery(
query,
selectedConversation.id,
);
setQuestionsAnswers(
questionsAnswers.concat([{ question: query, answer: result.response }]),
);
setMessages(
currMessages.concat([{ text: result.response, speaker: "simba" }]),
);
} catch (error) {
console.error("Failed to send query:", error);
// If session expired, redirect to login
if (error instanceof Error && error.message.includes("Session expired")) {
setAuthenticated(false);
}
}
};
const handleQueryChange = (event: React.ChangeEvent<HTMLTextAreaElement>) => {
setQuery(event.target.value);
};
const handleKeyDown = (event: React.KeyboardEvent<HTMLTextAreaElement>) => {
// Submit on Enter, but allow Shift+Enter for new line
if (event.key === "Enter" && !event.shiftKey) {
event.preventDefault();
handleQuestionSubmit();
}
};
return (
<div className="h-screen bg-opacity-20">
<div className="bg-white/85 h-screen">
<div className="flex flex-row justify-center py-4">
<div className="flex flex-col gap-4 w-full px-4 sm:w-11/12 sm:max-w-2xl lg:max-w-4xl sm:px-0">
<div className="flex flex-col sm:flex-row gap-3 sm:gap-0 sm:justify-between">
<header className="flex flex-row justify-center gap-2 sticky top-0 z-10 bg-white">
<h1 className="text-2xl sm:text-3xl">ask simba!</h1>
</header>
<div className="flex flex-row gap-2 justify-center sm:justify-end">
<button
className="p-2 h-11 border border-green-400 bg-green-200 hover:bg-green-400 cursor-pointer rounded-md text-sm sm:text-base"
onClick={() => setShowConversations(!showConversations)}
>
{showConversations
? "hide conversations"
: "show conversations"}
</button>
<button
className="p-2 h-11 border border-red-400 bg-red-200 hover:bg-red-400 cursor-pointer rounded-md text-sm sm:text-base"
onClick={() => setAuthenticated(false)}
>
logout
</button>
</div>
</div>
{showConversations && (
<ConversationList
conversations={conversations}
onCreateNewConversation={handleCreateNewConversation}
onSelectConversation={handleSelectConversation}
/>
)}
{messages.map((msg, index) => {
if (msg.speaker === "simba") {
return <AnswerBubble key={index} text={msg.text} />;
}
return <QuestionBubble key={index} text={msg.text} />;
})}
<div ref={messagesEndRef} />
<footer className="flex flex-col gap-2 sticky bottom-0">
<div className="flex flex-row justify-between gap-2 grow">
<textarea
className="p-3 sm:p-4 border border-blue-200 rounded-md grow bg-white min-h-[44px] resize-y"
onChange={handleQueryChange}
onKeyDown={handleKeyDown}
value={query}
rows={2}
placeholder="Type your message... (Press Enter to send, Shift+Enter for new line)"
/>
</div>
<div className="flex flex-row justify-between gap-2 grow">
<button
className="p-3 sm:p-4 min-h-[44px] border border-blue-400 bg-blue-200 hover:bg-blue-400 cursor-pointer rounded-md flex-grow text-sm sm:text-base"
onClick={() => handleQuestionSubmit()}
type="submit"
>
Submit
</button>
</div>
<div className="flex flex-row justify-center gap-2 grow items-center">
<input
type="checkbox"
onChange={(event) => setSimbaMode(event.target.checked)}
className="w-5 h-5 cursor-pointer"
/>
<p className="text-sm sm:text-base">simba mode?</p>
</div>
</footer>
</div>
</div>
</div>
</div>
);
};

View File

@@ -1,69 +0,0 @@
import { useState, useEffect } from "react";
import { conversationService } from "../api/conversationService";
type Conversation = {
title: string;
id: string;
};
type ConversationProps = {
conversations: Conversation[];
onSelectConversation: (conversation: Conversation) => void;
onCreateNewConversation: () => void;
};
export const ConversationList = ({
conversations,
onSelectConversation,
onCreateNewConversation,
}: ConversationProps) => {
const [conservations, setConversations] = useState(conversations);
useEffect(() => {
const loadConversations = async () => {
try {
let fetchedConversations =
await conversationService.getAllConversations();
if (conversations.length == 0) {
await conversationService.createConversation();
fetchedConversations =
await conversationService.getAllConversations();
}
setConversations(
fetchedConversations.map((conversation) => ({
id: conversation.id,
title: conversation.name,
})),
);
} catch (error) {
console.error("Failed to load messages:", error);
}
};
loadConversations();
}, []);
return (
<div className="bg-indigo-300 rounded-md p-3 sm:p-4 flex flex-col gap-1">
{conservations.map((conversation) => {
return (
<div
key={conversation.id}
className="border-blue-400 bg-indigo-300 hover:bg-indigo-200 cursor-pointer rounded-md p-3 min-h-[44px] flex items-center"
onClick={() => onSelectConversation(conversation)}
>
<p className="text-sm sm:text-base break-words">
{conversation.title}
</p>
</div>
);
})}
<div
className="border-blue-400 bg-indigo-300 hover:bg-indigo-200 cursor-pointer rounded-md p-3 min-h-[44px] flex items-center"
onClick={() => onCreateNewConversation()}
>
<p className="text-sm sm:text-base"> + Start a new thread</p>
</div>
</div>
);
};

View File

@@ -1,24 +0,0 @@
type Conversation = {
title: string;
id: string;
};
type ConversationMenuProps = {
conversations: Conversation[];
};
export const ConversationMenu = ({ conversations }: ConversationMenuProps) => {
return (
<div className="absolute bg-white w-md rounded-md shadow-xl m-4 p-4">
<p className="py-2 px-4 rounded-md w-full text-xl font-bold">askSimba!</p>
{conversations.map((conversation) => (
<p
key={conversation.id}
className="py-2 px-4 rounded-md hover:bg-stone-200 w-full text-xl font-bold cursor-pointer"
>
{conversation.title}
</p>
))}
</div>
);
};

View File

@@ -1,122 +0,0 @@
import { useState, useEffect } from "react";
import { userService } from "../api/userService";
type LoginScreenProps = {
setAuthenticated: (isAuth: boolean) => void;
};
export const LoginScreen = ({ setAuthenticated }: LoginScreenProps) => {
const [username, setUsername] = useState<string>("");
const [password, setPassword] = useState<string>("");
const [error, setError] = useState<string>("");
const [isChecking, setIsChecking] = useState<boolean>(true);
useEffect(() => {
// Check if user is already authenticated
const checkAuth = async () => {
const isValid = await userService.validateToken();
if (isValid) {
setAuthenticated(true);
}
setIsChecking(false);
};
checkAuth();
}, [setAuthenticated]);
const handleLogin = async (e?: React.FormEvent) => {
e?.preventDefault();
if (!username || !password) {
setError("Please enter username and password");
return;
}
try {
const result = await userService.login(username, password);
localStorage.setItem("access_token", result.access_token);
localStorage.setItem("refresh_token", result.refresh_token);
setAuthenticated(true);
setError("");
} catch (err) {
setError("Login failed. Please check your credentials.");
console.error("Login error:", err);
}
};
const handleKeyPress = (e: React.KeyboardEvent) => {
if (e.key === "Enter") {
handleLogin();
}
};
// Show loading state while checking authentication
if (isChecking) {
return (
<div className="h-screen bg-opacity-20">
<div className="bg-white/85 h-screen flex items-center justify-center">
<div className="text-center">
<p className="text-lg sm:text-xl">Checking authentication...</p>
</div>
</div>
</div>
);
}
return (
<div className="h-screen bg-opacity-20">
<div className="bg-white/85 h-screen">
<div className="flex flex-row justify-center py-4">
<div className="flex flex-col gap-4 w-full px-4 sm:w-11/12 sm:max-w-2xl lg:max-w-4xl sm:px-0">
<div className="flex flex-col gap-1">
<div className="flex flex-grow justify-center w-full bg-amber-400 p-2">
<h1 className="text-base sm:text-xl font-bold text-center">
I AM LOOKING FOR A DESIGNER. THIS APP WILL REMAIN UGLY UNTIL A
DESIGNER COMES.
</h1>
</div>
<header className="flex flex-row justify-center gap-2 grow sticky top-0 z-10 bg-white">
<h1 className="text-2xl sm:text-3xl">ask simba!</h1>
</header>
<label htmlFor="username" className="text-sm sm:text-base">
username
</label>
<input
type="text"
id="username"
name="username"
value={username}
onChange={(e) => setUsername(e.target.value)}
onKeyPress={handleKeyPress}
className="border border-s-slate-950 p-3 rounded-md min-h-[44px]"
/>
<label htmlFor="password" className="text-sm sm:text-base">
password
</label>
<input
type="password"
id="password"
name="password"
value={password}
onChange={(e) => setPassword(e.target.value)}
onKeyPress={handleKeyPress}
className="border border-s-slate-950 p-3 rounded-md min-h-[44px]"
/>
{error && (
<div className="text-red-600 font-semibold text-sm sm:text-base">
{error}
</div>
)}
</div>
<button
className="p-3 sm:p-4 min-h-[44px] border border-blue-400 bg-blue-200 hover:bg-blue-400 cursor-pointer rounded-md flex-grow text-sm sm:text-base"
onClick={handleLogin}
>
login
</button>
</div>
</div>
</div>
</div>
);
};

View File

@@ -1,11 +0,0 @@
type QuestionBubbleProps = {
text: string;
};
export const QuestionBubble = ({ text }: QuestionBubbleProps) => {
return (
<div className="rounded-md bg-stone-200 p-3 sm:p-4 break-words overflow-wrap-anywhere text-sm sm:text-base">
🤦: {text}
</div>
);
};

View File

@@ -1,56 +0,0 @@
import { createContext, useContext, useState, ReactNode } from "react";
import { userService } from "../api/userService";
interface AuthContextType {
token: string | null;
login: (username: string, password: string) => Promise<any>;
logout: () => void;
isAuthenticated: () => boolean;
}
const AuthContext = createContext<AuthContextType | undefined>(undefined);
interface AuthProviderProps {
children: ReactNode;
}
export const AuthProvider = ({ children }: AuthProviderProps) => {
const [token, setToken] = useState(localStorage.getItem("access_token"));
const login = async (username: string, password: string) => {
try {
const data = await userService.login(username, password);
setToken(data.access_token);
localStorage.setItem("access_token", data.access_token);
localStorage.setItem("refresh_token", data.refresh_token);
return data;
} catch (error) {
console.error("Login failed:", error);
throw error;
}
};
const logout = () => {
setToken(null);
localStorage.removeItem("access_token");
localStorage.removeItem("refresh_token");
};
const isAuthenticated = () => {
return token !== null && token !== undefined && token !== "";
};
return (
<AuthContext.Provider value={{ token, login, logout, isAuthenticated }}>
{children}
</AuthContext.Provider>
);
};
export const useAuth = () => {
const context = useContext(AuthContext);
if (context === undefined) {
throw new Error("useAuth must be used within an AuthProvider");
}
return context;
};

View File

@@ -1,8 +1,5 @@
#!/bin/bash #!/bin/bash
echo "Running database migrations..."
aerich upgrade
echo "Starting reindex process..." echo "Starting reindex process..."
python main.py "" --reindex python main.py "" --reindex

View File

@@ -2,21 +2,6 @@ version = 1
revision = 2 revision = 2
requires-python = ">=3.13" requires-python = ">=3.13"
[[package]]
name = "aerich"
version = "0.9.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "asyncclick" },
{ name = "dictdiffer" },
{ name = "tortoise-orm" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c4/60/5d3885f531fab2cecec67510e7b821efc403940ed9eefd034b2c21350f3c/aerich-0.9.2.tar.gz", hash = "sha256:02d58658714eebe396fe7bd9f9401db3a60a44dc885910ad3990920d0357317d", size = 74231, upload-time = "2025-10-10T05:53:49.632Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/87/1a/956c6b1e35881bb9835a33c8db1565edcd133f8e45321010489092a0df40/aerich-0.9.2-py3-none-any.whl", hash = "sha256:d0f007acb21f6559f1eccd4e404fb039cf48af2689e0669afa62989389c0582d", size = 46451, upload-time = "2025-10-10T05:53:48.71Z" },
]
[[package]] [[package]]
name = "aiofiles" name = "aiofiles"
version = "25.1.0" version = "25.1.0"
@@ -60,18 +45,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" },
] ]
[[package]]
name = "asyncclick"
version = "8.3.0.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f9/ca/25e426d16bd0e91c1c9259112cecd17b2c2c239bdd8e5dba430f3bd5e3ef/asyncclick-8.3.0.7.tar.gz", hash = "sha256:8a80d8ac613098ee6a9a8f0248f60c66c273e22402cf3f115ed7f071acfc71d3", size = 277634, upload-time = "2025-10-11T08:35:44.841Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/01/d9/782ffcb4c97b889bc12d8276637d2739b99520390ee8fec77c07416c5d12/asyncclick-8.3.0.7-py3-none-any.whl", hash = "sha256:7607046de39a3f315867cad818849f973e29d350c10d92f251db3ff7600c6c7d", size = 109925, upload-time = "2025-10-11T08:35:43.378Z" },
]
[[package]] [[package]]
name = "attrs" name = "attrs"
version = "25.3.0" version = "25.3.0"
@@ -333,15 +306,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" },
] ]
[[package]]
name = "dictdiffer"
version = "0.9.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/61/7b/35cbccb7effc5d7e40f4c55e2b79399e1853041997fcda15c9ff160abba0/dictdiffer-0.9.0.tar.gz", hash = "sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578", size = 31513, upload-time = "2021-07-22T13:24:29.276Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/47/ef/4cb333825d10317a36a1154341ba37e6e9c087bac99c1990ef07ffdb376f/dictdiffer-0.9.0-py2.py3-none-any.whl", hash = "sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595", size = 16754, upload-time = "2021-07-22T13:24:26.783Z" },
]
[[package]] [[package]]
name = "distlib" name = "distlib"
version = "0.4.0" version = "0.4.0"
@@ -1705,7 +1669,6 @@ name = "raggr"
version = "0.1.0" version = "0.1.0"
source = { virtual = "." } source = { virtual = "." }
dependencies = [ dependencies = [
{ name = "aerich" },
{ name = "bcrypt" }, { name = "bcrypt" },
{ name = "black" }, { name = "black" },
{ name = "chromadb" }, { name = "chromadb" },
@@ -1724,14 +1687,11 @@ dependencies = [
{ name = "python-dotenv" }, { name = "python-dotenv" },
{ name = "quart" }, { name = "quart" },
{ name = "quart-jwt-extended" }, { name = "quart-jwt-extended" },
{ name = "tomlkit" },
{ name = "tortoise-orm" }, { name = "tortoise-orm" },
{ name = "tortoise-orm-stubs" },
] ]
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
{ name = "aerich", specifier = ">=0.8.0" },
{ name = "bcrypt", specifier = ">=5.0.0" }, { name = "bcrypt", specifier = ">=5.0.0" },
{ name = "black", specifier = ">=25.9.0" }, { name = "black", specifier = ">=25.9.0" },
{ name = "chromadb", specifier = ">=1.1.0" }, { name = "chromadb", specifier = ">=1.1.0" },
@@ -1750,9 +1710,7 @@ requires-dist = [
{ name = "python-dotenv", specifier = ">=1.0.0" }, { name = "python-dotenv", specifier = ">=1.0.0" },
{ name = "quart", specifier = ">=0.20.0" }, { name = "quart", specifier = ">=0.20.0" },
{ name = "quart-jwt-extended", specifier = ">=0.1.0" }, { name = "quart-jwt-extended", specifier = ">=0.1.0" },
{ name = "tomlkit", specifier = ">=0.13.3" },
{ name = "tortoise-orm", specifier = ">=0.25.1" }, { name = "tortoise-orm", specifier = ">=0.25.1" },
{ name = "tortoise-orm-stubs", specifier = ">=1.0.2" },
] ]
[[package]] [[package]]
@@ -1960,15 +1918,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" },
] ]
[[package]]
name = "tomlkit"
version = "0.13.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cc/18/0bbf3884e9eaa38819ebe46a7bd25dcd56b67434402b66a58c4b8e552575/tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1", size = 185207, upload-time = "2025-06-05T07:13:44.947Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bd/75/8539d011f6be8e29f339c42e633aae3cb73bffa95dd0f9adec09b9c58e85/tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0", size = 38901, upload-time = "2025-06-05T07:13:43.546Z" },
]
[[package]] [[package]]
name = "tortoise-orm" name = "tortoise-orm"
version = "0.25.1" version = "0.25.1"
@@ -1984,18 +1933,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/70/55/2bda7f4445f4c07b734385b46d1647a388d05160cf5b8714a713e8709378/tortoise_orm-0.25.1-py3-none-any.whl", hash = "sha256:df0ef7e06eb0650a7e5074399a51ee6e532043308c612db2cac3882486a3fd9f", size = 167723, upload-time = "2025-06-05T10:43:29.309Z" }, { url = "https://files.pythonhosted.org/packages/70/55/2bda7f4445f4c07b734385b46d1647a388d05160cf5b8714a713e8709378/tortoise_orm-0.25.1-py3-none-any.whl", hash = "sha256:df0ef7e06eb0650a7e5074399a51ee6e532043308c612db2cac3882486a3fd9f", size = 167723, upload-time = "2025-06-05T10:43:29.309Z" },
] ]
[[package]]
name = "tortoise-orm-stubs"
version = "1.0.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "tortoise-orm" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ba/49/45b06cda907e55226b8ed4ddc71d13ff61505bfe366d72276462eeee9d2b/tortoise_orm_stubs-1.0.2.tar.gz", hash = "sha256:f4d6a810f295bebd83aa71b05ebd2decd883517f3c9530bd2376b9209b0777c6", size = 4559, upload-time = "2023-11-20T14:48:26.806Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/33/b1/f0b111dcf9381987f8acb143dd95b77934a3e9120a6c63b2cf4255c2934c/tortoise_orm_stubs-1.0.2-py3-none-any.whl", hash = "sha256:5ae3c2b0eb0286669563634b98202bbdf46349966b1c85659f3160de4fb655d6", size = 4681, upload-time = "2023-11-20T14:48:22.536Z" },
]
[[package]] [[package]]
name = "tqdm" name = "tqdm"
version = "4.67.1" version = "4.67.1"