Compare commits
12 Commits
quart-logi
...
async-rein
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e577cb335b | ||
|
|
591788dfa4 | ||
|
|
561b5bddce | ||
|
|
ddd455a4c6 | ||
|
|
07424e77e0 | ||
|
|
a56f752917 | ||
|
|
e8264e80ce | ||
|
|
04350045d3 | ||
|
|
f16e13fccc | ||
|
|
245db92524 | ||
|
|
7161c09a4e | ||
|
|
68d73b62e8 |
@@ -24,7 +24,6 @@ RUN uv pip install --system -e .
|
||||
# Copy application code
|
||||
COPY *.py ./
|
||||
COPY blueprints ./blueprints
|
||||
COPY aerich.toml ./
|
||||
COPY migrations ./migrations
|
||||
COPY startup.sh ./
|
||||
RUN chmod +x startup.sh
|
||||
@@ -35,8 +34,8 @@ WORKDIR /app/raggr-frontend
|
||||
RUN yarn install && yarn build
|
||||
WORKDIR /app
|
||||
|
||||
# Create ChromaDB directory
|
||||
RUN mkdir -p /app/chromadb
|
||||
# Create ChromaDB and database directories
|
||||
RUN mkdir -p /app/chromadb /app/database
|
||||
|
||||
# Expose port
|
||||
EXPOSE 8080
|
||||
|
||||
@@ -10,7 +10,7 @@ from blueprints.users.models import User
|
||||
async def add_user(username: str, email: str, password: str):
|
||||
"""Add a new user to the database"""
|
||||
await Tortoise.init(
|
||||
db_url="sqlite://raggr.db",
|
||||
db_url="sqlite://database/raggr.db",
|
||||
modules={
|
||||
"models": [
|
||||
"blueprints.users.models",
|
||||
@@ -56,7 +56,7 @@ async def add_user(username: str, email: str, password: str):
|
||||
async def list_users():
|
||||
"""List all users in the database"""
|
||||
await Tortoise.init(
|
||||
db_url="sqlite://raggr.db",
|
||||
db_url="sqlite://database/raggr.db",
|
||||
modules={
|
||||
"models": [
|
||||
"blueprints.users.models",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import os
|
||||
|
||||
TORTOISE_ORM = {
|
||||
"connections": {"default": os.getenv("DATABASE_URL", "sqlite:///app/raggr.db")},
|
||||
"connections": {"default": os.getenv("DATABASE_URL", "sqlite:///app/database/raggr.db")},
|
||||
"apps": {
|
||||
"models": {
|
||||
"models": [
|
||||
|
||||
14
app.py
14
app.py
@@ -27,7 +27,7 @@ app.register_blueprint(blueprints.conversation.conversation_blueprint)
|
||||
|
||||
|
||||
TORTOISE_CONFIG = {
|
||||
"connections": {"default": "sqlite://raggr.db"},
|
||||
"connections": {"default": "sqlite://database/raggr.db"},
|
||||
"apps": {
|
||||
"models": {
|
||||
"models": [
|
||||
@@ -69,9 +69,11 @@ async def query():
|
||||
user = await blueprints.users.models.User.get(id=current_user_uuid)
|
||||
data = await request.get_json()
|
||||
query = data.get("query")
|
||||
conversation = await blueprints.conversation.logic.get_conversation_for_user(
|
||||
user=user
|
||||
conversation_id = data.get("conversation_id")
|
||||
conversation = await blueprints.conversation.logic.get_conversation_by_id(
|
||||
conversation_id
|
||||
)
|
||||
await conversation.fetch_related("messages")
|
||||
await blueprints.conversation.logic.add_message_to_conversation(
|
||||
conversation=conversation,
|
||||
message=query,
|
||||
@@ -79,7 +81,11 @@ async def query():
|
||||
user=user,
|
||||
)
|
||||
|
||||
response = consult_simba_oracle(query)
|
||||
transcript = await blueprints.conversation.logic.get_conversation_transcript(
|
||||
user=user, conversation=conversation
|
||||
)
|
||||
|
||||
response = consult_simba_oracle(input=query, transcript=transcript)
|
||||
await blueprints.conversation.logic.add_message_to_conversation(
|
||||
conversation=conversation,
|
||||
message=response,
|
||||
|
||||
@@ -1,9 +1,19 @@
|
||||
import datetime
|
||||
|
||||
from quart_jwt_extended import (
|
||||
jwt_refresh_token_required,
|
||||
get_jwt_identity,
|
||||
)
|
||||
|
||||
from quart import Blueprint, jsonify
|
||||
from .models import (
|
||||
Conversation,
|
||||
PydConversation,
|
||||
PydListConversation,
|
||||
)
|
||||
|
||||
import blueprints.users.models
|
||||
|
||||
conversation_blueprint = Blueprint(
|
||||
"conversation_api", __name__, url_prefix="/api/conversation"
|
||||
)
|
||||
@@ -12,6 +22,51 @@ conversation_blueprint = Blueprint(
|
||||
@conversation_blueprint.route("/<conversation_id>")
|
||||
async def get_conversation(conversation_id: str):
|
||||
conversation = await Conversation.get(id=conversation_id)
|
||||
serialized_conversation = await PydConversation.from_tortoise_orm(conversation)
|
||||
await conversation.fetch_related("messages")
|
||||
|
||||
return jsonify(serialized_conversation.model_dump_json())
|
||||
# Manually serialize the conversation with messages
|
||||
messages = []
|
||||
for msg in conversation.messages:
|
||||
messages.append(
|
||||
{
|
||||
"id": str(msg.id),
|
||||
"text": msg.text,
|
||||
"speaker": msg.speaker.value,
|
||||
"created_at": msg.created_at.isoformat(),
|
||||
}
|
||||
)
|
||||
|
||||
return jsonify(
|
||||
{
|
||||
"id": str(conversation.id),
|
||||
"name": conversation.name,
|
||||
"messages": messages,
|
||||
"created_at": conversation.created_at.isoformat(),
|
||||
"updated_at": conversation.updated_at.isoformat(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@conversation_blueprint.post("/")
|
||||
@jwt_refresh_token_required
|
||||
async def create_conversation():
|
||||
user_uuid = get_jwt_identity()
|
||||
user = await blueprints.users.models.User.get(id=user_uuid)
|
||||
conversation = await Conversation.create(
|
||||
name=f"{user.username} {datetime.datetime.now().timestamp}",
|
||||
user=user,
|
||||
)
|
||||
|
||||
serialized_conversation = await PydConversation.from_tortoise_orm(conversation)
|
||||
return jsonify(serialized_conversation.model_dump())
|
||||
|
||||
|
||||
@conversation_blueprint.get("/")
|
||||
@jwt_refresh_token_required
|
||||
async def get_all_conversations():
|
||||
user_uuid = get_jwt_identity()
|
||||
user = await blueprints.users.models.User.get(id=user_uuid)
|
||||
conversations = Conversation.filter(user=user)
|
||||
serialized_conversations = await PydListConversation.from_queryset(conversations)
|
||||
|
||||
return jsonify(serialized_conversations.model_dump())
|
||||
|
||||
@@ -44,3 +44,17 @@ async def get_conversation_for_user(user: blueprints.users.models.User) -> Conve
|
||||
await Conversation.get_or_create(name=f"{user.username}'s chat", user=user)
|
||||
|
||||
return await Conversation.get(user=user)
|
||||
|
||||
|
||||
async def get_conversation_by_id(id: str) -> Conversation:
|
||||
return await Conversation.get(id=id)
|
||||
|
||||
|
||||
async def get_conversation_transcript(
|
||||
user: blueprints.users.models.User, conversation: Conversation
|
||||
) -> str:
|
||||
messages = []
|
||||
for message in conversation.messages:
|
||||
messages.append(f"{message.speaker} at {message.created_at}: {message.text}")
|
||||
|
||||
return "\n".join(messages)
|
||||
|
||||
@@ -40,5 +40,15 @@ class ConversationMessage(Model):
|
||||
|
||||
|
||||
PydConversationMessage = pydantic_model_creator(ConversationMessage)
|
||||
PydConversation = pydantic_model_creator(Conversation, name="Conversation")
|
||||
PydConversation = pydantic_model_creator(
|
||||
Conversation, name="Conversation", allow_cycles=True, exclude=("user",)
|
||||
)
|
||||
PydConversationWithMessages = pydantic_model_creator(
|
||||
Conversation,
|
||||
name="ConversationWithMessages",
|
||||
allow_cycles=True,
|
||||
exclude=("user",),
|
||||
include=("messages",),
|
||||
)
|
||||
PydListConversation = pydantic_queryset_creator(Conversation)
|
||||
PydListConversationMessage = pydantic_queryset_creator(ConversationMessage)
|
||||
|
||||
@@ -14,7 +14,7 @@ from llm import LLMClient
|
||||
load_dotenv()
|
||||
|
||||
ollama_client = Client(
|
||||
host=os.getenv("OLLAMA_HOST", "http://localhost:11434"), timeout=10.0
|
||||
host=os.getenv("OLLAMA_HOST", "http://localhost:11434"), timeout=1.0
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -12,6 +12,8 @@ services:
|
||||
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
||||
volumes:
|
||||
- chromadb_data:/app/chromadb
|
||||
- database_data:/app/database
|
||||
|
||||
volumes:
|
||||
chromadb_data:
|
||||
database_data:
|
||||
|
||||
@@ -27,7 +27,7 @@ headers = {"x-api-key": API_KEY, "Content-Type": "application/json"}
|
||||
VISITED = {}
|
||||
|
||||
if __name__ == "__main__":
|
||||
conn = sqlite3.connect("./visited.db")
|
||||
conn = sqlite3.connect("./database/visited.db")
|
||||
c = conn.cursor()
|
||||
c.execute("select immich_id from visited")
|
||||
rows = c.fetchall()
|
||||
|
||||
59
llm.py
59
llm.py
@@ -4,15 +4,20 @@ from ollama import Client
|
||||
from openai import OpenAI
|
||||
|
||||
import logging
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
TRY_OLLAMA = os.getenv("TRY_OLLAMA", False)
|
||||
|
||||
|
||||
class LLMClient:
|
||||
def __init__(self):
|
||||
try:
|
||||
self.ollama_client = Client(
|
||||
host=os.getenv("OLLAMA_URL", "http://localhost:11434"), timeout=10.0
|
||||
host=os.getenv("OLLAMA_URL", "http://localhost:11434"), timeout=1.0
|
||||
)
|
||||
self.ollama_client.chat(
|
||||
model="gemma3:4b", messages=[{"role": "system", "content": "test"}]
|
||||
@@ -30,31 +35,35 @@ class LLMClient:
|
||||
prompt: str,
|
||||
system_prompt: str,
|
||||
):
|
||||
# Instituting a fallback if my gaming PC is not on
|
||||
if self.PROVIDER == "ollama":
|
||||
response = self.ollama_client.chat(
|
||||
model="gemma3:4b",
|
||||
messages=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{"role": "user", "content": prompt},
|
||||
],
|
||||
)
|
||||
print(response)
|
||||
output = response.message.content
|
||||
elif self.PROVIDER == "openai":
|
||||
response = self.openai_client.responses.create(
|
||||
model="gpt-4o-mini",
|
||||
input=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{"role": "user", "content": prompt},
|
||||
],
|
||||
)
|
||||
output = response.output_text
|
||||
try:
|
||||
response = self.ollama_client.chat(
|
||||
model="gemma3:4b",
|
||||
messages=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{"role": "user", "content": prompt},
|
||||
],
|
||||
)
|
||||
output = response.message.content
|
||||
return output
|
||||
except Exception as e:
|
||||
logging.error(f"Could not connect to OLLAMA: {str(e)}")
|
||||
|
||||
response = self.openai_client.responses.create(
|
||||
model="gpt-4o-mini",
|
||||
input=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": system_prompt,
|
||||
},
|
||||
{"role": "user", "content": prompt},
|
||||
],
|
||||
)
|
||||
output = response.output_text
|
||||
|
||||
return output
|
||||
|
||||
|
||||
125
main.py
125
main.py
@@ -7,6 +7,8 @@ import argparse
|
||||
import chromadb
|
||||
import ollama
|
||||
|
||||
import time
|
||||
|
||||
|
||||
from request import PaperlessNGXService
|
||||
from chunker import Chunker
|
||||
@@ -36,6 +38,7 @@ parser.add_argument("query", type=str, help="questions about simba's health")
|
||||
parser.add_argument(
|
||||
"--reindex", action="store_true", help="re-index the simba documents"
|
||||
)
|
||||
parser.add_argument("--classify", action="store_true", help="test classification")
|
||||
parser.add_argument("--index", help="index a file")
|
||||
|
||||
ppngx = PaperlessNGXService()
|
||||
@@ -77,7 +80,7 @@ def chunk_data(docs, collection, doctypes):
|
||||
|
||||
logging.info(f"chunking {len(docs)} documents")
|
||||
texts: list[str] = [doc["content"] for doc in docs]
|
||||
with sqlite3.connect("visited.db") as conn:
|
||||
with sqlite3.connect("database/visited.db") as conn:
|
||||
to_insert = []
|
||||
c = conn.cursor()
|
||||
for index, text in enumerate(texts):
|
||||
@@ -113,9 +116,22 @@ def chunk_text(texts: list[str], collection):
|
||||
)
|
||||
|
||||
|
||||
def consult_oracle(input: str, collection):
|
||||
import time
|
||||
def classify_query(query: str, transcript: str) -> bool:
|
||||
logging.info("Starting query generation")
|
||||
qg_start = time.time()
|
||||
qg = QueryGenerator()
|
||||
query_type = qg.get_query_type(input=query, transcript=transcript)
|
||||
logging.info(query_type)
|
||||
qg_end = time.time()
|
||||
logging.info(f"Query generation took {qg_end - qg_start:.2f} seconds")
|
||||
return query_type == "Simba"
|
||||
|
||||
|
||||
def consult_oracle(
|
||||
input: str,
|
||||
collection,
|
||||
transcript: str = "",
|
||||
):
|
||||
chunker = Chunker(collection)
|
||||
|
||||
start_time = time.time()
|
||||
@@ -153,7 +169,10 @@ def consult_oracle(input: str, collection):
|
||||
logging.info("Starting LLM generation")
|
||||
llm_start = time.time()
|
||||
system_prompt = "You are a helpful assistant that understands veterinary terms."
|
||||
prompt = f"Using the following data, help answer the user's query by providing as many details as possible. Using this data: {results}. Respond to this prompt: {input}"
|
||||
transcript_prompt = f"Here is the message transcript thus far {transcript}."
|
||||
prompt = f"""Using the following data, help answer the user's query by providing as many details as possible.
|
||||
Using this data: {results}. {transcript_prompt if len(transcript) > 0 else ""}
|
||||
Respond to this prompt: {input}"""
|
||||
output = llm_client.chat(prompt=prompt, system_prompt=system_prompt)
|
||||
llm_end = time.time()
|
||||
logging.info(f"LLM generation took {llm_end - llm_start:.2f} seconds")
|
||||
@@ -164,6 +183,16 @@ def consult_oracle(input: str, collection):
|
||||
return output
|
||||
|
||||
|
||||
def llm_chat(input: str, transcript: str = "") -> str:
|
||||
system_prompt = "You are a helpful assistant that understands veterinary terms."
|
||||
transcript_prompt = f"Here is the message transcript thus far {transcript}."
|
||||
prompt = f"""Answer the user in a humorous way as if you were a cat named Simba. Be very coy.
|
||||
{transcript_prompt if len(transcript) > 0 else ""}
|
||||
Respond to this prompt: {input}"""
|
||||
output = llm_client.chat(prompt=prompt, system_prompt=system_prompt)
|
||||
return output
|
||||
|
||||
|
||||
def paperless_workflow(input):
|
||||
# Step 1: Get the text
|
||||
ppngx = PaperlessNGXService()
|
||||
@@ -173,15 +202,24 @@ def paperless_workflow(input):
|
||||
consult_oracle(input, simba_docs)
|
||||
|
||||
|
||||
def consult_simba_oracle(input: str):
|
||||
return consult_oracle(
|
||||
input=input,
|
||||
collection=simba_docs,
|
||||
)
|
||||
def consult_simba_oracle(input: str, transcript: str = ""):
|
||||
is_simba_related = classify_query(query=input, transcript=transcript)
|
||||
|
||||
if is_simba_related:
|
||||
logging.info("Query is related to simba")
|
||||
return consult_oracle(
|
||||
input=input,
|
||||
collection=simba_docs,
|
||||
transcript=transcript,
|
||||
)
|
||||
|
||||
logging.info("Query is NOT related to simba")
|
||||
|
||||
return llm_chat(input=input, transcript=transcript)
|
||||
|
||||
|
||||
def filter_indexed_files(docs):
|
||||
with sqlite3.connect("visited.db") as conn:
|
||||
with sqlite3.connect("database/visited.db") as conn:
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"CREATE TABLE IF NOT EXISTS indexed_documents (id INTEGER PRIMARY KEY AUTOINCREMENT, paperless_id INTEGER)"
|
||||
@@ -194,42 +232,45 @@ def filter_indexed_files(docs):
|
||||
return [doc for doc in docs if doc["id"] not in visited]
|
||||
|
||||
|
||||
def reindex():
|
||||
with sqlite3.connect("database/visited.db") as conn:
|
||||
c = conn.cursor()
|
||||
c.execute("DELETE FROM indexed_documents")
|
||||
conn.commit()
|
||||
|
||||
# Delete all documents from the collection
|
||||
all_docs = simba_docs.get()
|
||||
if all_docs["ids"]:
|
||||
simba_docs.delete(ids=all_docs["ids"])
|
||||
|
||||
logging.info("Fetching documents from Paperless-NGX")
|
||||
ppngx = PaperlessNGXService()
|
||||
docs = ppngx.get_data()
|
||||
docs = filter_indexed_files(docs)
|
||||
logging.info(f"Fetched {len(docs)} documents")
|
||||
|
||||
# Delete all chromadb data
|
||||
ids = simba_docs.get(ids=None, limit=None, offset=0)
|
||||
all_ids = ids["ids"]
|
||||
if len(all_ids) > 0:
|
||||
simba_docs.delete(ids=all_ids)
|
||||
|
||||
# Chunk documents
|
||||
logging.info("Chunking documents now ...")
|
||||
doctype_lookup = ppngx.get_doctypes()
|
||||
chunk_data(docs, collection=simba_docs, doctypes=doctype_lookup)
|
||||
logging.info("Done chunking documents")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = parser.parse_args()
|
||||
if args.reindex:
|
||||
with sqlite3.connect("./visited.db") as conn:
|
||||
c = conn.cursor()
|
||||
c.execute("DELETE FROM indexed_documents")
|
||||
reindex()
|
||||
|
||||
logging.info("Fetching documents from Paperless-NGX")
|
||||
ppngx = PaperlessNGXService()
|
||||
docs = ppngx.get_data()
|
||||
docs = filter_indexed_files(docs)
|
||||
logging.info(f"Fetched {len(docs)} documents")
|
||||
|
||||
# Delete all chromadb data
|
||||
ids = simba_docs.get(ids=None, limit=None, offset=0)
|
||||
all_ids = ids["ids"]
|
||||
if len(all_ids) > 0:
|
||||
simba_docs.delete(ids=all_ids)
|
||||
|
||||
# Chunk documents
|
||||
logging.info("Chunking documents now ...")
|
||||
tag_lookup = ppngx.get_tags()
|
||||
doctype_lookup = ppngx.get_doctypes()
|
||||
chunk_data(docs, collection=simba_docs, doctypes=doctype_lookup)
|
||||
logging.info("Done chunking documents")
|
||||
|
||||
# if args.index:
|
||||
# with open(args.index) as file:
|
||||
# extension = args.index.split(".")[-1]
|
||||
# if extension == "pdf":
|
||||
# pdf_path = ppngx.download_pdf_from_id(id=document_id)
|
||||
# image_paths = pdf_to_image(filepath=pdf_path)
|
||||
# print(f"summarizing {file}")
|
||||
# generated_summary = summarize_pdf_image(filepaths=image_paths)
|
||||
# elif extension in [".md", ".txt"]:
|
||||
# chunk_text(texts=[file.readall()], collection=simba_docs)
|
||||
if args.classify:
|
||||
consult_simba_oracle(input="yohohoho testing")
|
||||
consult_simba_oracle(input="write an email")
|
||||
consult_simba_oracle(input="how much does simba weigh")
|
||||
|
||||
if args.query:
|
||||
logging.info("Consulting oracle ...")
|
||||
|
||||
57
query.py
57
query.py
@@ -49,11 +49,20 @@ DOCTYPE_OPTIONS = [
|
||||
"Letter",
|
||||
]
|
||||
|
||||
QUERY_TYPE_OPTIONS = [
|
||||
"Simba",
|
||||
"Other",
|
||||
]
|
||||
|
||||
|
||||
class DocumentType(BaseModel):
|
||||
type: list[str] = Field(description="type of document", enum=DOCTYPE_OPTIONS)
|
||||
|
||||
|
||||
class QueryType(BaseModel):
|
||||
type: str = Field(desciption="type of query", enum=QUERY_TYPE_OPTIONS)
|
||||
|
||||
|
||||
PROMPT = """
|
||||
You are an information specialist that processes user queries. The current year is 2025. The user queries are all about
|
||||
a cat, Simba, and its records. The types of records are listed below. Using the query, extract the
|
||||
@@ -111,6 +120,27 @@ Query: "Who does Simba know?"
|
||||
Tags: ["Letter", "Documentation"]
|
||||
"""
|
||||
|
||||
QUERY_TYPE_PROMPT = f"""You are an information specialist that processes user queries.
|
||||
A query can have one tag attached from the following options. Based on the query and the transcript which is listed below, determine
|
||||
which of the following options is most appropriate: {",".join(QUERY_TYPE_OPTIONS)}
|
||||
|
||||
### Example 1
|
||||
Query: "Who is Simba's current vet?"
|
||||
Tags: ["Simba"]
|
||||
|
||||
|
||||
### Example 2
|
||||
Query: "What is the capital of Tokyo?"
|
||||
Tags: ["Other"]
|
||||
|
||||
|
||||
### Example 3
|
||||
Query: "Can you help me write an email?"
|
||||
Tags: ["Other"]
|
||||
|
||||
TRANSCRIPT:
|
||||
"""
|
||||
|
||||
|
||||
class QueryGenerator:
|
||||
def __init__(self) -> None:
|
||||
@@ -154,6 +184,33 @@ class QueryGenerator:
|
||||
metadata_query = {"document_type": {"$in": type_data["type"]}}
|
||||
return metadata_query
|
||||
|
||||
def get_query_type(self, input: str, transcript: str):
|
||||
client = OpenAI()
|
||||
response = client.chat.completions.create(
|
||||
messages=[
|
||||
{
|
||||
"role": "system",
|
||||
"content": "You are an information specialist that is really good at deciding what tags a query should have",
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": f"{QUERY_TYPE_PROMPT}\nTRANSCRIPT:\n{transcript}\nQUERY:{input}",
|
||||
},
|
||||
],
|
||||
model="gpt-4o",
|
||||
response_format={
|
||||
"type": "json_schema",
|
||||
"json_schema": {
|
||||
"name": "query_type",
|
||||
"schema": QueryType.model_json_schema(),
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
response_json_str = response.choices[0].message.content
|
||||
type_data = json.loads(response_json_str)
|
||||
return type_data["type"]
|
||||
|
||||
def get_query(self, input: str):
|
||||
client = OpenAI()
|
||||
response = client.responses.parse(
|
||||
|
||||
2677
raggr-frontend/package-lock.json
generated
Normal file
2677
raggr-frontend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -6,14 +6,18 @@
|
||||
"scripts": {
|
||||
"build": "rsbuild build",
|
||||
"dev": "rsbuild dev --open",
|
||||
"preview": "rsbuild preview"
|
||||
"preview": "rsbuild preview",
|
||||
"watch": "npm-watch build",
|
||||
"watch:build": "rsbuild build --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": "^1.12.2",
|
||||
"marked": "^16.3.0",
|
||||
"npm-watch": "^0.13.0",
|
||||
"react": "^19.1.1",
|
||||
"react-dom": "^19.1.1",
|
||||
"react-markdown": "^10.1.0"
|
||||
"react-markdown": "^10.1.0",
|
||||
"watch": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@rsbuild/core": "^1.5.6",
|
||||
@@ -22,5 +26,16 @@
|
||||
"@types/react": "^19.1.13",
|
||||
"@types/react-dom": "^19.1.9",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"watch": {
|
||||
"build": {
|
||||
"patterns": [
|
||||
"src"
|
||||
],
|
||||
"extensions": "ts,tsx,css,js,jsx",
|
||||
"delay": 1000,
|
||||
"quiet": false,
|
||||
"inherit": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,4 +3,8 @@ import { pluginReact } from '@rsbuild/plugin-react';
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [pluginReact()],
|
||||
html: {
|
||||
title: 'Raggr',
|
||||
favicon: './src/assets/favicon.svg',
|
||||
},
|
||||
});
|
||||
|
||||
@@ -10,9 +10,10 @@ interface Message {
|
||||
interface Conversation {
|
||||
id: string;
|
||||
name: string;
|
||||
messages: Message[];
|
||||
messages?: Message[];
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
user_id?: string;
|
||||
}
|
||||
|
||||
interface QueryRequest {
|
||||
@@ -23,15 +24,23 @@ interface QueryResponse {
|
||||
response: string;
|
||||
}
|
||||
|
||||
interface CreateConversationRequest {
|
||||
user_id: string;
|
||||
}
|
||||
|
||||
class ConversationService {
|
||||
private baseUrl = "/api";
|
||||
private conversationBaseUrl = "/api/conversation";
|
||||
|
||||
async sendQuery(query: string): Promise<QueryResponse> {
|
||||
async sendQuery(
|
||||
query: string,
|
||||
conversation_id: string,
|
||||
): Promise<QueryResponse> {
|
||||
const response = await userService.fetchWithRefreshToken(
|
||||
`${this.baseUrl}/query`,
|
||||
{
|
||||
method: "POST",
|
||||
body: JSON.stringify({ query }),
|
||||
body: JSON.stringify({ query, conversation_id }),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -56,6 +65,51 @@ class ConversationService {
|
||||
|
||||
return await response.json();
|
||||
}
|
||||
|
||||
async getConversation(conversationId: string): Promise<Conversation> {
|
||||
const response = await userService.fetchWithRefreshToken(
|
||||
`${this.conversationBaseUrl}/${conversationId}`,
|
||||
{
|
||||
method: "GET",
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to fetch conversation");
|
||||
}
|
||||
|
||||
return await response.json();
|
||||
}
|
||||
|
||||
async createConversation(): Promise<Conversation> {
|
||||
const response = await userService.fetchWithRefreshToken(
|
||||
`${this.conversationBaseUrl}/`,
|
||||
{
|
||||
method: "POST",
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to create conversation");
|
||||
}
|
||||
|
||||
return await response.json();
|
||||
}
|
||||
|
||||
async getAllConversations(): Promise<Conversation[]> {
|
||||
const response = await userService.fetchWithRefreshToken(
|
||||
`${this.conversationBaseUrl}/`,
|
||||
{
|
||||
method: "GET",
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to fetch conversations");
|
||||
}
|
||||
|
||||
return await response.json();
|
||||
}
|
||||
}
|
||||
|
||||
export const conversationService = new ConversationService();
|
||||
|
||||
3
raggr-frontend/src/assets/favicon.svg
Normal file
3
raggr-frontend/src/assets/favicon.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 100">
|
||||
<text y="80" font-size="80" font-family="system-ui, -apple-system, sans-serif">🐱</text>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 163 B |
@@ -2,6 +2,8 @@ import { useEffect, useState } from "react";
|
||||
import { conversationService } from "../api/conversationService";
|
||||
import { QuestionBubble } from "./QuestionBubble";
|
||||
import { AnswerBubble } from "./AnswerBubble";
|
||||
import { ConversationList } from "./ConversationList";
|
||||
import { parse } from "node:path/win32";
|
||||
|
||||
type Message = {
|
||||
text: string;
|
||||
@@ -33,13 +35,69 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
|
||||
const [conversations, setConversations] = useState<Conversation[]>([
|
||||
{ title: "simba meow meow", id: "uuid" },
|
||||
]);
|
||||
const [showConversations, setShowConversations] = useState<boolean>(false);
|
||||
const [selectedConversation, setSelectedConversation] =
|
||||
useState<Conversation | null>(null);
|
||||
|
||||
const simbaAnswers = ["meow.", "hiss...", "purrrrrr", "yowOWROWWowowr"];
|
||||
|
||||
useEffect(() => {
|
||||
const handleSelectConversation = (conversation: Conversation) => {
|
||||
setShowConversations(false);
|
||||
setSelectedConversation(conversation);
|
||||
const loadMessages = async () => {
|
||||
try {
|
||||
const conversation = await conversationService.getMessages();
|
||||
const fetchedConversation = await conversationService.getConversation(
|
||||
conversation.id,
|
||||
);
|
||||
setMessages(
|
||||
fetchedConversation.messages.map((message) => ({
|
||||
text: message.text,
|
||||
speaker: message.speaker,
|
||||
})),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Failed to load messages:", error);
|
||||
}
|
||||
};
|
||||
loadMessages();
|
||||
};
|
||||
|
||||
const loadConversations = async () => {
|
||||
try {
|
||||
const fetchedConversations =
|
||||
await conversationService.getAllConversations();
|
||||
const parsedConversations = fetchedConversations.map((conversation) => ({
|
||||
id: conversation.id,
|
||||
title: conversation.name,
|
||||
}));
|
||||
setConversations(parsedConversations);
|
||||
setSelectedConversation(parsedConversations[0]);
|
||||
console.log(parsedConversations);
|
||||
} catch (error) {
|
||||
console.error("Failed to load messages:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCreateNewConversation = async () => {
|
||||
const newConversation = await conversationService.createConversation();
|
||||
await loadConversations();
|
||||
setSelectedConversation({
|
||||
title: newConversation.name,
|
||||
id: newConversation.id,
|
||||
});
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
loadConversations();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const loadMessages = async () => {
|
||||
if (selectedConversation == null) return;
|
||||
try {
|
||||
const conversation = await conversationService.getConversation(
|
||||
selectedConversation.id,
|
||||
);
|
||||
setMessages(
|
||||
conversation.messages.map((message) => ({
|
||||
text: message.text,
|
||||
@@ -51,7 +109,7 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
|
||||
}
|
||||
};
|
||||
loadMessages();
|
||||
}, []);
|
||||
}, [selectedConversation]);
|
||||
|
||||
const handleQuestionSubmit = async () => {
|
||||
const currMessages = messages.concat([{ text: query, speaker: "user" }]);
|
||||
@@ -74,7 +132,10 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await conversationService.sendQuery(query);
|
||||
const result = await conversationService.sendQuery(
|
||||
query,
|
||||
selectedConversation.id,
|
||||
);
|
||||
setQuestionsAnswers(
|
||||
questionsAnswers.concat([{ question: query, answer: result.response }]),
|
||||
);
|
||||
@@ -101,16 +162,33 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
|
||||
<div className="flex flex-row justify-center py-4">
|
||||
<div className="flex flex-col gap-4 min-w-xl max-w-xl">
|
||||
<div className="flex flex-row justify-between">
|
||||
<header className="flex flex-row justify-center gap-2 grow sticky top-0 z-10 bg-white">
|
||||
<header className="flex flex-row justify-center gap-2 sticky top-0 z-10 bg-white">
|
||||
<h1 className="text-3xl">ask simba!</h1>
|
||||
</header>
|
||||
<button
|
||||
className="p-4 border border-red-400 bg-red-200 hover:bg-red-400 cursor-pointer rounded-md"
|
||||
onClick={() => setAuthenticated(false)}
|
||||
>
|
||||
logout
|
||||
</button>
|
||||
<div className="flex flex-row gap-2">
|
||||
<button
|
||||
className="p-2 border border-green-400 bg-green-200 hover:bg-green-400 cursor-pointer rounded-md"
|
||||
onClick={() => setShowConversations(!showConversations)}
|
||||
>
|
||||
{showConversations
|
||||
? "hide conversations"
|
||||
: "show conversations"}
|
||||
</button>
|
||||
<button
|
||||
className="p-2 border border-red-400 bg-red-200 hover:bg-red-400 cursor-pointer rounded-md"
|
||||
onClick={() => setAuthenticated(false)}
|
||||
>
|
||||
logout
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
{showConversations && (
|
||||
<ConversationList
|
||||
conversations={conversations}
|
||||
onCreateNewConversation={handleCreateNewConversation}
|
||||
onSelectConversation={handleSelectConversation}
|
||||
/>
|
||||
)}
|
||||
{messages.map((msg, index) => {
|
||||
if (msg.speaker === "simba") {
|
||||
return <AnswerBubble key={index} text={msg.text} />;
|
||||
|
||||
60
raggr-frontend/src/components/ConversationList.tsx
Normal file
60
raggr-frontend/src/components/ConversationList.tsx
Normal file
@@ -0,0 +1,60 @@
|
||||
import { useState, useEffect } from "react";
|
||||
|
||||
import { conversationService } from "../api/conversationService";
|
||||
type Conversation = {
|
||||
title: string;
|
||||
id: string;
|
||||
};
|
||||
|
||||
type ConversationProps = {
|
||||
conversations: Conversation[];
|
||||
onSelectConversation: (conversation: Conversation) => void;
|
||||
onCreateNewConversation: () => void;
|
||||
};
|
||||
|
||||
export const ConversationList = ({
|
||||
conversations,
|
||||
onSelectConversation,
|
||||
onCreateNewConversation,
|
||||
}: ConversationProps) => {
|
||||
const [conservations, setConversations] = useState(conversations);
|
||||
|
||||
useEffect(() => {
|
||||
const loadConversations = async () => {
|
||||
try {
|
||||
const fetchedConversations =
|
||||
await conversationService.getAllConversations();
|
||||
setConversations(
|
||||
fetchedConversations.map((conversation) => ({
|
||||
id: conversation.id,
|
||||
title: conversation.name,
|
||||
})),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Failed to load messages:", error);
|
||||
}
|
||||
};
|
||||
loadConversations();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="bg-indigo-300 rounded-md p-3 flex flex-col">
|
||||
{conservations.map((conversation) => {
|
||||
return (
|
||||
<div
|
||||
className="border-blue-400 bg-indigo-300 hover:bg-indigo-200 cursor-pointer rounded-md p-2"
|
||||
onClick={() => onSelectConversation(conversation)}
|
||||
>
|
||||
<p>{conversation.title}</p>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
<div
|
||||
className="border-blue-400 bg-indigo-300 hover:bg-indigo-200 cursor-pointer rounded-md p-2"
|
||||
onClick={() => onCreateNewConversation()}
|
||||
>
|
||||
<p> + Start a new thread</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user