Compare commits
17 Commits
worktree-c
...
2fcf84f5d2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2fcf84f5d2 | ||
|
|
142fac3a84 | ||
|
|
0415610d64 | ||
|
|
ac9c821ec7 | ||
|
|
0f88d211de | ||
|
|
6917f331d8 | ||
|
|
6a7b1369ad | ||
|
|
4621755c54 | ||
|
|
b6cd4e85f0 | ||
|
|
30d7f0a060 | ||
|
|
da9b52dda1 | ||
|
|
d1cb55ff1a | ||
|
|
53b2b3b366 | ||
|
|
03c7e0c951 | ||
|
|
97be5262a8 | ||
|
|
86cc269b3a | ||
|
|
0e3684031b |
40
.env.example
40
.env.example
@@ -54,3 +54,43 @@ OIDC_USE_DISCOVERY=true
|
|||||||
YNAB_ACCESS_TOKEN=your-ynab-personal-access-token
|
YNAB_ACCESS_TOKEN=your-ynab-personal-access-token
|
||||||
# Optional: Specify a budget ID, or leave empty to use the default/first budget
|
# Optional: Specify a budget ID, or leave empty to use the default/first budget
|
||||||
YNAB_BUDGET_ID=
|
YNAB_BUDGET_ID=
|
||||||
|
|
||||||
|
# Twilio Configuration (WhatsApp)
|
||||||
|
TWILIO_ACCOUNT_SID=your-twilio-account-sid
|
||||||
|
TWILIO_AUTH_TOKEN=your-twilio-auth-token
|
||||||
|
TWILIO_WHATSAPP_NUMBER=whatsapp:+14155238886
|
||||||
|
# Comma-separated list of WhatsApp numbers allowed to use the service (e.g., whatsapp:+1234567890)
|
||||||
|
# Use * to allow any number
|
||||||
|
ALLOWED_WHATSAPP_NUMBERS=
|
||||||
|
# Set to false to disable Twilio signature validation in development
|
||||||
|
TWILIO_SIGNATURE_VALIDATION=true
|
||||||
|
# If behind a reverse proxy, set this to your public webhook URL so signature validation works
|
||||||
|
# TWILIO_WEBHOOK_URL=https://your-domain.com/api/whatsapp/webhook
|
||||||
|
# Rate limiting: max messages per window (default: 10 messages per 60 seconds)
|
||||||
|
# WHATSAPP_RATE_LIMIT_MAX=10
|
||||||
|
# WHATSAPP_RATE_LIMIT_WINDOW=60
|
||||||
|
|
||||||
|
# Mailgun Configuration (Email channel)
|
||||||
|
MAILGUN_API_KEY=
|
||||||
|
MAILGUN_DOMAIN=
|
||||||
|
MAILGUN_WEBHOOK_SIGNING_KEY=
|
||||||
|
EMAIL_HMAC_SECRET=
|
||||||
|
# Rate limiting: max emails per window (default: 5 per 300 seconds)
|
||||||
|
# EMAIL_RATE_LIMIT_MAX=5
|
||||||
|
# EMAIL_RATE_LIMIT_WINDOW=300
|
||||||
|
# Set to false to disable Mailgun signature validation in development
|
||||||
|
MAILGUN_SIGNATURE_VALIDATION=true
|
||||||
|
|
||||||
|
# Obsidian Configuration (headless sync)
|
||||||
|
# Auth token from Obsidian account (Settings → Account → API token)
|
||||||
|
OBSIDIAN_AUTH_TOKEN=your-obsidian-auth-token
|
||||||
|
# Vault ID to sync (found in Obsidian sync settings)
|
||||||
|
OBSIDIAN_VAULT_ID=your-vault-id
|
||||||
|
# End-to-end encryption password (if vault uses E2E encryption)
|
||||||
|
OBSIDIAN_E2E_PASSWORD=
|
||||||
|
# Device name shown in Obsidian sync activity
|
||||||
|
OBSIDIAN_DEVICE_NAME=simbarag
|
||||||
|
# Set to true to run continuous sync in the background
|
||||||
|
OBSIDIAN_CONTINUOUS_SYNC=false
|
||||||
|
# Local path to Obsidian vault (where files are synced)
|
||||||
|
OBSIDIAN_VAULT_PATH=/app/data/obsidian
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -18,3 +18,6 @@ chromadb_openai/
|
|||||||
chroma_db/
|
chroma_db/
|
||||||
database/
|
database/
|
||||||
*.db
|
*.db
|
||||||
|
|
||||||
|
obvault/
|
||||||
|
.claude
|
||||||
|
|||||||
10
CLAUDE.md
10
CLAUDE.md
@@ -11,21 +11,21 @@ SimbaRAG is a RAG (Retrieval-Augmented Generation) conversational AI system for
|
|||||||
### Development
|
### Development
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Start dev environment with hot reload
|
# Start environment
|
||||||
docker compose -f docker-compose.dev.yml up --build
|
docker compose up --build
|
||||||
|
|
||||||
# View logs
|
# View logs
|
||||||
docker compose -f docker-compose.dev.yml logs -f raggr
|
docker compose logs -f raggr
|
||||||
```
|
```
|
||||||
|
|
||||||
### Database Migrations (Aerich/Tortoise ORM)
|
### Database Migrations (Aerich/Tortoise ORM)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Generate migration (must run in Docker with DB access)
|
# Generate migration (must run in Docker with DB access)
|
||||||
docker compose -f docker-compose.dev.yml exec raggr aerich migrate --name describe_change
|
docker compose exec raggr aerich migrate --name describe_change
|
||||||
|
|
||||||
# Apply migrations (auto-runs on startup, manual if needed)
|
# Apply migrations (auto-runs on startup, manual if needed)
|
||||||
docker compose -f docker-compose.dev.yml exec raggr aerich upgrade
|
docker compose exec raggr aerich upgrade
|
||||||
|
|
||||||
# View migration history
|
# View migration history
|
||||||
docker compose exec raggr aerich history
|
docker compose exec raggr aerich history
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ WORKDIR /app
|
|||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
build-essential \
|
build-essential \
|
||||||
curl \
|
curl \
|
||||||
&& curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
|
&& curl -fsSL https://deb.nodesource.com/setup_22.x | bash - \
|
||||||
&& apt-get install -y nodejs \
|
&& apt-get install -y nodejs \
|
||||||
&& npm install -g yarn \
|
&& npm install -g yarn obsidian-headless \
|
||||||
&& rm -rf /var/lib/apt/lists/* \
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
&& curl -LsSf https://astral.sh/uv/install.sh | sh
|
&& curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
|
||||||
|
|||||||
45
app.py
45
app.py
@@ -1,20 +1,27 @@
|
|||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from quart import Quart, jsonify, render_template, request, send_from_directory
|
from quart import Quart, jsonify, render_template, request, send_from_directory
|
||||||
from quart_jwt_extended import JWTManager, get_jwt_identity, jwt_refresh_token_required
|
from quart_jwt_extended import JWTManager, get_jwt_identity, jwt_refresh_token_required
|
||||||
from tortoise.contrib.quart import register_tortoise
|
from tortoise import Tortoise
|
||||||
|
|
||||||
import blueprints.conversation
|
import blueprints.conversation
|
||||||
import blueprints.conversation.logic
|
import blueprints.conversation.logic
|
||||||
import blueprints.rag
|
import blueprints.rag
|
||||||
import blueprints.users
|
import blueprints.users
|
||||||
|
import blueprints.whatsapp
|
||||||
|
import blueprints.email
|
||||||
import blueprints.users.models
|
import blueprints.users.models
|
||||||
|
from config.db import TORTOISE_CONFIG
|
||||||
from main import consult_simba_oracle
|
from main import consult_simba_oracle
|
||||||
|
|
||||||
# Load environment variables
|
# Load environment variables
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
app = Quart(
|
app = Quart(
|
||||||
__name__,
|
__name__,
|
||||||
static_folder="raggr-frontend/dist/static",
|
static_folder="raggr-frontend/dist/static",
|
||||||
@@ -22,38 +29,26 @@ app = Quart(
|
|||||||
)
|
)
|
||||||
|
|
||||||
app.config["JWT_SECRET_KEY"] = os.getenv("JWT_SECRET_KEY", "SECRET_KEY")
|
app.config["JWT_SECRET_KEY"] = os.getenv("JWT_SECRET_KEY", "SECRET_KEY")
|
||||||
|
app.config["MAX_CONTENT_LENGTH"] = 10 * 1024 * 1024 # 10 MB upload limit
|
||||||
jwt = JWTManager(app)
|
jwt = JWTManager(app)
|
||||||
|
|
||||||
# Register blueprints
|
# Register blueprints
|
||||||
app.register_blueprint(blueprints.users.user_blueprint)
|
app.register_blueprint(blueprints.users.user_blueprint)
|
||||||
app.register_blueprint(blueprints.conversation.conversation_blueprint)
|
app.register_blueprint(blueprints.conversation.conversation_blueprint)
|
||||||
app.register_blueprint(blueprints.rag.rag_blueprint)
|
app.register_blueprint(blueprints.rag.rag_blueprint)
|
||||||
|
app.register_blueprint(blueprints.whatsapp.whatsapp_blueprint)
|
||||||
|
app.register_blueprint(blueprints.email.email_blueprint)
|
||||||
|
|
||||||
|
|
||||||
# Database configuration with environment variable support
|
# Initialize Tortoise ORM with lifecycle hooks
|
||||||
DATABASE_URL = os.getenv(
|
@app.while_serving
|
||||||
"DATABASE_URL", "postgres://raggr:raggr_dev_password@localhost:5432/raggr"
|
async def lifespan():
|
||||||
)
|
logging.info("Initializing Tortoise ORM...")
|
||||||
|
await Tortoise.init(config=TORTOISE_CONFIG)
|
||||||
TORTOISE_CONFIG = {
|
logging.info("Tortoise ORM initialized successfully")
|
||||||
"connections": {"default": DATABASE_URL},
|
yield
|
||||||
"apps": {
|
logging.info("Closing Tortoise ORM connections...")
|
||||||
"models": {
|
await Tortoise.close_connections()
|
||||||
"models": [
|
|
||||||
"blueprints.conversation.models",
|
|
||||||
"blueprints.users.models",
|
|
||||||
"aerich.models",
|
|
||||||
]
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# Initialize Tortoise ORM
|
|
||||||
register_tortoise(
|
|
||||||
app,
|
|
||||||
config=TORTOISE_CONFIG,
|
|
||||||
generate_schemas=False, # Disabled - using Aerich for migrations
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Serve React static files
|
# Serve React static files
|
||||||
|
|||||||
@@ -1,12 +1,19 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
|
||||||
from quart import Blueprint, jsonify, request
|
from quart import Blueprint, Response, jsonify, make_response, request
|
||||||
from quart_jwt_extended import (
|
from quart_jwt_extended import (
|
||||||
get_jwt_identity,
|
get_jwt_identity,
|
||||||
jwt_refresh_token_required,
|
jwt_refresh_token_required,
|
||||||
)
|
)
|
||||||
|
|
||||||
import blueprints.users.models
|
import blueprints.users.models
|
||||||
|
from utils.image_process import analyze_user_image
|
||||||
|
from utils.image_upload import ImageValidationError, process_image
|
||||||
|
from utils.s3_client import get_image as s3_get_image
|
||||||
|
from utils.s3_client import upload_image as s3_upload_image
|
||||||
|
|
||||||
from .agents import main_agent
|
from .agents import main_agent
|
||||||
from .logic import (
|
from .logic import (
|
||||||
@@ -19,11 +26,41 @@ from .models import (
|
|||||||
PydConversation,
|
PydConversation,
|
||||||
PydListConversation,
|
PydListConversation,
|
||||||
)
|
)
|
||||||
|
from .prompts import SIMBA_SYSTEM_PROMPT
|
||||||
|
|
||||||
conversation_blueprint = Blueprint(
|
conversation_blueprint = Blueprint(
|
||||||
"conversation_api", __name__, url_prefix="/api/conversation"
|
"conversation_api", __name__, url_prefix="/api/conversation"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_SYSTEM_PROMPT = SIMBA_SYSTEM_PROMPT
|
||||||
|
|
||||||
|
|
||||||
|
def _build_messages_payload(
|
||||||
|
conversation, query_text: str, image_description: str | None = None
|
||||||
|
) -> list:
|
||||||
|
recent_messages = (
|
||||||
|
conversation.messages[-10:]
|
||||||
|
if len(conversation.messages) > 10
|
||||||
|
else conversation.messages
|
||||||
|
)
|
||||||
|
messages_payload = [{"role": "system", "content": _SYSTEM_PROMPT}]
|
||||||
|
for msg in recent_messages[:-1]: # Exclude the message we just added
|
||||||
|
role = "user" if msg.speaker == "user" else "assistant"
|
||||||
|
text = msg.text
|
||||||
|
if msg.image_key and role == "user":
|
||||||
|
text = f"[User sent an image]\n{text}"
|
||||||
|
messages_payload.append({"role": role, "content": text})
|
||||||
|
|
||||||
|
# Build the current user message with optional image description
|
||||||
|
if image_description:
|
||||||
|
content = f"[Image analysis: {image_description}]"
|
||||||
|
if query_text:
|
||||||
|
content = f"{query_text}\n\n{content}"
|
||||||
|
else:
|
||||||
|
content = query_text
|
||||||
|
messages_payload.append({"role": "user", "content": content})
|
||||||
|
return messages_payload
|
||||||
|
|
||||||
|
|
||||||
@conversation_blueprint.post("/query")
|
@conversation_blueprint.post("/query")
|
||||||
@jwt_refresh_token_required
|
@jwt_refresh_token_required
|
||||||
@@ -42,68 +79,7 @@ async def query():
|
|||||||
user=user,
|
user=user,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Build conversation history from recent messages (last 10 for context)
|
messages_payload = _build_messages_payload(conversation, query)
|
||||||
recent_messages = (
|
|
||||||
conversation.messages[-10:]
|
|
||||||
if len(conversation.messages) > 10
|
|
||||||
else conversation.messages
|
|
||||||
)
|
|
||||||
|
|
||||||
messages_payload = [
|
|
||||||
{
|
|
||||||
"role": "system",
|
|
||||||
"content": """You are a helpful cat assistant named Simba that understands veterinary terms. When there are questions to you specifically, they are referring to Simba the cat. Answer the user in as if you were a cat named Simba. Don't act too catlike. Be assertive.
|
|
||||||
|
|
||||||
SIMBA FACTS (as of January 2026):
|
|
||||||
- Name: Simba
|
|
||||||
- Species: Feline (Domestic Short Hair / American Short Hair)
|
|
||||||
- Sex: Male, Neutered
|
|
||||||
- Date of Birth: August 8, 2016 (approximately 9 years 5 months old)
|
|
||||||
- Color: Orange
|
|
||||||
- Current Weight: 16 lbs (as of 1/8/2026)
|
|
||||||
- Owner: Ryan Chen
|
|
||||||
- Location: Long Island City, NY
|
|
||||||
- Veterinarian: Court Square Animal Hospital
|
|
||||||
|
|
||||||
Medical Conditions:
|
|
||||||
- Hypertrophic Cardiomyopathy (HCM): Diagnosed 12/11/2025. Concentric left ventricular hypertrophy with no left atrial dilation. Grade II-III/VI systolic heart murmur. No cardiac medications currently needed. Must avoid Domitor, acepromazine, and ketamine during anesthesia.
|
|
||||||
- Dental Issues: Prior extraction of teeth 307 and 407 due to resorption. Tooth 107 extracted on 1/8/2026. Early resorption lesions present on teeth 207, 309, and 409.
|
|
||||||
|
|
||||||
Recent Medical Events:
|
|
||||||
- 1/8/2026: Dental cleaning and tooth 107 extraction. Prescribed Onsior for 3 days. Oravet sealant applied.
|
|
||||||
- 12/11/2025: Echocardiogram confirming HCM diagnosis. Pre-op bloodwork was normal.
|
|
||||||
- 12/1/2025: Visited for decreased appetite/nausea. Received subcutaneous fluids and Cerenia.
|
|
||||||
|
|
||||||
Diet & Lifestyle:
|
|
||||||
- Diet: Hill's I/D wet and dry food
|
|
||||||
- Supplements: Plaque Off
|
|
||||||
- Indoor only cat, only pet in the household
|
|
||||||
|
|
||||||
Upcoming Appointments:
|
|
||||||
- Rabies Vaccine: Due 2/19/2026
|
|
||||||
- Routine Examination: Due 6/1/2026
|
|
||||||
- FVRCP-3yr Vaccine: Due 10/2/2026
|
|
||||||
|
|
||||||
IMPORTANT: When users ask factual questions about Simba's health, medical history, veterinary visits, medications, weight, or any information that would be in documents, you MUST use the simba_search tool to retrieve accurate information before answering. Do not rely on general knowledge - always search the documents for factual questions.
|
|
||||||
|
|
||||||
BUDGET & FINANCE (YNAB Integration):
|
|
||||||
You have access to Ryan's budget data through YNAB (You Need A Budget). When users ask about financial matters, use the appropriate YNAB tools:
|
|
||||||
- Use ynab_budget_summary for overall budget health and status questions
|
|
||||||
- Use ynab_search_transactions to find specific purchases or spending at particular stores
|
|
||||||
- Use ynab_category_spending to analyze spending by category for a month
|
|
||||||
- Use ynab_insights to provide spending trends, patterns, and recommendations
|
|
||||||
Always use these tools when asked about budgets, spending, transactions, or financial health.""",
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
# Add recent conversation history
|
|
||||||
for msg in recent_messages[:-1]: # Exclude the message we just added
|
|
||||||
role = "user" if msg.speaker == "user" else "assistant"
|
|
||||||
messages_payload.append({"role": role, "content": msg.text})
|
|
||||||
|
|
||||||
# Add current query
|
|
||||||
messages_payload.append({"role": "user", "content": query})
|
|
||||||
|
|
||||||
payload = {"messages": messages_payload}
|
payload = {"messages": messages_payload}
|
||||||
|
|
||||||
response = await main_agent.ainvoke(payload)
|
response = await main_agent.ainvoke(payload)
|
||||||
@@ -117,6 +93,142 @@ Always use these tools when asked about budgets, spending, transactions, or fina
|
|||||||
return jsonify({"response": message})
|
return jsonify({"response": message})
|
||||||
|
|
||||||
|
|
||||||
|
@conversation_blueprint.post("/upload-image")
|
||||||
|
@jwt_refresh_token_required
|
||||||
|
async def upload_image():
|
||||||
|
current_user_uuid = get_jwt_identity()
|
||||||
|
await blueprints.users.models.User.get(id=current_user_uuid)
|
||||||
|
|
||||||
|
files = await request.files
|
||||||
|
form = await request.form
|
||||||
|
file = files.get("file")
|
||||||
|
conversation_id = form.get("conversation_id")
|
||||||
|
|
||||||
|
if not file or not conversation_id:
|
||||||
|
return jsonify({"error": "file and conversation_id are required"}), 400
|
||||||
|
|
||||||
|
file_bytes = file.read()
|
||||||
|
content_type = file.content_type or "image/jpeg"
|
||||||
|
|
||||||
|
try:
|
||||||
|
processed_bytes, output_content_type = process_image(file_bytes, content_type)
|
||||||
|
except ImageValidationError as e:
|
||||||
|
return jsonify({"error": str(e)}), 400
|
||||||
|
|
||||||
|
ext = output_content_type.split("/")[-1]
|
||||||
|
if ext == "jpeg":
|
||||||
|
ext = "jpg"
|
||||||
|
key = f"conversations/{conversation_id}/{uuid.uuid4()}.{ext}"
|
||||||
|
|
||||||
|
await s3_upload_image(processed_bytes, key, output_content_type)
|
||||||
|
|
||||||
|
return jsonify(
|
||||||
|
{
|
||||||
|
"image_key": key,
|
||||||
|
"image_url": f"/api/conversation/image/{key}",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@conversation_blueprint.get("/image/<path:image_key>")
|
||||||
|
@jwt_refresh_token_required
|
||||||
|
async def serve_image(image_key: str):
|
||||||
|
try:
|
||||||
|
image_bytes, content_type = await s3_get_image(image_key)
|
||||||
|
except Exception:
|
||||||
|
return jsonify({"error": "Image not found"}), 404
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
image_bytes,
|
||||||
|
content_type=content_type,
|
||||||
|
headers={"Cache-Control": "private, max-age=3600"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@conversation_blueprint.post("/stream-query")
|
||||||
|
@jwt_refresh_token_required
|
||||||
|
async def stream_query():
|
||||||
|
current_user_uuid = get_jwt_identity()
|
||||||
|
user = await blueprints.users.models.User.get(id=current_user_uuid)
|
||||||
|
data = await request.get_json()
|
||||||
|
query_text = data.get("query")
|
||||||
|
conversation_id = data.get("conversation_id")
|
||||||
|
image_key = data.get("image_key")
|
||||||
|
conversation = await get_conversation_by_id(conversation_id)
|
||||||
|
await conversation.fetch_related("messages")
|
||||||
|
await add_message_to_conversation(
|
||||||
|
conversation=conversation,
|
||||||
|
message=query_text or "",
|
||||||
|
speaker="user",
|
||||||
|
user=user,
|
||||||
|
image_key=image_key,
|
||||||
|
)
|
||||||
|
|
||||||
|
# If an image was uploaded, analyze it with the vision model
|
||||||
|
image_description = None
|
||||||
|
if image_key:
|
||||||
|
try:
|
||||||
|
image_bytes, _ = await s3_get_image(image_key)
|
||||||
|
image_description = await analyze_user_image(image_bytes)
|
||||||
|
logging.info(f"Image analysis complete for {image_key}")
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Failed to analyze image: {e}")
|
||||||
|
image_description = "[Image could not be analyzed]"
|
||||||
|
|
||||||
|
messages_payload = _build_messages_payload(
|
||||||
|
conversation, query_text or "", image_description
|
||||||
|
)
|
||||||
|
payload = {"messages": messages_payload}
|
||||||
|
|
||||||
|
async def event_generator():
|
||||||
|
final_message = None
|
||||||
|
try:
|
||||||
|
async for event in main_agent.astream_events(payload, version="v2"):
|
||||||
|
event_type = event.get("event")
|
||||||
|
|
||||||
|
if event_type == "on_tool_start":
|
||||||
|
yield f"data: {json.dumps({'type': 'tool_start', 'tool': event['name']})}\n\n"
|
||||||
|
|
||||||
|
elif event_type == "on_tool_end":
|
||||||
|
yield f"data: {json.dumps({'type': 'tool_end', 'tool': event['name']})}\n\n"
|
||||||
|
|
||||||
|
elif event_type == "on_chain_end":
|
||||||
|
output = event.get("data", {}).get("output")
|
||||||
|
if isinstance(output, dict):
|
||||||
|
msgs = output.get("messages", [])
|
||||||
|
if msgs:
|
||||||
|
last_msg = msgs[-1]
|
||||||
|
content = getattr(last_msg, "content", None)
|
||||||
|
if isinstance(content, str) and content:
|
||||||
|
final_message = content
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
yield f"data: {json.dumps({'type': 'error', 'message': str(e)})}\n\n"
|
||||||
|
|
||||||
|
if final_message:
|
||||||
|
await add_message_to_conversation(
|
||||||
|
conversation=conversation,
|
||||||
|
message=final_message,
|
||||||
|
speaker="simba",
|
||||||
|
user=user,
|
||||||
|
)
|
||||||
|
yield f"data: {json.dumps({'type': 'response', 'message': final_message})}\n\n"
|
||||||
|
else:
|
||||||
|
yield f"data: {json.dumps({'type': 'error', 'message': 'No response generated'})}\n\n"
|
||||||
|
|
||||||
|
yield "data: [DONE]\n\n"
|
||||||
|
|
||||||
|
return await make_response(
|
||||||
|
event_generator(),
|
||||||
|
200,
|
||||||
|
{
|
||||||
|
"Content-Type": "text/event-stream",
|
||||||
|
"Cache-Control": "no-cache",
|
||||||
|
"X-Accel-Buffering": "no",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@conversation_blueprint.route("/<conversation_id>")
|
@conversation_blueprint.route("/<conversation_id>")
|
||||||
@jwt_refresh_token_required
|
@jwt_refresh_token_required
|
||||||
async def get_conversation(conversation_id: str):
|
async def get_conversation(conversation_id: str):
|
||||||
@@ -134,6 +246,7 @@ async def get_conversation(conversation_id: str):
|
|||||||
"text": msg.text,
|
"text": msg.text,
|
||||||
"speaker": msg.speaker.value,
|
"speaker": msg.speaker.value,
|
||||||
"created_at": msg.created_at.isoformat(),
|
"created_at": msg.created_at.isoformat(),
|
||||||
|
"image_key": msg.image_key,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
name = conversation.name
|
name = conversation.name
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from langchain_openai import ChatOpenAI
|
|||||||
from tavily import AsyncTavilyClient
|
from tavily import AsyncTavilyClient
|
||||||
|
|
||||||
from blueprints.rag.logic import query_vector_store
|
from blueprints.rag.logic import query_vector_store
|
||||||
|
from utils.obsidian_service import ObsidianService
|
||||||
from utils.ynab_service import YNABService
|
from utils.ynab_service import YNABService
|
||||||
|
|
||||||
# Load environment variables
|
# Load environment variables
|
||||||
@@ -40,6 +41,32 @@ except (ValueError, Exception) as e:
|
|||||||
print(f"YNAB service not initialized: {e}")
|
print(f"YNAB service not initialized: {e}")
|
||||||
ynab_enabled = False
|
ynab_enabled = False
|
||||||
|
|
||||||
|
# Initialize Obsidian service (will only work if OBSIDIAN_VAULT_PATH is set)
|
||||||
|
try:
|
||||||
|
obsidian_service = ObsidianService()
|
||||||
|
obsidian_enabled = True
|
||||||
|
except (ValueError, Exception) as e:
|
||||||
|
print(f"Obsidian service not initialized: {e}")
|
||||||
|
obsidian_enabled = False
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def get_current_date() -> str:
|
||||||
|
"""Get today's date in a human-readable format.
|
||||||
|
|
||||||
|
Use this tool when you need to:
|
||||||
|
- Reference today's date in your response
|
||||||
|
- Answer questions like "what is today's date"
|
||||||
|
- Format dates in messages or documents
|
||||||
|
- Calculate time periods relative to today
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Today's date in YYYY-MM-DD format
|
||||||
|
"""
|
||||||
|
from datetime import date
|
||||||
|
|
||||||
|
return date.today().isoformat()
|
||||||
|
|
||||||
|
|
||||||
@tool
|
@tool
|
||||||
async def web_search(query: str) -> str:
|
async def web_search(query: str) -> str:
|
||||||
@@ -279,8 +306,291 @@ def ynab_insights(months_back: int = 3) -> str:
|
|||||||
return f"Error generating insights: {str(e)}"
|
return f"Error generating insights: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
async def obsidian_search_notes(query: str) -> str:
|
||||||
|
"""Search through Obsidian vault notes for information.
|
||||||
|
|
||||||
|
Use this tool when you need to:
|
||||||
|
- Find information in personal notes
|
||||||
|
- Research past ideas or thoughts from your vault
|
||||||
|
- Look up information stored in markdown files
|
||||||
|
- Search for content that would be in your notes
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query: The search query to look up in your Obsidian vault
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Relevant notes with their content and metadata
|
||||||
|
"""
|
||||||
|
if not obsidian_enabled:
|
||||||
|
return "Obsidian integration is not configured. Please set OBSIDIAN_VAULT_PATH environment variable."
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Query ChromaDB for obsidian documents
|
||||||
|
serialized, docs = await query_vector_store(query=query)
|
||||||
|
return serialized
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error searching Obsidian notes: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
async def obsidian_read_note(relative_path: str) -> str:
|
||||||
|
"""Read a specific note from your Obsidian vault.
|
||||||
|
|
||||||
|
Use this tool when you want to:
|
||||||
|
- Read the full content of a specific note
|
||||||
|
- Get detailed information from a particular markdown file
|
||||||
|
- Access content from a known note path
|
||||||
|
|
||||||
|
Args:
|
||||||
|
relative_path: Path to note relative to vault root (e.g., "notes/my-note.md")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Full content and metadata of the requested note
|
||||||
|
"""
|
||||||
|
if not obsidian_enabled:
|
||||||
|
return "Obsidian integration is not configured. Please set OBSIDIAN_VAULT_PATH environment variable."
|
||||||
|
|
||||||
|
try:
|
||||||
|
note = obsidian_service.read_note(relative_path)
|
||||||
|
content_data = note["content"]
|
||||||
|
|
||||||
|
result = f"File: {note['path']}\n\n"
|
||||||
|
result += f"Frontmatter:\n{content_data['metadata']}\n\n"
|
||||||
|
result += f"Content:\n{content_data['content']}\n\n"
|
||||||
|
result += f"Tags: {', '.join(content_data['tags'])}\n"
|
||||||
|
result += f"Contains {len(content_data['wikilinks'])} wikilinks and {len(content_data['embeds'])} embeds"
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except FileNotFoundError:
|
||||||
|
return f"Note not found at '{relative_path}'. Please check the path is correct."
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error reading note: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
async def obsidian_create_note(
|
||||||
|
title: str,
|
||||||
|
content: str,
|
||||||
|
folder: str = "notes",
|
||||||
|
tags: str = "",
|
||||||
|
) -> str:
|
||||||
|
"""Create a new note in your Obsidian vault.
|
||||||
|
|
||||||
|
Use this tool when you want to:
|
||||||
|
- Save research findings or ideas to your vault
|
||||||
|
- Create a new document with a specific title
|
||||||
|
- Write notes for future reference
|
||||||
|
|
||||||
|
Args:
|
||||||
|
title: The title of the note (will be used as filename)
|
||||||
|
content: The body content of the note
|
||||||
|
folder: The folder where to create the note (default: "notes")
|
||||||
|
tags: Comma-separated list of tags to add (default: "")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the created note
|
||||||
|
"""
|
||||||
|
if not obsidian_enabled:
|
||||||
|
return "Obsidian integration is not configured. Please set OBSIDIAN_VAULT_PATH environment variable."
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Parse tags from comma-separated string
|
||||||
|
tag_list = [tag.strip() for tag in tags.split(",") if tag.strip()]
|
||||||
|
|
||||||
|
relative_path = obsidian_service.create_note(
|
||||||
|
title=title,
|
||||||
|
content=content,
|
||||||
|
folder=folder,
|
||||||
|
tags=tag_list,
|
||||||
|
)
|
||||||
|
|
||||||
|
return f"Successfully created note: {relative_path}"
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error creating note: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def journal_get_today() -> str:
|
||||||
|
"""Get today's daily journal note, including all tasks and log entries.
|
||||||
|
|
||||||
|
Use this tool when the user asks about:
|
||||||
|
- What's on their plate today
|
||||||
|
- Today's tasks or to-do list
|
||||||
|
- Today's journal entry
|
||||||
|
- What they've logged today
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The full content of today's daily note, or a message if it doesn't exist.
|
||||||
|
"""
|
||||||
|
if not obsidian_enabled:
|
||||||
|
return "Obsidian integration is not configured."
|
||||||
|
|
||||||
|
try:
|
||||||
|
note = obsidian_service.get_daily_note()
|
||||||
|
if not note["found"]:
|
||||||
|
return f"No daily note found for {note['date']}. Use journal_add_task to create one."
|
||||||
|
return f"Daily note for {note['date']}:\n\n{note['content']}"
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error reading daily note: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def journal_get_tasks(date: str = "") -> str:
|
||||||
|
"""Get tasks from a daily journal note.
|
||||||
|
|
||||||
|
Use this tool when the user asks about:
|
||||||
|
- Open or pending tasks for a day
|
||||||
|
- What tasks are done or not done
|
||||||
|
- Task status for today or a specific date
|
||||||
|
|
||||||
|
Args:
|
||||||
|
date: Date in YYYY-MM-DD format (optional, defaults to today)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of tasks with their completion status.
|
||||||
|
"""
|
||||||
|
if not obsidian_enabled:
|
||||||
|
return "Obsidian integration is not configured."
|
||||||
|
|
||||||
|
try:
|
||||||
|
from datetime import datetime as dt
|
||||||
|
|
||||||
|
parsed_date = dt.strptime(date, "%Y-%m-%d") if date else None
|
||||||
|
result = obsidian_service.get_daily_tasks(parsed_date)
|
||||||
|
|
||||||
|
if not result["found"]:
|
||||||
|
return f"No daily note found for {result['date']}."
|
||||||
|
|
||||||
|
if not result["tasks"]:
|
||||||
|
return f"No tasks found in the {result['date']} note."
|
||||||
|
|
||||||
|
lines = [f"Tasks for {result['date']}:"]
|
||||||
|
for task in result["tasks"]:
|
||||||
|
status = "[x]" if task["done"] else "[ ]"
|
||||||
|
lines.append(f"- {status} {task['text']}")
|
||||||
|
return "\n".join(lines)
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error reading tasks: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def journal_add_task(task: str, date: str = "") -> str:
|
||||||
|
"""Add a task to a daily journal note.
|
||||||
|
|
||||||
|
Use this tool when the user wants to:
|
||||||
|
- Add a task or to-do to today's note
|
||||||
|
- Remind themselves to do something
|
||||||
|
- Track a new item in their daily note
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task: The task description to add
|
||||||
|
date: Date in YYYY-MM-DD format (optional, defaults to today)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Confirmation of the added task.
|
||||||
|
"""
|
||||||
|
if not obsidian_enabled:
|
||||||
|
return "Obsidian integration is not configured."
|
||||||
|
|
||||||
|
try:
|
||||||
|
from datetime import datetime as dt
|
||||||
|
|
||||||
|
parsed_date = dt.strptime(date, "%Y-%m-%d") if date else None
|
||||||
|
result = obsidian_service.add_task_to_daily_note(task, parsed_date)
|
||||||
|
|
||||||
|
if result["success"]:
|
||||||
|
note_date = date or dt.now().strftime("%Y-%m-%d")
|
||||||
|
extra = " (created new note)" if result["created_note"] else ""
|
||||||
|
return f"Added task '{task}' to {note_date}{extra}."
|
||||||
|
return "Failed to add task."
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error adding task: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def journal_complete_task(task: str, date: str = "") -> str:
|
||||||
|
"""Mark a task as complete in a daily journal note.
|
||||||
|
|
||||||
|
Use this tool when the user wants to:
|
||||||
|
- Check off a task as done
|
||||||
|
- Mark something as completed
|
||||||
|
- Update task status in their daily note
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task: The task text to mark complete (exact or partial match)
|
||||||
|
date: Date in YYYY-MM-DD format (optional, defaults to today)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Confirmation that the task was marked complete.
|
||||||
|
"""
|
||||||
|
if not obsidian_enabled:
|
||||||
|
return "Obsidian integration is not configured."
|
||||||
|
|
||||||
|
try:
|
||||||
|
from datetime import datetime as dt
|
||||||
|
|
||||||
|
parsed_date = dt.strptime(date, "%Y-%m-%d") if date else None
|
||||||
|
result = obsidian_service.complete_task_in_daily_note(task, parsed_date)
|
||||||
|
|
||||||
|
if result["success"]:
|
||||||
|
return f"Marked '{result['completed_task']}' as complete."
|
||||||
|
return f"Could not complete task: {result.get('error', 'unknown error')}"
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error completing task: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
async def obsidian_create_task(
|
||||||
|
title: str,
|
||||||
|
content: str = "",
|
||||||
|
folder: str = "tasks",
|
||||||
|
due_date: str = "",
|
||||||
|
tags: str = "",
|
||||||
|
) -> str:
|
||||||
|
"""Create a new task note in your Obsidian vault.
|
||||||
|
|
||||||
|
Use this tool when you want to:
|
||||||
|
- Create a task to remember to do something
|
||||||
|
- Add a task with a due date
|
||||||
|
- Track tasks in your vault
|
||||||
|
|
||||||
|
Args:
|
||||||
|
title: The title of the task
|
||||||
|
content: The description of the task (optional)
|
||||||
|
folder: The folder to place the task (default: "tasks")
|
||||||
|
due_date: Due date in YYYY-MM-DD format (optional)
|
||||||
|
tags: Comma-separated list of tags to add (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the created task note
|
||||||
|
"""
|
||||||
|
if not obsidian_enabled:
|
||||||
|
return "Obsidian integration is not configured. Please set OBSIDIAN_VAULT_PATH environment variable."
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Parse tags from comma-separated string
|
||||||
|
tag_list = [tag.strip() for tag in tags.split(",") if tag.strip()]
|
||||||
|
|
||||||
|
relative_path = obsidian_service.create_task(
|
||||||
|
title=title,
|
||||||
|
content=content,
|
||||||
|
folder=folder,
|
||||||
|
due_date=due_date or None,
|
||||||
|
tags=tag_list,
|
||||||
|
)
|
||||||
|
|
||||||
|
return f"Successfully created task: {relative_path}"
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error creating task: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
# Create tools list based on what's available
|
# Create tools list based on what's available
|
||||||
tools = [simba_search, web_search]
|
tools = [get_current_date, simba_search, web_search]
|
||||||
if ynab_enabled:
|
if ynab_enabled:
|
||||||
tools.extend(
|
tools.extend(
|
||||||
[
|
[
|
||||||
@@ -290,6 +600,19 @@ if ynab_enabled:
|
|||||||
ynab_insights,
|
ynab_insights,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
if obsidian_enabled:
|
||||||
|
tools.extend(
|
||||||
|
[
|
||||||
|
obsidian_search_notes,
|
||||||
|
obsidian_read_note,
|
||||||
|
obsidian_create_note,
|
||||||
|
obsidian_create_task,
|
||||||
|
journal_get_today,
|
||||||
|
journal_get_tasks,
|
||||||
|
journal_add_task,
|
||||||
|
journal_complete_task,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
# Llama 3.1 supports native function calling via OpenAI-compatible API
|
# Llama 3.1 supports native function calling via OpenAI-compatible API
|
||||||
main_agent = create_agent(model=model_with_fallback, tools=tools)
|
main_agent = create_agent(model=model_with_fallback, tools=tools)
|
||||||
|
|||||||
@@ -16,12 +16,14 @@ async def add_message_to_conversation(
|
|||||||
message: str,
|
message: str,
|
||||||
speaker: str,
|
speaker: str,
|
||||||
user: blueprints.users.models.User,
|
user: blueprints.users.models.User,
|
||||||
|
image_key: str | None = None,
|
||||||
) -> ConversationMessage:
|
) -> ConversationMessage:
|
||||||
print(conversation, message, speaker)
|
print(conversation, message, speaker)
|
||||||
message = await ConversationMessage.create(
|
message = await ConversationMessage.create(
|
||||||
text=message,
|
text=message,
|
||||||
speaker=speaker,
|
speaker=speaker,
|
||||||
conversation=conversation,
|
conversation=conversation,
|
||||||
|
image_key=image_key,
|
||||||
)
|
)
|
||||||
|
|
||||||
return message
|
return message
|
||||||
|
|||||||
@@ -41,6 +41,7 @@ class ConversationMessage(Model):
|
|||||||
)
|
)
|
||||||
created_at = fields.DatetimeField(auto_now_add=True)
|
created_at = fields.DatetimeField(auto_now_add=True)
|
||||||
speaker = fields.CharEnumField(enum_type=Speaker, max_length=10)
|
speaker = fields.CharEnumField(enum_type=Speaker, max_length=10)
|
||||||
|
image_key = fields.CharField(max_length=512, null=True, default=None)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
table = "conversation_messages"
|
table = "conversation_messages"
|
||||||
|
|||||||
57
blueprints/conversation/prompts.py
Normal file
57
blueprints/conversation/prompts.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
SIMBA_SYSTEM_PROMPT = """You are a helpful cat assistant named Simba that understands veterinary terms. When there are questions to you specifically, they are referring to Simba the cat. Answer the user in as if you were a cat named Simba. Don't act too catlike. Be assertive.
|
||||||
|
|
||||||
|
SIMBA FACTS (as of January 2026):
|
||||||
|
- Name: Simba
|
||||||
|
- Species: Feline (Domestic Short Hair / American Short Hair)
|
||||||
|
- Sex: Male, Neutered
|
||||||
|
- Date of Birth: August 8, 2016 (approximately 9 years 5 months old)
|
||||||
|
- Color: Orange
|
||||||
|
- Current Weight: 16 lbs (as of 1/8/2026)
|
||||||
|
- Owner: Ryan Chen
|
||||||
|
- Location: Long Island City, NY
|
||||||
|
- Veterinarian: Court Square Animal Hospital
|
||||||
|
|
||||||
|
Medical Conditions:
|
||||||
|
- Hypertrophic Cardiomyopathy (HCM): Diagnosed 12/11/2025. Concentric left ventricular hypertrophy with no left atrial dilation. Grade II-III/VI systolic heart murmur. No cardiac medications currently needed. Must avoid Domitor, acepromazine, and ketamine during anesthesia.
|
||||||
|
- Dental Issues: Prior extraction of teeth 307 and 407 due to resorption. Tooth 107 extracted on 1/8/2026. Early resorption lesions present on teeth 207, 309, and 409.
|
||||||
|
|
||||||
|
Recent Medical Events:
|
||||||
|
- 1/8/2026: Dental cleaning and tooth 107 extraction. Prescribed Onsior for 3 days. Oravet sealant applied.
|
||||||
|
- 12/11/2025: Echocardiogram confirming HCM diagnosis. Pre-op bloodwork was normal.
|
||||||
|
- 12/1/2025: Visited for decreased appetite/nausea. Received subcutaneous fluids and Cerenia.
|
||||||
|
|
||||||
|
Diet & Lifestyle:
|
||||||
|
- Diet: Hill's I/D wet and dry food
|
||||||
|
- Supplements: Plaque Off
|
||||||
|
- Indoor only cat, only pet in the household
|
||||||
|
|
||||||
|
Upcoming Appointments:
|
||||||
|
- Rabies Vaccine: Due 2/19/2026
|
||||||
|
- Routine Examination: Due 6/1/2026
|
||||||
|
- FVRCP-3yr Vaccine: Due 10/2/2026
|
||||||
|
|
||||||
|
IMPORTANT: When users ask factual questions about Simba's health, medical history, veterinary visits, medications, weight, or any information that would be in documents, you MUST use the simba_search tool to retrieve accurate information before answering. Do not rely on general knowledge - always search the documents for factual questions.
|
||||||
|
|
||||||
|
BUDGET & FINANCE (YNAB Integration):
|
||||||
|
You have access to Ryan's budget data through YNAB (You Need A Budget). When users ask about financial matters, use the appropriate YNAB tools:
|
||||||
|
- Use ynab_budget_summary for overall budget health and status questions
|
||||||
|
- Use ynab_search_transactions to find specific purchases or spending at particular stores
|
||||||
|
- Use ynab_category_spending to analyze spending by category for a month
|
||||||
|
- Use ynab_insights to provide spending trends, patterns, and recommendations
|
||||||
|
Always use these tools when asked about budgets, spending, transactions, or financial health.
|
||||||
|
|
||||||
|
NOTES & RESEARCH (Obsidian Integration):
|
||||||
|
You have access to Ryan's Obsidian vault through the Obsidian integration. When users ask about research, personal notes, or information that might be stored in markdown files, use the appropriate Obsidian tools:
|
||||||
|
- Use obsidian_search_notes to search through your vault for relevant information
|
||||||
|
- Use obsidian_read_note to read the full content of a specific note by path
|
||||||
|
- Use obsidian_create_note to save new findings, ideas, or research to your vault
|
||||||
|
- Use obsidian_create_task to create task notes with due dates
|
||||||
|
Always use these tools when users ask about notes, research, ideas, tasks, or when you want to save information for future reference.
|
||||||
|
|
||||||
|
DAILY JOURNAL (Task Tracking):
|
||||||
|
You have access to Ryan's daily journal notes. Each note lives at journal/YYYY/YYYY-MM-DD.md and has two sections: tasks and log.
|
||||||
|
- Use journal_get_today to read today's full daily note (tasks + log)
|
||||||
|
- Use journal_get_tasks to list tasks (done/pending) for today or a specific date
|
||||||
|
- Use journal_add_task to add a new task to today's (or a given date's) note
|
||||||
|
- Use journal_complete_task to check off a task as done
|
||||||
|
Use these tools when Ryan asks about today's tasks, wants to add something to his list, or wants to mark a task complete."""
|
||||||
217
blueprints/email/__init__.py
Normal file
217
blueprints/email/__init__.py
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
import os
|
||||||
|
import hmac
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import functools
|
||||||
|
import time
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from quart import Blueprint, request
|
||||||
|
|
||||||
|
from blueprints.users.models import User
|
||||||
|
from blueprints.conversation.logic import (
|
||||||
|
get_conversation_for_user,
|
||||||
|
add_message_to_conversation,
|
||||||
|
get_conversation_transcript,
|
||||||
|
)
|
||||||
|
from blueprints.conversation.agents import main_agent
|
||||||
|
from blueprints.conversation.prompts import SIMBA_SYSTEM_PROMPT
|
||||||
|
from .helpers import generate_email_token, get_user_email_address # noqa: F401
|
||||||
|
|
||||||
|
email_blueprint = Blueprint("email_api", __name__, url_prefix="/api/email")
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Rate limiting: per-sender message timestamps
|
||||||
|
_rate_limit_store: dict[str, list[float]] = defaultdict(list)
|
||||||
|
|
||||||
|
RATE_LIMIT_MAX = int(os.getenv("EMAIL_RATE_LIMIT_MAX", "5"))
|
||||||
|
RATE_LIMIT_WINDOW = int(os.getenv("EMAIL_RATE_LIMIT_WINDOW", "300"))
|
||||||
|
|
||||||
|
MAX_MESSAGE_LENGTH = 2000
|
||||||
|
|
||||||
|
|
||||||
|
# --- Mailgun signature validation ---
|
||||||
|
|
||||||
|
def validate_mailgun_signature(f):
|
||||||
|
"""Decorator to validate Mailgun webhook signatures."""
|
||||||
|
@functools.wraps(f)
|
||||||
|
async def decorated_function(*args, **kwargs):
|
||||||
|
if os.getenv("MAILGUN_SIGNATURE_VALIDATION", "true").lower() == "false":
|
||||||
|
return await f(*args, **kwargs)
|
||||||
|
|
||||||
|
signing_key = os.getenv("MAILGUN_WEBHOOK_SIGNING_KEY")
|
||||||
|
if not signing_key:
|
||||||
|
logger.error("MAILGUN_WEBHOOK_SIGNING_KEY not set — rejecting request")
|
||||||
|
return "", 406
|
||||||
|
|
||||||
|
form_data = await request.form
|
||||||
|
timestamp = form_data.get("timestamp", "")
|
||||||
|
token = form_data.get("token", "")
|
||||||
|
signature = form_data.get("signature", "")
|
||||||
|
|
||||||
|
if not timestamp or not token or not signature:
|
||||||
|
logger.warning("Missing Mailgun signature fields")
|
||||||
|
return "", 406
|
||||||
|
|
||||||
|
expected = hmac.new(
|
||||||
|
signing_key.encode(),
|
||||||
|
f"{timestamp}{token}".encode(),
|
||||||
|
hashlib.sha256,
|
||||||
|
).hexdigest()
|
||||||
|
|
||||||
|
if not hmac.compare_digest(expected, signature):
|
||||||
|
logger.warning("Invalid Mailgun signature")
|
||||||
|
return "", 406
|
||||||
|
|
||||||
|
return await f(*args, **kwargs)
|
||||||
|
return decorated_function
|
||||||
|
|
||||||
|
|
||||||
|
# --- Rate limiting ---
|
||||||
|
|
||||||
|
def _check_rate_limit(sender: str) -> bool:
|
||||||
|
"""Check if a sender has exceeded the rate limit.
|
||||||
|
|
||||||
|
Returns True if the request is allowed, False if rate-limited.
|
||||||
|
"""
|
||||||
|
now = time.monotonic()
|
||||||
|
cutoff = now - RATE_LIMIT_WINDOW
|
||||||
|
|
||||||
|
timestamps = _rate_limit_store[sender]
|
||||||
|
_rate_limit_store[sender] = [t for t in timestamps if t > cutoff]
|
||||||
|
|
||||||
|
if len(_rate_limit_store[sender]) >= RATE_LIMIT_MAX:
|
||||||
|
return False
|
||||||
|
|
||||||
|
_rate_limit_store[sender].append(now)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
# --- Send reply via Mailgun API ---
|
||||||
|
|
||||||
|
async def send_email_reply(to: str, subject: str, body: str, in_reply_to: str | None = None):
|
||||||
|
"""Send a reply email via the Mailgun API."""
|
||||||
|
api_key = os.getenv("MAILGUN_API_KEY")
|
||||||
|
domain = os.getenv("MAILGUN_DOMAIN")
|
||||||
|
if not api_key or not domain:
|
||||||
|
logger.error("MAILGUN_API_KEY or MAILGUN_DOMAIN not configured")
|
||||||
|
return
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"from": f"Simba <simba@{domain}>",
|
||||||
|
"to": to,
|
||||||
|
"subject": f"Re: {subject}" if not subject.startswith("Re:") else subject,
|
||||||
|
"text": body,
|
||||||
|
}
|
||||||
|
if in_reply_to:
|
||||||
|
data["h:In-Reply-To"] = in_reply_to
|
||||||
|
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
resp = await client.post(
|
||||||
|
f"https://api.mailgun.net/v3/{domain}/messages",
|
||||||
|
auth=("api", api_key),
|
||||||
|
data=data,
|
||||||
|
)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
logger.error(f"Mailgun send failed ({resp.status_code}): {resp.text}")
|
||||||
|
else:
|
||||||
|
logger.info(f"Sent email reply to {to}")
|
||||||
|
|
||||||
|
|
||||||
|
# --- Webhook route ---
|
||||||
|
|
||||||
|
@email_blueprint.route("/webhook", methods=["POST"])
|
||||||
|
@validate_mailgun_signature
|
||||||
|
async def webhook():
|
||||||
|
"""Handle inbound emails forwarded by Mailgun."""
|
||||||
|
form_data = await request.form
|
||||||
|
sender = form_data.get("sender", "")
|
||||||
|
recipient = form_data.get("recipient", "")
|
||||||
|
body = form_data.get("stripped-text", "")
|
||||||
|
subject = form_data.get("subject", "(no subject)")
|
||||||
|
message_id = form_data.get("Message-Id", "")
|
||||||
|
|
||||||
|
# Extract token from recipient: ask+<token>@domain
|
||||||
|
local_part = recipient.split("@")[0] if "@" in recipient else ""
|
||||||
|
if "+" not in local_part:
|
||||||
|
logger.info(f"Ignoring email to {recipient} — no token in address")
|
||||||
|
return "", 200
|
||||||
|
|
||||||
|
token = local_part.split("+", 1)[1]
|
||||||
|
|
||||||
|
# Lookup user by token
|
||||||
|
user = await User.filter(email_hmac_token=token, email_enabled=True).first()
|
||||||
|
if not user:
|
||||||
|
logger.info(f"No user found for email token {token}")
|
||||||
|
return "", 200
|
||||||
|
|
||||||
|
# Rate limit
|
||||||
|
if not _check_rate_limit(sender):
|
||||||
|
logger.warning(f"Rate limit exceeded for email sender {sender}")
|
||||||
|
return "", 200
|
||||||
|
|
||||||
|
# Clean up body
|
||||||
|
body = (body or "").strip()
|
||||||
|
if not body:
|
||||||
|
logger.info(f"Ignoring empty email from {sender}")
|
||||||
|
return "", 200
|
||||||
|
|
||||||
|
if len(body) > MAX_MESSAGE_LENGTH:
|
||||||
|
body = body[:MAX_MESSAGE_LENGTH]
|
||||||
|
logger.info(f"Truncated long email from {sender} to {MAX_MESSAGE_LENGTH} chars")
|
||||||
|
|
||||||
|
logger.info(f"Processing email from {sender} for user {user.username}: {body[:100]}")
|
||||||
|
|
||||||
|
# Get or create conversation
|
||||||
|
try:
|
||||||
|
conversation = await get_conversation_for_user(user=user)
|
||||||
|
await conversation.fetch_related("messages")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get conversation for user {user.username}: {e}")
|
||||||
|
return "", 200
|
||||||
|
|
||||||
|
# Add user message
|
||||||
|
await add_message_to_conversation(
|
||||||
|
conversation=conversation,
|
||||||
|
message=body,
|
||||||
|
speaker="user",
|
||||||
|
user=user,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Build messages payload
|
||||||
|
try:
|
||||||
|
messages = await conversation.messages.all()
|
||||||
|
recent_messages = list(messages)[-10:]
|
||||||
|
|
||||||
|
messages_payload = [{"role": "system", "content": SIMBA_SYSTEM_PROMPT}]
|
||||||
|
for msg in recent_messages[:-1]:
|
||||||
|
role = "user" if msg.speaker == "user" else "assistant"
|
||||||
|
messages_payload.append({"role": role, "content": msg.text})
|
||||||
|
messages_payload.append({"role": "user", "content": body})
|
||||||
|
|
||||||
|
logger.info(f"Invoking LangChain agent with {len(messages_payload)} messages")
|
||||||
|
response = await main_agent.ainvoke({"messages": messages_payload})
|
||||||
|
response_text = response.get("messages", [])[-1].content
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error invoking agent for email: {e}")
|
||||||
|
response_text = "Sorry, I'm having trouble thinking right now."
|
||||||
|
|
||||||
|
# Save response
|
||||||
|
await add_message_to_conversation(
|
||||||
|
conversation=conversation,
|
||||||
|
message=response_text,
|
||||||
|
speaker="simba",
|
||||||
|
user=user,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send reply email
|
||||||
|
await send_email_reply(
|
||||||
|
to=sender,
|
||||||
|
subject=subject,
|
||||||
|
body=response_text,
|
||||||
|
in_reply_to=message_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
return "", 200
|
||||||
14
blueprints/email/helpers.py
Normal file
14
blueprints/email/helpers.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import hmac
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
|
||||||
|
def generate_email_token(user_id: str, secret: str) -> str:
|
||||||
|
"""Generate a 16-char hex HMAC token for a user's email address."""
|
||||||
|
return hmac.new(
|
||||||
|
secret.encode(), str(user_id).encode(), hashlib.sha256
|
||||||
|
).hexdigest()[:16]
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_email_address(token: str, domain: str) -> str:
|
||||||
|
"""Return the routable email address for a given token."""
|
||||||
|
return f"ask+{token}@{domain}"
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
from quart import Blueprint, jsonify
|
from quart import Blueprint, jsonify
|
||||||
from quart_jwt_extended import jwt_refresh_token_required
|
from quart_jwt_extended import jwt_refresh_token_required
|
||||||
|
|
||||||
from .logic import get_vector_store_stats, index_documents, vector_store
|
from .logic import fetch_obsidian_documents, get_vector_store_stats, index_documents, index_obsidian_documents, vector_store
|
||||||
from blueprints.users.decorators import admin_required
|
from blueprints.users.decorators import admin_required
|
||||||
|
|
||||||
rag_blueprint = Blueprint("rag_api", __name__, url_prefix="/api/rag")
|
rag_blueprint = Blueprint("rag_api", __name__, url_prefix="/api/rag")
|
||||||
@@ -45,3 +45,15 @@ async def trigger_reindex():
|
|||||||
return jsonify({"status": "success", "stats": stats})
|
return jsonify({"status": "success", "stats": stats})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return jsonify({"status": "error", "message": str(e)}), 500
|
return jsonify({"status": "error", "message": str(e)}), 500
|
||||||
|
|
||||||
|
|
||||||
|
@rag_blueprint.post("/index-obsidian")
|
||||||
|
@admin_required
|
||||||
|
async def trigger_obsidian_index():
|
||||||
|
"""Index all Obsidian markdown documents into vector store. Admin only."""
|
||||||
|
try:
|
||||||
|
result = await index_obsidian_documents()
|
||||||
|
stats = get_vector_store_stats()
|
||||||
|
return jsonify({"status": "success", "result": result, "stats": stats})
|
||||||
|
except Exception as e:
|
||||||
|
return jsonify({"status": "error", "message": str(e)}), 500
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ from langchain_openai import OpenAIEmbeddings
|
|||||||
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
||||||
|
|
||||||
from .fetchers import PaperlessNGXService
|
from .fetchers import PaperlessNGXService
|
||||||
|
from utils.obsidian_service import ObsidianService
|
||||||
|
|
||||||
# Load environment variables
|
# Load environment variables
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
@@ -58,12 +59,75 @@ async def fetch_documents_from_paperless_ngx() -> list[Document]:
|
|||||||
|
|
||||||
|
|
||||||
async def index_documents():
|
async def index_documents():
|
||||||
|
"""Index Paperless-NGX documents into vector store."""
|
||||||
documents = await fetch_documents_from_paperless_ngx()
|
documents = await fetch_documents_from_paperless_ngx()
|
||||||
|
|
||||||
splits = text_splitter.split_documents(documents)
|
splits = text_splitter.split_documents(documents)
|
||||||
await vector_store.aadd_documents(documents=splits)
|
await vector_store.aadd_documents(documents=splits)
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_obsidian_documents() -> list[Document]:
|
||||||
|
"""Fetch all markdown documents from Obsidian vault.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of LangChain Document objects with source='obsidian' metadata.
|
||||||
|
"""
|
||||||
|
obsidian_service = ObsidianService()
|
||||||
|
documents = []
|
||||||
|
|
||||||
|
for md_path in obsidian_service.walk_vault():
|
||||||
|
try:
|
||||||
|
# Read markdown file
|
||||||
|
with open(md_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# Parse metadata
|
||||||
|
parsed = obsidian_service.parse_markdown(content, md_path)
|
||||||
|
|
||||||
|
# Create LangChain Document with obsidian source
|
||||||
|
document = Document(
|
||||||
|
page_content=parsed["content"],
|
||||||
|
metadata={
|
||||||
|
"source": "obsidian",
|
||||||
|
"filepath": parsed["filepath"],
|
||||||
|
"tags": parsed["tags"],
|
||||||
|
"created_at": parsed["metadata"].get("created_at"),
|
||||||
|
**{k: v for k, v in parsed["metadata"].items() if k not in ["created_at", "created_by"]},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
documents.append(document)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading {md_path}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
return documents
|
||||||
|
|
||||||
|
|
||||||
|
async def index_obsidian_documents():
|
||||||
|
"""Index all Obsidian markdown documents into vector store.
|
||||||
|
|
||||||
|
Deletes existing obsidian source chunks before re-indexing.
|
||||||
|
"""
|
||||||
|
obsidian_service = ObsidianService()
|
||||||
|
documents = await fetch_obsidian_documents()
|
||||||
|
|
||||||
|
if not documents:
|
||||||
|
print("No Obsidian documents found to index")
|
||||||
|
return {"indexed": 0}
|
||||||
|
|
||||||
|
# Delete existing obsidian chunks
|
||||||
|
existing_results = vector_store.get(where={"source": "obsidian"})
|
||||||
|
if existing_results.get("ids"):
|
||||||
|
await vector_store.adelete(existing_results["ids"])
|
||||||
|
|
||||||
|
# Split and index documents
|
||||||
|
splits = text_splitter.split_documents(documents)
|
||||||
|
await vector_store.aadd_documents(documents=splits)
|
||||||
|
|
||||||
|
return {"indexed": len(documents)}
|
||||||
|
|
||||||
|
|
||||||
async def query_vector_store(query: str):
|
async def query_vector_store(query: str):
|
||||||
retrieved_docs = await vector_store.asimilarity_search(query, k=2)
|
retrieved_docs = await vector_store.asimilarity_search(query, k=2)
|
||||||
serialized = "\n\n".join(
|
serialized = "\n\n".join(
|
||||||
|
|||||||
@@ -7,7 +7,9 @@ from quart_jwt_extended import (
|
|||||||
)
|
)
|
||||||
from .models import User
|
from .models import User
|
||||||
from .oidc_service import OIDCUserService
|
from .oidc_service import OIDCUserService
|
||||||
|
from .decorators import admin_required
|
||||||
from config.oidc_config import oidc_config
|
from config.oidc_config import oidc_config
|
||||||
|
import os
|
||||||
import secrets
|
import secrets
|
||||||
import httpx
|
import httpx
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
@@ -131,6 +133,21 @@ async def oidc_callback():
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
return jsonify({"error": f"ID token verification failed: {str(e)}"}), 400
|
return jsonify({"error": f"ID token verification failed: {str(e)}"}), 400
|
||||||
|
|
||||||
|
# Fetch userinfo to get groups (older Authelia versions only include groups there)
|
||||||
|
userinfo_endpoint = discovery.get("userinfo_endpoint")
|
||||||
|
if userinfo_endpoint:
|
||||||
|
access_token_str = tokens.get("access_token")
|
||||||
|
if access_token_str:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
userinfo_response = await client.get(
|
||||||
|
userinfo_endpoint,
|
||||||
|
headers={"Authorization": f"Bearer {access_token_str}"},
|
||||||
|
)
|
||||||
|
if userinfo_response.status_code == 200:
|
||||||
|
userinfo = userinfo_response.json()
|
||||||
|
if "groups" in userinfo and "groups" not in claims:
|
||||||
|
claims["groups"] = userinfo["groups"]
|
||||||
|
|
||||||
# Get or create user from OIDC claims
|
# Get or create user from OIDC claims
|
||||||
user = await OIDCUserService.get_or_create_user_from_oidc(claims)
|
user = await OIDCUserService.get_or_create_user_from_oidc(claims)
|
||||||
|
|
||||||
@@ -186,3 +203,122 @@ async def login():
|
|||||||
refresh_token=refresh_token,
|
refresh_token=refresh_token,
|
||||||
user={"id": str(user.id), "username": user.username},
|
user={"id": str(user.id), "username": user.username},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@user_blueprint.route("/me", methods=["GET"])
|
||||||
|
@jwt_refresh_token_required
|
||||||
|
async def me():
|
||||||
|
user_id = get_jwt_identity()
|
||||||
|
user = await User.get_or_none(id=user_id)
|
||||||
|
if not user:
|
||||||
|
return jsonify({"error": "User not found"}), 404
|
||||||
|
return jsonify({
|
||||||
|
"id": str(user.id),
|
||||||
|
"username": user.username,
|
||||||
|
"email": user.email,
|
||||||
|
"is_admin": user.is_admin(),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@user_blueprint.route("/admin/users", methods=["GET"])
|
||||||
|
@admin_required
|
||||||
|
async def list_users():
|
||||||
|
from blueprints.email.helpers import get_user_email_address
|
||||||
|
users = await User.all().order_by("username")
|
||||||
|
mailgun_domain = os.getenv("MAILGUN_DOMAIN", "")
|
||||||
|
return jsonify([
|
||||||
|
{
|
||||||
|
"id": str(u.id),
|
||||||
|
"username": u.username,
|
||||||
|
"email": u.email,
|
||||||
|
"whatsapp_number": u.whatsapp_number,
|
||||||
|
"auth_provider": u.auth_provider,
|
||||||
|
"email_enabled": u.email_enabled,
|
||||||
|
"email_address": get_user_email_address(u.email_hmac_token, mailgun_domain) if u.email_hmac_token and u.email_enabled else None,
|
||||||
|
}
|
||||||
|
for u in users
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
@user_blueprint.route("/admin/users/<user_id>/whatsapp", methods=["PUT"])
|
||||||
|
@admin_required
|
||||||
|
async def set_whatsapp(user_id):
|
||||||
|
data = await request.get_json()
|
||||||
|
number = (data or {}).get("whatsapp_number", "").strip()
|
||||||
|
if not number:
|
||||||
|
return jsonify({"error": "whatsapp_number is required"}), 400
|
||||||
|
|
||||||
|
user = await User.get_or_none(id=user_id)
|
||||||
|
if not user:
|
||||||
|
return jsonify({"error": "User not found"}), 404
|
||||||
|
|
||||||
|
conflict = await User.filter(whatsapp_number=number).exclude(id=user_id).first()
|
||||||
|
if conflict:
|
||||||
|
return jsonify({"error": "That WhatsApp number is already linked to another account"}), 409
|
||||||
|
|
||||||
|
user.whatsapp_number = number
|
||||||
|
await user.save()
|
||||||
|
return jsonify({
|
||||||
|
"id": str(user.id),
|
||||||
|
"username": user.username,
|
||||||
|
"email": user.email,
|
||||||
|
"whatsapp_number": user.whatsapp_number,
|
||||||
|
"auth_provider": user.auth_provider,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@user_blueprint.route("/admin/users/<user_id>/whatsapp", methods=["DELETE"])
|
||||||
|
@admin_required
|
||||||
|
async def unlink_whatsapp(user_id):
|
||||||
|
user = await User.get_or_none(id=user_id)
|
||||||
|
if not user:
|
||||||
|
return jsonify({"error": "User not found"}), 404
|
||||||
|
|
||||||
|
user.whatsapp_number = None
|
||||||
|
await user.save()
|
||||||
|
return jsonify({"ok": True})
|
||||||
|
|
||||||
|
|
||||||
|
@user_blueprint.route("/admin/users/<user_id>/email", methods=["PUT"])
|
||||||
|
@admin_required
|
||||||
|
async def toggle_email(user_id):
|
||||||
|
"""Enable email channel for a user, generating an HMAC token."""
|
||||||
|
from blueprints.email.helpers import generate_email_token, get_user_email_address
|
||||||
|
user = await User.get_or_none(id=user_id)
|
||||||
|
if not user:
|
||||||
|
return jsonify({"error": "User not found"}), 404
|
||||||
|
|
||||||
|
email_secret = os.getenv("EMAIL_HMAC_SECRET")
|
||||||
|
if not email_secret:
|
||||||
|
return jsonify({"error": "EMAIL_HMAC_SECRET not configured"}), 500
|
||||||
|
|
||||||
|
mailgun_domain = os.getenv("MAILGUN_DOMAIN", "")
|
||||||
|
|
||||||
|
if not user.email_hmac_token:
|
||||||
|
user.email_hmac_token = generate_email_token(user.id, email_secret)
|
||||||
|
user.email_enabled = True
|
||||||
|
await user.save()
|
||||||
|
|
||||||
|
return jsonify({
|
||||||
|
"id": str(user.id),
|
||||||
|
"username": user.username,
|
||||||
|
"email": user.email,
|
||||||
|
"whatsapp_number": user.whatsapp_number,
|
||||||
|
"auth_provider": user.auth_provider,
|
||||||
|
"email_enabled": user.email_enabled,
|
||||||
|
"email_address": get_user_email_address(user.email_hmac_token, mailgun_domain),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@user_blueprint.route("/admin/users/<user_id>/email", methods=["DELETE"])
|
||||||
|
@admin_required
|
||||||
|
async def disable_email(user_id):
|
||||||
|
"""Disable email channel and clear the token."""
|
||||||
|
user = await User.get_or_none(id=user_id)
|
||||||
|
if not user:
|
||||||
|
return jsonify({"error": "User not found"}), 404
|
||||||
|
|
||||||
|
user.email_enabled = False
|
||||||
|
user.email_hmac_token = None
|
||||||
|
await user.save()
|
||||||
|
return jsonify({"ok": True})
|
||||||
|
|||||||
@@ -10,6 +10,11 @@ class User(Model):
|
|||||||
username = fields.CharField(max_length=255)
|
username = fields.CharField(max_length=255)
|
||||||
password = fields.BinaryField(null=True) # Hashed - nullable for OIDC users
|
password = fields.BinaryField(null=True) # Hashed - nullable for OIDC users
|
||||||
email = fields.CharField(max_length=100, unique=True)
|
email = fields.CharField(max_length=100, unique=True)
|
||||||
|
whatsapp_number = fields.CharField(max_length=30, unique=True, null=True, index=True)
|
||||||
|
|
||||||
|
# Email channel fields
|
||||||
|
email_enabled = fields.BooleanField(default=False)
|
||||||
|
email_hmac_token = fields.CharField(max_length=16, unique=True, null=True, index=True)
|
||||||
|
|
||||||
# OIDC fields
|
# OIDC fields
|
||||||
oidc_subject = fields.CharField(
|
oidc_subject = fields.CharField(
|
||||||
|
|||||||
212
blueprints/whatsapp/__init__.py
Normal file
212
blueprints/whatsapp/__init__.py
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import asyncio
|
||||||
|
import functools
|
||||||
|
import time
|
||||||
|
from collections import defaultdict
|
||||||
|
from quart import Blueprint, request, jsonify, abort
|
||||||
|
from twilio.request_validator import RequestValidator
|
||||||
|
from twilio.twiml.messaging_response import MessagingResponse
|
||||||
|
|
||||||
|
from blueprints.users.models import User
|
||||||
|
from blueprints.conversation.logic import (
|
||||||
|
get_conversation_for_user,
|
||||||
|
add_message_to_conversation,
|
||||||
|
get_conversation_transcript,
|
||||||
|
)
|
||||||
|
from blueprints.conversation.agents import main_agent
|
||||||
|
from blueprints.conversation.prompts import SIMBA_SYSTEM_PROMPT
|
||||||
|
|
||||||
|
whatsapp_blueprint = Blueprint("whatsapp_api", __name__, url_prefix="/api/whatsapp")
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Rate limiting: per-number message timestamps
|
||||||
|
# Format: {phone_number: [timestamp1, timestamp2, ...]}
|
||||||
|
_rate_limit_store: dict[str, list[float]] = defaultdict(list)
|
||||||
|
|
||||||
|
# Configurable via env: max messages per window (default: 10 per 60s)
|
||||||
|
RATE_LIMIT_MAX = int(os.getenv("WHATSAPP_RATE_LIMIT_MAX", "10"))
|
||||||
|
RATE_LIMIT_WINDOW = int(os.getenv("WHATSAPP_RATE_LIMIT_WINDOW", "60"))
|
||||||
|
|
||||||
|
# Max message length to process (WhatsApp max is 4096, but we cap for LLM sanity)
|
||||||
|
MAX_MESSAGE_LENGTH = 2000
|
||||||
|
|
||||||
|
|
||||||
|
def _twiml_response(text: str) -> tuple[str, int]:
|
||||||
|
"""Helper to return a TwiML MessagingResponse."""
|
||||||
|
resp = MessagingResponse()
|
||||||
|
resp.message(text)
|
||||||
|
return str(resp), 200
|
||||||
|
|
||||||
|
|
||||||
|
def _check_rate_limit(phone_number: str) -> bool:
|
||||||
|
"""Check if a phone number has exceeded the rate limit.
|
||||||
|
|
||||||
|
Returns True if the request is allowed, False if rate-limited.
|
||||||
|
Also cleans up expired entries.
|
||||||
|
"""
|
||||||
|
now = time.monotonic()
|
||||||
|
cutoff = now - RATE_LIMIT_WINDOW
|
||||||
|
|
||||||
|
# Remove expired timestamps
|
||||||
|
timestamps = _rate_limit_store[phone_number]
|
||||||
|
_rate_limit_store[phone_number] = [t for t in timestamps if t > cutoff]
|
||||||
|
|
||||||
|
if len(_rate_limit_store[phone_number]) >= RATE_LIMIT_MAX:
|
||||||
|
return False
|
||||||
|
|
||||||
|
_rate_limit_store[phone_number].append(now)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def validate_twilio_request(f):
|
||||||
|
"""Decorator to validate that the request comes from Twilio.
|
||||||
|
|
||||||
|
Validates the X-Twilio-Signature header using the TWILIO_AUTH_TOKEN.
|
||||||
|
Set TWILIO_WEBHOOK_URL if behind a reverse proxy (e.g., ngrok, Caddy)
|
||||||
|
so the validated URL matches what Twilio signed against.
|
||||||
|
Set TWILIO_SIGNATURE_VALIDATION=false to disable in development.
|
||||||
|
"""
|
||||||
|
@functools.wraps(f)
|
||||||
|
async def decorated_function(*args, **kwargs):
|
||||||
|
if os.getenv("TWILIO_SIGNATURE_VALIDATION", "true").lower() == "false":
|
||||||
|
return await f(*args, **kwargs)
|
||||||
|
|
||||||
|
auth_token = os.getenv("TWILIO_AUTH_TOKEN")
|
||||||
|
if not auth_token:
|
||||||
|
logger.error("TWILIO_AUTH_TOKEN not set — rejecting request")
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
twilio_signature = request.headers.get("X-Twilio-Signature")
|
||||||
|
if not twilio_signature:
|
||||||
|
logger.warning("Missing X-Twilio-Signature header")
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
# Use configured webhook URL if behind a proxy, otherwise use request URL
|
||||||
|
url = os.getenv("TWILIO_WEBHOOK_URL") or request.url
|
||||||
|
form_data = await request.form
|
||||||
|
|
||||||
|
validator = RequestValidator(auth_token)
|
||||||
|
if not validator.validate(url, form_data, twilio_signature):
|
||||||
|
logger.warning(f"Invalid Twilio signature for URL: {url}")
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
return await f(*args, **kwargs)
|
||||||
|
return decorated_function
|
||||||
|
|
||||||
|
|
||||||
|
@whatsapp_blueprint.route("/webhook", methods=["POST"])
|
||||||
|
@validate_twilio_request
|
||||||
|
async def webhook():
|
||||||
|
"""
|
||||||
|
Handle incoming WhatsApp messages from Twilio.
|
||||||
|
"""
|
||||||
|
form_data = await request.form
|
||||||
|
from_number = form_data.get("From") # e.g., "whatsapp:+1234567890"
|
||||||
|
body = form_data.get("Body")
|
||||||
|
|
||||||
|
if not from_number or not body:
|
||||||
|
return _twiml_response("Invalid message received.") if from_number else ("Missing From or Body", 400)
|
||||||
|
|
||||||
|
# Strip whitespace and check for empty body
|
||||||
|
body = body.strip()
|
||||||
|
if not body:
|
||||||
|
return _twiml_response("I received an empty message. Please send some text!")
|
||||||
|
|
||||||
|
# Rate limiting
|
||||||
|
if not _check_rate_limit(from_number):
|
||||||
|
logger.warning(f"Rate limit exceeded for {from_number}")
|
||||||
|
return _twiml_response("You're sending messages too quickly. Please wait a moment and try again.")
|
||||||
|
|
||||||
|
# Truncate overly long messages
|
||||||
|
if len(body) > MAX_MESSAGE_LENGTH:
|
||||||
|
body = body[:MAX_MESSAGE_LENGTH]
|
||||||
|
logger.info(f"Truncated long message from {from_number} to {MAX_MESSAGE_LENGTH} chars")
|
||||||
|
|
||||||
|
logger.info(f"Received WhatsApp message from {from_number}: {body[:100]}")
|
||||||
|
|
||||||
|
# Identify or create user
|
||||||
|
user = await User.filter(whatsapp_number=from_number).first()
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
# Check if number is in allowlist
|
||||||
|
allowed_numbers = os.getenv("ALLOWED_WHATSAPP_NUMBERS", "").split(",")
|
||||||
|
if from_number not in allowed_numbers and "*" not in allowed_numbers:
|
||||||
|
return _twiml_response("Sorry, you are not authorized to use this service.")
|
||||||
|
|
||||||
|
# Create a new user for this WhatsApp number
|
||||||
|
username = f"wa_{from_number.split(':')[-1]}"
|
||||||
|
try:
|
||||||
|
user = await User.create(
|
||||||
|
username=username,
|
||||||
|
email=f"{username}@whatsapp.simbarag.local",
|
||||||
|
whatsapp_number=from_number,
|
||||||
|
auth_provider="whatsapp"
|
||||||
|
)
|
||||||
|
logger.info(f"Created new user for WhatsApp: {username}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to create user for {from_number}: {e}")
|
||||||
|
return _twiml_response("Sorry, something went wrong setting up your account. Please try again later.")
|
||||||
|
|
||||||
|
# Get or create a conversation for this user
|
||||||
|
try:
|
||||||
|
conversation = await get_conversation_for_user(user=user)
|
||||||
|
await conversation.fetch_related("messages")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get conversation for user {user.username}: {e}")
|
||||||
|
return _twiml_response("Sorry, something went wrong. Please try again later.")
|
||||||
|
|
||||||
|
# Add user message to conversation
|
||||||
|
await add_message_to_conversation(
|
||||||
|
conversation=conversation,
|
||||||
|
message=body,
|
||||||
|
speaker="user",
|
||||||
|
user=user,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get transcript for context
|
||||||
|
transcript = await get_conversation_transcript(user=user, conversation=conversation)
|
||||||
|
|
||||||
|
# Build messages payload for LangChain agent with system prompt and conversation history
|
||||||
|
try:
|
||||||
|
# Get last 10 messages for conversation history
|
||||||
|
messages = await conversation.messages.all()
|
||||||
|
recent_messages = list(messages)[-10:]
|
||||||
|
|
||||||
|
# Build messages payload
|
||||||
|
messages_payload = [{"role": "system", "content": SIMBA_SYSTEM_PROMPT}]
|
||||||
|
|
||||||
|
# Add recent conversation history (exclude the message we just added)
|
||||||
|
for msg in recent_messages[:-1]:
|
||||||
|
role = "user" if msg.speaker == "user" else "assistant"
|
||||||
|
messages_payload.append({"role": role, "content": msg.text})
|
||||||
|
|
||||||
|
# Add current query
|
||||||
|
messages_payload.append({"role": "user", "content": body})
|
||||||
|
|
||||||
|
# Invoke LangChain agent
|
||||||
|
logger.info(f"Invoking LangChain agent with {len(messages_payload)} messages")
|
||||||
|
response = await main_agent.ainvoke({"messages": messages_payload})
|
||||||
|
response_text = response.get("messages", [])[-1].content
|
||||||
|
|
||||||
|
# Log YNAB availability
|
||||||
|
if os.getenv("YNAB_ACCESS_TOKEN"):
|
||||||
|
logger.info("YNAB integration is available for this conversation")
|
||||||
|
else:
|
||||||
|
logger.info("YNAB integration is not configured")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error invoking agent: {e}")
|
||||||
|
response_text = "Sorry, I'm having trouble thinking right now. 😿"
|
||||||
|
|
||||||
|
# Add Simba's response to conversation
|
||||||
|
await add_message_to_conversation(
|
||||||
|
conversation=conversation,
|
||||||
|
message=response_text,
|
||||||
|
speaker="simba",
|
||||||
|
user=user,
|
||||||
|
)
|
||||||
|
|
||||||
|
return _twiml_response(response_text)
|
||||||
@@ -1,15 +1,14 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
# Load environment variables
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
# Database configuration with environment variable support
|
DATABASE_URL = os.getenv(
|
||||||
# Use DATABASE_PATH for relative paths or DATABASE_URL for full connection strings
|
"DATABASE_URL", "postgres://raggr:raggr_dev_password@localhost:5432/raggr"
|
||||||
DATABASE_PATH = os.getenv("DATABASE_PATH", "database/raggr.db")
|
)
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL", f"sqlite://{DATABASE_PATH}")
|
|
||||||
|
|
||||||
TORTOISE_ORM = {
|
TORTOISE_CONFIG = {
|
||||||
"connections": {"default": DATABASE_URL},
|
"connections": {"default": DATABASE_URL},
|
||||||
"apps": {
|
"apps": {
|
||||||
"models": {
|
"models": {
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:16-alpine
|
|
||||||
environment:
|
|
||||||
- POSTGRES_USER=raggr
|
|
||||||
- POSTGRES_PASSWORD=raggr_dev_password
|
|
||||||
- POSTGRES_DB=raggr
|
|
||||||
ports:
|
|
||||||
- "5432:5432"
|
|
||||||
volumes:
|
|
||||||
- postgres_data:/var/lib/postgresql/data
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "pg_isready -U raggr"]
|
|
||||||
interval: 5s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 5
|
|
||||||
|
|
||||||
# raggr service disabled - run locally for development
|
|
||||||
# raggr:
|
|
||||||
# build:
|
|
||||||
# context: .
|
|
||||||
# dockerfile: Dockerfile.dev
|
|
||||||
# image: torrtle/simbarag:dev
|
|
||||||
# ports:
|
|
||||||
# - "8080:8080"
|
|
||||||
# env_file:
|
|
||||||
# - .env
|
|
||||||
# environment:
|
|
||||||
# - PAPERLESS_TOKEN=${PAPERLESS_TOKEN}
|
|
||||||
# - BASE_URL=${BASE_URL}
|
|
||||||
# - OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434}
|
|
||||||
# - CHROMADB_PATH=/app/data/chromadb
|
|
||||||
# - OPENAI_API_KEY=${OPENAI_API_KEY}
|
|
||||||
# - JWT_SECRET_KEY=${JWT_SECRET_KEY}
|
|
||||||
# - OIDC_ISSUER=${OIDC_ISSUER}
|
|
||||||
# - OIDC_CLIENT_ID=${OIDC_CLIENT_ID}
|
|
||||||
# - OIDC_CLIENT_SECRET=${OIDC_CLIENT_SECRET}
|
|
||||||
# - OIDC_REDIRECT_URI=${OIDC_REDIRECT_URI}
|
|
||||||
# - OIDC_USE_DISCOVERY=${OIDC_USE_DISCOVERY:-true}
|
|
||||||
# - DATABASE_URL=postgres://raggr:raggr_dev_password@postgres:5432/raggr
|
|
||||||
# - FLASK_ENV=development
|
|
||||||
# - PYTHONUNBUFFERED=1
|
|
||||||
# - NODE_ENV=development
|
|
||||||
# - TAVILY_KEY=${TAVILIY_KEY}
|
|
||||||
# depends_on:
|
|
||||||
# postgres:
|
|
||||||
# condition: service_healthy
|
|
||||||
# volumes:
|
|
||||||
# - chromadb_data:/app/data/chromadb
|
|
||||||
# - ./migrations:/app/migrations # Bind mount for migrations (bidirectional)
|
|
||||||
# develop:
|
|
||||||
# watch:
|
|
||||||
# # Sync+restart on any file change in root directory
|
|
||||||
# - action: sync+restart
|
|
||||||
# path: .
|
|
||||||
# target: /app
|
|
||||||
# ignore:
|
|
||||||
# - __pycache__/
|
|
||||||
# - "*.pyc"
|
|
||||||
# - "*.pyo"
|
|
||||||
# - "*.pyd"
|
|
||||||
# - .git/
|
|
||||||
# - chromadb/
|
|
||||||
# - node_modules/
|
|
||||||
# - raggr-frontend/dist/
|
|
||||||
# - docs/
|
|
||||||
# - .venv/
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
chromadb_data:
|
|
||||||
postgres_data:
|
|
||||||
@@ -32,18 +32,41 @@ services:
|
|||||||
- CHROMADB_PATH=/app/data/chromadb
|
- CHROMADB_PATH=/app/data/chromadb
|
||||||
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
- OPENAI_API_KEY=${OPENAI_API_KEY}
|
||||||
- JWT_SECRET_KEY=${JWT_SECRET_KEY}
|
- JWT_SECRET_KEY=${JWT_SECRET_KEY}
|
||||||
|
- LLAMA_SERVER_URL=${LLAMA_SERVER_URL}
|
||||||
|
- LLAMA_MODEL_NAME=${LLAMA_MODEL_NAME}
|
||||||
- OIDC_ISSUER=${OIDC_ISSUER}
|
- OIDC_ISSUER=${OIDC_ISSUER}
|
||||||
- OIDC_CLIENT_ID=${OIDC_CLIENT_ID}
|
- OIDC_CLIENT_ID=${OIDC_CLIENT_ID}
|
||||||
- OIDC_CLIENT_SECRET=${OIDC_CLIENT_SECRET}
|
- OIDC_CLIENT_SECRET=${OIDC_CLIENT_SECRET}
|
||||||
- OIDC_REDIRECT_URI=${OIDC_REDIRECT_URI}
|
- OIDC_REDIRECT_URI=${OIDC_REDIRECT_URI}
|
||||||
- OIDC_USE_DISCOVERY=${OIDC_USE_DISCOVERY:-true}
|
- OIDC_USE_DISCOVERY=${OIDC_USE_DISCOVERY:-true}
|
||||||
- DATABASE_URL=${DATABASE_URL:-postgres://raggr:changeme@postgres:5432/raggr}
|
- DATABASE_URL=${DATABASE_URL:-postgres://raggr:changeme@postgres:5432/raggr}
|
||||||
- TAVILY_KEY=${TAVILIY_KEY}
|
- TAVILY_API_KEY=${TAVILIY_API_KEY}
|
||||||
|
- YNAB_ACCESS_TOKEN=${YNAB_ACCESS_TOKEN}
|
||||||
|
- YNAB_BUDGET_ID=${YNAB_BUDGET_ID}
|
||||||
|
- TWILIO_ACCOUNT_SID=${TWILIO_ACCOUNT_SID}
|
||||||
|
- TWILIO_AUTH_TOKEN=${TWILIO_AUTH_TOKEN}
|
||||||
|
- TWILIO_WHATSAPP_NUMBER=${TWILIO_WHATSAPP_NUMBER}
|
||||||
|
- ALLOWED_WHATSAPP_NUMBERS=${ALLOWED_WHATSAPP_NUMBERS}
|
||||||
|
- TWILIO_SIGNATURE_VALIDATION=${TWILIO_SIGNATURE_VALIDATION:-true}
|
||||||
|
- TWILIO_WEBHOOK_URL=${TWILIO_WEBHOOK_URL:-}
|
||||||
|
- OBSIDIAN_AUTH_TOKEN=${OBSIDIAN_AUTH_TOKEN}
|
||||||
|
- OBSIDIAN_VAULT_ID=${OBSIDIAN_VAULT_ID}
|
||||||
|
- OBSIDIAN_E2E_PASSWORD=${OBSIDIAN_E2E_PASSWORD}
|
||||||
|
- OBSIDIAN_DEVICE_NAME=${OBSIDIAN_DEVICE_NAME}
|
||||||
|
- OBSIDIAN_CONTINUOUS_SYNC=${OBSIDIAN_CONTINUOUS_SYNC:-false}
|
||||||
|
- OBSIDIAN_VAULT_PATH=${OBSIDIAN_VAULT_PATH:-/app/data/obsidian}
|
||||||
|
- S3_ENDPOINT_URL=${S3_ENDPOINT_URL}
|
||||||
|
- S3_ACCESS_KEY_ID=${S3_ACCESS_KEY_ID}
|
||||||
|
- S3_SECRET_ACCESS_KEY=${S3_SECRET_ACCESS_KEY}
|
||||||
|
- S3_BUCKET_NAME=${S3_BUCKET_NAME:-asksimba-images}
|
||||||
|
- S3_REGION=${S3_REGION:-garage}
|
||||||
|
- OLLAMA_HOST=${OLLAMA_HOST:-http://localhost:11434}
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
volumes:
|
volumes:
|
||||||
- chromadb_data:/app/data/chromadb
|
- chromadb_data:/app/data/chromadb
|
||||||
|
- ./obvault:/app/data/obsidian
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
4
main.py
4
main.py
@@ -225,6 +225,10 @@ def filter_indexed_files(docs):
|
|||||||
def reindex():
|
def reindex():
|
||||||
with sqlite3.connect("database/visited.db") as conn:
|
with sqlite3.connect("database/visited.db") as conn:
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
# Ensure the table exists before trying to delete from it
|
||||||
|
c.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS indexed_documents (id INTEGER PRIMARY KEY AUTOINCREMENT, paperless_id INTEGER)"
|
||||||
|
)
|
||||||
c.execute("DELETE FROM indexed_documents")
|
c.execute("DELETE FROM indexed_documents")
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
|
|||||||
42
migrations/models/2_20260228125713_add_whatsapp_number.py
Normal file
42
migrations/models/2_20260228125713_add_whatsapp_number.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
from tortoise import BaseDBAsyncClient
|
||||||
|
|
||||||
|
RUN_IN_TRANSACTION = True
|
||||||
|
|
||||||
|
|
||||||
|
async def upgrade(db: BaseDBAsyncClient) -> str:
|
||||||
|
return """
|
||||||
|
ALTER TABLE "users" ADD "whatsapp_number" VARCHAR(20) UNIQUE;"""
|
||||||
|
|
||||||
|
|
||||||
|
async def downgrade(db: BaseDBAsyncClient) -> str:
|
||||||
|
return """
|
||||||
|
DROP INDEX IF EXISTS "uid_users_whatsap_e6b586";
|
||||||
|
ALTER TABLE "users" DROP COLUMN "whatsapp_number";"""
|
||||||
|
|
||||||
|
|
||||||
|
MODELS_STATE = (
|
||||||
|
"eJztmm1v4jgQx78Kyquu1KtatnRX1emkQOkttwuceNinXhWZxECuiZ2NnaWo6nc/2yTESR"
|
||||||
|
"wgFCjs8aYtYw+2fx5n/p70SXOxBR1yVsPoJ/QJoDZG2nXpSUPAhewPZftpSQOeF7dyAwUD"
|
||||||
|
"RziYUk/RAgaE+sCkrHEIHAKZyYLE9G0vHAwFjsON2GQdbTSKTQGyfwTQoHgE6Rj6rOHunp"
|
||||||
|
"ltZMFHSKKP3oMxtKFjJeZtW3xsYTfo1BO2fr9xcyt68uEGhomdwEVxb29KxxjNuweBbZ1x"
|
||||||
|
"H942ggj6gEJLWgafZbjsyDSbMTNQP4DzqVqxwYJDEDgchvb7MEAmZ1ASI/Efl39oBfAw1B"
|
||||||
|
"ytjShn8fQ8W1W8ZmHV+FC1D3rn5O3VG7FKTOjIF42CiPYsHAEFM1fBNQYpfmdQ1sbAV6OM"
|
||||||
|
"+qdgsomugzEyxBzjGIpARoDWo6a54NFwIBrRMftYrlQWYPysdwRJ1kugxCyuZ1HfCpvKsz"
|
||||||
|
"aONEZo+pAv2QA0C/KGtVDbhWqYSc8UUit0PYv+2FPAbA1WGznT8BAs4NtrNOvdnt78m6/E"
|
||||||
|
"JeSHIxDpvTpvKQvrNGU9uUptxfxLSl8avQ8l/rH0vd2qp2N/3q/3XeNzAgHFBsITA1jSeY"
|
||||||
|
"2sEZjExgaetebGJj2PG/uqGxtOXtpXAn2jWAaRXF6QRsK57XAT108aPPUOH5Q5g8PIwrvF"
|
||||||
|
"PrRH6COcCoQNNg+ATFWyCEVHP/yafYUWW+NZ+GAyVyNyULDVsTVBOsueerem39Q1wXAAzI"
|
||||||
|
"cJ8C0jB6YLCQEjSLJAq6Hn7ccOdObSTM1SFnDN2Tfu51Mlj61ghctYYpSgl21yy27aAhBb"
|
||||||
|
"txWOzUdaQGeJCpYgriaGDXkjj6L4oEUxhY+KlN9jVjXKqP+hiOJFqbz+tZfI4pH0PWnqX9"
|
||||||
|
"8kMvmnduvPqLsklWuf2tWjQv4VhVRWIRMPggeVGOAXyDoK3IwUSOyu5P7KR0frd+ud6xLP"
|
||||||
|
"6P+gbqNZ1a9LxHYHQFttixO3zIvzFS6ZF+e5d0zelDpAcqIp9phXuG7ymX+gEtZMFbxeKG"
|
||||||
|
"XT9bO9pbhU0yrCpai23aaSE3cGhXSL7hL5Wo0f7aM2O3xtxvexaNFS9jkUjbaDwqUHCJlg"
|
||||||
|
"XxGZVRsBf6qGKXulYA6mdHb/2dcrvQpeletVWW4xZNVGS+98U0veqL8ct9VvvbqeogtdYD"
|
||||||
|
"tFonTusJkQXX7iNxmgF+eriZ5FqicjeyZjQAl7pBtMSQ7yZKYapsJ1LazpUN0t1fIqUMv5"
|
||||||
|
"TMsZpNi2TIMEg3+hqbiM5fNM+x0izG08Q9n1aGx4Pv5pW8UCNOO4u8SkOdgEzgsye5JrZZ"
|
||||||
|
"UgreQHaSUTpI4FPGPk48BTlEX/6rZbaqQptxTQPmKrvLNsk56WHJvQ+63hvbvfjmriK19c"
|
||||||
|
"m0mXYVJpin/BsTbzP6nNHN9e/hIbO385krljL3uzlPlXnc28Xtpnfb/b10o69G1zrCnKEW"
|
||||||
|
"HL6aKCBIj77E1FooFy3nAoCxIccyoYwp1/1XuJeLn3W/ni8t3l+7dXl+9ZFzGTueXdgodB"
|
||||||
|
"o9VbUoDgB0FZNczXepLLsfwQS2d2NIoI5ln3wwS4lesxG5FCpEjv+RJZcnkteby1Qs7G5H"
|
||||||
|
"GBbLv59PL8Hy/ZG1k="
|
||||||
|
)
|
||||||
46
migrations/models/3_20260313000000_add_email_fields.py
Normal file
46
migrations/models/3_20260313000000_add_email_fields.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
from tortoise import BaseDBAsyncClient
|
||||||
|
|
||||||
|
RUN_IN_TRANSACTION = True
|
||||||
|
|
||||||
|
|
||||||
|
async def upgrade(db: BaseDBAsyncClient) -> str:
|
||||||
|
return """
|
||||||
|
ALTER TABLE "users" ADD "email_enabled" BOOL NOT NULL DEFAULT FALSE;
|
||||||
|
ALTER TABLE "users" ADD "email_hmac_token" VARCHAR(16) UNIQUE;
|
||||||
|
CREATE INDEX "idx_users_email_h_a1b2c3" ON "users" ("email_hmac_token");"""
|
||||||
|
|
||||||
|
|
||||||
|
async def downgrade(db: BaseDBAsyncClient) -> str:
|
||||||
|
return """
|
||||||
|
DROP INDEX IF EXISTS "idx_users_email_h_a1b2c3";
|
||||||
|
ALTER TABLE "users" DROP COLUMN "email_hmac_token";
|
||||||
|
ALTER TABLE "users" DROP COLUMN "email_enabled";"""
|
||||||
|
|
||||||
|
|
||||||
|
MODELS_STATE = (
|
||||||
|
"eJztmm1v4jgQx78Kyquu1KtaKN1VdTopUHrLbYEThX3qVZFJXMg1sbOJsxRV/e5nm4Q4jg"
|
||||||
|
"OEAoU93rRl7CH2z2PP35M+ay62oBOc1DH6Cf0AEBsj7bL0rCHgQvqHsv24pAHPS1qZgYCB"
|
||||||
|
"wx1MoSdvAYOA+MAktPEBOAGkJgsGpm970cNQ6DjMiE3a0UbDxBQi+0cIDYKHkIygTxvu7q"
|
||||||
|
"nZRhZ8gkH80Xs0HmzoWKlx2xZ7NrcbZOJxW7/fvLrmPdnjBoaJndBFSW9vQkYYzbqHoW2d"
|
||||||
|
"MB/WNoQI+oBAS5gGG2U07dg0HTE1ED+Es6FaicGCDyB0GAzt94cQmYxBiT+J/Tj/QyuAh6"
|
||||||
|
"JmaG1EGIvnl+mskjlzq8YeVf+od48qF+/4LHFAhj5v5ES0F+4ICJi6cq4JSP47g7I+Ar4a"
|
||||||
|
"ZdxfgkkHugrG2JBwTGIoBhkDWo2a5oInw4FoSEb0Y7lanYPxs97lJGkvjhLTuJ5GfTtqKk"
|
||||||
|
"/bGNIEoelDNmUDkCzIK9pCbBeqYaY9JaRW5HoS/7GjgOkcrA5yJtEmmMO312w1bnt66282"
|
||||||
|
"EzcIfjgckd5rsJYyt04k69GFtBSzLyl9afY+ltjH0vdOuyHH/qxf77vGxgRCgg2ExwawhP"
|
||||||
|
"0aW2MwqYUNPWvFhU17Hhb2TRc2GrywrgH0jWIZRHB5RRqJxrbFRVw9abDU+/CozBkMRhbe"
|
||||||
|
"NfahPUSf4IQjbNJxAGSqkkUkOvrR1+wqtMSajMIH45kaEYOCzo7OCZJp9tRv6/pVQ+MMB8"
|
||||||
|
"B8HAPfMnJgujAIwBAGWaC1yPP6Uxc6M2mmZikKuNb0G3fzVMljy1nhMhYYpehlm9yyK1sA"
|
||||||
|
"ovO2omezJ82hs0AFCxCXE8OGuJAHUbzXopjAJ0XK71GrGmXcf19E8bxU3vjaS2XxWPoetf"
|
||||||
|
"Sv71KZ/KbT/jPuLkjl+k2ndlDIv6KQyirkwIPgUSUG2AWygUI3IwVSqyu4v/HW0fq3je5l"
|
||||||
|
"iWX0f9Bts1XTL0uB7Q6AttwSp26ZZ6dLXDLPTnPvmKxJ2kBioil2zCtc13nm76mENaWC1y"
|
||||||
|
"ulrFw/21mKCzWtIlyKattNKjl+Z1BIt/guka/V2NY+aLP912ZsHYsWLUWffdFoWyhceiAI"
|
||||||
|
"xthXRGbNRsCfqGGKXhLMwYRM7z+7eqVXwasxvSrKLYqs1mzr3W9qyRv3F+O29q3X0CW60A"
|
||||||
|
"W2UyRKZw7rCdHFO36dAXp2upzomad6MrJnPAIkoEe6QZXkIE9mqmEqXFfCKofqdqlWloFa"
|
||||||
|
"yWdaySDlQWZAxKan2vgYOxCgOQEq+srbnzpv6jAtmqoL7P9O5ya1/2tN+Urbb9UaNHg5Zt"
|
||||||
|
"rJnkqhZrunhDtygUk1wiNUKMsFu1/y3cOIPbtY5hiQr6zCKXAhRyy2LdMIwsG/0FSUD/KB"
|
||||||
|
"yn57CHMjWZ9e6EeG5+OftlXsSM04bk9KaQ42gfMKLZrmWl3mWK3mH6vVzLHqWMAzhj4OPU"
|
||||||
|
"Uh/6/bTluNVHKTgPYRneWdZZvkuOTYAbnfGN67+83ofDbz+dVEuXAoCSv2BYdq4v+kmnh4"
|
||||||
|
"3/5LLOzsdV6mKrToXWjmn8vW80J0l2+k230RqkPfNkeaooAWtRzPK6GBpM/O1NCaKOednL"
|
||||||
|
"KExjBLwRCt/JvepPnr6N/KZ+fvzz9ULs4/0C58JDPL+zmHQXwNyS+ZsY2grHPnaz3B5VAw"
|
||||||
|
"S6Qz3RpFBPO0+34C3EhBhz6RQKRI7/kSWXB5K3m8sdLj2uRxgWy7/vTy8h9Mf/k3"
|
||||||
|
)
|
||||||
43
migrations/models/4_20260404080201_add_image_key.py
Normal file
43
migrations/models/4_20260404080201_add_image_key.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
from tortoise import BaseDBAsyncClient
|
||||||
|
|
||||||
|
RUN_IN_TRANSACTION = True
|
||||||
|
|
||||||
|
|
||||||
|
async def upgrade(db: BaseDBAsyncClient) -> str:
|
||||||
|
return """
|
||||||
|
ALTER TABLE "conversation_messages" ADD "image_key" VARCHAR(512);"""
|
||||||
|
|
||||||
|
|
||||||
|
async def downgrade(db: BaseDBAsyncClient) -> str:
|
||||||
|
return """
|
||||||
|
ALTER TABLE "conversation_messages" DROP COLUMN "image_key";"""
|
||||||
|
|
||||||
|
|
||||||
|
MODELS_STATE = (
|
||||||
|
"eJztmmtv4jgUhv8KyqeO1K0KvcyoWq0UWrrDToFVC3PrVpFJXPCS2JnEGYqq/ve1TUIcx6"
|
||||||
|
"GkBQqzfGnLsQ+xH1/Oe076aHjEgW54cE7wTxiEgCKCjbPKo4GBB9kf2vb9igF8P23lBgr6"
|
||||||
|
"rnCwpZ6iBfRDGgCbssZ74IaQmRwY2gHy44fhyHW5kdisI8KD1BRh9COCFiUDSIcwYA23d8"
|
||||||
|
"yMsAMfYJh89EfWPYKukxk3cvizhd2iE1/Yer3mxaXoyR/Xt2ziRh5Oe/sTOiR41j2KkHPA"
|
||||||
|
"fXjbAGIYAAodaRp8lPG0E9N0xMxAgwjOhuqkBgfeg8jlMIzf7yNscwYV8ST+4/gPowQehp"
|
||||||
|
"qjRZhyFo9P01mlcxZWgz/q/KN5vXd0+k7MkoR0EIhGQcR4Eo6Agqmr4JqCFL9zKM+HINCj"
|
||||||
|
"TPorMNlAX4IxMaQc0z2UgEwAvYya4YEHy4V4QIfsY+3kZA7Gz+a1IMl6CZSE7evprm/HTb"
|
||||||
|
"VpG0eaIrQDyKdsAZoHecFaKPKgHmbWU0HqxK4HyR8bCpjNwelgdxIfgjl8u81W46Zrtv7m"
|
||||||
|
"M/HC8IcrEJndBm+pCetEse6dKksx+5LKl2b3Y4V/rHzvtBvq3p/16343+JhARImFydgCjn"
|
||||||
|
"ReE2sCJrOwke+8cGGznruFfdOFjQcvrWsIA6tcBJFcXhFG4rGtcRFfHjR46L0faWMGh5GH"
|
||||||
|
"d0kCiAb4E5wIhE02DoBtXbCIRUcv/ppNhZZa01EEYDxTI/KmYLNjc4J0Gj3Nm3PzomEIhn"
|
||||||
|
"1gj8YgcKwCmB4MQzCAYR5oPfa8/HQN3Zk007OUBVxr+o2beasUsRWsSI1IjDL08k1ezVMt"
|
||||||
|
"ALN5O/Gz+ZPm0HlGBUsQFxPDlryQO1G81aKYwgdNyO8yqx5l0n9bRPG8UN742s1E8UT67r"
|
||||||
|
"XMr+8ykfyq0/4z6S5J5fOrTn2nkH9FIZVXyKEPwUgnBngC2cCRl5MCmdWV3N/46Bi9m8b1"
|
||||||
|
"WYVH9H/wTbNVN88qIfL6wFhsiTNZZvVwgSSzeliYY/Km7AFCHoss1ghOyqTqGacX8V2/9M"
|
||||||
|
"qCPKnWFiDJehWiFG3KZSQH7XIhU+O6zPi5pemArRQPX5kWqLXIjaX4bH6g2S5l84RVqmKR"
|
||||||
|
"f2lkcJKXFetefk3udO7261y+jmULwLLPtujdNRSBfRCGYxJodmYdYRBM9DBlLwVmf0Knue"
|
||||||
|
"TGxeg58Opc+8vSlSGrN9vm9Td9+pD0l/dt/Vu3YSp0oQeQW2aXzhyWs0WfP/HL3KDVw8UE"
|
||||||
|
"5DwFmZOQ4yGgIbvSLabK+0WSXQ9T47oUObleqkeLQD0qZnqUQyo2mQUxn57u4BPiQoDnbF"
|
||||||
|
"DZVz3+zHlVl2nZUF3i/Hc6V5nzX2+q5YFeq95gm1dgZp3QVAo1210t3KEHbKYRRlCjLJ85"
|
||||||
|
"/YrvFu7Y6uki14Ca/ku3wKm6YwlybCuM+v9CW1OKKQaq+m0hzJVEfRDRoeUH5Cdyyl2pOc"
|
||||||
|
"f1SSnDJTZwX6FFlRx9kWv1pPhaPcldq64DfGsQkMjXvBT566bT1iNV3BSgPcxmeesgm+5X"
|
||||||
|
"XBTSu5Xhvb1bjc7nM59fmVWLsIqw4l+wq8z+Tyqzu/9d+CUWdvZqNFcVeu69cu4f9Zbzcn"
|
||||||
|
"mTM9L1vlQ2YYDsoaEpoMUt+/NKaCDtszE1tCYueL+pLaFxzMpmiFf+TTNp8Wr/t1r1+P3x"
|
||||||
|
"h6PT4w+sixjJzPJ+zmWQpCHFJTN+ELR17mKtJ7nsCmapdGZHo4xgnnbfToArKeiwJ1KINe"
|
||||||
|
"G9WCJLLm8lj1dWelyaPC4RbZcfXp7+AzcBYwM="
|
||||||
|
)
|
||||||
@@ -20,7 +20,7 @@ dependencies = [
|
|||||||
"pony>=0.7.19",
|
"pony>=0.7.19",
|
||||||
"flask-login>=0.6.3",
|
"flask-login>=0.6.3",
|
||||||
"quart>=0.20.0",
|
"quart>=0.20.0",
|
||||||
"tortoise-orm>=0.25.1",
|
"tortoise-orm>=0.25.1,<1.0.0",
|
||||||
"quart-jwt-extended>=0.1.0",
|
"quart-jwt-extended>=0.1.0",
|
||||||
"pre-commit>=4.3.0",
|
"pre-commit>=4.3.0",
|
||||||
"tortoise-orm-stubs>=1.0.2",
|
"tortoise-orm-stubs>=1.0.2",
|
||||||
@@ -35,9 +35,12 @@ dependencies = [
|
|||||||
"jq>=1.10.0",
|
"jq>=1.10.0",
|
||||||
"tavily-python>=0.7.17",
|
"tavily-python>=0.7.17",
|
||||||
"ynab>=1.3.0",
|
"ynab>=1.3.0",
|
||||||
|
"ollama>=0.6.1",
|
||||||
|
"twilio>=9.10.2",
|
||||||
|
"aioboto3>=13.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.aerich]
|
[tool.aerich]
|
||||||
tortoise_orm = "app.TORTOISE_CONFIG"
|
tortoise_orm = "config.db.TORTOISE_CONFIG"
|
||||||
location = "./migrations"
|
location = "./migrations"
|
||||||
src_folder = "./."
|
src_folder = "./."
|
||||||
|
|||||||
@@ -12,11 +12,15 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"axios": "^1.12.2",
|
"axios": "^1.12.2",
|
||||||
|
"class-variance-authority": "^0.7.1",
|
||||||
|
"clsx": "^2.1.1",
|
||||||
|
"lucide-react": "^0.577.0",
|
||||||
"marked": "^16.3.0",
|
"marked": "^16.3.0",
|
||||||
"npm-watch": "^0.13.0",
|
"npm-watch": "^0.13.0",
|
||||||
"react": "^19.1.1",
|
"react": "^19.1.1",
|
||||||
"react-dom": "^19.1.1",
|
"react-dom": "^19.1.1",
|
||||||
"react-markdown": "^10.1.0",
|
"react-markdown": "^10.1.0",
|
||||||
|
"tailwind-merge": "^3.5.0",
|
||||||
"watch": "^1.0.2"
|
"watch": "^1.0.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
BIN
raggr-frontend/public/apple-touch-icon.png
Normal file
BIN
raggr-frontend/public/apple-touch-icon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 17 KiB |
14
raggr-frontend/public/manifest.json
Normal file
14
raggr-frontend/public/manifest.json
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"name": "Ask Simba",
|
||||||
|
"short_name": "Simba",
|
||||||
|
"description": "Chat with Simba - your AI cat companion",
|
||||||
|
"start_url": "/",
|
||||||
|
"display": "standalone",
|
||||||
|
"background_color": "#FAF8F2",
|
||||||
|
"theme_color": "#2A4D38",
|
||||||
|
"icons": [
|
||||||
|
{ "src": "/pwa-icon-192.png", "sizes": "192x192", "type": "image/png" },
|
||||||
|
{ "src": "/pwa-icon-512.png", "sizes": "512x512", "type": "image/png" },
|
||||||
|
{ "src": "/pwa-icon-512.png", "sizes": "512x512", "type": "image/png", "purpose": "maskable" }
|
||||||
|
]
|
||||||
|
}
|
||||||
BIN
raggr-frontend/public/pwa-icon-192.png
Normal file
BIN
raggr-frontend/public/pwa-icon-192.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 19 KiB |
BIN
raggr-frontend/public/pwa-icon-512.png
Normal file
BIN
raggr-frontend/public/pwa-icon-512.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 91 KiB |
46
raggr-frontend/public/sw.js
Normal file
46
raggr-frontend/public/sw.js
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
const CACHE = 'simba-v1';
|
||||||
|
|
||||||
|
self.addEventListener('install', (e) => {
|
||||||
|
self.skipWaiting();
|
||||||
|
});
|
||||||
|
|
||||||
|
self.addEventListener('activate', (e) => {
|
||||||
|
e.waitUntil(
|
||||||
|
caches.keys().then((keys) =>
|
||||||
|
Promise.all(keys.filter((k) => k !== CACHE).map((k) => caches.delete(k)))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
self.clients.claim();
|
||||||
|
});
|
||||||
|
|
||||||
|
self.addEventListener('fetch', (e) => {
|
||||||
|
const { request } = e;
|
||||||
|
const url = new URL(request.url);
|
||||||
|
|
||||||
|
// Network-only for API calls
|
||||||
|
if (url.pathname.startsWith('/api/')) return;
|
||||||
|
|
||||||
|
// Cache-first for fingerprinted static assets
|
||||||
|
if (url.pathname.startsWith('/static/')) {
|
||||||
|
e.respondWith(
|
||||||
|
caches.match(request).then(
|
||||||
|
(cached) =>
|
||||||
|
cached ||
|
||||||
|
fetch(request).then((res) => {
|
||||||
|
const clone = res.clone();
|
||||||
|
caches.open(CACHE).then((c) => c.put(request, clone));
|
||||||
|
return res;
|
||||||
|
})
|
||||||
|
)
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Network-first for navigation (offline fallback to cache)
|
||||||
|
if (request.mode === 'navigate') {
|
||||||
|
e.respondWith(
|
||||||
|
fetch(request).catch(() => caches.match(request))
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -4,7 +4,16 @@ import { pluginReact } from '@rsbuild/plugin-react';
|
|||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
plugins: [pluginReact()],
|
plugins: [pluginReact()],
|
||||||
html: {
|
html: {
|
||||||
title: 'Raggr',
|
title: 'Ask Simba',
|
||||||
favicon: './src/assets/favicon.svg',
|
favicon: './src/assets/favicon.svg',
|
||||||
|
tags: [
|
||||||
|
{ tag: 'link', attrs: { rel: 'manifest', href: '/manifest.json' } },
|
||||||
|
{ tag: 'meta', attrs: { name: 'theme-color', content: '#2A4D38' } },
|
||||||
|
{ tag: 'link', attrs: { rel: 'apple-touch-icon', href: '/apple-touch-icon.png' } },
|
||||||
|
{ tag: 'meta', attrs: { name: 'apple-mobile-web-app-capable', content: 'yes' } },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
output: {
|
||||||
|
copy: [{ from: './public', to: '.' }],
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,7 +1,173 @@
|
|||||||
|
@import url('https://fonts.googleapis.com/css2?family=Nunito:wght@400;500;600;700;800&family=Playfair+Display:ital,wght@0,600;0,700;1,600&display=swap');
|
||||||
@import "tailwindcss";
|
@import "tailwindcss";
|
||||||
|
|
||||||
body {
|
@theme {
|
||||||
margin: 0;
|
/* === Animal Crossing × Claude Palette === */
|
||||||
font-family: Inter, Avenir, Helvetica, Arial, sans-serif;
|
|
||||||
background-color: #F9F5EB;
|
/* Backgrounds */
|
||||||
|
--color-cream: #FAF8F2;
|
||||||
|
--color-cream-dark: #F0EBDF;
|
||||||
|
--color-warm-white: #FFFDF9;
|
||||||
|
|
||||||
|
/* Forest / Nook Green system */
|
||||||
|
--color-forest: #2A4D38;
|
||||||
|
--color-forest-mid: #345E46;
|
||||||
|
--color-forest-light: #4D7A5E;
|
||||||
|
--color-leaf: #5E9E70;
|
||||||
|
--color-leaf-dark: #3D7A52;
|
||||||
|
--color-leaf-light: #B8DEC4;
|
||||||
|
--color-leaf-pale: #EBF7EE;
|
||||||
|
|
||||||
|
/* Amber / warm accents */
|
||||||
|
--color-amber-glow: #E8943A;
|
||||||
|
--color-amber-dark: #C97828;
|
||||||
|
--color-amber-soft: #F5C882;
|
||||||
|
--color-amber-pale: #FFF4E0;
|
||||||
|
|
||||||
|
/* Neutrals */
|
||||||
|
--color-charcoal: #2C2420;
|
||||||
|
--color-warm-gray: #7A7268;
|
||||||
|
--color-sand: #DECFB8;
|
||||||
|
--color-sand-light: #EDE3D4;
|
||||||
|
--color-blush: #F2D1B3;
|
||||||
|
|
||||||
|
/* Sidebar */
|
||||||
|
--color-sidebar-bg: #2A4D38;
|
||||||
|
--color-sidebar-hover: #345E46;
|
||||||
|
--color-sidebar-active: #3D6E52;
|
||||||
|
|
||||||
|
/* Fonts */
|
||||||
|
--font-display: 'Playfair Display', Georgia, serif;
|
||||||
|
--font-body: 'Nunito', 'Nunito Sans', system-ui, sans-serif;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
font-family: var(--font-body);
|
||||||
|
background-color: var(--color-cream);
|
||||||
|
color: var(--color-charcoal);
|
||||||
|
-webkit-font-smoothing: antialiased;
|
||||||
|
-moz-osx-font-smoothing: grayscale;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── Scrollbar ─────────────────────────────────────── */
|
||||||
|
::-webkit-scrollbar { width: 5px; }
|
||||||
|
::-webkit-scrollbar-track { background: transparent; }
|
||||||
|
::-webkit-scrollbar-thumb { background: var(--color-sand); border-radius: 99px; }
|
||||||
|
::-webkit-scrollbar-thumb:hover { background: var(--color-warm-gray); }
|
||||||
|
|
||||||
|
/* ── Markdown in answer bubbles ─────────────────────── */
|
||||||
|
.markdown-content p { margin: 0.5em 0; line-height: 1.7; }
|
||||||
|
.markdown-content p:first-child { margin-top: 0; }
|
||||||
|
.markdown-content p:last-child { margin-bottom: 0; }
|
||||||
|
|
||||||
|
.markdown-content h1,
|
||||||
|
.markdown-content h2,
|
||||||
|
.markdown-content h3 {
|
||||||
|
font-family: var(--font-display);
|
||||||
|
font-weight: 600;
|
||||||
|
margin: 1em 0 0.4em;
|
||||||
|
line-height: 1.3;
|
||||||
|
color: var(--color-charcoal);
|
||||||
|
}
|
||||||
|
.markdown-content h1 { font-size: 1.2rem; }
|
||||||
|
.markdown-content h2 { font-size: 1.05rem; }
|
||||||
|
.markdown-content h3 { font-size: 0.95rem; }
|
||||||
|
|
||||||
|
.markdown-content ul,
|
||||||
|
.markdown-content ol { padding-left: 1.4em; margin: 0.5em 0; }
|
||||||
|
.markdown-content li { margin: 0.3em 0; line-height: 1.6; }
|
||||||
|
|
||||||
|
.markdown-content code {
|
||||||
|
background: rgba(0,0,0,0.06);
|
||||||
|
padding: 0.15em 0.4em;
|
||||||
|
border-radius: 5px;
|
||||||
|
font-size: 0.85em;
|
||||||
|
font-family: 'SF Mono', 'Fira Code', 'Cascadia Code', monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
.markdown-content pre {
|
||||||
|
background: var(--color-charcoal);
|
||||||
|
color: #F0EBDF;
|
||||||
|
padding: 1em 1.1em;
|
||||||
|
border-radius: 12px;
|
||||||
|
overflow-x: auto;
|
||||||
|
margin: 0.8em 0;
|
||||||
|
}
|
||||||
|
.markdown-content pre code { background: none; padding: 0; color: inherit; }
|
||||||
|
|
||||||
|
.markdown-content a {
|
||||||
|
color: var(--color-leaf-dark);
|
||||||
|
text-decoration: underline;
|
||||||
|
text-underline-offset: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.markdown-content blockquote {
|
||||||
|
border-left: 3px solid var(--color-amber-soft);
|
||||||
|
padding-left: 1em;
|
||||||
|
margin: 0.75em 0;
|
||||||
|
color: var(--color-warm-gray);
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
.markdown-content strong { font-weight: 700; }
|
||||||
|
.markdown-content em { font-style: italic; }
|
||||||
|
|
||||||
|
/* ── Animations ─────────────────────────────────────── */
|
||||||
|
@keyframes fadeSlideUp {
|
||||||
|
from { opacity: 0; transform: translateY(10px); }
|
||||||
|
to { opacity: 1; transform: translateY(0); }
|
||||||
|
}
|
||||||
|
.message-enter {
|
||||||
|
animation: fadeSlideUp 0.3s ease-out forwards;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes catPulse {
|
||||||
|
0%, 80%, 100% { opacity: 0.25; transform: scale(0.75); }
|
||||||
|
40% { opacity: 1; transform: scale(1); }
|
||||||
|
}
|
||||||
|
.loading-dot { animation: catPulse 1.4s ease-in-out infinite; }
|
||||||
|
.loading-dot:nth-child(2) { animation-delay: 0.2s; }
|
||||||
|
.loading-dot:nth-child(3) { animation-delay: 0.4s; }
|
||||||
|
|
||||||
|
@keyframes shimmer {
|
||||||
|
0% { background-position: -200% 0; }
|
||||||
|
100% { background-position: 200% 0; }
|
||||||
|
}
|
||||||
|
.skeleton-shimmer {
|
||||||
|
background: linear-gradient(90deg,
|
||||||
|
var(--color-sand-light) 25%,
|
||||||
|
var(--color-cream) 50%,
|
||||||
|
var(--color-sand-light) 75%
|
||||||
|
);
|
||||||
|
background-size: 200% 100%;
|
||||||
|
animation: shimmer 1.8s ease-in-out infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ── Toggle switch ──────────────────────────────────── */
|
||||||
|
.toggle-track {
|
||||||
|
width: 36px;
|
||||||
|
height: 20px;
|
||||||
|
border-radius: 99px;
|
||||||
|
background: var(--color-sand);
|
||||||
|
position: relative;
|
||||||
|
transition: background 0.2s;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.toggle-track.checked { background: var(--color-leaf); }
|
||||||
|
.toggle-thumb {
|
||||||
|
width: 14px;
|
||||||
|
height: 14px;
|
||||||
|
background: white;
|
||||||
|
border-radius: 99px;
|
||||||
|
position: absolute;
|
||||||
|
top: 3px;
|
||||||
|
left: 3px;
|
||||||
|
transition: transform 0.2s;
|
||||||
|
box-shadow: 0 1px 3px rgba(0,0,0,0.15);
|
||||||
|
}
|
||||||
|
.toggle-track.checked .toggle-thumb { transform: translateX(16px); }
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { AuthProvider } from "./contexts/AuthContext";
|
|||||||
import { ChatScreen } from "./components/ChatScreen";
|
import { ChatScreen } from "./components/ChatScreen";
|
||||||
import { LoginScreen } from "./components/LoginScreen";
|
import { LoginScreen } from "./components/LoginScreen";
|
||||||
import { conversationService } from "./api/conversationService";
|
import { conversationService } from "./api/conversationService";
|
||||||
|
import catIcon from "./assets/cat.png";
|
||||||
|
|
||||||
const AppContainer = () => {
|
const AppContainer = () => {
|
||||||
const [isAuthenticated, setAuthenticated] = useState<boolean>(false);
|
const [isAuthenticated, setAuthenticated] = useState<boolean>(false);
|
||||||
@@ -44,8 +45,15 @@ const AppContainer = () => {
|
|||||||
// Show loading state while checking authentication
|
// Show loading state while checking authentication
|
||||||
if (isChecking) {
|
if (isChecking) {
|
||||||
return (
|
return (
|
||||||
<div className="h-screen flex items-center justify-center bg-white/85">
|
<div className="h-screen flex flex-col items-center justify-center bg-cream gap-4">
|
||||||
<div className="text-xl">Loading...</div>
|
<img
|
||||||
|
src={catIcon}
|
||||||
|
alt="Simba"
|
||||||
|
className="w-16 h-16 animate-bounce"
|
||||||
|
/>
|
||||||
|
<p className="text-warm-gray font-medium text-lg tracking-wide">
|
||||||
|
waking up simba...
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,19 @@
|
|||||||
import { userService } from "./userService";
|
import { userService } from "./userService";
|
||||||
|
|
||||||
|
export type SSEEvent =
|
||||||
|
| { type: "tool_start"; tool: string }
|
||||||
|
| { type: "tool_end"; tool: string }
|
||||||
|
| { type: "response"; message: string }
|
||||||
|
| { type: "error"; message: string };
|
||||||
|
|
||||||
|
export type SSEEventCallback = (event: SSEEvent) => void;
|
||||||
|
|
||||||
interface Message {
|
interface Message {
|
||||||
id: string;
|
id: string;
|
||||||
text: string;
|
text: string;
|
||||||
speaker: "user" | "simba";
|
speaker: "user" | "simba";
|
||||||
created_at: string;
|
created_at: string;
|
||||||
|
image_key?: string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface Conversation {
|
interface Conversation {
|
||||||
@@ -112,6 +121,94 @@ class ConversationService {
|
|||||||
|
|
||||||
return await response.json();
|
return await response.json();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async uploadImage(
|
||||||
|
file: File,
|
||||||
|
conversationId: string,
|
||||||
|
): Promise<{ image_key: string; image_url: string }> {
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append("file", file);
|
||||||
|
formData.append("conversation_id", conversationId);
|
||||||
|
|
||||||
|
const response = await userService.fetchWithRefreshToken(
|
||||||
|
`${this.conversationBaseUrl}/upload-image`,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
body: formData,
|
||||||
|
},
|
||||||
|
{ skipContentType: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const data = await response.json();
|
||||||
|
throw new Error(data.error || "Failed to upload image");
|
||||||
|
}
|
||||||
|
|
||||||
|
return await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
getImageUrl(imageKey: string): string {
|
||||||
|
return `/api/conversation/image/${imageKey}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
async streamQuery(
|
||||||
|
query: string,
|
||||||
|
conversation_id: string,
|
||||||
|
onEvent: SSEEventCallback,
|
||||||
|
signal?: AbortSignal,
|
||||||
|
imageKey?: string,
|
||||||
|
): Promise<void> {
|
||||||
|
const body: Record<string, string> = { query, conversation_id };
|
||||||
|
if (imageKey) {
|
||||||
|
body.image_key = imageKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await userService.fetchWithRefreshToken(
|
||||||
|
`${this.conversationBaseUrl}/stream-query`,
|
||||||
|
{
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
signal,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error("Failed to stream query");
|
||||||
|
}
|
||||||
|
|
||||||
|
await this._readSSEStream(response, onEvent);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async _readSSEStream(
|
||||||
|
response: Response,
|
||||||
|
onEvent: SSEEventCallback,
|
||||||
|
): Promise<void> {
|
||||||
|
const reader = response.body!.getReader();
|
||||||
|
const decoder = new TextDecoder();
|
||||||
|
let buffer = "";
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read();
|
||||||
|
if (done) break;
|
||||||
|
|
||||||
|
buffer += decoder.decode(value, { stream: true });
|
||||||
|
const parts = buffer.split("\n\n");
|
||||||
|
buffer = parts.pop() ?? "";
|
||||||
|
|
||||||
|
for (const part of parts) {
|
||||||
|
const line = part.trim();
|
||||||
|
if (!line.startsWith("data: ")) continue;
|
||||||
|
const data = line.slice(6);
|
||||||
|
if (data === "[DONE]") return;
|
||||||
|
try {
|
||||||
|
const event = JSON.parse(data) as SSEEvent;
|
||||||
|
onEvent(event);
|
||||||
|
} catch {
|
||||||
|
// ignore malformed events
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const conversationService = new ConversationService();
|
export const conversationService = new ConversationService();
|
||||||
|
|||||||
@@ -106,14 +106,15 @@ class UserService {
|
|||||||
async fetchWithRefreshToken(
|
async fetchWithRefreshToken(
|
||||||
url: string,
|
url: string,
|
||||||
options: RequestInit = {},
|
options: RequestInit = {},
|
||||||
|
{ skipContentType = false }: { skipContentType?: boolean } = {},
|
||||||
): Promise<Response> {
|
): Promise<Response> {
|
||||||
const refreshToken = localStorage.getItem("refresh_token");
|
const refreshToken = localStorage.getItem("refresh_token");
|
||||||
|
|
||||||
// Add authorization header
|
// Add authorization header
|
||||||
const headers = {
|
const headers: Record<string, string> = {
|
||||||
"Content-Type": "application/json",
|
...(skipContentType ? {} : { "Content-Type": "application/json" }),
|
||||||
...(options.headers || {}),
|
...((options.headers as Record<string, string>) || {}),
|
||||||
...(refreshToken && { Authorization: `Bearer ${refreshToken}` }),
|
...(refreshToken ? { Authorization: `Bearer ${refreshToken}` } : {}),
|
||||||
};
|
};
|
||||||
|
|
||||||
let response = await fetch(url, { ...options, headers });
|
let response = await fetch(url, { ...options, headers });
|
||||||
@@ -134,6 +135,67 @@ class UserService {
|
|||||||
|
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async getMe(): Promise<{ id: string; username: string; email: string; is_admin: boolean }> {
|
||||||
|
const response = await this.fetchWithRefreshToken(`${this.baseUrl}/me`);
|
||||||
|
if (!response.ok) throw new Error("Failed to fetch user profile");
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
async adminListUsers(): Promise<AdminUserRecord[]> {
|
||||||
|
const response = await this.fetchWithRefreshToken(`${this.baseUrl}/admin/users`);
|
||||||
|
if (!response.ok) throw new Error("Failed to list users");
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
async adminSetWhatsapp(userId: string, number: string): Promise<AdminUserRecord> {
|
||||||
|
const response = await this.fetchWithRefreshToken(
|
||||||
|
`${this.baseUrl}/admin/users/${userId}/whatsapp`,
|
||||||
|
{ method: "PUT", body: JSON.stringify({ whatsapp_number: number }) },
|
||||||
|
);
|
||||||
|
if (response.status === 409) {
|
||||||
|
const data = await response.json();
|
||||||
|
throw new Error(data.error ?? "WhatsApp number already in use");
|
||||||
|
}
|
||||||
|
if (!response.ok) throw new Error("Failed to set WhatsApp number");
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
async adminUnlinkWhatsapp(userId: string): Promise<void> {
|
||||||
|
const response = await this.fetchWithRefreshToken(
|
||||||
|
`${this.baseUrl}/admin/users/${userId}/whatsapp`,
|
||||||
|
{ method: "DELETE" },
|
||||||
|
);
|
||||||
|
if (!response.ok) throw new Error("Failed to unlink WhatsApp number");
|
||||||
|
}
|
||||||
|
|
||||||
|
async adminToggleEmail(userId: string): Promise<AdminUserRecord> {
|
||||||
|
const response = await this.fetchWithRefreshToken(
|
||||||
|
`${this.baseUrl}/admin/users/${userId}/email`,
|
||||||
|
{ method: "PUT" },
|
||||||
|
);
|
||||||
|
if (!response.ok) throw new Error("Failed to enable email");
|
||||||
|
return response.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
async adminDisableEmail(userId: string): Promise<void> {
|
||||||
|
const response = await this.fetchWithRefreshToken(
|
||||||
|
`${this.baseUrl}/admin/users/${userId}/email`,
|
||||||
|
{ method: "DELETE" },
|
||||||
|
);
|
||||||
|
if (!response.ok) throw new Error("Failed to disable email");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface AdminUserRecord {
|
||||||
|
id: string;
|
||||||
|
username: string;
|
||||||
|
email: string;
|
||||||
|
whatsapp_number: string | null;
|
||||||
|
auth_provider: string;
|
||||||
|
email_enabled: boolean;
|
||||||
|
email_address: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export { UserService };
|
||||||
export const userService = new UserService();
|
export const userService = new UserService();
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 5.8 KiB After Width: | Height: | Size: 91 KiB |
312
raggr-frontend/src/components/AdminPanel.tsx
Normal file
312
raggr-frontend/src/components/AdminPanel.tsx
Normal file
@@ -0,0 +1,312 @@
|
|||||||
|
import { useEffect, useState } from "react";
|
||||||
|
import { X, Phone, PhoneOff, Pencil, Check, Mail, Copy } from "lucide-react";
|
||||||
|
import { userService, type AdminUserRecord } from "../api/userService";
|
||||||
|
import { cn } from "../lib/utils";
|
||||||
|
import { Button } from "./ui/button";
|
||||||
|
import { Input } from "./ui/input";
|
||||||
|
import {
|
||||||
|
Table,
|
||||||
|
TableBody,
|
||||||
|
TableCell,
|
||||||
|
TableHead,
|
||||||
|
TableHeader,
|
||||||
|
TableRow,
|
||||||
|
} from "./ui/table";
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
onClose: () => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const AdminPanel = ({ onClose }: Props) => {
|
||||||
|
const [users, setUsers] = useState<AdminUserRecord[]>([]);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [editingId, setEditingId] = useState<string | null>(null);
|
||||||
|
const [editValue, setEditValue] = useState("");
|
||||||
|
const [rowError, setRowError] = useState<Record<string, string>>({});
|
||||||
|
const [rowSuccess, setRowSuccess] = useState<Record<string, string>>({});
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
userService
|
||||||
|
.adminListUsers()
|
||||||
|
.then(setUsers)
|
||||||
|
.catch(() => {})
|
||||||
|
.finally(() => setLoading(false));
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const startEdit = (user: AdminUserRecord) => {
|
||||||
|
setEditingId(user.id);
|
||||||
|
setEditValue(user.whatsapp_number ?? "");
|
||||||
|
setRowError((p) => ({ ...p, [user.id]: "" }));
|
||||||
|
setRowSuccess((p) => ({ ...p, [user.id]: "" }));
|
||||||
|
};
|
||||||
|
|
||||||
|
const cancelEdit = () => {
|
||||||
|
setEditingId(null);
|
||||||
|
setEditValue("");
|
||||||
|
};
|
||||||
|
|
||||||
|
const saveWhatsapp = async (userId: string) => {
|
||||||
|
setRowError((p) => ({ ...p, [userId]: "" }));
|
||||||
|
try {
|
||||||
|
const updated = await userService.adminSetWhatsapp(userId, editValue);
|
||||||
|
setUsers((p) => p.map((u) => (u.id === userId ? updated : u)));
|
||||||
|
setRowSuccess((p) => ({ ...p, [userId]: "Saved ✓" }));
|
||||||
|
setEditingId(null);
|
||||||
|
setTimeout(() => setRowSuccess((p) => ({ ...p, [userId]: "" })), 2000);
|
||||||
|
} catch (err) {
|
||||||
|
setRowError((p) => ({
|
||||||
|
...p,
|
||||||
|
[userId]: err instanceof Error ? err.message : "Failed to save",
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const unlinkWhatsapp = async (userId: string) => {
|
||||||
|
setRowError((p) => ({ ...p, [userId]: "" }));
|
||||||
|
try {
|
||||||
|
await userService.adminUnlinkWhatsapp(userId);
|
||||||
|
setUsers((p) =>
|
||||||
|
p.map((u) => (u.id === userId ? { ...u, whatsapp_number: null } : u)),
|
||||||
|
);
|
||||||
|
setRowSuccess((p) => ({ ...p, [userId]: "Unlinked ✓" }));
|
||||||
|
setTimeout(() => setRowSuccess((p) => ({ ...p, [userId]: "" })), 2000);
|
||||||
|
} catch (err) {
|
||||||
|
setRowError((p) => ({
|
||||||
|
...p,
|
||||||
|
[userId]: err instanceof Error ? err.message : "Failed to unlink",
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const toggleEmail = async (userId: string) => {
|
||||||
|
setRowError((p) => ({ ...p, [userId]: "" }));
|
||||||
|
try {
|
||||||
|
const updated = await userService.adminToggleEmail(userId);
|
||||||
|
setUsers((p) => p.map((u) => (u.id === userId ? updated : u)));
|
||||||
|
setRowSuccess((p) => ({ ...p, [userId]: "Email enabled ✓" }));
|
||||||
|
setTimeout(() => setRowSuccess((p) => ({ ...p, [userId]: "" })), 2000);
|
||||||
|
} catch (err) {
|
||||||
|
setRowError((p) => ({
|
||||||
|
...p,
|
||||||
|
[userId]: err instanceof Error ? err.message : "Failed to enable email",
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const disableEmail = async (userId: string) => {
|
||||||
|
setRowError((p) => ({ ...p, [userId]: "" }));
|
||||||
|
try {
|
||||||
|
await userService.adminDisableEmail(userId);
|
||||||
|
setUsers((p) =>
|
||||||
|
p.map((u) => (u.id === userId ? { ...u, email_enabled: false, email_address: null } : u)),
|
||||||
|
);
|
||||||
|
setRowSuccess((p) => ({ ...p, [userId]: "Email disabled ✓" }));
|
||||||
|
setTimeout(() => setRowSuccess((p) => ({ ...p, [userId]: "" })), 2000);
|
||||||
|
} catch (err) {
|
||||||
|
setRowError((p) => ({
|
||||||
|
...p,
|
||||||
|
[userId]: err instanceof Error ? err.message : "Failed to disable email",
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const copyToClipboard = (text: string, userId: string) => {
|
||||||
|
navigator.clipboard.writeText(text);
|
||||||
|
setRowSuccess((p) => ({ ...p, [userId]: "Copied ✓" }));
|
||||||
|
setTimeout(() => setRowSuccess((p) => ({ ...p, [userId]: "" })), 2000);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
className="fixed inset-0 z-50 flex items-center justify-center bg-charcoal/40 backdrop-blur-sm"
|
||||||
|
onClick={(e) => e.target === e.currentTarget && onClose()}
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
className={cn(
|
||||||
|
"bg-warm-white rounded-3xl shadow-2xl shadow-charcoal/20",
|
||||||
|
"w-full max-w-3xl mx-4 max-h-[82vh] flex flex-col",
|
||||||
|
"border border-sand-light/60",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex items-center justify-between px-6 py-4 border-b border-sand-light/60">
|
||||||
|
<div className="flex items-center gap-2.5">
|
||||||
|
<div className="w-8 h-8 rounded-xl bg-leaf-pale flex items-center justify-center">
|
||||||
|
<Phone size={14} className="text-leaf-dark" />
|
||||||
|
</div>
|
||||||
|
<h2 className="text-sm font-semibold text-charcoal">
|
||||||
|
Admin · User Integrations
|
||||||
|
</h2>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
onClick={onClose}
|
||||||
|
className="w-7 h-7 rounded-lg flex items-center justify-center text-warm-gray hover:text-charcoal hover:bg-cream-dark transition-colors cursor-pointer"
|
||||||
|
>
|
||||||
|
<X size={15} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Body */}
|
||||||
|
<div className="overflow-y-auto flex-1 rounded-b-3xl">
|
||||||
|
{loading ? (
|
||||||
|
<div className="px-6 py-12 text-center text-warm-gray text-sm">
|
||||||
|
<div className="flex justify-center gap-1.5 mb-3">
|
||||||
|
<span className="loading-dot w-2 h-2 rounded-full bg-amber-soft inline-block" />
|
||||||
|
<span className="loading-dot w-2 h-2 rounded-full bg-amber-soft inline-block" />
|
||||||
|
<span className="loading-dot w-2 h-2 rounded-full bg-amber-soft inline-block" />
|
||||||
|
</div>
|
||||||
|
Loading users…
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<Table>
|
||||||
|
<TableHeader>
|
||||||
|
<TableRow>
|
||||||
|
<TableHead>Username</TableHead>
|
||||||
|
<TableHead>Email</TableHead>
|
||||||
|
<TableHead>WhatsApp</TableHead>
|
||||||
|
<TableHead>Email</TableHead>
|
||||||
|
<TableHead className="w-28">Actions</TableHead>
|
||||||
|
</TableRow>
|
||||||
|
</TableHeader>
|
||||||
|
<TableBody>
|
||||||
|
{users.map((user) => (
|
||||||
|
<TableRow key={user.id}>
|
||||||
|
<TableCell className="font-medium text-charcoal">
|
||||||
|
{user.username}
|
||||||
|
</TableCell>
|
||||||
|
<TableCell className="text-warm-gray">{user.email}</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
{editingId === user.id ? (
|
||||||
|
<div className="flex flex-col gap-1">
|
||||||
|
<Input
|
||||||
|
value={editValue}
|
||||||
|
onChange={(e) => setEditValue(e.target.value)}
|
||||||
|
placeholder="whatsapp:+15551234567"
|
||||||
|
className="w-52"
|
||||||
|
autoFocus
|
||||||
|
onKeyDown={(e) =>
|
||||||
|
e.key === "Enter" && saveWhatsapp(user.id)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
{rowError[user.id] && (
|
||||||
|
<span className="text-xs text-red-500">
|
||||||
|
{rowError[user.id]}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="flex flex-col gap-0.5">
|
||||||
|
<span
|
||||||
|
className={cn(
|
||||||
|
"text-sm",
|
||||||
|
user.whatsapp_number
|
||||||
|
? "text-charcoal"
|
||||||
|
: "text-warm-gray/40 italic",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{user.whatsapp_number ?? "—"}
|
||||||
|
</span>
|
||||||
|
{rowSuccess[user.id] && (
|
||||||
|
<span className="text-xs text-leaf-dark">
|
||||||
|
{rowSuccess[user.id]}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{rowError[user.id] && (
|
||||||
|
<span className="text-xs text-red-500">
|
||||||
|
{rowError[user.id]}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
<div className="flex flex-col gap-0.5">
|
||||||
|
{user.email_enabled && user.email_address ? (
|
||||||
|
<div className="flex items-center gap-1.5">
|
||||||
|
<span className="text-sm text-charcoal truncate max-w-[180px]" title={user.email_address}>
|
||||||
|
{user.email_address}
|
||||||
|
</span>
|
||||||
|
<button
|
||||||
|
onClick={() => copyToClipboard(user.email_address!, user.id)}
|
||||||
|
className="text-warm-gray hover:text-charcoal transition-colors cursor-pointer"
|
||||||
|
title="Copy address"
|
||||||
|
>
|
||||||
|
<Copy size={11} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<span className="text-sm text-warm-gray/40 italic">—</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</TableCell>
|
||||||
|
<TableCell>
|
||||||
|
{editingId === user.id ? (
|
||||||
|
<div className="flex gap-1.5">
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="default"
|
||||||
|
onClick={() => saveWhatsapp(user.id)}
|
||||||
|
>
|
||||||
|
<Check size={12} />
|
||||||
|
Save
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="ghost-dark"
|
||||||
|
onClick={cancelEdit}
|
||||||
|
>
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="flex gap-1.5">
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="ghost-dark"
|
||||||
|
onClick={() => startEdit(user)}
|
||||||
|
>
|
||||||
|
<Pencil size={11} />
|
||||||
|
Edit
|
||||||
|
</Button>
|
||||||
|
{user.whatsapp_number && (
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="destructive"
|
||||||
|
onClick={() => unlinkWhatsapp(user.id)}
|
||||||
|
>
|
||||||
|
<PhoneOff size={11} />
|
||||||
|
Unlink
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
{user.email_enabled ? (
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="destructive"
|
||||||
|
onClick={() => disableEmail(user.id)}
|
||||||
|
>
|
||||||
|
<Mail size={11} />
|
||||||
|
Email
|
||||||
|
</Button>
|
||||||
|
) : (
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="ghost-dark"
|
||||||
|
onClick={() => toggleEmail(user.id)}
|
||||||
|
>
|
||||||
|
<Mail size={11} />
|
||||||
|
Email
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</TableCell>
|
||||||
|
</TableRow>
|
||||||
|
))}
|
||||||
|
</TableBody>
|
||||||
|
</Table>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
import ReactMarkdown from "react-markdown";
|
import ReactMarkdown from "react-markdown";
|
||||||
|
import { cn } from "../lib/utils";
|
||||||
|
|
||||||
type AnswerBubbleProps = {
|
type AnswerBubbleProps = {
|
||||||
text: string;
|
text: string;
|
||||||
@@ -7,25 +8,32 @@ type AnswerBubbleProps = {
|
|||||||
|
|
||||||
export const AnswerBubble = ({ text, loading }: AnswerBubbleProps) => {
|
export const AnswerBubble = ({ text, loading }: AnswerBubbleProps) => {
|
||||||
return (
|
return (
|
||||||
<div className="rounded-md bg-orange-100 p-3 sm:p-4 w-2/3">
|
<div className="flex justify-start message-enter">
|
||||||
{loading ? (
|
<div
|
||||||
<div className="flex flex-col w-full animate-pulse gap-2">
|
className={cn(
|
||||||
<div className="flex flex-row gap-2 w-full">
|
"max-w-[78%] rounded-3xl rounded-bl-md",
|
||||||
<div className="bg-gray-400 w-1/2 p-3 rounded-lg" />
|
"bg-warm-white border border-sand-light/70",
|
||||||
<div className="bg-gray-400 w-1/2 p-3 rounded-lg" />
|
"shadow-sm shadow-sand/30",
|
||||||
</div>
|
"overflow-hidden",
|
||||||
<div className="flex flex-row gap-2 w-full">
|
)}
|
||||||
<div className="bg-gray-400 w-1/3 p-3 rounded-lg" />
|
>
|
||||||
<div className="bg-gray-400 w-2/3 p-3 rounded-lg" />
|
{/* amber accent bar */}
|
||||||
</div>
|
<div className="h-0.5 w-full bg-gradient-to-r from-amber-soft via-amber-glow/50 to-transparent" />
|
||||||
|
|
||||||
|
<div className="px-4 py-3">
|
||||||
|
{loading ? (
|
||||||
|
<div className="flex items-center gap-1.5 py-1 px-1">
|
||||||
|
<span className="loading-dot w-2 h-2 rounded-full bg-amber-soft inline-block" />
|
||||||
|
<span className="loading-dot w-2 h-2 rounded-full bg-amber-soft inline-block" />
|
||||||
|
<span className="loading-dot w-2 h-2 rounded-full bg-amber-soft inline-block" />
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="markdown-content text-sm leading-relaxed text-charcoal">
|
||||||
|
<ReactMarkdown>{text}</ReactMarkdown>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
) : (
|
</div>
|
||||||
<div className=" flex flex-col break-words overflow-wrap-anywhere text-sm sm:text-base [&>*]:break-words">
|
|
||||||
<ReactMarkdown>
|
|
||||||
{"🐈: " + text}
|
|
||||||
</ReactMarkdown>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,19 +1,20 @@
|
|||||||
import { useEffect, useState, useRef } from "react";
|
import { useEffect, useState, useRef } from "react";
|
||||||
|
import { LogOut, Shield, PanelLeftClose, PanelLeftOpen, Menu, X } from "lucide-react";
|
||||||
import { conversationService } from "../api/conversationService";
|
import { conversationService } from "../api/conversationService";
|
||||||
|
import { userService } from "../api/userService";
|
||||||
import { QuestionBubble } from "./QuestionBubble";
|
import { QuestionBubble } from "./QuestionBubble";
|
||||||
import { AnswerBubble } from "./AnswerBubble";
|
import { AnswerBubble } from "./AnswerBubble";
|
||||||
|
import { ToolBubble } from "./ToolBubble";
|
||||||
import { MessageInput } from "./MessageInput";
|
import { MessageInput } from "./MessageInput";
|
||||||
import { ConversationList } from "./ConversationList";
|
import { ConversationList } from "./ConversationList";
|
||||||
|
import { AdminPanel } from "./AdminPanel";
|
||||||
|
import { cn } from "../lib/utils";
|
||||||
import catIcon from "../assets/cat.png";
|
import catIcon from "../assets/cat.png";
|
||||||
|
|
||||||
type Message = {
|
type Message = {
|
||||||
text: string;
|
text: string;
|
||||||
speaker: "simba" | "user";
|
speaker: "simba" | "user" | "tool";
|
||||||
};
|
image_key?: string | null;
|
||||||
|
|
||||||
type QuestionAnswer = {
|
|
||||||
question: string;
|
|
||||||
answer: string;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
type Conversation = {
|
type Conversation = {
|
||||||
@@ -25,22 +26,37 @@ type ChatScreenProps = {
|
|||||||
setAuthenticated: (isAuth: boolean) => void;
|
setAuthenticated: (isAuth: boolean) => void;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const TOOL_MESSAGES: Record<string, string> = {
|
||||||
|
simba_search: "🔍 Searching Simba's records...",
|
||||||
|
web_search: "🌐 Searching the web...",
|
||||||
|
get_current_date: "📅 Checking today's date...",
|
||||||
|
ynab_budget_summary: "💰 Checking budget summary...",
|
||||||
|
ynab_search_transactions: "💳 Looking up transactions...",
|
||||||
|
ynab_category_spending: "📊 Analyzing category spending...",
|
||||||
|
ynab_insights: "📈 Generating budget insights...",
|
||||||
|
obsidian_search_notes: "📝 Searching notes...",
|
||||||
|
obsidian_read_note: "📖 Reading note...",
|
||||||
|
obsidian_create_note: "✏️ Saving note...",
|
||||||
|
obsidian_create_task: "✅ Creating task...",
|
||||||
|
journal_get_today: "📔 Reading today's journal...",
|
||||||
|
journal_get_tasks: "📋 Getting tasks...",
|
||||||
|
journal_add_task: "➕ Adding task...",
|
||||||
|
journal_complete_task: "✔️ Completing task...",
|
||||||
|
};
|
||||||
|
|
||||||
export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
|
export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
|
||||||
const [query, setQuery] = useState<string>("");
|
const [query, setQuery] = useState<string>("");
|
||||||
const [answer, setAnswer] = useState<string>("");
|
|
||||||
const [simbaMode, setSimbaMode] = useState<boolean>(false);
|
const [simbaMode, setSimbaMode] = useState<boolean>(false);
|
||||||
const [questionsAnswers, setQuestionsAnswers] = useState<QuestionAnswer[]>(
|
|
||||||
[],
|
|
||||||
);
|
|
||||||
const [messages, setMessages] = useState<Message[]>([]);
|
const [messages, setMessages] = useState<Message[]>([]);
|
||||||
const [conversations, setConversations] = useState<Conversation[]>([
|
const [conversations, setConversations] = useState<Conversation[]>([]);
|
||||||
{ title: "simba meow meow", id: "uuid" },
|
|
||||||
]);
|
|
||||||
const [showConversations, setShowConversations] = useState<boolean>(false);
|
const [showConversations, setShowConversations] = useState<boolean>(false);
|
||||||
const [selectedConversation, setSelectedConversation] =
|
const [selectedConversation, setSelectedConversation] =
|
||||||
useState<Conversation | null>(null);
|
useState<Conversation | null>(null);
|
||||||
const [sidebarCollapsed, setSidebarCollapsed] = useState<boolean>(false);
|
const [sidebarCollapsed, setSidebarCollapsed] = useState<boolean>(false);
|
||||||
const [isLoading, setIsLoading] = useState<boolean>(false);
|
const [isLoading, setIsLoading] = useState<boolean>(false);
|
||||||
|
const [isAdmin, setIsAdmin] = useState<boolean>(false);
|
||||||
|
const [showAdminPanel, setShowAdminPanel] = useState<boolean>(false);
|
||||||
|
const [pendingImage, setPendingImage] = useState<File | null>(null);
|
||||||
|
|
||||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||||
const isMountedRef = useRef<boolean>(true);
|
const isMountedRef = useRef<boolean>(true);
|
||||||
@@ -51,67 +67,49 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
|
|||||||
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
|
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
|
||||||
};
|
};
|
||||||
|
|
||||||
// Cleanup effect to handle component unmounting
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
isMountedRef.current = true;
|
isMountedRef.current = true;
|
||||||
return () => {
|
return () => {
|
||||||
isMountedRef.current = false;
|
isMountedRef.current = false;
|
||||||
// Abort any pending requests when component unmounts
|
abortControllerRef.current?.abort();
|
||||||
if (abortControllerRef.current) {
|
|
||||||
abortControllerRef.current.abort();
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const handleSelectConversation = (conversation: Conversation) => {
|
const handleSelectConversation = (conversation: Conversation) => {
|
||||||
setShowConversations(false);
|
setShowConversations(false);
|
||||||
setSelectedConversation(conversation);
|
setSelectedConversation(conversation);
|
||||||
const loadMessages = async () => {
|
const load = async () => {
|
||||||
try {
|
try {
|
||||||
const fetchedConversation = await conversationService.getConversation(
|
const fetched = await conversationService.getConversation(conversation.id);
|
||||||
conversation.id,
|
|
||||||
);
|
|
||||||
setMessages(
|
setMessages(
|
||||||
fetchedConversation.messages.map((message) => ({
|
fetched.messages.map((m) => ({ text: m.text, speaker: m.speaker, image_key: m.image_key })),
|
||||||
text: message.text,
|
|
||||||
speaker: message.speaker,
|
|
||||||
})),
|
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (err) {
|
||||||
console.error("Failed to load messages:", error);
|
console.error("Failed to load messages:", err);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
loadMessages();
|
load();
|
||||||
};
|
};
|
||||||
|
|
||||||
const loadConversations = async () => {
|
const loadConversations = async () => {
|
||||||
try {
|
try {
|
||||||
const fetchedConversations =
|
const fetched = await conversationService.getAllConversations();
|
||||||
await conversationService.getAllConversations();
|
const parsed = fetched.map((c) => ({ id: c.id, title: c.name }));
|
||||||
const parsedConversations = fetchedConversations.map((conversation) => ({
|
setConversations(parsed);
|
||||||
id: conversation.id,
|
} catch (err) {
|
||||||
title: conversation.name,
|
console.error("Failed to load conversations:", err);
|
||||||
}));
|
|
||||||
setConversations(parsedConversations);
|
|
||||||
setSelectedConversation(parsedConversations[0]);
|
|
||||||
console.log(parsedConversations);
|
|
||||||
console.log("JELLYFISH@");
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Failed to load messages:", error);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleCreateNewConversation = async () => {
|
const handleCreateNewConversation = async () => {
|
||||||
const newConversation = await conversationService.createConversation();
|
const newConv = await conversationService.createConversation();
|
||||||
await loadConversations();
|
await loadConversations();
|
||||||
setSelectedConversation({
|
setSelectedConversation({ title: newConv.name, id: newConv.id });
|
||||||
title: newConversation.name,
|
|
||||||
id: newConversation.id,
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
loadConversations();
|
loadConversations();
|
||||||
|
userService.getMe().then((me) => setIsAdmin(me.is_admin)).catch(() => {});
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -119,90 +117,101 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
|
|||||||
}, [messages]);
|
}, [messages]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const loadMessages = async () => {
|
const load = async () => {
|
||||||
console.log(selectedConversation);
|
if (!selectedConversation) return;
|
||||||
console.log("JELLYFISH");
|
|
||||||
if (selectedConversation == null) return;
|
|
||||||
try {
|
try {
|
||||||
const conversation = await conversationService.getConversation(
|
const conv = await conversationService.getConversation(selectedConversation.id);
|
||||||
selectedConversation.id,
|
setSelectedConversation({ id: conv.id, title: conv.name });
|
||||||
);
|
setMessages(conv.messages.map((m) => ({ text: m.text, speaker: m.speaker, image_key: m.image_key })));
|
||||||
// Update the conversation title in case it changed
|
} catch (err) {
|
||||||
setSelectedConversation({
|
console.error("Failed to load messages:", err);
|
||||||
id: conversation.id,
|
|
||||||
title: conversation.name,
|
|
||||||
});
|
|
||||||
setMessages(
|
|
||||||
conversation.messages.map((message) => ({
|
|
||||||
text: message.text,
|
|
||||||
speaker: message.speaker,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Failed to load messages:", error);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
loadMessages();
|
load();
|
||||||
}, [selectedConversation?.id]);
|
}, [selectedConversation?.id]);
|
||||||
|
|
||||||
const handleQuestionSubmit = async () => {
|
const handleQuestionSubmit = async () => {
|
||||||
if (!query.trim() || isLoading) return; // Don't submit empty messages or while loading
|
if ((!query.trim() && !pendingImage) || isLoading) return;
|
||||||
|
|
||||||
|
let activeConversation = selectedConversation;
|
||||||
|
if (!activeConversation) {
|
||||||
|
const newConv = await conversationService.createConversation();
|
||||||
|
activeConversation = { title: newConv.name, id: newConv.id };
|
||||||
|
setSelectedConversation(activeConversation);
|
||||||
|
setConversations((prev) => [activeConversation!, ...prev]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Capture pending image before clearing state
|
||||||
|
const imageFile = pendingImage;
|
||||||
|
|
||||||
const currMessages = messages.concat([{ text: query, speaker: "user" }]);
|
const currMessages = messages.concat([{ text: query, speaker: "user" }]);
|
||||||
setMessages(currMessages);
|
setMessages(currMessages);
|
||||||
setQuery(""); // Clear input immediately after submission
|
setQuery("");
|
||||||
|
setPendingImage(null);
|
||||||
setIsLoading(true);
|
setIsLoading(true);
|
||||||
|
|
||||||
if (simbaMode) {
|
if (simbaMode) {
|
||||||
console.log("simba mode activated");
|
const randomElement = simbaAnswers[Math.floor(Math.random() * simbaAnswers.length)];
|
||||||
const randomIndex = Math.floor(Math.random() * simbaAnswers.length);
|
setMessages((prev) => prev.concat([{ text: randomElement, speaker: "simba" }]));
|
||||||
const randomElement = simbaAnswers[randomIndex];
|
|
||||||
setAnswer(randomElement);
|
|
||||||
setQuestionsAnswers(
|
|
||||||
questionsAnswers.concat([
|
|
||||||
{
|
|
||||||
question: query,
|
|
||||||
answer: randomElement,
|
|
||||||
},
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a new AbortController for this request
|
|
||||||
const abortController = new AbortController();
|
const abortController = new AbortController();
|
||||||
abortControllerRef.current = abortController;
|
abortControllerRef.current = abortController;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const result = await conversationService.sendQuery(
|
// Upload image first if present
|
||||||
|
let imageKey: string | undefined;
|
||||||
|
if (imageFile) {
|
||||||
|
const uploadResult = await conversationService.uploadImage(
|
||||||
|
imageFile,
|
||||||
|
activeConversation.id,
|
||||||
|
);
|
||||||
|
imageKey = uploadResult.image_key;
|
||||||
|
|
||||||
|
// Update the user message with the image key
|
||||||
|
setMessages((prev) => {
|
||||||
|
const updated = [...prev];
|
||||||
|
// Find the last user message we just added
|
||||||
|
for (let i = updated.length - 1; i >= 0; i--) {
|
||||||
|
if (updated[i].speaker === "user") {
|
||||||
|
updated[i] = { ...updated[i], image_key: imageKey };
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return updated;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await conversationService.streamQuery(
|
||||||
query,
|
query,
|
||||||
selectedConversation.id,
|
activeConversation.id,
|
||||||
|
(event) => {
|
||||||
|
if (!isMountedRef.current) return;
|
||||||
|
if (event.type === "tool_start") {
|
||||||
|
const friendly = TOOL_MESSAGES[event.tool] ?? `🔧 Using ${event.tool}...`;
|
||||||
|
setMessages((prev) => prev.concat([{ text: friendly, speaker: "tool" }]));
|
||||||
|
} else if (event.type === "response") {
|
||||||
|
setMessages((prev) => prev.concat([{ text: event.message, speaker: "simba" }]));
|
||||||
|
} else if (event.type === "error") {
|
||||||
|
console.error("Stream error:", event.message);
|
||||||
|
}
|
||||||
|
},
|
||||||
abortController.signal,
|
abortController.signal,
|
||||||
);
|
imageKey,
|
||||||
setQuestionsAnswers(
|
|
||||||
questionsAnswers.concat([{ question: query, answer: result.response }]),
|
|
||||||
);
|
|
||||||
setMessages(
|
|
||||||
currMessages.concat([{ text: result.response, speaker: "simba" }]),
|
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Ignore abort errors (these are intentional cancellations)
|
|
||||||
if (error instanceof Error && error.name === "AbortError") {
|
if (error instanceof Error && error.name === "AbortError") {
|
||||||
console.log("Request was aborted");
|
console.log("Request was aborted");
|
||||||
} else {
|
} else {
|
||||||
console.error("Failed to send query:", error);
|
console.error("Failed to send query:", error);
|
||||||
// If session expired, redirect to login
|
|
||||||
if (error instanceof Error && error.message.includes("Session expired")) {
|
if (error instanceof Error && error.message.includes("Session expired")) {
|
||||||
setAuthenticated(false);
|
setAuthenticated(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
// Only update loading state if component is still mounted
|
if (isMountedRef.current) setIsLoading(false);
|
||||||
if (isMountedRef.current) {
|
|
||||||
setIsLoading(false);
|
|
||||||
}
|
|
||||||
// Clear the abort controller reference
|
|
||||||
abortControllerRef.current = null;
|
abortControllerRef.current = null;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -211,128 +220,216 @@ export const ChatScreen = ({ setAuthenticated }: ChatScreenProps) => {
|
|||||||
setQuery(event.target.value);
|
setQuery(event.target.value);
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleKeyDown = (event: React.KeyboardEvent<HTMLTextAreaElement>) => {
|
const handleKeyDown = (event: React.ChangeEvent<HTMLTextAreaElement>) => {
|
||||||
// Submit on Enter, but allow Shift+Enter for new line
|
const kev = event as unknown as React.KeyboardEvent<HTMLTextAreaElement>;
|
||||||
if (event.key === "Enter" && !event.shiftKey) {
|
if (kev.key === "Enter" && !kev.shiftKey) {
|
||||||
event.preventDefault();
|
kev.preventDefault();
|
||||||
handleQuestionSubmit();
|
handleQuestionSubmit();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleLogout = () => {
|
||||||
|
localStorage.removeItem("access_token");
|
||||||
|
localStorage.removeItem("refresh_token");
|
||||||
|
setAuthenticated(false);
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="h-screen flex flex-row bg-[#F9F5EB]">
|
<div className="h-screen h-[100dvh] flex flex-row bg-cream overflow-hidden">
|
||||||
{/* Sidebar - Expanded */}
|
{/* ── Desktop Sidebar ─────────────────────────────── */}
|
||||||
<aside
|
<aside
|
||||||
className={`hidden md:flex md:flex-col bg-[#F9F5EB] border-r border-gray-200 p-4 overflow-y-auto transition-all duration-300 ${sidebarCollapsed ? "w-20" : "w-64"}`}
|
className={cn(
|
||||||
|
"hidden md:flex md:flex-col",
|
||||||
|
"bg-sidebar-bg transition-all duration-300 ease-in-out",
|
||||||
|
sidebarCollapsed ? "w-[56px]" : "w-64",
|
||||||
|
)}
|
||||||
>
|
>
|
||||||
{!sidebarCollapsed ? (
|
{sidebarCollapsed ? (
|
||||||
<div className="bg-[#F9F5EB]">
|
/* Collapsed state */
|
||||||
<div className="flex flex-row items-center gap-2 mb-6">
|
<div className="flex flex-col items-center py-4 gap-4 h-full">
|
||||||
<img
|
<button
|
||||||
src={catIcon}
|
onClick={() => setSidebarCollapsed(false)}
|
||||||
alt="Simba"
|
className="w-9 h-9 rounded-xl flex items-center justify-center text-cream/50 hover:text-cream hover:bg-white/10 transition-all cursor-pointer"
|
||||||
className="cursor-pointer hover:opacity-80"
|
>
|
||||||
onClick={() => setSidebarCollapsed(true)}
|
<PanelLeftOpen size={18} />
|
||||||
/>
|
</button>
|
||||||
<h2 className="text-3xl bg-[#F9F5EB] font-semibold">asksimba!</h2>
|
|
||||||
</div>
|
|
||||||
<ConversationList
|
|
||||||
conversations={conversations}
|
|
||||||
onCreateNewConversation={handleCreateNewConversation}
|
|
||||||
onSelectConversation={handleSelectConversation}
|
|
||||||
/>
|
|
||||||
<div className="mt-auto pt-4">
|
|
||||||
<button
|
|
||||||
className="w-full p-2 border border-red-400 bg-red-200 hover:bg-red-400 cursor-pointer rounded-md text-sm"
|
|
||||||
onClick={() => setAuthenticated(false)}
|
|
||||||
>
|
|
||||||
logout
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<div className="flex flex-col items-center gap-4">
|
|
||||||
<img
|
<img
|
||||||
src={catIcon}
|
src={catIcon}
|
||||||
alt="Simba"
|
alt="Simba"
|
||||||
className="cursor-pointer hover:opacity-80"
|
className="w-12 h-12 opacity-70 mt-1"
|
||||||
onClick={() => setSidebarCollapsed(false)}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
) : (
|
||||||
|
/* Expanded state */
|
||||||
|
<div className="flex flex-col h-full">
|
||||||
|
{/* Header */}
|
||||||
|
<div className="flex items-center justify-between px-4 py-4 border-b border-white/8">
|
||||||
|
<div className="flex items-center gap-2.5">
|
||||||
|
<img src={catIcon} alt="Simba" className="w-12 h-12" />
|
||||||
|
<h2
|
||||||
|
className="text-lg font-bold text-cream tracking-tight"
|
||||||
|
style={{ fontFamily: "var(--font-display)" }}
|
||||||
|
>
|
||||||
|
asksimba
|
||||||
|
</h2>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
onClick={() => setSidebarCollapsed(true)}
|
||||||
|
className="w-7 h-7 rounded-lg flex items-center justify-center text-cream/40 hover:text-cream hover:bg-white/10 transition-all cursor-pointer"
|
||||||
|
>
|
||||||
|
<PanelLeftClose size={15} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Conversations */}
|
||||||
|
<div className="flex-1 overflow-y-auto px-2 py-3">
|
||||||
|
<ConversationList
|
||||||
|
conversations={conversations}
|
||||||
|
onCreateNewConversation={handleCreateNewConversation}
|
||||||
|
onSelectConversation={handleSelectConversation}
|
||||||
|
selectedId={selectedConversation?.id}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Footer */}
|
||||||
|
<div className="px-2 pb-3 pt-2 border-t border-white/8 flex flex-col gap-0.5">
|
||||||
|
{isAdmin && (
|
||||||
|
<button
|
||||||
|
onClick={() => setShowAdminPanel(true)}
|
||||||
|
className="flex items-center gap-2 w-full px-3 py-2 rounded-xl text-sm text-cream/50 hover:text-cream hover:bg-white/8 transition-all cursor-pointer"
|
||||||
|
>
|
||||||
|
<Shield size={14} />
|
||||||
|
<span>Admin</span>
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
<button
|
||||||
|
onClick={handleLogout}
|
||||||
|
className="flex items-center gap-2 w-full px-3 py-2 rounded-xl text-sm text-cream/50 hover:text-cream hover:bg-white/8 transition-all cursor-pointer"
|
||||||
|
>
|
||||||
|
<LogOut size={14} />
|
||||||
|
<span>Sign out</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
)}
|
)}
|
||||||
</aside>
|
</aside>
|
||||||
|
|
||||||
{/* Main chat area */}
|
{/* Admin Panel modal */}
|
||||||
<div className="flex-1 flex flex-col h-screen overflow-hidden">
|
{showAdminPanel && <AdminPanel onClose={() => setShowAdminPanel(false)} />}
|
||||||
|
|
||||||
|
{/* ── Main chat area ──────────────────────────────── */}
|
||||||
|
<div className="flex-1 flex flex-col h-full overflow-hidden min-w-0">
|
||||||
{/* Mobile header */}
|
{/* Mobile header */}
|
||||||
<header className="md:hidden flex flex-row justify-between items-center gap-3 p-4 border-b border-gray-200 bg-white">
|
<header className="md:hidden flex items-center justify-between px-4 py-3 bg-warm-white border-b border-sand-light/60">
|
||||||
<div className="flex flex-row items-center gap-2">
|
<div className="flex items-center gap-2">
|
||||||
<img src={catIcon} alt="Simba" className="w-10 h-10" />
|
<img src={catIcon} alt="Simba" className="w-12 h-12" />
|
||||||
<h1 className="text-xl">asksimba!</h1>
|
<h1
|
||||||
</div>
|
className="text-base font-bold text-charcoal"
|
||||||
<div className="flex flex-row gap-2">
|
style={{ fontFamily: "var(--font-display)" }}
|
||||||
<button
|
|
||||||
className="p-2 border border-green-400 bg-green-200 hover:bg-green-400 cursor-pointer rounded-md text-sm"
|
|
||||||
onClick={() => setShowConversations(!showConversations)}
|
|
||||||
>
|
>
|
||||||
{showConversations ? "hide" : "show"}
|
asksimba
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<button
|
||||||
|
className="w-8 h-8 rounded-xl flex items-center justify-center text-warm-gray hover:text-charcoal hover:bg-cream-dark transition-all cursor-pointer"
|
||||||
|
onClick={() => setShowConversations((v) => !v)}
|
||||||
|
>
|
||||||
|
{showConversations ? <X size={16} /> : <Menu size={16} />}
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
className="p-2 border border-red-400 bg-red-200 hover:bg-red-400 cursor-pointer rounded-md text-sm"
|
className="w-8 h-8 rounded-xl flex items-center justify-center text-warm-gray hover:text-charcoal hover:bg-cream-dark transition-all cursor-pointer"
|
||||||
onClick={() => setAuthenticated(false)}
|
onClick={handleLogout}
|
||||||
>
|
>
|
||||||
logout
|
<LogOut size={15} />
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</header>
|
</header>
|
||||||
|
|
||||||
{/* Messages area */}
|
{messages.length === 0 ? (
|
||||||
{selectedConversation && (
|
/* ── Empty / homepage state ── */
|
||||||
<div className="sticky top-0 mx-auto w-full">
|
<div className="flex-1 flex flex-col items-center justify-center px-4 gap-6">
|
||||||
<div className="bg-[#F9F5EB] text-black px-6 w-full py-3">
|
{/* Mobile conversation drawer */}
|
||||||
<h2 className="text-lg font-semibold">
|
|
||||||
{selectedConversation.title || "Untitled Conversation"}
|
|
||||||
</h2>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
<div className="flex-1 overflow-y-auto relative px-4 py-6">
|
|
||||||
{/* Floating conversation name */}
|
|
||||||
|
|
||||||
<div className="max-w-2xl mx-auto flex flex-col gap-4">
|
|
||||||
{showConversations && (
|
{showConversations && (
|
||||||
<div className="md:hidden">
|
<div className="md:hidden w-full max-w-2xl bg-warm-white rounded-2xl border border-sand-light p-3 shadow-sm">
|
||||||
<ConversationList
|
<ConversationList
|
||||||
conversations={conversations}
|
conversations={conversations}
|
||||||
onCreateNewConversation={handleCreateNewConversation}
|
onCreateNewConversation={handleCreateNewConversation}
|
||||||
onSelectConversation={handleSelectConversation}
|
onSelectConversation={handleSelectConversation}
|
||||||
|
selectedId={selectedConversation?.id}
|
||||||
|
variant="light"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
{messages.map((msg, index) => {
|
<div className="relative">
|
||||||
if (msg.speaker === "simba") {
|
<div className="absolute -inset-6 bg-amber-soft/20 rounded-full blur-3xl" />
|
||||||
return <AnswerBubble key={index} text={msg.text} />;
|
<img src={catIcon} alt="Simba" className="relative w-36 h-36" />
|
||||||
}
|
</div>
|
||||||
return <QuestionBubble key={index} text={msg.text} />;
|
<h1
|
||||||
})}
|
className="text-2xl font-bold text-charcoal"
|
||||||
{isLoading && <AnswerBubble text="" loading={true} />}
|
style={{ fontFamily: "var(--font-display)" }}
|
||||||
<div ref={messagesEndRef} />
|
>
|
||||||
|
Ask me anything
|
||||||
|
</h1>
|
||||||
|
<div className="w-full max-w-2xl">
|
||||||
|
<MessageInput
|
||||||
|
query={query}
|
||||||
|
handleQueryChange={handleQueryChange}
|
||||||
|
handleKeyDown={handleKeyDown}
|
||||||
|
handleQuestionSubmit={handleQuestionSubmit}
|
||||||
|
setSimbaMode={setSimbaMode}
|
||||||
|
isLoading={isLoading}
|
||||||
|
pendingImage={pendingImage}
|
||||||
|
onImageSelect={(file) => setPendingImage(file)}
|
||||||
|
onClearImage={() => setPendingImage(null)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
) : (
|
||||||
|
/* ── Active chat state ── */
|
||||||
|
<>
|
||||||
|
<div className="flex-1 overflow-y-auto px-4 py-6">
|
||||||
|
<div className="max-w-2xl mx-auto flex flex-col gap-3">
|
||||||
|
{/* Mobile conversation drawer */}
|
||||||
|
{showConversations && (
|
||||||
|
<div className="md:hidden mb-3 bg-warm-white rounded-2xl border border-sand-light p-3 shadow-sm">
|
||||||
|
<ConversationList
|
||||||
|
conversations={conversations}
|
||||||
|
onCreateNewConversation={handleCreateNewConversation}
|
||||||
|
onSelectConversation={handleSelectConversation}
|
||||||
|
selectedId={selectedConversation?.id}
|
||||||
|
variant="light"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Input area */}
|
{messages.map((msg, index) => {
|
||||||
<footer className="p-4 bg-[#F9F5EB]">
|
if (msg.speaker === "tool")
|
||||||
<div className="max-w-2xl mx-auto">
|
return <ToolBubble key={index} text={msg.text} />;
|
||||||
<MessageInput
|
if (msg.speaker === "simba")
|
||||||
query={query}
|
return <AnswerBubble key={index} text={msg.text} />;
|
||||||
handleQueryChange={handleQueryChange}
|
return <QuestionBubble key={index} text={msg.text} image_key={msg.image_key} />;
|
||||||
handleKeyDown={handleKeyDown}
|
})}
|
||||||
handleQuestionSubmit={handleQuestionSubmit}
|
|
||||||
setSimbaMode={setSimbaMode}
|
{isLoading && <AnswerBubble text="" loading={true} />}
|
||||||
isLoading={isLoading}
|
<div ref={messagesEndRef} />
|
||||||
/>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</footer>
|
|
||||||
|
<footer className="border-t border-sand-light/40 bg-cream/80 backdrop-blur-sm">
|
||||||
|
<div className="max-w-2xl mx-auto px-4 py-3">
|
||||||
|
<MessageInput
|
||||||
|
query={query}
|
||||||
|
handleQueryChange={handleQueryChange}
|
||||||
|
handleKeyDown={handleKeyDown}
|
||||||
|
handleQuestionSubmit={handleQuestionSubmit}
|
||||||
|
setSimbaMode={setSimbaMode}
|
||||||
|
isLoading={isLoading}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</footer>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { useState, useEffect } from "react";
|
import { useState, useEffect } from "react";
|
||||||
|
import { Plus } from "lucide-react";
|
||||||
|
import { cn } from "../lib/utils";
|
||||||
import { conversationService } from "../api/conversationService";
|
import { conversationService } from "../api/conversationService";
|
||||||
|
|
||||||
type Conversation = {
|
type Conversation = {
|
||||||
title: string;
|
title: string;
|
||||||
id: string;
|
id: string;
|
||||||
@@ -10,60 +12,80 @@ type ConversationProps = {
|
|||||||
conversations: Conversation[];
|
conversations: Conversation[];
|
||||||
onSelectConversation: (conversation: Conversation) => void;
|
onSelectConversation: (conversation: Conversation) => void;
|
||||||
onCreateNewConversation: () => void;
|
onCreateNewConversation: () => void;
|
||||||
|
selectedId?: string;
|
||||||
|
variant?: "dark" | "light";
|
||||||
};
|
};
|
||||||
|
|
||||||
export const ConversationList = ({
|
export const ConversationList = ({
|
||||||
conversations,
|
conversations,
|
||||||
onSelectConversation,
|
onSelectConversation,
|
||||||
onCreateNewConversation,
|
onCreateNewConversation,
|
||||||
|
selectedId,
|
||||||
|
variant = "dark",
|
||||||
}: ConversationProps) => {
|
}: ConversationProps) => {
|
||||||
const [conservations, setConversations] = useState(conversations);
|
const [items, setItems] = useState(conversations);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const loadConversations = async () => {
|
const load = async () => {
|
||||||
try {
|
try {
|
||||||
let fetchedConversations =
|
let fetched = await conversationService.getAllConversations();
|
||||||
await conversationService.getAllConversations();
|
if (fetched.length === 0) {
|
||||||
|
|
||||||
if (conversations.length == 0) {
|
|
||||||
await conversationService.createConversation();
|
await conversationService.createConversation();
|
||||||
fetchedConversations =
|
fetched = await conversationService.getAllConversations();
|
||||||
await conversationService.getAllConversations();
|
|
||||||
}
|
}
|
||||||
setConversations(
|
setItems(fetched.map((c) => ({ id: c.id, title: c.name })));
|
||||||
fetchedConversations.map((conversation) => ({
|
} catch (err) {
|
||||||
id: conversation.id,
|
console.error("Failed to load conversations:", err);
|
||||||
title: conversation.name,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Failed to load messages:", error);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
loadConversations();
|
load();
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
// Keep in sync when parent updates conversations
|
||||||
|
useEffect(() => {
|
||||||
|
setItems(conversations);
|
||||||
|
}, [conversations]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="bg-stone-200 rounded-md p-3 sm:p-4 flex flex-col gap-1">
|
<div className="flex flex-col gap-1">
|
||||||
{conservations.map((conversation) => {
|
{/* New thread button */}
|
||||||
|
<button
|
||||||
|
onClick={onCreateNewConversation}
|
||||||
|
className={cn(
|
||||||
|
"flex items-center gap-2 w-full px-3 py-2 rounded-xl",
|
||||||
|
"text-sm transition-all duration-150 cursor-pointer mb-1",
|
||||||
|
variant === "dark"
|
||||||
|
? "text-cream/60 hover:text-cream hover:bg-white/8"
|
||||||
|
: "text-warm-gray hover:text-charcoal hover:bg-cream-dark",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<Plus size={14} strokeWidth={2.5} />
|
||||||
|
<span>New thread</span>
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{/* Conversation items */}
|
||||||
|
{items.map((conv) => {
|
||||||
|
const isActive = conv.id === selectedId;
|
||||||
return (
|
return (
|
||||||
<div
|
<button
|
||||||
key={conversation.id}
|
key={conv.id}
|
||||||
className="bg-stone-200 hover:bg-stone-300 cursor-pointer rounded-md p-3 min-h-[44px] flex items-center"
|
onClick={() => onSelectConversation(conv)}
|
||||||
onClick={() => onSelectConversation(conversation)}
|
className={cn(
|
||||||
|
"w-full px-3 py-2 rounded-xl text-left",
|
||||||
|
"text-sm truncate transition-all duration-150 cursor-pointer",
|
||||||
|
variant === "dark"
|
||||||
|
? isActive
|
||||||
|
? "bg-white/12 text-cream font-medium"
|
||||||
|
: "text-cream/60 hover:text-cream hover:bg-white/8"
|
||||||
|
: isActive
|
||||||
|
? "bg-cream-dark text-charcoal font-medium"
|
||||||
|
: "text-warm-gray hover:text-charcoal hover:bg-cream-dark",
|
||||||
|
)}
|
||||||
>
|
>
|
||||||
<p className="text-sm sm:text-base truncate w-full">
|
{conv.title}
|
||||||
{conversation.title}
|
</button>
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
<div
|
|
||||||
className="bg-stone-200 hover:bg-stone-300 cursor-pointer rounded-md p-3 min-h-[44px] flex items-center"
|
|
||||||
onClick={() => onCreateNewConversation()}
|
|
||||||
>
|
|
||||||
<p className="text-sm sm:text-base"> + Start a new thread</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { useState, useEffect } from "react";
|
import { useState, useEffect } from "react";
|
||||||
import { userService } from "../api/userService";
|
import { userService } from "../api/userService";
|
||||||
import { oidcService } from "../api/oidcService";
|
import { oidcService } from "../api/oidcService";
|
||||||
|
import catIcon from "../assets/cat.png";
|
||||||
|
import { cn } from "../lib/utils";
|
||||||
|
|
||||||
type LoginScreenProps = {
|
type LoginScreenProps = {
|
||||||
setAuthenticated: (isAuth: boolean) => void;
|
setAuthenticated: (isAuth: boolean) => void;
|
||||||
@@ -13,25 +15,17 @@ export const LoginScreen = ({ setAuthenticated }: LoginScreenProps) => {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const initAuth = async () => {
|
const initAuth = async () => {
|
||||||
// First, check for OIDC callback parameters
|
|
||||||
const callbackParams = oidcService.getCallbackParamsFromURL();
|
const callbackParams = oidcService.getCallbackParamsFromURL();
|
||||||
|
|
||||||
if (callbackParams) {
|
if (callbackParams) {
|
||||||
// Handle OIDC callback
|
|
||||||
try {
|
try {
|
||||||
setIsLoggingIn(true);
|
setIsLoggingIn(true);
|
||||||
const result = await oidcService.handleCallback(
|
const result = await oidcService.handleCallback(
|
||||||
callbackParams.code,
|
callbackParams.code,
|
||||||
callbackParams.state
|
callbackParams.state,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Store tokens
|
|
||||||
localStorage.setItem("access_token", result.access_token);
|
localStorage.setItem("access_token", result.access_token);
|
||||||
localStorage.setItem("refresh_token", result.refresh_token);
|
localStorage.setItem("refresh_token", result.refresh_token);
|
||||||
|
|
||||||
// Clear URL parameters
|
|
||||||
oidcService.clearCallbackParams();
|
oidcService.clearCallbackParams();
|
||||||
|
|
||||||
setAuthenticated(true);
|
setAuthenticated(true);
|
||||||
setIsChecking(false);
|
setIsChecking(false);
|
||||||
return;
|
return;
|
||||||
@@ -44,15 +38,10 @@ export const LoginScreen = ({ setAuthenticated }: LoginScreenProps) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if user is already authenticated
|
|
||||||
const isValid = await userService.validateToken();
|
const isValid = await userService.validateToken();
|
||||||
if (isValid) {
|
if (isValid) setAuthenticated(true);
|
||||||
setAuthenticated(true);
|
|
||||||
}
|
|
||||||
setIsChecking(false);
|
setIsChecking(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
initAuth();
|
initAuth();
|
||||||
}, [setAuthenticated]);
|
}, [setAuthenticated]);
|
||||||
|
|
||||||
@@ -60,70 +49,113 @@ export const LoginScreen = ({ setAuthenticated }: LoginScreenProps) => {
|
|||||||
try {
|
try {
|
||||||
setIsLoggingIn(true);
|
setIsLoggingIn(true);
|
||||||
setError("");
|
setError("");
|
||||||
|
|
||||||
// Get authorization URL from backend
|
|
||||||
const authUrl = await oidcService.initiateLogin();
|
const authUrl = await oidcService.initiateLogin();
|
||||||
|
|
||||||
// Redirect to Authelia
|
|
||||||
window.location.href = authUrl;
|
window.location.href = authUrl;
|
||||||
} catch (err) {
|
} catch {
|
||||||
setError("Failed to initiate login. Please try again.");
|
setError("Failed to initiate login. Please try again.");
|
||||||
console.error("OIDC login error:", err);
|
|
||||||
setIsLoggingIn(false);
|
setIsLoggingIn(false);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Show loading state while checking authentication or processing callback
|
|
||||||
if (isChecking || isLoggingIn) {
|
if (isChecking || isLoggingIn) {
|
||||||
return (
|
return (
|
||||||
<div className="h-screen bg-opacity-20">
|
<div className="h-screen flex flex-col items-center justify-center bg-cream gap-4">
|
||||||
<div className="bg-white/85 h-screen flex items-center justify-center">
|
{/* Subtle dot grid */}
|
||||||
<div className="text-center">
|
<div
|
||||||
<p className="text-lg sm:text-xl">
|
className="fixed inset-0 pointer-events-none opacity-[0.035]"
|
||||||
{isLoggingIn ? "Logging in..." : "Checking authentication..."}
|
style={{
|
||||||
</p>
|
backgroundImage: `radial-gradient(circle, var(--color-charcoal) 1px, transparent 0)`,
|
||||||
</div>
|
backgroundSize: "22px 22px",
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
<div className="relative">
|
||||||
|
<div className="absolute -inset-4 bg-amber-soft/30 rounded-full blur-2xl" />
|
||||||
|
<img
|
||||||
|
src={catIcon}
|
||||||
|
alt="Simba"
|
||||||
|
className="relative w-14 h-14 animate-bounce drop-shadow"
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
<p className="text-warm-gray text-sm tracking-wide font-medium">
|
||||||
|
{isLoggingIn ? "letting you in..." : "checking credentials..."}
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="h-screen bg-opacity-20">
|
<div className="h-screen bg-cream flex items-center justify-center p-4 relative overflow-hidden">
|
||||||
<div className="bg-white/85 h-screen">
|
{/* Background dot texture */}
|
||||||
<div className="flex flex-row justify-center py-4">
|
<div
|
||||||
<div className="flex flex-col gap-4 w-full px-4 sm:w-11/12 sm:max-w-2xl lg:max-w-4xl sm:px-0">
|
className="fixed inset-0 pointer-events-none opacity-[0.04]"
|
||||||
<div className="flex flex-col gap-4">
|
style={{
|
||||||
<div className="flex flex-grow justify-center w-full bg-amber-400 p-2">
|
backgroundImage: `radial-gradient(circle, var(--color-charcoal) 1px, transparent 0)`,
|
||||||
<h1 className="text-base sm:text-xl font-bold text-center">
|
backgroundSize: "22px 22px",
|
||||||
I AM LOOKING FOR A DESIGNER. THIS APP WILL REMAIN UGLY UNTIL A
|
}}
|
||||||
DESIGNER COMES.
|
/>
|
||||||
</h1>
|
|
||||||
</div>
|
|
||||||
<header className="flex flex-row justify-center gap-2 grow sticky top-0 z-10 bg-white">
|
|
||||||
<h1 className="text-2xl sm:text-3xl">ask simba!</h1>
|
|
||||||
</header>
|
|
||||||
|
|
||||||
{error && (
|
{/* Decorative background blobs */}
|
||||||
<div className="text-red-600 font-semibold text-sm sm:text-base bg-red-50 p-3 rounded-md">
|
<div className="absolute top-1/4 -left-20 w-72 h-72 rounded-full bg-leaf-pale/60 blur-3xl pointer-events-none" />
|
||||||
{error}
|
<div className="absolute bottom-1/4 -right-20 w-64 h-64 rounded-full bg-amber-pale/70 blur-3xl pointer-events-none" />
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="text-center text-sm sm:text-base text-gray-600 py-2">
|
<div className="relative w-full max-w-sm">
|
||||||
Click below to login with Authelia
|
{/* Branding */}
|
||||||
</div>
|
<div className="flex flex-col items-center mb-8">
|
||||||
</div>
|
<div className="relative mb-5">
|
||||||
|
<div className="absolute -inset-5 bg-amber-soft/30 rounded-full blur-2xl" />
|
||||||
<button
|
<img
|
||||||
className="p-3 sm:p-4 min-h-[44px] border border-blue-400 bg-blue-200 hover:bg-blue-400 cursor-pointer rounded-md flex-grow text-sm sm:text-base font-semibold"
|
src={catIcon}
|
||||||
onClick={handleOIDCLogin}
|
alt="Simba"
|
||||||
disabled={isLoggingIn}
|
className="relative w-20 h-20 drop-shadow-lg"
|
||||||
>
|
/>
|
||||||
{isLoggingIn ? "Redirecting..." : "Login with Authelia"}
|
|
||||||
</button>
|
|
||||||
</div>
|
</div>
|
||||||
|
<h1
|
||||||
|
className="text-4xl font-bold text-charcoal tracking-tight"
|
||||||
|
style={{ fontFamily: "var(--font-display)" }}
|
||||||
|
>
|
||||||
|
asksimba
|
||||||
|
</h1>
|
||||||
|
<p className="text-warm-gray text-sm mt-1.5 tracking-wide">
|
||||||
|
your feline knowledge companion
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Card */}
|
||||||
|
<div
|
||||||
|
className={cn(
|
||||||
|
"bg-warm-white rounded-3xl border border-sand-light",
|
||||||
|
"shadow-xl shadow-sand/30 p-8",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{error && (
|
||||||
|
<div className="mb-5 text-sm bg-red-50 text-red-600 px-4 py-3 rounded-2xl border border-red-200">
|
||||||
|
{error}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<p className="text-center text-warm-gray text-sm mb-6">
|
||||||
|
Sign in to start chatting with Simba
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<button
|
||||||
|
onClick={handleOIDCLogin}
|
||||||
|
disabled={isLoggingIn}
|
||||||
|
className={cn(
|
||||||
|
"w-full py-3.5 px-4 rounded-2xl text-sm font-semibold tracking-wide",
|
||||||
|
"bg-forest text-cream",
|
||||||
|
"shadow-md shadow-forest/20",
|
||||||
|
"hover:bg-forest-mid hover:shadow-lg hover:shadow-forest/30",
|
||||||
|
"active:scale-[0.98] disabled:opacity-50 disabled:cursor-not-allowed",
|
||||||
|
"transition-all duration-200 cursor-pointer",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{isLoggingIn ? "Redirecting..." : "Sign in with Authelia"}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<p className="text-center text-sand mt-5 text-xs tracking-widest select-none">
|
||||||
|
✦ meow ✦
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,12 +1,18 @@
|
|||||||
import { useEffect, useState, useRef } from "react";
|
import { useRef, useState } from "react";
|
||||||
|
import { ArrowUp, ImagePlus, X } from "lucide-react";
|
||||||
|
import { cn } from "../lib/utils";
|
||||||
|
import { Textarea } from "./ui/textarea";
|
||||||
|
|
||||||
type MessageInputProps = {
|
type MessageInputProps = {
|
||||||
handleQueryChange: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
|
handleQueryChange: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
|
||||||
handleKeyDown: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
|
handleKeyDown: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
|
||||||
handleQuestionSubmit: () => void;
|
handleQuestionSubmit: () => void;
|
||||||
setSimbaMode: (sdf: boolean) => void;
|
setSimbaMode: (val: boolean) => void;
|
||||||
query: string;
|
query: string;
|
||||||
isLoading: boolean;
|
isLoading: boolean;
|
||||||
|
pendingImage: File | null;
|
||||||
|
onImageSelect: (file: File) => void;
|
||||||
|
onClearImage: () => void;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const MessageInput = ({
|
export const MessageInput = ({
|
||||||
@@ -16,41 +22,127 @@ export const MessageInput = ({
|
|||||||
handleQuestionSubmit,
|
handleQuestionSubmit,
|
||||||
setSimbaMode,
|
setSimbaMode,
|
||||||
isLoading,
|
isLoading,
|
||||||
|
pendingImage,
|
||||||
|
onImageSelect,
|
||||||
|
onClearImage,
|
||||||
}: MessageInputProps) => {
|
}: MessageInputProps) => {
|
||||||
|
const [simbaMode, setLocalSimbaMode] = useState(false);
|
||||||
|
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||||
|
|
||||||
|
const toggleSimbaMode = () => {
|
||||||
|
const next = !simbaMode;
|
||||||
|
setLocalSimbaMode(next);
|
||||||
|
setSimbaMode(next);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleFileChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
|
const file = e.target.files?.[0];
|
||||||
|
if (file) {
|
||||||
|
onImageSelect(file);
|
||||||
|
}
|
||||||
|
// Reset so the same file can be re-selected
|
||||||
|
e.target.value = "";
|
||||||
|
};
|
||||||
|
|
||||||
|
const canSend = !isLoading && (query.trim() || pendingImage);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex flex-col gap-4 sticky bottom-0 bg-[#3D763A] p-6 rounded-xl">
|
<div
|
||||||
<div className="flex flex-row justify-between grow">
|
className={cn(
|
||||||
<textarea
|
"rounded-2xl bg-warm-white border border-sand shadow-md shadow-sand/30",
|
||||||
className="p-3 sm:p-4 border border-blue-200 rounded-md grow bg-[#F9F5EB] min-h-[44px] resize-y"
|
"transition-shadow duration-200 focus-within:shadow-lg focus-within:shadow-amber-soft/20",
|
||||||
onChange={handleQueryChange}
|
"focus-within:border-amber-soft/60",
|
||||||
onKeyDown={handleKeyDown}
|
)}
|
||||||
value={query}
|
>
|
||||||
rows={2}
|
{/* Image preview */}
|
||||||
placeholder="Type your message... (Press Enter to send, Shift+Enter for new line)"
|
{pendingImage && (
|
||||||
/>
|
<div className="px-3 pt-3">
|
||||||
</div>
|
<div className="relative inline-block">
|
||||||
<div className="flex flex-row justify-between gap-2 grow">
|
<img
|
||||||
|
src={URL.createObjectURL(pendingImage)}
|
||||||
|
alt="Pending upload"
|
||||||
|
className="h-20 rounded-lg object-cover border border-sand"
|
||||||
|
/>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={onClearImage}
|
||||||
|
className="absolute -top-1.5 -right-1.5 w-5 h-5 rounded-full bg-charcoal text-white flex items-center justify-center hover:bg-charcoal/80 transition-colors cursor-pointer"
|
||||||
|
>
|
||||||
|
<X size={12} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Textarea */}
|
||||||
|
<Textarea
|
||||||
|
onChange={handleQueryChange}
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
|
value={query}
|
||||||
|
rows={2}
|
||||||
|
placeholder="Ask Simba anything..."
|
||||||
|
className="min-h-[60px] max-h-40"
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Hidden file input */}
|
||||||
|
<input
|
||||||
|
ref={fileInputRef}
|
||||||
|
type="file"
|
||||||
|
accept="image/*"
|
||||||
|
onChange={handleFileChange}
|
||||||
|
className="hidden"
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Bottom toolbar */}
|
||||||
|
<div className="flex items-center justify-between px-3 pb-2.5 pt-1">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
{/* Simba mode toggle */}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={toggleSimbaMode}
|
||||||
|
className="flex items-center gap-2 group cursor-pointer select-none"
|
||||||
|
>
|
||||||
|
<div className={cn("toggle-track", simbaMode && "checked")}>
|
||||||
|
<div className="toggle-thumb" />
|
||||||
|
</div>
|
||||||
|
<span className="text-xs text-warm-gray group-hover:text-charcoal transition-colors">
|
||||||
|
simba mode
|
||||||
|
</span>
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{/* Image attach button */}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => fileInputRef.current?.click()}
|
||||||
|
disabled={isLoading}
|
||||||
|
className={cn(
|
||||||
|
"w-7 h-7 rounded-lg flex items-center justify-center transition-all cursor-pointer",
|
||||||
|
isLoading
|
||||||
|
? "text-warm-gray/40 cursor-not-allowed"
|
||||||
|
: "text-warm-gray hover:text-charcoal hover:bg-cream-dark",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<ImagePlus size={16} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Send button */}
|
||||||
<button
|
<button
|
||||||
className={`p-3 sm:p-4 min-h-[44px] border border-blue-400 rounded-md flex-grow text-sm sm:text-base ${
|
|
||||||
isLoading
|
|
||||||
? "bg-gray-400 cursor-not-allowed opacity-50"
|
|
||||||
: "bg-[#EDA541] hover:bg-blue-400 cursor-pointer"
|
|
||||||
}`}
|
|
||||||
onClick={() => handleQuestionSubmit()}
|
|
||||||
type="submit"
|
type="submit"
|
||||||
disabled={isLoading}
|
onClick={handleQuestionSubmit}
|
||||||
|
disabled={!canSend}
|
||||||
|
className={cn(
|
||||||
|
"w-8 h-8 rounded-full flex items-center justify-center",
|
||||||
|
"transition-all duration-200 cursor-pointer",
|
||||||
|
"shadow-sm",
|
||||||
|
!canSend
|
||||||
|
? "bg-sand text-warm-gray/50 cursor-not-allowed shadow-none"
|
||||||
|
: "bg-amber-glow text-white hover:bg-amber-dark hover:shadow-md hover:shadow-amber-glow/30 active:scale-95",
|
||||||
|
)}
|
||||||
>
|
>
|
||||||
{isLoading ? "Sending..." : "Submit"}
|
<ArrowUp size={15} strokeWidth={2.5} />
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex flex-row justify-center gap-2 grow items-center">
|
|
||||||
<input
|
|
||||||
type="checkbox"
|
|
||||||
onChange={(event) => setSimbaMode(event.target.checked)}
|
|
||||||
className="w-5 h-5 cursor-pointer"
|
|
||||||
/>
|
|
||||||
<p className="text-sm sm:text-base">simba mode?</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,11 +1,31 @@
|
|||||||
|
import { cn } from "../lib/utils";
|
||||||
|
import { conversationService } from "../api/conversationService";
|
||||||
|
|
||||||
type QuestionBubbleProps = {
|
type QuestionBubbleProps = {
|
||||||
text: string;
|
text: string;
|
||||||
|
image_key?: string | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const QuestionBubble = ({ text }: QuestionBubbleProps) => {
|
export const QuestionBubble = ({ text, image_key }: QuestionBubbleProps) => {
|
||||||
return (
|
return (
|
||||||
<div className="w-2/3 rounded-md bg-stone-200 p-3 sm:p-4 break-words overflow-wrap-anywhere text-sm sm:text-base ml-auto">
|
<div className="flex justify-end message-enter">
|
||||||
🤦: {text}
|
<div
|
||||||
|
className={cn(
|
||||||
|
"max-w-[72%] rounded-3xl rounded-br-md",
|
||||||
|
"bg-leaf-pale border border-leaf-light/60",
|
||||||
|
"px-4 py-3 text-sm leading-relaxed text-charcoal",
|
||||||
|
"shadow-sm shadow-leaf/10",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{image_key && (
|
||||||
|
<img
|
||||||
|
src={conversationService.getImageUrl(image_key)}
|
||||||
|
alt="Uploaded image"
|
||||||
|
className="max-w-full rounded-xl mb-2"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{text}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
15
raggr-frontend/src/components/ToolBubble.tsx
Normal file
15
raggr-frontend/src/components/ToolBubble.tsx
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { cn } from "../lib/utils";
|
||||||
|
|
||||||
|
export const ToolBubble = ({ text }: { text: string }) => (
|
||||||
|
<div className="flex justify-center message-enter">
|
||||||
|
<div
|
||||||
|
className={cn(
|
||||||
|
"inline-flex items-center gap-1.5 px-3 py-1 rounded-full",
|
||||||
|
"bg-leaf-pale border border-leaf-light/50",
|
||||||
|
"text-xs text-leaf-dark italic",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{text}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
26
raggr-frontend/src/components/ui/badge.tsx
Normal file
26
raggr-frontend/src/components/ui/badge.tsx
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { cva, type VariantProps } from "class-variance-authority";
|
||||||
|
import { cn } from "../../lib/utils";
|
||||||
|
|
||||||
|
const badgeVariants = cva(
|
||||||
|
"inline-flex items-center gap-1.5 rounded-full px-3 py-1 text-xs font-medium transition-colors",
|
||||||
|
{
|
||||||
|
variants: {
|
||||||
|
variant: {
|
||||||
|
default: "bg-leaf-pale text-leaf-dark border border-leaf-light/50",
|
||||||
|
amber: "bg-amber-pale text-amber-glow border border-amber-soft/40",
|
||||||
|
muted: "bg-sand-light/60 text-warm-gray border border-sand/40",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
defaultVariants: {
|
||||||
|
variant: "default",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export interface BadgeProps
|
||||||
|
extends React.HTMLAttributes<HTMLDivElement>,
|
||||||
|
VariantProps<typeof badgeVariants> {}
|
||||||
|
|
||||||
|
export const Badge = ({ className, variant, ...props }: BadgeProps) => {
|
||||||
|
return <div className={cn(badgeVariants({ variant }), className)} {...props} />;
|
||||||
|
};
|
||||||
48
raggr-frontend/src/components/ui/button.tsx
Normal file
48
raggr-frontend/src/components/ui/button.tsx
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import { cva, type VariantProps } from "class-variance-authority";
|
||||||
|
import { cn } from "../../lib/utils";
|
||||||
|
|
||||||
|
const buttonVariants = cva(
|
||||||
|
"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-xl text-sm font-semibold transition-all duration-200 disabled:pointer-events-none disabled:opacity-50 cursor-pointer select-none",
|
||||||
|
{
|
||||||
|
variants: {
|
||||||
|
variant: {
|
||||||
|
default:
|
||||||
|
"bg-leaf text-white shadow-sm shadow-leaf/20 hover:bg-leaf-dark hover:shadow-md hover:shadow-leaf/30 active:scale-[0.97]",
|
||||||
|
amber:
|
||||||
|
"bg-amber-glow text-white shadow-sm shadow-amber/20 hover:bg-amber-dark hover:shadow-md active:scale-[0.97]",
|
||||||
|
ghost:
|
||||||
|
"text-cream/70 hover:text-cream hover:bg-white/8 active:scale-[0.97]",
|
||||||
|
"ghost-dark":
|
||||||
|
"text-warm-gray hover:text-charcoal hover:bg-sand-light/60 active:scale-[0.97]",
|
||||||
|
outline:
|
||||||
|
"border border-sand bg-transparent text-warm-gray hover:bg-cream-dark hover:text-charcoal active:scale-[0.97]",
|
||||||
|
destructive:
|
||||||
|
"text-red-400 hover:text-red-600 hover:bg-red-50 active:scale-[0.97]",
|
||||||
|
},
|
||||||
|
size: {
|
||||||
|
default: "h-9 px-4 py-2",
|
||||||
|
sm: "h-7 px-3 text-xs",
|
||||||
|
lg: "h-11 px-6 text-base",
|
||||||
|
icon: "h-9 w-9",
|
||||||
|
"icon-sm": "h-7 w-7",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
defaultVariants: {
|
||||||
|
variant: "default",
|
||||||
|
size: "default",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export interface ButtonProps
|
||||||
|
extends React.ButtonHTMLAttributes<HTMLButtonElement>,
|
||||||
|
VariantProps<typeof buttonVariants> {}
|
||||||
|
|
||||||
|
export const Button = ({ className, variant, size, ...props }: ButtonProps) => {
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
className={cn(buttonVariants({ variant, size }), className)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
19
raggr-frontend/src/components/ui/input.tsx
Normal file
19
raggr-frontend/src/components/ui/input.tsx
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { cn } from "../../lib/utils";
|
||||||
|
|
||||||
|
export interface InputProps
|
||||||
|
extends React.InputHTMLAttributes<HTMLInputElement> {}
|
||||||
|
|
||||||
|
export const Input = ({ className, ...props }: InputProps) => {
|
||||||
|
return (
|
||||||
|
<input
|
||||||
|
className={cn(
|
||||||
|
"flex h-8 w-full rounded-lg border border-sand bg-cream px-3 py-1",
|
||||||
|
"text-sm text-charcoal placeholder:text-warm-gray/50",
|
||||||
|
"focus:outline-none focus:ring-2 focus:ring-amber-soft/60",
|
||||||
|
"disabled:cursor-not-allowed disabled:opacity-50",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
37
raggr-frontend/src/components/ui/table.tsx
Normal file
37
raggr-frontend/src/components/ui/table.tsx
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { cn } from "../../lib/utils";
|
||||||
|
|
||||||
|
export const Table = ({ className, ...props }: React.HTMLAttributes<HTMLTableElement>) => (
|
||||||
|
<table className={cn("w-full caption-bottom text-sm", className)} {...props} />
|
||||||
|
);
|
||||||
|
|
||||||
|
export const TableHeader = ({ className, ...props }: React.HTMLAttributes<HTMLTableSectionElement>) => (
|
||||||
|
<thead className={cn("[&_tr]:border-b [&_tr]:border-sand-light", className)} {...props} />
|
||||||
|
);
|
||||||
|
|
||||||
|
export const TableBody = ({ className, ...props }: React.HTMLAttributes<HTMLTableSectionElement>) => (
|
||||||
|
<tbody className={cn("[&_tr:last-child]:border-0", className)} {...props} />
|
||||||
|
);
|
||||||
|
|
||||||
|
export const TableRow = ({ className, ...props }: React.HTMLAttributes<HTMLTableRowElement>) => (
|
||||||
|
<tr
|
||||||
|
className={cn(
|
||||||
|
"border-b border-sand-light/50 transition-colors hover:bg-cream-dark/40",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
|
||||||
|
export const TableHead = ({ className, ...props }: React.ThHTMLAttributes<HTMLTableCellElement>) => (
|
||||||
|
<th
|
||||||
|
className={cn(
|
||||||
|
"h-10 px-4 text-left align-middle text-xs font-semibold text-warm-gray uppercase tracking-wider",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
|
||||||
|
export const TableCell = ({ className, ...props }: React.TdHTMLAttributes<HTMLTableCellElement>) => (
|
||||||
|
<td className={cn("px-4 py-3 align-middle", className)} {...props} />
|
||||||
|
);
|
||||||
19
raggr-frontend/src/components/ui/textarea.tsx
Normal file
19
raggr-frontend/src/components/ui/textarea.tsx
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { cn } from "../../lib/utils";
|
||||||
|
|
||||||
|
export interface TextareaProps
|
||||||
|
extends React.TextareaHTMLAttributes<HTMLTextAreaElement> {}
|
||||||
|
|
||||||
|
export const Textarea = ({ className, ...props }: TextareaProps) => {
|
||||||
|
return (
|
||||||
|
<textarea
|
||||||
|
className={cn(
|
||||||
|
"flex w-full resize-none rounded-xl border-0 bg-transparent px-3 py-2.5",
|
||||||
|
"text-sm text-charcoal placeholder:text-warm-gray/50",
|
||||||
|
"focus:outline-none",
|
||||||
|
"disabled:cursor-not-allowed disabled:opacity-50",
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -11,3 +11,9 @@ if (rootEl) {
|
|||||||
</React.StrictMode>,
|
</React.StrictMode>,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ('serviceWorker' in navigator) {
|
||||||
|
window.addEventListener('load', () => {
|
||||||
|
navigator.serviceWorker.register('/sw.js').catch(console.warn);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|||||||
6
raggr-frontend/src/lib/utils.ts
Normal file
6
raggr-frontend/src/lib/utils.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import { clsx, type ClassValue } from "clsx";
|
||||||
|
import { twMerge } from "tailwind-merge";
|
||||||
|
|
||||||
|
export function cn(...inputs: ClassValue[]) {
|
||||||
|
return twMerge(clsx(inputs));
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
15
startup.sh
15
startup.sh
@@ -3,8 +3,17 @@
|
|||||||
echo "Running database migrations..."
|
echo "Running database migrations..."
|
||||||
aerich upgrade
|
aerich upgrade
|
||||||
|
|
||||||
echo "Starting reindex process..."
|
# Ensure Obsidian vault directory exists
|
||||||
python main.py "" --reindex
|
mkdir -p /app/data/obsidian
|
||||||
|
|
||||||
echo "Starting Flask application..."
|
# Start continuous Obsidian sync if enabled
|
||||||
|
if [ "${OBSIDIAN_CONTINUOUS_SYNC}" = "true" ]; then
|
||||||
|
echo "Starting Obsidian continuous sync in background..."
|
||||||
|
ob sync --continuous &
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Starting reindex process in background..."
|
||||||
|
python main.py "" --reindex &
|
||||||
|
|
||||||
|
echo "Starting application..."
|
||||||
python app.py
|
python app.py
|
||||||
|
|||||||
189
tickets.md
Normal file
189
tickets.md
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
# Integration: Twilio API for WhatsApp Interface (Multi-User)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Integrate Twilio's WhatsApp API to allow users to interact with Simba via WhatsApp. This requires multi-user support, linking WhatsApp numbers to existing or new user accounts.
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
### Phase 1: Infrastructure and Database Changes
|
||||||
|
- [x] **[TICKET-001]** Update `User` model to include `whatsapp_number`.
|
||||||
|
- [x] **[TICKET-002]** Generate and apply migrations for the database changes.
|
||||||
|
|
||||||
|
### Phase 2: Twilio Integration Blueprint
|
||||||
|
- [x] **[TICKET-003]** Create a new blueprint for Twilio/WhatsApp webhook.
|
||||||
|
- [x] **[TICKET-004]** Implement Twilio signature validation for security.
|
||||||
|
- Decorator enabled on webhook. Set `TWILIO_SIGNATURE_VALIDATION=false` to disable in dev. Set `TWILIO_WEBHOOK_URL` if behind a reverse proxy.
|
||||||
|
- [x] **[TICKET-005]** Implement User identification from WhatsApp phone number.
|
||||||
|
|
||||||
|
### Phase 3: Core Messaging Logic
|
||||||
|
- [x] **[TICKET-006]** Integrate `consult_simba_oracle` with the WhatsApp blueprint.
|
||||||
|
- [x] **[TICKET-007]** Implement outgoing WhatsApp message responses.
|
||||||
|
- [x] **[TICKET-008]** Handle conversation context in WhatsApp.
|
||||||
|
|
||||||
|
### Phase 4: Configuration and Deployment
|
||||||
|
- [x] **[TICKET-009]** Add Twilio credentials to environment variables.
|
||||||
|
- Keys: `TWILIO_ACCOUNT_SID`, `TWILIO_AUTH_TOKEN`, `TWILIO_WHATSAPP_NUMBER`.
|
||||||
|
- [ ] **[TICKET-010]** Document the Twilio webhook setup in `docs/whatsapp_integration.md`.
|
||||||
|
- Include: Webhook URL format, Twilio Console setup instructions.
|
||||||
|
|
||||||
|
### Phase 5: Multi-user & Edge Cases
|
||||||
|
- [ ] **[TICKET-011]** Handle first-time users (auto-creation of accounts or invitation system).
|
||||||
|
- [ ] **[TICKET-012]** Handle media messages (optional/future: images, audio).
|
||||||
|
- [x] **[TICKET-013]** Rate limiting and error handling for Twilio requests.
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Twilio Webhook Payload (POST)
|
||||||
|
- `SmsMessageSid`, `NumMedia`, `Body`, `From`, `To`, `AccountSid`, etc.
|
||||||
|
- We primarily care about `Body` (user message) and `From` (user WhatsApp number).
|
||||||
|
|
||||||
|
### Workflow
|
||||||
|
1. Twilio receives a message -> POST to `/api/whatsapp/webhook`.
|
||||||
|
2. Validate signature.
|
||||||
|
3. Identify `User` by `From` number.
|
||||||
|
4. If not found, create a new `User` or return an error.
|
||||||
|
5. Get/create `Conversation` for this `User`.
|
||||||
|
6. Call `consult_simba_oracle` with the query and context.
|
||||||
|
7. Return response via TwiML `<Message>` tag.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Integration: Obsidian Bidirectional Data Store
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Integrate Obsidian as a bidirectional data store using the [`obsidian-headless`](https://github.com/obsidianmd/obsidian-headless) npm package. SimbaRAG will be able to read/search Obsidian notes for RAG context and write new notes, research summaries, and tasks back to the vault via the LangChain agent.
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
### Phase 1: Infrastructure
|
||||||
|
- [ ] **[OBS-001]** Upgrade Node.js from 20 to 22 in `Dockerfile` (required by obsidian-headless).
|
||||||
|
- [ ] **[OBS-002]** Install `obsidian-headless` globally via npm in `Dockerfile`.
|
||||||
|
- [ ] **[OBS-003]** Add `obsidian_vault_data` volume and Obsidian env vars to `docker-compose.yml`.
|
||||||
|
- [ ] **[OBS-004]** Document Obsidian env vars in `.env.example` (`OBSIDIAN_AUTH_TOKEN`, `OBSIDIAN_VAULT_ID`, `OBSIDIAN_E2E_PASSWORD`, `OBSIDIAN_DEVICE_NAME`, `OBSIDIAN_CONTINUOUS_SYNC`).
|
||||||
|
- [ ] **[OBS-005]** Update `startup.sh` to conditionally run `ob sync --continuous` in background when `OBSIDIAN_CONTINUOUS_SYNC=true`.
|
||||||
|
|
||||||
|
### Phase 2: Core Service
|
||||||
|
- [ ] **[OBS-006]** Create `utils/obsidian_service.py` with `ObsidianService` class.
|
||||||
|
- Vault setup via `ob sync-setup` (async subprocess)
|
||||||
|
- One-time sync via `ob sync`
|
||||||
|
- Sync status via `ob sync-status`
|
||||||
|
- Walk vault directory for `.md` files (skip `.obsidian/`)
|
||||||
|
- Parse Obsidian markdown: YAML frontmatter → metadata, wikilink conversion, embed stripping, tag extraction
|
||||||
|
- Read specific note by relative path
|
||||||
|
- Create new note with frontmatter (auto-adds `created_by: simbarag` + timestamp)
|
||||||
|
- Create task note in configurable tasks folder
|
||||||
|
|
||||||
|
### Phase 3: RAG Indexing (Read)
|
||||||
|
- [ ] **[OBS-007]** Add `fetch_obsidian_documents()` to `blueprints/rag/logic.py` — uses `ObsidianService` to parse all vault `.md` files into LangChain `Document` objects with `source=obsidian` metadata.
|
||||||
|
- [ ] **[OBS-008]** Add `index_obsidian_documents()` to `blueprints/rag/logic.py` — deletes existing `source=obsidian` chunks, splits documents with shared `text_splitter`, embeds into shared `vector_store`.
|
||||||
|
- [ ] **[OBS-009]** Add `POST /api/rag/index-obsidian` endpoint (`@admin_required`) to `blueprints/rag/__init__.py`.
|
||||||
|
|
||||||
|
### Phase 4: Agent Tools (Read + Write)
|
||||||
|
- [ ] **[OBS-010]** Add `obsidian_search_notes` tool to `blueprints/conversation/agents.py` — semantic search via ChromaDB with `where={"source": "obsidian"}` filter.
|
||||||
|
- [ ] **[OBS-011]** Add `obsidian_read_note` tool to `blueprints/conversation/agents.py` — reads a specific note by relative path.
|
||||||
|
- [ ] **[OBS-012]** Add `obsidian_create_note` tool to `blueprints/conversation/agents.py` — creates a new markdown note in the vault (title, content, folder, tags).
|
||||||
|
- [ ] **[OBS-013]** Add `obsidian_create_task` tool to `blueprints/conversation/agents.py` — creates a task note with optional due date.
|
||||||
|
- [ ] **[OBS-014]** Register Obsidian tools conditionally (follow YNAB pattern: `obsidian_enabled` flag).
|
||||||
|
- [ ] **[OBS-015]** Update system prompt in `blueprints/conversation/__init__.py` with Obsidian tool usage instructions.
|
||||||
|
|
||||||
|
### Phase 5: Testing & Verification
|
||||||
|
- [ ] **[OBS-016]** Verify Docker image builds with Node.js 22 + obsidian-headless.
|
||||||
|
- [ ] **[OBS-017]** Test vault sync: setup → sync → verify files appear in `/app/data/obsidian`.
|
||||||
|
- [ ] **[OBS-018]** Test indexing: `POST /api/rag/index-obsidian` → verify chunks in ChromaDB with `source=obsidian`.
|
||||||
|
- [ ] **[OBS-019]** Test agent read tools: chat queries trigger `obsidian_search_notes` and `obsidian_read_note`.
|
||||||
|
- [ ] **[OBS-020]** Test agent write tools: chat creates notes/tasks → files appear in vault → sync pushes to Obsidian.
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Key Files
|
||||||
|
- `utils/obsidian_service.py` — new, core service (follows `utils/ynab_service.py` pattern)
|
||||||
|
- `blueprints/conversation/agents.py` — add tools (follows YNAB tool pattern at lines 101-279)
|
||||||
|
- `blueprints/conversation/__init__.py` — update system prompt (line ~94)
|
||||||
|
- `blueprints/rag/logic.py` — add indexing functions (reuse `vector_store`, `text_splitter`)
|
||||||
|
- `blueprints/rag/__init__.py` — add index endpoint
|
||||||
|
|
||||||
|
### Write-back Model
|
||||||
|
Files written to the vault directory are automatically synced to Obsidian Sync by the `ob sync --continuous` background process. No separate push step needed.
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
| Variable | Required | Description |
|
||||||
|
|----------|----------|-------------|
|
||||||
|
| `OBSIDIAN_AUTH_TOKEN` | Yes | Auth token for Obsidian Sync (non-interactive) |
|
||||||
|
| `OBSIDIAN_VAULT_ID` | Yes | Remote vault ID or name |
|
||||||
|
| `OBSIDIAN_E2E_PASSWORD` | If E2EE | End-to-end encryption password |
|
||||||
|
| `OBSIDIAN_DEVICE_NAME` | No | Client identifier (default: `simbarag-server`) |
|
||||||
|
| `OBSIDIAN_CONTINUOUS_SYNC` | No | Enable background sync (default: `false`) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Integration: WhatsApp to LangChain Agent Migration
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Migrate the WhatsApp blueprint from custom LLM logic to the LangChain agent-based system used by the conversation blueprint. This will provide Tavily web search, YNAB integration, and improved message handling capabilities.
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
### Phase 1: Import and Setup Changes
|
||||||
|
- [x] **[WA-001]** Remove dependency on `main.py`'s `consult_simba_oracle` import in `blueprints/whatsapp/__init__.py`.
|
||||||
|
- [x] **[WA-002]** Import `main_agent` from `blueprints.conversation.agents` in `blueprints/whatsapp/__init__.py`.
|
||||||
|
- [ ] **[WA-003]** Add import for `query_vector_store` from `blueprints.rag.logic` (if needed for simba_search tool).
|
||||||
|
- [x] **[WA-004]** Verify `main_agent` is already initialized as a global variable in `agents.py` (it is at line 295).
|
||||||
|
|
||||||
|
### Phase 2: Agent Invocation Adaptation
|
||||||
|
- [x] **[WA-005]** Replace `consult_simba_oracle()` call (lines 171-178) with LangChain agent invocation.
|
||||||
|
- [x] **[WA-006]** Add system prompt with Simba facts, medical conditions, and recent events from `blueprints/conversation/__init__.py` (lines 55-95).
|
||||||
|
- [x] **[WA-007]** Build messages payload with role-based conversation history (last 10 messages).
|
||||||
|
- [x] **[WA-008]** Handle agent response extraction: `response.get("messages", [])[-1].content`.
|
||||||
|
- [x] **[WA-009]** Keep existing error handling around agent invocation (try/except block).
|
||||||
|
|
||||||
|
### Phase 3: Configuration and Logging
|
||||||
|
- [x] **[WA-010]** Add YNAB availability logging (check `os.getenv("YNAB_ACCESS_TOKEN")` is not None) in webhook handler.
|
||||||
|
- [x] **[WA-011]** Ensure `main_agent` tools include `simba_search`, `web_search`, and optionally YNAB tools (already configured in `agents.py`).
|
||||||
|
- [x] **[WA-012]** Verify `simba_search` tool uses `query_vector_store()` which supports `where={"source": "paperless"}` filter (no change needed, works with existing ChromaDB collection).
|
||||||
|
|
||||||
|
### Phase 4: Testing Strategy
|
||||||
|
- [ ] **[WA-013]** Test Simba queries (e.g., "How much does Simba weigh?") — should use `simba_search` tool.
|
||||||
|
- [ ] **[WA-014]** Test general chat queries (e.g., "What's the weather?") — should use LLM directly, no tools.
|
||||||
|
- [ ] **[WA-015]** Test web search capability (e.g., "What's the latest cat health research?") — should use `web_search` tool with Tavily.
|
||||||
|
- [ ] **[WA-016]** Test YNAB integration if configured (e.g., "How much did I spend on food?") — should use appropriate YNAB tool.
|
||||||
|
- [ ] **[WA-017]** Test conversation context preservation (send multiple messages in sequence).
|
||||||
|
- [ ] **[WA-018]** Test rate limiting still works after migration.
|
||||||
|
- [ ] **[WA-019]** Test user creation and allowlist still function correctly.
|
||||||
|
- [ ] **[WA-020]** Test error handling for agent failures (returns "Sorry, I'm having trouble thinking right now. 😿").
|
||||||
|
|
||||||
|
### Phase 5: Cleanup and Documentation
|
||||||
|
- [ ] **[WA-021]** Optionally remove or deprecate deprecated `main.py` functions: `classify_query()`, `consult_oracle()`, `llm_chat()`, `consult_simba_oracle()` (keep for CLI tool usage).
|
||||||
|
- [ ] **[WA-022]** Update code comments in `main.py` to indicate WhatsApp no longer uses these functions.
|
||||||
|
- [ ] **[WA-023]** Document the agent-based approach in `docs/whatsapp_integration.md` (if file exists) or create new documentation.
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Current WhatsApp Flow
|
||||||
|
1. Twilio webhook → `blueprints/whatsapp/__init__.webhook()`
|
||||||
|
2. Call `consult_simba_oracle(input, transcript)` from `main.py`
|
||||||
|
3. `consult_simba_oracle()` uses custom `QueryGenerator` to classify query
|
||||||
|
4. Routes to `consult_oracle()` (ChromaDB) or `llm_chat()` (simple chat)
|
||||||
|
5. Returns text response
|
||||||
|
|
||||||
|
### Target WhatsApp Flow
|
||||||
|
1. Twilio webhook → `blueprints/whatsapp/__init__.webhook()`
|
||||||
|
2. Build LangChain messages payload with system prompt and conversation history
|
||||||
|
3. Invoke `main_agent.ainvoke({"messages": messages_payload})`
|
||||||
|
4. Agent decides when to use tools (simba_search, web_search, YNAB)
|
||||||
|
5. Returns text response from last message
|
||||||
|
|
||||||
|
### Key Differences
|
||||||
|
1. **No manual query classification** — Agent decides based on LLM reasoning
|
||||||
|
2. **Tavily web_search** now available for current information
|
||||||
|
3. **YNAB integration** available if configured
|
||||||
|
4. **System prompt consistency** with conversation blueprint
|
||||||
|
5. **Message format** — LangChain messages array vs transcript string
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
No new environment variables needed. Uses existing:
|
||||||
|
- `LLAMA_SERVER_URL` — for LLM model
|
||||||
|
- `TAVILY_API_KEY` — for web search
|
||||||
|
- `YNAB_ACCESS_TOKEN` — for budget integration (optional)
|
||||||
|
|
||||||
|
### Files Modified
|
||||||
|
- `blueprints/whatsapp/__init__.py` — Main webhook handler
|
||||||
@@ -76,6 +76,50 @@ def describe_simba_image(input):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
async def analyze_user_image(file_bytes: bytes) -> str:
|
||||||
|
"""Analyze an image uploaded by a user and return a text description.
|
||||||
|
|
||||||
|
Uses llama-server (OpenAI-compatible API) with vision support.
|
||||||
|
Falls back to OpenAI if llama-server is not configured.
|
||||||
|
"""
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from openai import AsyncOpenAI
|
||||||
|
|
||||||
|
llama_url = os.getenv("LLAMA_SERVER_URL")
|
||||||
|
if llama_url:
|
||||||
|
aclient = AsyncOpenAI(base_url=llama_url, api_key="not-needed")
|
||||||
|
model = os.getenv("LLAMA_MODEL_NAME", "llama-3.1-8b-instruct")
|
||||||
|
else:
|
||||||
|
aclient = AsyncOpenAI()
|
||||||
|
model = "gpt-4o-mini"
|
||||||
|
|
||||||
|
b64 = base64.b64encode(file_bytes).decode("utf-8")
|
||||||
|
|
||||||
|
response = await aclient.chat.completions.create(
|
||||||
|
model=model,
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "system",
|
||||||
|
"content": "You are a helpful image analyst. Describe what you see in the image in detail. Be thorough but concise.",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": [
|
||||||
|
{"type": "text", "text": "Please describe this image in detail."},
|
||||||
|
{
|
||||||
|
"type": "image_url",
|
||||||
|
"image_url": {
|
||||||
|
"url": f"data:image/jpeg;base64,{b64}",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
return response.choices[0].message.content
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
if args.filepath:
|
if args.filepath:
|
||||||
|
|||||||
62
utils/image_upload.py
Normal file
62
utils/image_upload.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import io
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from PIL import Image
|
||||||
|
from pillow_heif import register_heif_opener
|
||||||
|
|
||||||
|
register_heif_opener()
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
|
ALLOWED_TYPES = {"image/jpeg", "image/png", "image/webp", "image/heic", "image/heif"}
|
||||||
|
MAX_DIMENSION = 1920
|
||||||
|
|
||||||
|
|
||||||
|
class ImageValidationError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def process_image(file_bytes: bytes, content_type: str) -> tuple[bytes, str]:
|
||||||
|
"""Validate, resize, and strip EXIF from an uploaded image.
|
||||||
|
|
||||||
|
Returns processed bytes and the output content type (always image/jpeg or image/png or image/webp).
|
||||||
|
"""
|
||||||
|
if content_type not in ALLOWED_TYPES:
|
||||||
|
raise ImageValidationError(
|
||||||
|
f"Unsupported image type: {content_type}. "
|
||||||
|
f"Allowed: JPEG, PNG, WebP, HEIC"
|
||||||
|
)
|
||||||
|
|
||||||
|
img = Image.open(io.BytesIO(file_bytes))
|
||||||
|
|
||||||
|
# Resize if too large
|
||||||
|
width, height = img.size
|
||||||
|
if max(width, height) > MAX_DIMENSION:
|
||||||
|
ratio = MAX_DIMENSION / max(width, height)
|
||||||
|
new_size = (int(width * ratio), int(height * ratio))
|
||||||
|
img = img.resize(new_size, Image.LANCZOS)
|
||||||
|
logging.info(
|
||||||
|
f"Resized image from {width}x{height} to {new_size[0]}x{new_size[1]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Strip EXIF by copying pixel data to a new image
|
||||||
|
clean_img = Image.new(img.mode, img.size)
|
||||||
|
clean_img.putdata(list(img.getdata()))
|
||||||
|
|
||||||
|
# Convert HEIC/HEIF to JPEG; otherwise keep original format
|
||||||
|
if content_type in {"image/heic", "image/heif"}:
|
||||||
|
output_format = "JPEG"
|
||||||
|
output_content_type = "image/jpeg"
|
||||||
|
elif content_type == "image/png":
|
||||||
|
output_format = "PNG"
|
||||||
|
output_content_type = "image/png"
|
||||||
|
elif content_type == "image/webp":
|
||||||
|
output_format = "WEBP"
|
||||||
|
output_content_type = "image/webp"
|
||||||
|
else:
|
||||||
|
output_format = "JPEG"
|
||||||
|
output_content_type = "image/jpeg"
|
||||||
|
|
||||||
|
buf = io.BytesIO()
|
||||||
|
clean_img.save(buf, format=output_format, quality=85)
|
||||||
|
return buf.getvalue(), output_content_type
|
||||||
446
utils/obsidian_service.py
Normal file
446
utils/obsidian_service.py
Normal file
@@ -0,0 +1,446 @@
|
|||||||
|
"""Obsidian headless sync service for querying and modifying vaults."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import yaml
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional
|
||||||
|
from subprocess import run
|
||||||
|
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
# Load environment variables
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
|
||||||
|
class ObsidianService:
|
||||||
|
"""Service for interacting with Obsidian vault via obsidian-headless CLI."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize Obsidian Sync client."""
|
||||||
|
self.vault_path = os.getenv("OBSIDIAN_VAULT_PATH", "/app/data/obsidian")
|
||||||
|
|
||||||
|
# Create vault path if it doesn't exist
|
||||||
|
Path(self.vault_path).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Validate vault has .md files
|
||||||
|
self._validate_vault()
|
||||||
|
|
||||||
|
def _validate_vault(self) -> None:
|
||||||
|
"""Validate that vault directory exists and has .md files."""
|
||||||
|
vault_dir = Path(self.vault_path)
|
||||||
|
if not vault_dir.exists():
|
||||||
|
raise ValueError(
|
||||||
|
f"Obsidian vault path '{self.vault_path}' does not exist. "
|
||||||
|
"Please ensure the vault is synced to this location."
|
||||||
|
)
|
||||||
|
|
||||||
|
md_files = list(vault_dir.rglob("*.md"))
|
||||||
|
if not md_files:
|
||||||
|
raise ValueError(
|
||||||
|
f"Vault at '{self.vault_path}' contains no markdown files. "
|
||||||
|
"Please ensure the vault is synced with obsidian-headless."
|
||||||
|
)
|
||||||
|
|
||||||
|
def walk_vault(self) -> list[Path]:
|
||||||
|
"""Walk through vault directory and return paths to .md files.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of paths to markdown files, excluding .obsidian directory.
|
||||||
|
"""
|
||||||
|
vault_dir = Path(self.vault_path)
|
||||||
|
md_files = []
|
||||||
|
|
||||||
|
# Walk vault, excluding .obsidian directory
|
||||||
|
for md_file in vault_dir.rglob("*.md"):
|
||||||
|
# Skip .obsidian directory and its contents
|
||||||
|
if ".obsidian" in md_file.parts:
|
||||||
|
continue
|
||||||
|
md_files.append(md_file)
|
||||||
|
|
||||||
|
return md_files
|
||||||
|
|
||||||
|
def parse_markdown(self, content: str, filepath: Optional[Path] = None) -> dict[str, Any]:
|
||||||
|
"""Parse Obsidian markdown to extract metadata and clean content.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content: Raw markdown content
|
||||||
|
filepath: Optional file path for context
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary containing parsed content:
|
||||||
|
- metadata: Parsed YAML frontmatter (or empty dict if none)
|
||||||
|
- content: Cleaned body content
|
||||||
|
- tags: Extracted tags
|
||||||
|
- wikilinks: List of wikilinks found
|
||||||
|
- embeds: List of embeds found
|
||||||
|
"""
|
||||||
|
# Split frontmatter from content
|
||||||
|
frontmatter_pattern = r"^---\n(.*?)\n---"
|
||||||
|
match = re.match(frontmatter_pattern, content, re.DOTALL)
|
||||||
|
|
||||||
|
metadata = {}
|
||||||
|
body_content = content
|
||||||
|
|
||||||
|
if match:
|
||||||
|
frontmatter = match.group(1)
|
||||||
|
body_content = content[match.end():].strip()
|
||||||
|
try:
|
||||||
|
metadata = yaml.safe_load(frontmatter) or {}
|
||||||
|
except yaml.YAMLError:
|
||||||
|
# Invalid YAML, treat as empty metadata
|
||||||
|
metadata = {}
|
||||||
|
|
||||||
|
# Extract tags (#tag format)
|
||||||
|
tags = re.findall(r"#(\w+)", content)
|
||||||
|
tags = [tag for tag in tags if tag] # Remove empty strings
|
||||||
|
|
||||||
|
# Extract wikilinks [[wiki link]]
|
||||||
|
wikilinks = re.findall(r"\[\[([^\]]+)\]\]", content)
|
||||||
|
|
||||||
|
# Extract embeds [[!embed]] or [[!embed:file]]
|
||||||
|
embeds = re.findall(r"\[\[!(.*?)\]\]", content)
|
||||||
|
embeds = [e.split(":")[0].strip() if ":" in e else e.strip() for e in embeds]
|
||||||
|
|
||||||
|
# Clean body content
|
||||||
|
# Remove wikilinks [[...]] and embeds [[!...]]
|
||||||
|
cleaned_content = re.sub(r"\[\[.*?\]\]", "", body_content)
|
||||||
|
cleaned_content = re.sub(r"\n{3,}", "\n\n", cleaned_content).strip()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"metadata": metadata,
|
||||||
|
"content": cleaned_content,
|
||||||
|
"tags": tags,
|
||||||
|
"wikilinks": wikilinks,
|
||||||
|
"embeds": embeds,
|
||||||
|
"filepath": str(filepath) if filepath else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
def read_note(self, relative_path: str) -> dict[str, Any]:
|
||||||
|
"""Read a specific note from the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
relative_path: Path to note relative to vault root (e.g., "My Notes/simba.md")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary containing parsed note content and metadata.
|
||||||
|
"""
|
||||||
|
vault_dir = Path(self.vault_path)
|
||||||
|
note_path = vault_dir / relative_path
|
||||||
|
|
||||||
|
if not note_path.exists():
|
||||||
|
raise FileNotFoundError(f"Note not found at '{relative_path}'")
|
||||||
|
|
||||||
|
with open(note_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
parsed = self.parse_markdown(content, note_path)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"content": parsed,
|
||||||
|
"path": relative_path,
|
||||||
|
"full_path": str(note_path),
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_note(
|
||||||
|
self,
|
||||||
|
title: str,
|
||||||
|
content: str,
|
||||||
|
folder: str = "notes",
|
||||||
|
tags: Optional[list[str]] = None,
|
||||||
|
frontmatter: Optional[dict[str, Any]] = None,
|
||||||
|
) -> str:
|
||||||
|
"""Create a new note in the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
title: Note title (will be used as filename)
|
||||||
|
content: Note body content
|
||||||
|
folder: Folder path (default: "notes")
|
||||||
|
tags: List of tags to add
|
||||||
|
frontmatter: Optional custom frontmatter to merge with defaults
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to created note (relative to vault root).
|
||||||
|
"""
|
||||||
|
vault_dir = Path(self.vault_path)
|
||||||
|
note_folder = vault_dir / folder
|
||||||
|
note_folder.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Sanitize title for filename
|
||||||
|
safe_title = re.sub(r"[^a-z0-9-_]", "-", title.lower().strip())
|
||||||
|
safe_title = re.sub(r"-+", "-", safe_title).strip("-")
|
||||||
|
|
||||||
|
note_path = note_folder / f"{safe_title}.md"
|
||||||
|
|
||||||
|
# Build frontmatter
|
||||||
|
default_frontmatter = {
|
||||||
|
"created_by": "simbarag",
|
||||||
|
"created_at": datetime.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if frontmatter:
|
||||||
|
default_frontmatter.update(frontmatter)
|
||||||
|
|
||||||
|
# Add tags to frontmatter if provided
|
||||||
|
if tags:
|
||||||
|
default_frontmatter.setdefault("tags", []).extend(tags)
|
||||||
|
|
||||||
|
# Write note
|
||||||
|
frontmatter_yaml = yaml.dump(default_frontmatter, allow_unicode=True, default_flow_style=False)
|
||||||
|
full_content = f"---\n{frontmatter_yaml}---\n\n{content}"
|
||||||
|
|
||||||
|
with open(note_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(full_content)
|
||||||
|
|
||||||
|
return f"{folder}/{safe_title}.md"
|
||||||
|
|
||||||
|
def create_task(
|
||||||
|
self,
|
||||||
|
title: str,
|
||||||
|
content: str = "",
|
||||||
|
folder: str = "tasks",
|
||||||
|
due_date: Optional[str] = None,
|
||||||
|
tags: Optional[list[str]] = None,
|
||||||
|
) -> str:
|
||||||
|
"""Create a task note in the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
title: Task title
|
||||||
|
content: Task description
|
||||||
|
folder: Folder to place task (default: "tasks")
|
||||||
|
due_date: Optional due date in YYYY-MM-DD format
|
||||||
|
tags: Optional list of tags to add
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to created task note (relative to vault root).
|
||||||
|
"""
|
||||||
|
task_content = f"# {title}\n\n{content}"
|
||||||
|
|
||||||
|
# Add checkboxes if content is empty (simple task)
|
||||||
|
if not content.strip():
|
||||||
|
task_content += "\n- [ ]"
|
||||||
|
|
||||||
|
# Add due date if provided
|
||||||
|
if due_date:
|
||||||
|
task_content += f"\n\n**Due**: {due_date}"
|
||||||
|
|
||||||
|
# Add tags if provided
|
||||||
|
if tags:
|
||||||
|
task_content += "\n\n" + " ".join([f"#{tag}" for tag in tags])
|
||||||
|
|
||||||
|
return self.create_note(
|
||||||
|
title=title,
|
||||||
|
content=task_content,
|
||||||
|
folder=folder,
|
||||||
|
tags=tags,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_daily_note_path(self, date: Optional[datetime] = None) -> str:
|
||||||
|
"""Return the relative vault path for a daily note.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
date: Date for the note (defaults to today)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Relative path like "journal/2026/2026-03-03.md"
|
||||||
|
"""
|
||||||
|
if date is None:
|
||||||
|
date = datetime.now()
|
||||||
|
return f"journal/{date.strftime('%Y')}/{date.strftime('%Y-%m-%d')}.md"
|
||||||
|
|
||||||
|
def get_daily_note(self, date: Optional[datetime] = None) -> dict[str, Any]:
|
||||||
|
"""Read a daily note from the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
date: Date for the note (defaults to today)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with found status, path, raw content, and date string.
|
||||||
|
"""
|
||||||
|
if date is None:
|
||||||
|
date = datetime.now()
|
||||||
|
relative_path = self.get_daily_note_path(date)
|
||||||
|
note_path = Path(self.vault_path) / relative_path
|
||||||
|
|
||||||
|
if not note_path.exists():
|
||||||
|
return {"found": False, "path": relative_path, "content": None, "date": date.strftime("%Y-%m-%d")}
|
||||||
|
|
||||||
|
with open(note_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
return {"found": True, "path": relative_path, "content": content, "date": date.strftime("%Y-%m-%d")}
|
||||||
|
|
||||||
|
def get_daily_tasks(self, date: Optional[datetime] = None) -> dict[str, Any]:
|
||||||
|
"""Extract tasks from a daily note's tasks section.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
date: Date for the note (defaults to today)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with tasks list (each has "text" and "done" keys) and metadata.
|
||||||
|
"""
|
||||||
|
if date is None:
|
||||||
|
date = datetime.now()
|
||||||
|
note = self.get_daily_note(date)
|
||||||
|
if not note["found"]:
|
||||||
|
return {"found": False, "tasks": [], "date": note["date"], "path": note["path"]}
|
||||||
|
|
||||||
|
tasks = []
|
||||||
|
in_tasks = False
|
||||||
|
for line in note["content"].split("\n"):
|
||||||
|
if re.match(r"^###\s+tasks\s*$", line, re.IGNORECASE):
|
||||||
|
in_tasks = True
|
||||||
|
continue
|
||||||
|
if in_tasks and re.match(r"^#{1,3}\s", line):
|
||||||
|
break
|
||||||
|
if in_tasks:
|
||||||
|
done_match = re.match(r"^- \[x\] (.+)$", line, re.IGNORECASE)
|
||||||
|
todo_match = re.match(r"^- \[ \] (.+)$", line)
|
||||||
|
if done_match:
|
||||||
|
tasks.append({"text": done_match.group(1), "done": True})
|
||||||
|
elif todo_match:
|
||||||
|
tasks.append({"text": todo_match.group(1), "done": False})
|
||||||
|
|
||||||
|
return {"found": True, "tasks": tasks, "date": note["date"], "path": note["path"]}
|
||||||
|
|
||||||
|
def add_task_to_daily_note(self, task_text: str, date: Optional[datetime] = None) -> dict[str, Any]:
|
||||||
|
"""Add a task checkbox to a daily note, creating the note if needed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task_text: The task description text
|
||||||
|
date: Date for the note (defaults to today)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with success status, path, and whether note was created.
|
||||||
|
"""
|
||||||
|
if date is None:
|
||||||
|
date = datetime.now()
|
||||||
|
relative_path = self.get_daily_note_path(date)
|
||||||
|
note_path = Path(self.vault_path) / relative_path
|
||||||
|
|
||||||
|
if not note_path.exists():
|
||||||
|
note_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
content = (
|
||||||
|
f"---\nmodified: {datetime.now().isoformat()}\n---\n"
|
||||||
|
f"### tasks\n\n- [ ] {task_text}\n\n### log\n"
|
||||||
|
)
|
||||||
|
with open(note_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(content)
|
||||||
|
return {"success": True, "created_note": True, "path": relative_path}
|
||||||
|
|
||||||
|
with open(note_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# Insert before ### log if present, otherwise append before end
|
||||||
|
log_match = re.search(r"\n(### log)", content, re.IGNORECASE)
|
||||||
|
if log_match:
|
||||||
|
insert_pos = log_match.start()
|
||||||
|
content = content[:insert_pos] + f"\n- [ ] {task_text}" + content[insert_pos:]
|
||||||
|
else:
|
||||||
|
content = content.rstrip() + f"\n- [ ] {task_text}\n"
|
||||||
|
|
||||||
|
with open(note_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
return {"success": True, "created_note": False, "path": relative_path}
|
||||||
|
|
||||||
|
def complete_task_in_daily_note(self, task_text: str, date: Optional[datetime] = None) -> dict[str, Any]:
|
||||||
|
"""Mark a task as complete in a daily note by matching task text.
|
||||||
|
|
||||||
|
Searches for a task matching the given text (exact or partial) and
|
||||||
|
replaces `- [ ]` with `- [x]`.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task_text: The task text to search for (exact or partial match)
|
||||||
|
date: Date for the note (defaults to today)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with success status, matched task text, and path.
|
||||||
|
"""
|
||||||
|
if date is None:
|
||||||
|
date = datetime.now()
|
||||||
|
relative_path = self.get_daily_note_path(date)
|
||||||
|
note_path = Path(self.vault_path) / relative_path
|
||||||
|
|
||||||
|
if not note_path.exists():
|
||||||
|
return {"success": False, "error": "Note not found", "path": relative_path}
|
||||||
|
|
||||||
|
with open(note_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# Try exact match first, then partial
|
||||||
|
exact = f"- [ ] {task_text}"
|
||||||
|
if exact in content:
|
||||||
|
content = content.replace(exact, f"- [x] {task_text}", 1)
|
||||||
|
else:
|
||||||
|
match = re.search(r"- \[ \] .*" + re.escape(task_text) + r".*", content, re.IGNORECASE)
|
||||||
|
if not match:
|
||||||
|
return {"success": False, "error": f"Task '{task_text}' not found", "path": relative_path}
|
||||||
|
completed = match.group(0).replace("- [ ]", "- [x]", 1)
|
||||||
|
content = content.replace(match.group(0), completed, 1)
|
||||||
|
task_text = match.group(0).replace("- [ ] ", "")
|
||||||
|
|
||||||
|
with open(note_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
return {"success": True, "completed_task": task_text, "path": relative_path}
|
||||||
|
|
||||||
|
def sync_vault(self) -> dict[str, Any]:
|
||||||
|
"""Trigger a one-time sync of the vault.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary containing sync result and output.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
result = run(
|
||||||
|
["ob", "sync"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=300,
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.returncode != 0:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": result.stderr or "Sync failed",
|
||||||
|
"stdout": result.stdout,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"message": "Vault synced successfully",
|
||||||
|
"stdout": result.stdout,
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": str(e),
|
||||||
|
}
|
||||||
|
|
||||||
|
def sync_status(self) -> dict[str, Any]:
|
||||||
|
"""Check sync status of the vault.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary containing sync status information.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
result = run(
|
||||||
|
["ob", "sync-status"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=60,
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"output": result.stdout,
|
||||||
|
"stderr": result.stderr,
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": str(e),
|
||||||
|
}
|
||||||
53
utils/s3_client.py
Normal file
53
utils/s3_client.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import aioboto3
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
|
S3_ENDPOINT_URL = os.getenv("S3_ENDPOINT_URL")
|
||||||
|
S3_ACCESS_KEY_ID = os.getenv("S3_ACCESS_KEY_ID")
|
||||||
|
S3_SECRET_ACCESS_KEY = os.getenv("S3_SECRET_ACCESS_KEY")
|
||||||
|
S3_BUCKET_NAME = os.getenv("S3_BUCKET_NAME", "asksimba-images")
|
||||||
|
S3_REGION = os.getenv("S3_REGION", "garage")
|
||||||
|
|
||||||
|
session = aioboto3.Session()
|
||||||
|
|
||||||
|
|
||||||
|
def _get_client():
|
||||||
|
return session.client(
|
||||||
|
"s3",
|
||||||
|
endpoint_url=S3_ENDPOINT_URL,
|
||||||
|
aws_access_key_id=S3_ACCESS_KEY_ID,
|
||||||
|
aws_secret_access_key=S3_SECRET_ACCESS_KEY,
|
||||||
|
region_name=S3_REGION,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def upload_image(file_bytes: bytes, key: str, content_type: str) -> str:
|
||||||
|
async with _get_client() as client:
|
||||||
|
await client.put_object(
|
||||||
|
Bucket=S3_BUCKET_NAME,
|
||||||
|
Key=key,
|
||||||
|
Body=file_bytes,
|
||||||
|
ContentType=content_type,
|
||||||
|
)
|
||||||
|
logging.info(f"Uploaded image to S3: {key}")
|
||||||
|
return key
|
||||||
|
|
||||||
|
|
||||||
|
async def get_image(key: str) -> tuple[bytes, str]:
|
||||||
|
async with _get_client() as client:
|
||||||
|
response = await client.get_object(Bucket=S3_BUCKET_NAME, Key=key)
|
||||||
|
body = await response["Body"].read()
|
||||||
|
content_type = response.get("ContentType", "image/jpeg")
|
||||||
|
return body, content_type
|
||||||
|
|
||||||
|
|
||||||
|
async def delete_image(key: str) -> None:
|
||||||
|
async with _get_client() as client:
|
||||||
|
await client.delete_object(Bucket=S3_BUCKET_NAME, Key=key)
|
||||||
|
logging.info(f"Deleted image from S3: {key}")
|
||||||
@@ -27,18 +27,14 @@ class YNABService:
|
|||||||
self.api_client = ynab.ApiClient(configuration)
|
self.api_client = ynab.ApiClient(configuration)
|
||||||
|
|
||||||
# Initialize API endpoints
|
# Initialize API endpoints
|
||||||
self.budgets_api = ynab.BudgetsApi(self.api_client)
|
self.plans_api = ynab.PlansApi(self.api_client)
|
||||||
self.transactions_api = ynab.TransactionsApi(self.api_client)
|
self.transactions_api = ynab.TransactionsApi(self.api_client)
|
||||||
self.months_api = ynab.MonthsApi(self.api_client)
|
self.months_api = ynab.MonthsApi(self.api_client)
|
||||||
self.categories_api = ynab.CategoriesApi(self.api_client)
|
self.categories_api = ynab.CategoriesApi(self.api_client)
|
||||||
|
|
||||||
# Get budget ID if not provided
|
# Get budget ID if not provided, fall back to last-used
|
||||||
if not self.budget_id:
|
if not self.budget_id:
|
||||||
budgets_response = self.budgets_api.get_budgets()
|
self.budget_id = "last-used"
|
||||||
if budgets_response.data and budgets_response.data.budgets:
|
|
||||||
self.budget_id = budgets_response.data.budgets[0].id
|
|
||||||
else:
|
|
||||||
raise ValueError("No YNAB budgets found")
|
|
||||||
|
|
||||||
def get_budget_summary(self) -> dict[str, Any]:
|
def get_budget_summary(self) -> dict[str, Any]:
|
||||||
"""Get overall budget summary and health status.
|
"""Get overall budget summary and health status.
|
||||||
@@ -47,7 +43,7 @@ class YNABService:
|
|||||||
Dictionary containing budget summary with to-be-budgeted amount,
|
Dictionary containing budget summary with to-be-budgeted amount,
|
||||||
total budgeted, total activity, and overall budget health.
|
total budgeted, total activity, and overall budget health.
|
||||||
"""
|
"""
|
||||||
budget_response = self.budgets_api.get_budget_by_id(self.budget_id)
|
budget_response = self.plans_api.get_plan_by_id(self.budget_id)
|
||||||
budget_data = budget_response.data.budget
|
budget_data = budget_response.data.budget
|
||||||
|
|
||||||
# Calculate totals from categories
|
# Calculate totals from categories
|
||||||
@@ -59,15 +55,12 @@ class YNABService:
|
|||||||
total_activity = 0
|
total_activity = 0
|
||||||
total_available = 0
|
total_available = 0
|
||||||
|
|
||||||
for category_group in budget_data.category_groups or []:
|
for category in budget_data.categories or []:
|
||||||
if category_group.deleted or category_group.hidden:
|
if category.deleted or category.hidden:
|
||||||
continue
|
continue
|
||||||
for category in category_group.categories or []:
|
total_budgeted += category.budgeted / 1000
|
||||||
if category.deleted or category.hidden:
|
total_activity += category.activity / 1000
|
||||||
continue
|
total_available += category.balance / 1000
|
||||||
total_budgeted += category.budgeted / 1000
|
|
||||||
total_activity += category.activity / 1000
|
|
||||||
total_available += category.balance / 1000
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"budget_name": budget_data.name,
|
"budget_name": budget_data.name,
|
||||||
@@ -89,7 +82,6 @@ class YNABService:
|
|||||||
end_date: Optional[str] = None,
|
end_date: Optional[str] = None,
|
||||||
category_name: Optional[str] = None,
|
category_name: Optional[str] = None,
|
||||||
payee_name: Optional[str] = None,
|
payee_name: Optional[str] = None,
|
||||||
limit: int = 50,
|
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""Get transactions filtered by date range, category, or payee.
|
"""Get transactions filtered by date range, category, or payee.
|
||||||
|
|
||||||
@@ -98,7 +90,6 @@ class YNABService:
|
|||||||
end_date: End date in YYYY-MM-DD format (defaults to today)
|
end_date: End date in YYYY-MM-DD format (defaults to today)
|
||||||
category_name: Filter by category name (case-insensitive partial match)
|
category_name: Filter by category name (case-insensitive partial match)
|
||||||
payee_name: Filter by payee name (case-insensitive partial match)
|
payee_name: Filter by payee name (case-insensitive partial match)
|
||||||
limit: Maximum number of transactions to return (default 50)
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dictionary containing matching transactions and summary statistics.
|
Dictionary containing matching transactions and summary statistics.
|
||||||
@@ -109,9 +100,10 @@ class YNABService:
|
|||||||
if not end_date:
|
if not end_date:
|
||||||
end_date = datetime.now().strftime("%Y-%m-%d")
|
end_date = datetime.now().strftime("%Y-%m-%d")
|
||||||
|
|
||||||
# Get transactions
|
# Get transactions (SDK v2 requires datetime.date, not string)
|
||||||
|
since_date_obj = datetime.strptime(start_date, "%Y-%m-%d").date()
|
||||||
transactions_response = self.transactions_api.get_transactions(
|
transactions_response = self.transactions_api.get_transactions(
|
||||||
self.budget_id, since_date=start_date
|
self.budget_id, since_date=since_date_obj
|
||||||
)
|
)
|
||||||
|
|
||||||
transactions = transactions_response.data.transactions or []
|
transactions = transactions_response.data.transactions or []
|
||||||
@@ -124,7 +116,7 @@ class YNABService:
|
|||||||
# Skip if deleted or before start date or after end date
|
# Skip if deleted or before start date or after end date
|
||||||
if txn.deleted:
|
if txn.deleted:
|
||||||
continue
|
continue
|
||||||
txn_date = str(txn.date)
|
txn_date = str(txn.var_date)
|
||||||
if txn_date < start_date or txn_date > end_date:
|
if txn_date < start_date or txn_date > end_date:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -141,7 +133,7 @@ class YNABService:
|
|||||||
amount = txn.amount / 1000 # Convert milliunits to dollars
|
amount = txn.amount / 1000 # Convert milliunits to dollars
|
||||||
filtered_transactions.append(
|
filtered_transactions.append(
|
||||||
{
|
{
|
||||||
"date": txn_date,
|
"date": str(txn.var_date),
|
||||||
"payee": txn.payee_name,
|
"payee": txn.payee_name,
|
||||||
"category": txn.category_name,
|
"category": txn.category_name,
|
||||||
"memo": txn.memo,
|
"memo": txn.memo,
|
||||||
@@ -151,9 +143,8 @@ class YNABService:
|
|||||||
)
|
)
|
||||||
total_amount += amount
|
total_amount += amount
|
||||||
|
|
||||||
# Sort by date (most recent first) and limit
|
# Sort by date (most recent first)
|
||||||
filtered_transactions.sort(key=lambda x: x["date"], reverse=True)
|
filtered_transactions.sort(key=lambda x: x["date"], reverse=True)
|
||||||
filtered_transactions = filtered_transactions[:limit]
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"transactions": filtered_transactions,
|
"transactions": filtered_transactions,
|
||||||
@@ -180,8 +171,9 @@ class YNABService:
|
|||||||
if len(month) == 7: # YYYY-MM
|
if len(month) == 7: # YYYY-MM
|
||||||
month = f"{month}-01"
|
month = f"{month}-01"
|
||||||
|
|
||||||
# Get budget month
|
# Get budget month (SDK v2 requires datetime.date, not string)
|
||||||
month_response = self.months_api.get_budget_month(self.budget_id, month)
|
month_date_obj = datetime.strptime(month, "%Y-%m-%d").date()
|
||||||
|
month_response = self.months_api.get_plan_month(self.budget_id, month_date_obj)
|
||||||
|
|
||||||
month_data = month_response.data.month
|
month_data = month_response.data.month
|
||||||
|
|
||||||
|
|||||||
54
uv.lock
generated
54
uv.lock
generated
@@ -103,6 +103,18 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" },
|
{ url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aiohttp-retry"
|
||||||
|
version = "2.9.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "aiohttp" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/9d/61/ebda4d8e3d8cfa1fd3db0fb428db2dd7461d5742cea35178277ad180b033/aiohttp_retry-2.9.1.tar.gz", hash = "sha256:8eb75e904ed4ee5c2ec242fefe85bf04240f685391c4879d8f541d6028ff01f1", size = 13608, upload-time = "2024-11-06T10:44:54.574Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1a/99/84ba7273339d0f3dfa57901b846489d2e5c2cd731470167757f1935fffbd/aiohttp_retry-2.9.1-py3-none-any.whl", hash = "sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54", size = 9981, upload-time = "2024-11-06T10:44:52.917Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiosignal"
|
name = "aiosignal"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
@@ -1281,19 +1293,6 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/83/bd/9df897cbc98290bf71140104ee5b9777cf5291afb80333aa7da5a497339b/langchain_core-1.2.5-py3-none-any.whl", hash = "sha256:3255944ef4e21b2551facb319bfc426057a40247c0a05de5bd6f2fc021fbfa34", size = 484851, upload-time = "2025-12-22T23:45:30.525Z" },
|
{ url = "https://files.pythonhosted.org/packages/83/bd/9df897cbc98290bf71140104ee5b9777cf5291afb80333aa7da5a497339b/langchain_core-1.2.5-py3-none-any.whl", hash = "sha256:3255944ef4e21b2551facb319bfc426057a40247c0a05de5bd6f2fc021fbfa34", size = 484851, upload-time = "2025-12-22T23:45:30.525Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "langchain-ollama"
|
|
||||||
version = "1.0.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "langchain-core" },
|
|
||||||
{ name = "ollama" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/73/51/72cd04d74278f3575f921084f34280e2f837211dc008c9671c268c578afe/langchain_ollama-1.0.1.tar.gz", hash = "sha256:e37880c2f41cdb0895e863b1cfd0c2c840a117868b3f32e44fef42569e367443", size = 153850, upload-time = "2025-12-12T21:48:28.68Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e3/46/f2907da16dc5a5a6c679f83b7de21176178afad8d2ca635a581429580ef6/langchain_ollama-1.0.1-py3-none-any.whl", hash = "sha256:37eb939a4718a0255fe31e19fbb0def044746c717b01b97d397606ebc3e9b440", size = 29207, upload-time = "2025-12-12T21:48:27.832Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain-openai"
|
name = "langchain-openai"
|
||||||
version = "1.1.6"
|
version = "1.1.6"
|
||||||
@@ -1715,15 +1714,15 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ollama"
|
name = "ollama"
|
||||||
version = "0.6.0"
|
version = "0.6.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "httpx" },
|
{ name = "httpx" },
|
||||||
{ name = "pydantic" },
|
{ name = "pydantic" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/d6/47/f9ee32467fe92744474a8c72e138113f3b529fc266eea76abfdec9a33f3b/ollama-0.6.0.tar.gz", hash = "sha256:da2b2d846b5944cfbcee1ca1e6ee0585f6c9d45a2fe9467cbcd096a37383da2f", size = 50811, upload-time = "2025-09-24T22:46:02.417Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/9d/5a/652dac4b7affc2b37b95386f8ae78f22808af09d720689e3d7a86b6ed98e/ollama-0.6.1.tar.gz", hash = "sha256:478c67546836430034b415ed64fa890fd3d1ff91781a9d548b3325274e69d7c6", size = 51620, upload-time = "2025-11-13T23:02:17.416Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/b5/c1/edc9f41b425ca40b26b7c104c5f6841a4537bb2552bfa6ca66e81405bb95/ollama-0.6.0-py3-none-any.whl", hash = "sha256:534511b3ccea2dff419ae06c3b58d7f217c55be7897c8ce5868dfb6b219cf7a0", size = 14130, upload-time = "2025-09-24T22:46:01.19Z" },
|
{ url = "https://files.pythonhosted.org/packages/47/4f/4a617ee93d8208d2bcf26b2d8b9402ceaed03e3853c754940e2290fed063/ollama-0.6.1-py3-none-any.whl", hash = "sha256:fc4c984b345735c5486faeee67d8a265214a31cbb828167782dc642ce0a2bf8c", size = 14354, upload-time = "2025-11-13T23:02:16.292Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2534,7 +2533,6 @@ dependencies = [
|
|||||||
{ name = "langchain" },
|
{ name = "langchain" },
|
||||||
{ name = "langchain-chroma" },
|
{ name = "langchain-chroma" },
|
||||||
{ name = "langchain-community" },
|
{ name = "langchain-community" },
|
||||||
{ name = "langchain-ollama" },
|
|
||||||
{ name = "langchain-openai" },
|
{ name = "langchain-openai" },
|
||||||
{ name = "ollama" },
|
{ name = "ollama" },
|
||||||
{ name = "openai" },
|
{ name = "openai" },
|
||||||
@@ -2551,6 +2549,7 @@ dependencies = [
|
|||||||
{ name = "tomlkit" },
|
{ name = "tomlkit" },
|
||||||
{ name = "tortoise-orm" },
|
{ name = "tortoise-orm" },
|
||||||
{ name = "tortoise-orm-stubs" },
|
{ name = "tortoise-orm-stubs" },
|
||||||
|
{ name = "twilio" },
|
||||||
{ name = "ynab" },
|
{ name = "ynab" },
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -2570,9 +2569,8 @@ requires-dist = [
|
|||||||
{ name = "langchain", specifier = ">=1.2.0" },
|
{ name = "langchain", specifier = ">=1.2.0" },
|
||||||
{ name = "langchain-chroma", specifier = ">=1.0.0" },
|
{ name = "langchain-chroma", specifier = ">=1.0.0" },
|
||||||
{ name = "langchain-community", specifier = ">=0.4.1" },
|
{ name = "langchain-community", specifier = ">=0.4.1" },
|
||||||
{ name = "langchain-ollama", specifier = ">=1.0.1" },
|
|
||||||
{ name = "langchain-openai", specifier = ">=1.1.6" },
|
{ name = "langchain-openai", specifier = ">=1.1.6" },
|
||||||
{ name = "ollama", specifier = ">=0.6.0" },
|
{ name = "ollama", specifier = ">=0.6.1" },
|
||||||
{ name = "openai", specifier = ">=2.0.1" },
|
{ name = "openai", specifier = ">=2.0.1" },
|
||||||
{ name = "pillow", specifier = ">=10.0.0" },
|
{ name = "pillow", specifier = ">=10.0.0" },
|
||||||
{ name = "pillow-heif", specifier = ">=1.1.1" },
|
{ name = "pillow-heif", specifier = ">=1.1.1" },
|
||||||
@@ -2585,8 +2583,9 @@ requires-dist = [
|
|||||||
{ name = "quart-jwt-extended", specifier = ">=0.1.0" },
|
{ name = "quart-jwt-extended", specifier = ">=0.1.0" },
|
||||||
{ name = "tavily-python", specifier = ">=0.7.17" },
|
{ name = "tavily-python", specifier = ">=0.7.17" },
|
||||||
{ name = "tomlkit", specifier = ">=0.13.3" },
|
{ name = "tomlkit", specifier = ">=0.13.3" },
|
||||||
{ name = "tortoise-orm", specifier = ">=0.25.1" },
|
{ name = "tortoise-orm", specifier = ">=0.25.1,<1.0.0" },
|
||||||
{ name = "tortoise-orm-stubs", specifier = ">=1.0.2" },
|
{ name = "tortoise-orm-stubs", specifier = ">=1.0.2" },
|
||||||
|
{ name = "twilio", specifier = ">=9.10.2" },
|
||||||
{ name = "ynab", specifier = ">=1.3.0" },
|
{ name = "ynab", specifier = ">=1.3.0" },
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -3002,6 +3001,21 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" },
|
{ url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "twilio"
|
||||||
|
version = "9.10.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "aiohttp" },
|
||||||
|
{ name = "aiohttp-retry" },
|
||||||
|
{ name = "pyjwt" },
|
||||||
|
{ name = "requests" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/1c/a1/44cd8604eb69b1c5e7c0f07f0e4305b1884a3b75e23eb8d89350fe7bb982/twilio-9.10.2.tar.gz", hash = "sha256:f17d778870a7419a7278d5747b0e80a1c89e6f5ab14acf5456a004f8f2016bfa", size = 1618748, upload-time = "2026-02-18T04:40:44.279Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6c/ac/e1937f70544075f896bfcd6b23fa7c15cad945e4598bcfa7017b7c120ad8/twilio-9.10.2-py2.py3-none-any.whl", hash = "sha256:8722bb59bacf31fab5725d6f5d3fac2224265c669d38f653f53179165533da43", size = 2256481, upload-time = "2026-02-18T04:40:42.226Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typer"
|
name = "typer"
|
||||||
version = "0.19.2"
|
version = "0.19.2"
|
||||||
|
|||||||
Reference in New Issue
Block a user