- Added SQLAlchemy 2.0 and Alembic 1.13 dependencies - Created models.py with Channel and VideoEntry ORM models - Created database.py for database configuration and session management - Initialized Alembic migration system with initial migration - Updated feed_parser.py with save_to_db() method for persistence - Updated main.py with database initialization and new API routes: - /api/feed now saves to database by default - /api/channels lists all tracked channels - /api/history/<channel_id> returns video history - Updated .gitignore to exclude database files - Updated CLAUDE.md with comprehensive ORM and migration documentation Database uses SQLite (yottob.db) with upsert logic to avoid duplicates. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
151 lines
4.4 KiB
Python
151 lines
4.4 KiB
Python
"""Flask web application for YouTube RSS feed parsing."""
|
|
|
|
from flask import Flask, render_template, request, jsonify
|
|
from feed_parser import YouTubeFeedParser
|
|
from database import init_db, get_db_session
|
|
from models import Channel, VideoEntry
|
|
|
|
|
|
app = Flask(__name__)
|
|
|
|
# Default channel ID for demonstration
|
|
DEFAULT_CHANNEL_ID = "UCtTWOND3uyl4tVc_FarDmpw"
|
|
|
|
|
|
# Initialize database on app startup
|
|
with app.app_context():
|
|
init_db()
|
|
|
|
|
|
@app.route("/", methods=["GET"])
|
|
def index():
|
|
"""Render the main page."""
|
|
return render_template("index.html")
|
|
|
|
|
|
@app.route("/api/feed", methods=["GET"])
|
|
def get_feed():
|
|
"""API endpoint to fetch YouTube channel feed and save to database.
|
|
|
|
Query parameters:
|
|
channel_id: YouTube channel ID (optional, uses default if not provided)
|
|
filter_shorts: Whether to filter out Shorts (default: true)
|
|
save: Whether to save to database (default: true)
|
|
|
|
Returns:
|
|
JSON response with feed data or error message
|
|
"""
|
|
channel_id = request.args.get("channel_id", DEFAULT_CHANNEL_ID)
|
|
filter_shorts = request.args.get("filter_shorts", "true").lower() == "true"
|
|
save_to_db = request.args.get("save", "true").lower() == "true"
|
|
|
|
parser = YouTubeFeedParser(channel_id)
|
|
result = parser.fetch_feed(filter_shorts=filter_shorts)
|
|
|
|
if result is None:
|
|
return jsonify({"error": "Failed to fetch feed"}), 500
|
|
|
|
# Save to database if requested
|
|
if save_to_db:
|
|
try:
|
|
with get_db_session() as session:
|
|
parser.save_to_db(session, result)
|
|
except Exception as e:
|
|
return jsonify({"error": f"Failed to save to database: {str(e)}"}), 500
|
|
|
|
return jsonify(result)
|
|
|
|
|
|
@app.route("/api/channels", methods=["GET"])
|
|
def get_channels():
|
|
"""API endpoint to list all tracked channels.
|
|
|
|
Returns:
|
|
JSON response with list of channels
|
|
"""
|
|
try:
|
|
with get_db_session() as session:
|
|
channels = session.query(Channel).all()
|
|
return jsonify({
|
|
"channels": [
|
|
{
|
|
"id": ch.id,
|
|
"channel_id": ch.channel_id,
|
|
"title": ch.title,
|
|
"link": ch.link,
|
|
"last_fetched": ch.last_fetched.isoformat(),
|
|
"video_count": len(ch.videos)
|
|
}
|
|
for ch in channels
|
|
]
|
|
})
|
|
except Exception as e:
|
|
return jsonify({"error": f"Failed to fetch channels: {str(e)}"}), 500
|
|
|
|
|
|
@app.route("/api/history/<channel_id>", methods=["GET"])
|
|
def get_history(channel_id: str):
|
|
"""API endpoint to get video history for a specific channel.
|
|
|
|
Args:
|
|
channel_id: YouTube channel ID
|
|
|
|
Query parameters:
|
|
limit: Maximum number of videos to return (default: 50)
|
|
|
|
Returns:
|
|
JSON response with channel info and video history
|
|
"""
|
|
limit = request.args.get("limit", "50")
|
|
try:
|
|
limit = int(limit)
|
|
except ValueError:
|
|
limit = 50
|
|
|
|
try:
|
|
with get_db_session() as session:
|
|
channel = session.query(Channel).filter_by(
|
|
channel_id=channel_id
|
|
).first()
|
|
|
|
if not channel:
|
|
return jsonify({"error": "Channel not found"}), 404
|
|
|
|
videos = session.query(VideoEntry).filter_by(
|
|
channel_id=channel.id
|
|
).order_by(VideoEntry.created_at.desc()).limit(limit).all()
|
|
|
|
return jsonify({
|
|
"channel": {
|
|
"channel_id": channel.channel_id,
|
|
"title": channel.title,
|
|
"link": channel.link,
|
|
"last_fetched": channel.last_fetched.isoformat()
|
|
},
|
|
"videos": [video.to_dict() for video in videos],
|
|
"total_videos": len(channel.videos)
|
|
})
|
|
except Exception as e:
|
|
return jsonify({"error": f"Failed to fetch history: {str(e)}"}), 500
|
|
|
|
|
|
def main():
|
|
"""CLI entry point for testing feed parser."""
|
|
parser = YouTubeFeedParser(DEFAULT_CHANNEL_ID)
|
|
result = parser.fetch_feed()
|
|
|
|
if result is None:
|
|
print("Failed to retrieve RSS feed")
|
|
return
|
|
|
|
print(f"Feed Title: {result['feed_title']}")
|
|
print(f"Feed Link: {result['feed_link']}")
|
|
|
|
for entry in result['entries']:
|
|
print(f"\nEntry Title: {entry['title']}")
|
|
print(f"Entry Link: {entry['link']}")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|