Compare commits

..

46 Commits

Author SHA1 Message Date
JSC
b1f9667edd feat: Replace pydub with ffmpeg for audio duration and metadata extraction in sound services 2025-07-19 09:40:31 +02:00
JSC
4cfc2ec0a2 feat: Remove unused get_user_credit_info method from CreditService 2025-07-18 23:32:14 +02:00
JSC
39b7e14ae9 feat: Add stream URL generation and service URL retrieval for sounds in music player service 2025-07-18 22:38:40 +02:00
JSC
d0bda6c930 feat: Add sorting by name for soundboard sounds and improve socket emission logging 2025-07-18 21:10:08 +02:00
JSC
010f18bff4 feat: Add referential routes for listing available plans and remove plans endpoint from admin routes 2025-07-16 15:44:57 +02:00
JSC
e874d0665f feat: Add user management endpoints for listing, updating, activating, and deactivating users 2025-07-16 15:24:20 +02:00
JSC
ae238d3d18 feat: Emit sound play count change event to connected clients after playing a sound 2025-07-16 13:54:50 +02:00
JSC
7226d87a77 refactor: Comment out play_sound event handler and related logic for future use 2025-07-13 17:39:17 +02:00
JSC
b17e0db2b0 feat: Emit credits required event via SocketIO when user lacks sufficient credits 2025-07-13 01:46:23 +02:00
JSC
64074685a3 refactor: Simplify sound file path retrieval by consolidating stream sound handling 2025-07-12 22:53:07 +02:00
JSC
688b95b6af refactor: Remove unused playlist routes and related logic; clean up sound and stream models 2025-07-12 22:00:04 +02:00
JSC
627b95c961 feat: Add 'single' play mode to music player and update related logic 2025-07-12 20:49:20 +02:00
JSC
fc734e2581 feat: Enhance play tracking to accumulate play time and trigger at 20% of cumulative listening 2025-07-12 16:26:53 +02:00
JSC
4e96c3538c feat: Update play tracking to trigger at 20% completion instead of start 2025-07-12 16:13:13 +02:00
JSC
6bbf3dce66 feat: Update SoundPlayed model to accept nullable user_id and enhance sound tracking in MusicPlayerService 2025-07-12 15:56:13 +02:00
JSC
842e1dff13 feat: Implement playlist management routes and integrate with music player service 2025-07-12 15:17:45 +02:00
JSC
93897921fb feat: Update playlist loading method to use current playlist on startup 2025-07-11 23:26:28 +02:00
JSC
4f702d3302 feat: Update SoundPlayed model to allow nullable user_id 2025-07-09 14:00:55 +02:00
JSC
7d224d1db7 feat: Restrict JWT access cookie path and update Socket.IO CORS path 2025-07-08 22:35:47 +02:00
JSC
2e464dc977 feat: Enhance track ending detection and handling in MusicPlayerService 2025-07-08 13:36:51 +02:00
JSC
193bd5ebf4 feat: Add sounds routes for serving audio and thumbnail files 2025-07-08 12:57:17 +02:00
JSC
96ab2bdf77 feat: Remove load_main_playlist endpoint and implement automatic loading on startup 2025-07-07 21:34:29 +02:00
JSC
bcd6ca8104 Merge branch 'player' 2025-07-07 21:19:28 +02:00
JSC
9ac55f8904 feat: Enhance stream processing and SocketIO services with app context management 2025-07-07 21:17:51 +02:00
JSC
e7d958eb39 feat: Implement Music Player Service with VLC integration
- Added MusicPlayerService for managing VLC music playback with playlist support.
- Implemented methods for loading playlists, controlling playback (play, pause, stop, next, previous), and managing volume and play modes.
- Integrated real-time synchronization with VLC state using a background thread.
- Added SocketIO event emissions for player state updates.
- Enhanced logging for better debugging and tracking of player state changes.

fix: Improve SocketIO service logging and event handling

- Added detailed logging for SocketIO events and user authentication.
- Implemented a test event handler to verify SocketIO functionality.
- Enhanced error handling and logging for better traceability.

chore: Update dependencies and logging configuration

- Added python-vlc dependency for VLC integration.
- Configured logging to show INFO and DEBUG messages for better visibility during development.
- Updated main application entry point to allow unsafe Werkzeug for debugging purposes.
2025-07-07 20:51:53 +02:00
JSC
c44b055f83 feat(stream): integrate sound normalization into stream processing service 2025-07-07 15:29:50 +02:00
JSC
fe628b99d4 feat(stream): update stream route to /api/stream and add sound to main playlist upon creation 2025-07-07 15:22:45 +02:00
JSC
d7c6efcd0e refactor(admin): streamline sound scanning route to use scheduler service and require admin role 2025-07-06 17:31:04 +02:00
JSC
4f18f3e64e feat(stream): implement stream processing service with routes for managing streaming URLs; add support for concurrent processing and metadata extraction 2025-07-06 16:57:33 +02:00
JSC
61db6c56dc fix(stream): update service and sound_id fields to be nullable; adjust create_stream method parameters for optional values 2025-07-05 18:47:55 +02:00
JSC
fac4fdf212 feat(stream): add Stream model for managing streaming service links to sounds; update Sound model to include relationship with Stream 2025-07-05 18:31:47 +02:00
JSC
024c58f013 refactor(models): unify table names to singular form for consistency across models 2025-07-05 18:11:19 +02:00
JSC
21541c8184 feat(playlists): implement Playlist and PlaylistSound models; add seeding for default Main playlist 2025-07-05 18:05:59 +02:00
JSC
f68d046653 fix(admin_sounds): import jsonify to enable JSON responses in admin sound management routes 2025-07-05 17:49:05 +02:00
JSC
e2fe451e5a Refactor OAuth provider linking and unlinking logic into a dedicated service; enhance error handling and logging throughout the application; improve sound management and scanning services with better file handling and unique naming; implement centralized error and logging services for consistent API responses and application-wide logging configuration. 2025-07-05 13:07:06 +02:00
JSC
41fc197f4c refactor(soundboard): remove unused client information from play_sound method 2025-07-05 12:43:22 +02:00
JSC
2f7ffbbfe4 refactor(vlc_service): remove unused parameters ip_address and user_agent from play_sound method 2025-07-05 08:36:59 +02:00
JSC
5876b247f4 refactor(main): remove allow_unsafe_werkzeug option from SocketIO run configuration 2025-07-04 20:30:58 +02:00
JSC
ccc5ee38e2 feat(socketio): integrate SocketIO service for real-time communication and emit credits change events 2025-07-04 20:18:46 +02:00
JSC
1cd43a670d refactor: update timestamp handling to use timezone-aware datetime 2025-07-04 19:20:56 +02:00
JSC
4375718c2f refactor(decorators): simplify require_admin decorator by reusing require_role 2025-07-04 19:13:33 +02:00
JSC
5c29fa1a4c refactor(sound_played): remove unused fields ip_address and user_agent from SoundPlayed model 2025-07-04 18:55:26 +02:00
JSC
c3b8205f83 feat(sound_played): add sound play tracking and user statistics endpoints; enhance VLC service to record play events 2025-07-03 21:50:17 +02:00
JSC
97b998fd9e feat(vlc_service): refactor VLC service to use subprocess for sound playback and management; update process tracking 2025-07-03 21:36:42 +02:00
JSC
7455811860 feat: Add VLC service for sound playback and management
- Implemented VLCService to handle sound playback using VLC.
- Added routes for soundboard management including play, stop, and status.
- Introduced admin routes for sound normalization and scanning.
- Updated user model and services to accommodate new functionalities.
- Enhanced error handling and logging throughout the application.
- Updated dependencies to include python-vlc for sound playback capabilities.
2025-07-03 21:25:50 +02:00
JSC
8f17dd730a feat(admin_routes): add admin routes for scheduler and sound management; refactor main routes 2025-07-03 20:24:13 +02:00
42 changed files with 4658 additions and 548 deletions

View File

@@ -17,3 +17,6 @@ GOOGLE_CLIENT_SECRET=your_google_client_secret_here
# GitHub OAuth # GitHub OAuth
GITHUB_CLIENT_ID=your_github_client_id_here GITHUB_CLIENT_ID=your_github_client_id_here
GITHUB_CLIENT_SECRET=your_github_client_secret_here GITHUB_CLIENT_SECRET=your_github_client_secret_here
# Stream Processing Configuration
STREAM_MAX_CONCURRENT=2

View File

@@ -4,13 +4,15 @@ from datetime import timedelta
from flask import Flask from flask import Flask
from flask_cors import CORS from flask_cors import CORS
from flask_jwt_extended import JWTManager from flask_jwt_extended import JWTManager
from flask_socketio import SocketIO
from app.database import init_db from app.database import init_db
from app.services.auth_service import AuthService from app.services.auth_service import AuthService
from app.services.scheduler_service import scheduler_service from app.services.scheduler_service import scheduler_service
# Global auth service instance # Global service instances
auth_service = AuthService() auth_service = AuthService()
socketio = SocketIO()
def create_app(): def create_app():
@@ -27,14 +29,16 @@ def create_app():
# Configure Flask-JWT-Extended # Configure Flask-JWT-Extended
app.config["JWT_SECRET_KEY"] = os.environ.get( app.config["JWT_SECRET_KEY"] = os.environ.get(
"JWT_SECRET_KEY", "jwt-secret-key", "JWT_SECRET_KEY",
"jwt-secret-key",
) )
app.config["JWT_ACCESS_TOKEN_EXPIRES"] = timedelta(minutes=15) app.config["JWT_ACCESS_TOKEN_EXPIRES"] = timedelta(minutes=15)
app.config["JWT_REFRESH_TOKEN_EXPIRES"] = timedelta(days=7) app.config["JWT_REFRESH_TOKEN_EXPIRES"] = timedelta(days=7)
app.config["JWT_TOKEN_LOCATION"] = ["cookies"] app.config["JWT_TOKEN_LOCATION"] = ["cookies"]
app.config["JWT_COOKIE_SECURE"] = False # Set to True in production app.config["JWT_COOKIE_SECURE"] = False # Set to True in production
app.config["JWT_COOKIE_CSRF_PROTECT"] = False app.config["JWT_COOKIE_CSRF_PROTECT"] = False
app.config["JWT_ACCESS_COOKIE_PATH"] = "/api/" app.config["JWT_COOKIE_SAMESITE"] = "Lax" # Allow cross-origin requests
app.config["JWT_ACCESS_COOKIE_PATH"] = "/api/" # Restrict to API paths only
app.config["JWT_REFRESH_COOKIE_PATH"] = "/api/auth/refresh" app.config["JWT_REFRESH_COOKIE_PATH"] = "/api/auth/refresh"
# Initialize CORS # Initialize CORS
@@ -46,6 +50,14 @@ def create_app():
methods=["GET", "POST", "PATCH", "PUT", "DELETE", "OPTIONS"], methods=["GET", "POST", "PATCH", "PUT", "DELETE", "OPTIONS"],
) )
# Initialize SocketIO
socketio.init_app(
app,
cors_allowed_origins="http://localhost:3000",
cors_credentials=True,
path="/api/socket.io/", # Use /api prefix for Socket.IO
)
# Initialize JWT manager # Initialize JWT manager
jwt = JWTManager(app) jwt = JWTManager(app)
@@ -61,23 +73,55 @@ def create_app():
# Initialize authentication service with app # Initialize authentication service with app
auth_service.init_app(app) auth_service.init_app(app)
# Initialize SocketIO service (import after socketio is initialized)
from app.services.socketio_service import socketio_service # noqa: F401
# Initialize scheduler service with app # Initialize scheduler service with app
scheduler_service.app = app scheduler_service.app = app
# Start scheduler for background tasks # Start scheduler for background tasks
scheduler_service.start() scheduler_service.start()
# Initialize stream processing service
from app.services.stream_processing_service import StreamProcessingService
StreamProcessingService.initialize(app)
# Initialize music player service
from app.services.music_player_service import music_player_service
music_player_service.app = app # Store app instance for Flask context
music_player_service.start_vlc_instance()
# Register blueprints # Register blueprints
from app.routes import auth, main from app.routes import (
admin,
admin_sounds,
auth,
main,
player,
referential,
soundboard,
sounds,
stream,
)
app.register_blueprint(main.bp, url_prefix="/api") app.register_blueprint(main.bp, url_prefix="/api")
app.register_blueprint(auth.bp, url_prefix="/api/auth") app.register_blueprint(auth.bp, url_prefix="/api/auth")
app.register_blueprint(admin.bp, url_prefix="/api/admin")
app.register_blueprint(admin_sounds.bp, url_prefix="/api/admin/sounds")
app.register_blueprint(referential.bp, url_prefix="/api/referential")
app.register_blueprint(soundboard.bp, url_prefix="/api/soundboard")
app.register_blueprint(sounds.bp, url_prefix="/api/sounds")
app.register_blueprint(stream.bp, url_prefix="/api/stream")
app.register_blueprint(player.bp, url_prefix="/api/player")
# Shutdown scheduler when app is torn down # Shutdown services when app is torn down
@app.teardown_appcontext @app.teardown_appcontext
def shutdown_scheduler(exception): def shutdown_services(exception):
"""Stop scheduler when app context is torn down.""" """Stop services when app context is torn down."""
if exception: if exception:
scheduler_service.stop() scheduler_service.stop()
# music_player_service.stop_vlc_instance()
return app return app

View File

@@ -13,6 +13,6 @@ def init_db(app):
migrate.init_app(app, db) migrate.init_app(app, db)
# Import models here to ensure they are registered with SQLAlchemy # Import models here to ensure they are registered with SQLAlchemy
from app.models import user, user_oauth # noqa: F401 from app.models import sound_played, user, user_oauth # noqa: F401
return db return db

View File

@@ -12,8 +12,8 @@ def init_database():
# Seed plans if they don't exist # Seed plans if they don't exist
seed_plans() seed_plans()
# Migrate existing users to have plans # Create default Main playlist if it doesn't exist
migrate_users_to_plans() seed_main_playlist()
def seed_plans(): def seed_plans():
@@ -55,68 +55,35 @@ def seed_plans():
print(f"Seeded {len(plans_data)} plans into database") print(f"Seeded {len(plans_data)} plans into database")
def migrate_users_to_plans(): def seed_main_playlist():
"""Assign plans to existing users who don't have one.""" """Create the default Main playlist if it doesn't exist."""
from app.models.user import User from app.models.playlist import Playlist
try: # Check if Main playlist already exists
# Find users without plans main_playlist = Playlist.query.filter_by(name="Main", user_id=None).first()
users_without_plans = User.query.filter(User.plan_id.is_(None)).all()
# Find users with plans but NULL credits (only if credits column exists) if main_playlist is None:
# Note: We only migrate users with NULL credits, not 0 credits # Create the Main playlist
# 0 credits means they spent them, NULL means they never got assigned main_playlist = Playlist.create_playlist(
try: name="Main",
users_without_credits = User.query.filter( description="Default main playlist for all sounds",
User.plan_id.isnot(None), User.credits.is_(None), genre=None,
).all() user_id=None, # System playlist
except Exception: is_main=True,
# Credits column doesn't exist yet, will be handled by create_all is_deletable=False,
users_without_credits = [] is_current=True,
commit=True,
if not users_without_plans and not users_without_credits: )
return print("Created default Main playlist")
else:
# Get default and pro plans # Ensure the existing Main playlist has correct properties
default_plan = Plan.get_default_plan() if (
pro_plan = Plan.get_pro_plan() not main_playlist.is_main
or main_playlist.is_deletable
# Get the first user (admin) from all users ordered by ID or not main_playlist.is_current
first_user = User.query.order_by(User.id).first() ):
main_playlist.is_main = True
updated_count = 0 main_playlist.is_deletable = False
main_playlist.is_current = True
# Assign plans to users without plans
for user in users_without_plans:
# First user gets pro plan, others get free plan
if user.id == first_user.id:
user.plan_id = pro_plan.id
# Only set credits if the column exists
try:
user.credits = pro_plan.credits
except Exception:
pass
else:
user.plan_id = default_plan.id
# Only set credits if the column exists
try:
user.credits = default_plan.credits
except Exception:
pass
updated_count += 1
# Assign credits to users with plans but no credits
for user in users_without_credits:
user.credits = user.plan.credits
updated_count += 1
if updated_count > 0:
db.session.commit() db.session.commit()
print( print("Updated existing Main playlist properties")
f"Updated {updated_count} existing users with plans and credits",
)
except Exception:
# If there's any error (like missing columns), just skip migration
# The database will be properly created by create_all()
pass

View File

@@ -1,8 +1,11 @@
"""Database models.""" """Database models."""
from .plan import Plan from .plan import Plan
from .playlist import Playlist
from .playlist_sound import PlaylistSound
from .sound import Sound from .sound import Sound
from .stream import Stream
from .user import User from .user import User
from .user_oauth import UserOAuth from .user_oauth import UserOAuth
__all__ = ["Plan", "Sound", "User", "UserOAuth"] __all__ = ["Plan", "Playlist", "PlaylistSound", "Sound", "Stream", "User", "UserOAuth"]

View File

@@ -9,7 +9,7 @@ from app.database import db
class Plan(db.Model): class Plan(db.Model):
"""Plan model for user subscription plans.""" """Plan model for user subscription plans."""
__tablename__ = "plans" __tablename__ = "plan"
id = Column(Integer, primary_key=True) id = Column(Integer, primary_key=True)
code = Column(String(50), unique=True, nullable=False, index=True) code = Column(String(50), unique=True, nullable=False, index=True)

156
app/models/playlist.py Normal file
View File

@@ -0,0 +1,156 @@
"""Playlist model for managing sound playlists."""
from datetime import datetime
from typing import TYPE_CHECKING, Optional
from zoneinfo import ZoneInfo
from sqlalchemy import Boolean, DateTime, ForeignKey, Integer, String, Text
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.database import db
if TYPE_CHECKING:
from app.models.playlist_sound import PlaylistSound
from app.models.user import User
class Playlist(db.Model):
"""Model for playlists containing sounds."""
__tablename__ = "playlist"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
name: Mapped[str] = mapped_column(String(255), nullable=False)
description: Mapped[Optional[str]] = mapped_column(Text, nullable=True)
genre: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
user_id: Mapped[Optional[int]] = mapped_column(
Integer, ForeignKey("user.id"), nullable=True
)
is_main: Mapped[bool] = mapped_column(
Boolean, default=False, nullable=False
)
is_deletable: Mapped[bool] = mapped_column(
Boolean, default=True, nullable=True
)
is_current: Mapped[bool] = mapped_column(
Boolean, default=False, nullable=False
)
created_at: Mapped[datetime] = mapped_column(
DateTime,
default=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False,
)
updated_at: Mapped[datetime] = mapped_column(
DateTime,
default=lambda: datetime.now(tz=ZoneInfo("UTC")),
onupdate=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False,
)
# Relationships
user: Mapped[Optional["User"]] = relationship(
"User", back_populates="playlists"
)
playlist_sounds: Mapped[list["PlaylistSound"]] = relationship(
"PlaylistSound", back_populates="playlist", cascade="all, delete-orphan"
)
def __repr__(self) -> str:
"""String representation of the playlist."""
return f"<Playlist(id={self.id}, name='{self.name}', user_id={self.user_id})>"
def to_dict(self) -> dict:
"""Convert playlist to dictionary representation."""
return {
"id": self.id,
"name": self.name,
"description": self.description,
"genre": self.genre,
"user_id": self.user_id,
"is_main": self.is_main,
"is_deletable": self.is_deletable,
"is_current": self.is_current,
"created_at": (
self.created_at.isoformat() if self.created_at else None
),
"updated_at": (
self.updated_at.isoformat() if self.updated_at else None
),
"sound_count": (
len(self.playlist_sounds) if self.playlist_sounds else 0
),
}
@classmethod
def create_playlist(
cls,
name: str,
description: Optional[str] = None,
genre: Optional[str] = None,
user_id: Optional[int] = None,
is_main: bool = False,
is_deletable: bool = True,
is_current: bool = False,
commit: bool = True,
) -> "Playlist":
"""Create a new playlist."""
playlist = cls(
name=name,
description=description,
genre=genre,
user_id=user_id,
is_main=is_main,
is_deletable=is_deletable,
is_current=is_current,
)
db.session.add(playlist)
if commit:
db.session.commit()
return playlist
@classmethod
def find_current_playlist(
cls, user_id: Optional[int] = None
) -> Optional["Playlist"]:
"""Find the current active playlist."""
query = cls.query.filter_by(is_current=True)
if user_id is not None:
query = query.filter_by(user_id=user_id)
return query.first()
@classmethod
def find_main_playlist(
cls, user_id: Optional[int] = None
) -> Optional["Playlist"]:
"""Find the main playlist."""
query = cls.query.filter_by(is_main=True)
if user_id is not None:
query = query.filter_by(user_id=user_id)
return query.first()
def add_sound(
self, sound_id: int, order: Optional[int] = None, commit: bool = True
) -> "PlaylistSound":
"""Add a sound to the playlist."""
from app.models.playlist_sound import PlaylistSound
if order is None:
# Get the next order number
max_order = (
db.session.query(db.func.max(PlaylistSound.order))
.filter_by(playlist_id=self.id)
.scalar()
)
order = (max_order or 0) + 1
playlist_sound = PlaylistSound(
playlist_id=self.id, sound_id=sound_id, order=order
)
db.session.add(playlist_sound)
if commit:
db.session.commit()
return playlist_sound

View File

@@ -0,0 +1,65 @@
"""Playlist-Sound relationship model for managing sound order in playlists."""
from datetime import datetime
from typing import TYPE_CHECKING, Optional
from zoneinfo import ZoneInfo
from sqlalchemy import DateTime, ForeignKey, Integer, UniqueConstraint
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.database import db
if TYPE_CHECKING:
from app.models.playlist import Playlist
from app.models.sound import Sound
class PlaylistSound(db.Model):
"""Model for playlist-sound relationships with ordering."""
__tablename__ = "playlist_sound"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
playlist_id: Mapped[int] = mapped_column(
Integer, ForeignKey("playlist.id"), nullable=False
)
sound_id: Mapped[int] = mapped_column(
Integer, ForeignKey("sound.id"), nullable=False
)
order: Mapped[int] = mapped_column(Integer, nullable=False)
added_at: Mapped[datetime] = mapped_column(
DateTime,
default=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False,
)
# Relationships
playlist: Mapped["Playlist"] = relationship(
"Playlist", back_populates="playlist_sounds"
)
sound: Mapped["Sound"] = relationship(
"Sound", back_populates="playlist_sounds"
)
# Constraints
__table_args__ = (
UniqueConstraint(
"playlist_id", "sound_id", name="unique_playlist_sound"
),
UniqueConstraint("playlist_id", "order", name="unique_playlist_order"),
)
def __repr__(self) -> str:
"""String representation of the playlist-sound relationship."""
return f"<PlaylistSound(playlist_id={self.playlist_id}, sound_id={self.sound_id}, order={self.order})>"
def to_dict(self) -> dict:
"""Convert playlist-sound relationship to dictionary representation."""
return {
"id": self.id,
"playlist_id": self.playlist_id,
"sound_id": self.sound_id,
"order": self.order,
"added_at": self.added_at.isoformat() if self.added_at else None,
"sound": self.sound.to_dict() if self.sound else None,
}

View File

@@ -2,11 +2,17 @@
from datetime import datetime from datetime import datetime
from enum import Enum from enum import Enum
from typing import Optional from typing import TYPE_CHECKING, Optional
from zoneinfo import ZoneInfo
from sqlalchemy import Boolean, DateTime, Integer, String
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.database import db from app.database import db
from sqlalchemy import Boolean, DateTime, Integer, String
from sqlalchemy.orm import Mapped, mapped_column if TYPE_CHECKING:
from app.models.playlist_sound import PlaylistSound
from app.models.stream import Stream
class SoundType(Enum): class SoundType(Enum):
@@ -20,7 +26,7 @@ class SoundType(Enum):
class Sound(db.Model): class Sound(db.Model):
"""Sound model for storing sound file information.""" """Sound model for storing sound file information."""
__tablename__ = "sounds" __tablename__ = "sound"
id: Mapped[int] = mapped_column(primary_key=True) id: Mapped[int] = mapped_column(primary_key=True)
@@ -30,6 +36,9 @@ class Sound(db.Model):
# Basic sound information # Basic sound information
name: Mapped[str] = mapped_column(String(255), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False)
filename: Mapped[str] = mapped_column(String(500), nullable=False) filename: Mapped[str] = mapped_column(String(500), nullable=False)
thumbnail: Mapped[str | None] = mapped_column(
String(500), nullable=True
) # Thumbnail filename
duration: Mapped[int] = mapped_column(Integer, nullable=False) duration: Mapped[int] = mapped_column(Integer, nullable=False)
size: Mapped[int] = mapped_column(Integer, nullable=False) # Size in bytes size: Mapped[int] = mapped_column(Integer, nullable=False) # Size in bytes
hash: Mapped[str] = mapped_column(String(64), nullable=False) # SHA256 hash hash: Mapped[str] = mapped_column(String(64), nullable=False) # SHA256 hash
@@ -75,16 +84,28 @@ class Sound(db.Model):
# Timestamps # Timestamps
created_at: Mapped[datetime] = mapped_column( created_at: Mapped[datetime] = mapped_column(
DateTime, DateTime,
default=datetime.utcnow, default=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False, nullable=False,
) )
updated_at: Mapped[datetime] = mapped_column( updated_at: Mapped[datetime] = mapped_column(
DateTime, DateTime,
default=datetime.utcnow, default=lambda: datetime.now(tz=ZoneInfo("UTC")),
onupdate=datetime.utcnow, onupdate=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False, nullable=False,
) )
# Relationships
playlist_sounds: Mapped[list["PlaylistSound"]] = relationship(
"PlaylistSound",
back_populates="sound",
cascade="all, delete-orphan",
)
streams: Mapped[list["Stream"]] = relationship(
"Stream",
back_populates="sound",
cascade="all, delete-orphan",
)
def __repr__(self) -> str: def __repr__(self) -> str:
"""String representation of Sound.""" """String representation of Sound."""
return f"<Sound {self.name} ({self.type}) - {self.play_count} plays>" return f"<Sound {self.name} ({self.type}) - {self.play_count} plays>"
@@ -96,6 +117,7 @@ class Sound(db.Model):
"type": self.type, "type": self.type,
"name": self.name, "name": self.name,
"filename": self.filename, "filename": self.filename,
"thumbnail": self.thumbnail,
"duration": self.duration, "duration": self.duration,
"size": self.size, "size": self.size,
"hash": self.hash, "hash": self.hash,
@@ -114,7 +136,7 @@ class Sound(db.Model):
def increment_play_count(self) -> None: def increment_play_count(self) -> None:
"""Increment the play count for this sound.""" """Increment the play count for this sound."""
self.play_count += 1 self.play_count += 1
self.updated_at = datetime.utcnow() self.updated_at = datetime.now(tz=ZoneInfo("UTC"))
db.session.commit() db.session.commit()
def set_normalized_info( def set_normalized_info(
@@ -130,7 +152,7 @@ class Sound(db.Model):
self.normalized_size = normalized_size self.normalized_size = normalized_size
self.normalized_hash = normalized_hash self.normalized_hash = normalized_hash
self.is_normalized = True self.is_normalized = True
self.updated_at = datetime.utcnow() self.updated_at = datetime.now(tz=ZoneInfo("UTC"))
def clear_normalized_info(self) -> None: def clear_normalized_info(self) -> None:
"""Clear normalized sound information.""" """Clear normalized sound information."""
@@ -139,7 +161,7 @@ class Sound(db.Model):
self.normalized_hash = None self.normalized_hash = None
self.normalized_size = None self.normalized_size = None
self.is_normalized = False self.is_normalized = False
self.updated_at = datetime.utcnow() self.updated_at = datetime.now(tz=ZoneInfo("UTC"))
def update_file_info( def update_file_info(
self, self,
@@ -153,7 +175,7 @@ class Sound(db.Model):
self.duration = duration self.duration = duration
self.size = size self.size = size
self.hash = hash_value self.hash = hash_value
self.updated_at = datetime.utcnow() self.updated_at = datetime.now(tz=ZoneInfo("UTC"))
@classmethod @classmethod
def find_by_hash(cls, hash_value: str) -> Optional["Sound"]: def find_by_hash(cls, hash_value: str) -> Optional["Sound"]:
@@ -175,21 +197,6 @@ class Sound(db.Model):
"""Find all sounds by type.""" """Find all sounds by type."""
return cls.query.filter_by(type=sound_type).all() return cls.query.filter_by(type=sound_type).all()
@classmethod
def get_most_played(cls, limit: int = 10) -> list["Sound"]:
"""Get the most played sounds."""
return cls.query.order_by(cls.play_count.desc()).limit(limit).all()
@classmethod
def get_music_sounds(cls) -> list["Sound"]:
"""Get all music sounds."""
return cls.query.filter_by(is_music=True).all()
@classmethod
def get_deletable_sounds(cls) -> list["Sound"]:
"""Get all deletable sounds."""
return cls.query.filter_by(is_deletable=True).all()
@classmethod @classmethod
def create_sound( def create_sound(
cls, cls,
@@ -199,6 +206,7 @@ class Sound(db.Model):
duration: float, duration: float,
size: int, size: int,
hash_value: str, hash_value: str,
thumbnail: Optional[str] = None,
is_music: bool = False, is_music: bool = False,
is_deletable: bool = True, is_deletable: bool = True,
commit: bool = True, commit: bool = True,
@@ -212,6 +220,7 @@ class Sound(db.Model):
type=sound_type, type=sound_type,
name=name, name=name,
filename=filename, filename=filename,
thumbnail=thumbnail,
duration=duration, duration=duration,
size=size, size=size,
hash=hash_value, hash=hash_value,

View File

@@ -0,0 +1,94 @@
"""Sound played tracking model."""
from datetime import datetime
from zoneinfo import ZoneInfo
from sqlalchemy import DateTime, ForeignKey, Integer, func, text
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.database import db
class SoundPlayed(db.Model):
"""Model to track when users play sounds."""
__tablename__ = "sound_played"
id: Mapped[int] = mapped_column(primary_key=True)
# Foreign keys
user_id: Mapped[int] = mapped_column(
Integer,
ForeignKey("user.id"),
nullable=True,
)
sound_id: Mapped[int] = mapped_column(
Integer,
ForeignKey("sound.id"),
nullable=False,
)
# Timestamp
played_at: Mapped[datetime] = mapped_column(
DateTime,
default=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False,
)
# Relationships
user: Mapped["User"] = relationship("User", backref="sounds_played")
sound: Mapped["Sound"] = relationship("Sound", backref="play_history")
def __repr__(self) -> str:
"""Return string representation of SoundPlayed."""
return (
f"<SoundPlayed user_id={self.user_id} sound_id={self.sound_id} "
f"at={self.played_at}>"
)
def to_dict(self) -> dict:
"""Convert sound played record to dictionary."""
return {
"id": self.id,
"user_id": self.user_id,
"sound_id": self.sound_id,
"played_at": self.played_at.isoformat(),
"user": (
{
"id": self.user.id,
"name": self.user.name,
"email": self.user.email,
}
if self.user
else None
),
"sound": (
{
"id": self.sound.id,
"name": self.sound.name,
"filename": self.sound.filename,
"type": self.sound.type,
}
if self.sound
else None
),
}
@classmethod
def create_play_record(
cls,
user_id: int | None,
sound_id: int,
*,
commit: bool = True,
) -> "SoundPlayed":
"""Create a new sound played record."""
play_record = cls(
user_id=user_id,
sound_id=sound_id,
)
db.session.add(play_record)
if commit:
db.session.commit()
return play_record

124
app/models/stream.py Normal file
View File

@@ -0,0 +1,124 @@
"""Stream model for storing streaming service links to sounds."""
from datetime import datetime
from typing import TYPE_CHECKING, Optional
from zoneinfo import ZoneInfo
from sqlalchemy import (
DateTime,
ForeignKey,
Integer,
String,
Text,
UniqueConstraint,
)
from sqlalchemy.orm import Mapped, mapped_column, relationship
from app.database import db
if TYPE_CHECKING:
from app.models.sound import Sound
class Stream(db.Model):
"""Model for storing streaming service information linked to sounds."""
__tablename__ = "stream"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
service: Mapped[str] = mapped_column(String(50), nullable=True)
service_id: Mapped[str] = mapped_column(String(255), nullable=True)
sound_id: Mapped[int] = mapped_column(
Integer, ForeignKey("sound.id"), nullable=True
)
url: Mapped[str] = mapped_column(Text, nullable=False)
title: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
track: Mapped[Optional[str]] = mapped_column(String(500), nullable=True)
artist: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
album: Mapped[Optional[str]] = mapped_column(String(255), nullable=True)
genre: Mapped[Optional[str]] = mapped_column(String(100), nullable=True)
status: Mapped[str] = mapped_column(
String(50), nullable=False, default="pending"
)
error: Mapped[str | None] = mapped_column(Text, nullable=True)
created_at: Mapped[datetime] = mapped_column(
DateTime,
default=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False,
)
updated_at: Mapped[datetime] = mapped_column(
DateTime,
default=lambda: datetime.now(tz=ZoneInfo("UTC")),
onupdate=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False,
)
# Relationships
sound: Mapped["Sound"] = relationship("Sound", back_populates="streams")
# Constraints
__table_args__ = (
UniqueConstraint("service", "service_id", name="unique_service_stream"),
)
def __repr__(self) -> str:
"""String representation of the stream."""
return f"<Stream(id={self.id}, service='{self.service}', service_id='{self.service_id}', sound_id={self.sound_id})>"
def to_dict(self) -> dict:
"""Convert stream to dictionary representation."""
return {
"id": self.id,
"service": self.service,
"service_id": self.service_id,
"sound_id": self.sound_id,
"url": self.url,
"title": self.title,
"track": self.track,
"artist": self.artist,
"album": self.album,
"genre": self.genre,
"status": self.status,
"error": self.error,
"created_at": (
self.created_at.isoformat() if self.created_at else None
),
"updated_at": (
self.updated_at.isoformat() if self.updated_at else None
),
}
@classmethod
def create_stream(
cls,
url: str,
service: Optional[str] = None,
service_id: Optional[str] = None,
sound_id: Optional[int] = None,
title: Optional[str] = None,
track: Optional[str] = None,
artist: Optional[str] = None,
album: Optional[str] = None,
genre: Optional[str] = None,
status: str = "active",
commit: bool = True,
) -> "Stream":
"""Create a new stream record."""
stream = cls(
service=service,
service_id=service_id,
sound_id=sound_id,
url=url,
title=title,
track=track,
artist=artist,
album=album,
genre=genre,
status=status,
)
db.session.add(stream)
if commit:
db.session.commit()
return stream

View File

@@ -3,6 +3,7 @@
import secrets import secrets
from datetime import datetime from datetime import datetime
from typing import TYPE_CHECKING, Optional from typing import TYPE_CHECKING, Optional
from zoneinfo import ZoneInfo
from sqlalchemy import DateTime, ForeignKey, Integer, String from sqlalchemy import DateTime, ForeignKey, Integer, String
from sqlalchemy.orm import Mapped, mapped_column, relationship from sqlalchemy.orm import Mapped, mapped_column, relationship
@@ -12,13 +13,14 @@ from app.database import db
if TYPE_CHECKING: if TYPE_CHECKING:
from app.models.plan import Plan from app.models.plan import Plan
from app.models.playlist import Playlist
from app.models.user_oauth import UserOAuth from app.models.user_oauth import UserOAuth
class User(db.Model): class User(db.Model):
"""User model for storing user information.""" """User model for storing user information."""
__tablename__ = "users" __tablename__ = "user"
id: Mapped[int] = mapped_column(primary_key=True) id: Mapped[int] = mapped_column(primary_key=True)
@@ -29,12 +31,15 @@ class User(db.Model):
# Password authentication (optional - users can use OAuth instead) # Password authentication (optional - users can use OAuth instead)
password_hash: Mapped[str | None] = mapped_column( password_hash: Mapped[str | None] = mapped_column(
String(255), nullable=True, String(255),
nullable=True,
) )
# Role-based access control # Role-based access control
role: Mapped[str] = mapped_column( role: Mapped[str] = mapped_column(
String(50), nullable=False, default="user", String(50),
nullable=False,
default="user",
) )
# User status # User status
@@ -42,7 +47,9 @@ class User(db.Model):
# Plan relationship # Plan relationship
plan_id: Mapped[int] = mapped_column( plan_id: Mapped[int] = mapped_column(
Integer, ForeignKey("plans.id"), nullable=False, Integer,
ForeignKey("plan.id"),
nullable=False,
) )
# User credits (populated from plan credits on creation) # User credits (populated from plan credits on creation)
@@ -51,25 +58,35 @@ class User(db.Model):
# API token for programmatic access # API token for programmatic access
api_token: Mapped[str | None] = mapped_column(String(255), nullable=True) api_token: Mapped[str | None] = mapped_column(String(255), nullable=True)
api_token_expires_at: Mapped[datetime | None] = mapped_column( api_token_expires_at: Mapped[datetime | None] = mapped_column(
DateTime, nullable=True, DateTime,
nullable=True,
) )
# Timestamps # Timestamps
created_at: Mapped[datetime] = mapped_column( created_at: Mapped[datetime] = mapped_column(
DateTime, default=datetime.utcnow, nullable=False, DateTime,
default=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False,
) )
updated_at: Mapped[datetime] = mapped_column( updated_at: Mapped[datetime] = mapped_column(
DateTime, DateTime,
default=datetime.utcnow, default=lambda: datetime.now(tz=ZoneInfo("UTC")),
onupdate=datetime.utcnow, onupdate=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False, nullable=False,
) )
# Relationships # Relationships
oauth_providers: Mapped[list["UserOAuth"]] = relationship( oauth_providers: Mapped[list["UserOAuth"]] = relationship(
"UserOAuth", back_populates="user", cascade="all, delete-orphan", "UserOAuth",
back_populates="user",
cascade="all, delete-orphan",
) )
plan: Mapped["Plan"] = relationship("Plan", back_populates="users") plan: Mapped["Plan"] = relationship("Plan", back_populates="users")
playlists: Mapped[list["Playlist"]] = relationship(
"Playlist",
back_populates="user",
cascade="all, delete-orphan",
)
def __repr__(self) -> str: def __repr__(self) -> str:
"""String representation of User.""" """String representation of User."""
@@ -93,9 +110,11 @@ class User(db.Model):
"role": self.role, "role": self.role,
"is_active": self.is_active, "is_active": self.is_active,
"api_token": self.api_token, "api_token": self.api_token,
"api_token_expires_at": self.api_token_expires_at.isoformat() "api_token_expires_at": (
if self.api_token_expires_at self.api_token_expires_at.isoformat()
else None, if self.api_token_expires_at
else None
),
"providers": providers, "providers": providers,
"plan": self.plan.to_dict() if self.plan else None, "plan": self.plan.to_dict() if self.plan else None,
"credits": self.credits, "credits": self.credits,
@@ -119,13 +138,13 @@ class User(db.Model):
self.email = provider_data.get("email", self.email) self.email = provider_data.get("email", self.email)
self.name = provider_data.get("name", self.name) self.name = provider_data.get("name", self.name)
self.picture = provider_data.get("picture", self.picture) self.picture = provider_data.get("picture", self.picture)
self.updated_at = datetime.utcnow() self.updated_at = datetime.now(tz=ZoneInfo("UTC"))
db.session.commit() db.session.commit()
def set_password(self, password: str) -> None: def set_password(self, password: str) -> None:
"""Hash and set user password.""" """Hash and set user password."""
self.password_hash = generate_password_hash(password) self.password_hash = generate_password_hash(password)
self.updated_at = datetime.utcnow() self.updated_at = datetime.now(tz=ZoneInfo("UTC"))
def check_password(self, password: str) -> bool: def check_password(self, password: str) -> bool:
"""Check if provided password matches user's password.""" """Check if provided password matches user's password."""
@@ -141,7 +160,7 @@ class User(db.Model):
"""Generate a new API token for the user.""" """Generate a new API token for the user."""
self.api_token = secrets.token_urlsafe(32) self.api_token = secrets.token_urlsafe(32)
self.api_token_expires_at = None # No expiration by default self.api_token_expires_at = None # No expiration by default
self.updated_at = datetime.utcnow() self.updated_at = datetime.now(tz=ZoneInfo("UTC"))
return self.api_token return self.api_token
def is_api_token_valid(self) -> bool: def is_api_token_valid(self) -> bool:
@@ -152,23 +171,23 @@ class User(db.Model):
if self.api_token_expires_at is None: if self.api_token_expires_at is None:
return True # No expiration return True # No expiration
return datetime.utcnow() < self.api_token_expires_at return datetime.now(tz=ZoneInfo("UTC")) < self.api_token_expires_at
def revoke_api_token(self) -> None: def revoke_api_token(self) -> None:
"""Revoke the user's API token.""" """Revoke the user's API token."""
self.api_token = None self.api_token = None
self.api_token_expires_at = None self.api_token_expires_at = None
self.updated_at = datetime.utcnow() self.updated_at = datetime.now(tz=ZoneInfo("UTC"))
def activate(self) -> None: def activate(self) -> None:
"""Activate the user account.""" """Activate the user account."""
self.is_active = True self.is_active = True
self.updated_at = datetime.utcnow() self.updated_at = datetime.now(tz=ZoneInfo("UTC"))
def deactivate(self) -> None: def deactivate(self) -> None:
"""Deactivate the user account.""" """Deactivate the user account."""
self.is_active = False self.is_active = False
self.updated_at = datetime.utcnow() self.updated_at = datetime.now(tz=ZoneInfo("UTC"))
@classmethod @classmethod
def find_by_email(cls, email: str) -> Optional["User"]: def find_by_email(cls, email: str) -> Optional["User"]:
@@ -198,7 +217,8 @@ class User(db.Model):
# First, try to find existing OAuth provider # First, try to find existing OAuth provider
oauth_provider = UserOAuth.find_by_provider_and_id( oauth_provider = UserOAuth.find_by_provider_and_id(
provider, provider_id, provider,
provider_id,
) )
if oauth_provider: if oauth_provider:
@@ -207,7 +227,7 @@ class User(db.Model):
oauth_provider.email = email oauth_provider.email = email
oauth_provider.name = name oauth_provider.name = name
oauth_provider.picture = picture oauth_provider.picture = picture
oauth_provider.updated_at = datetime.utcnow() oauth_provider.updated_at = datetime.now(tz=ZoneInfo("UTC"))
# Update user info with latest data # Update user info with latest data
user.update_from_provider( user.update_from_provider(
@@ -256,7 +276,10 @@ class User(db.Model):
@classmethod @classmethod
def create_with_password( def create_with_password(
cls, email: str, password: str, name: str, cls,
email: str,
password: str,
name: str,
) -> "User": ) -> "User":
"""Create new user with email and password.""" """Create new user with email and password."""
from app.models.plan import Plan from app.models.plan import Plan
@@ -293,7 +316,9 @@ class User(db.Model):
@classmethod @classmethod
def authenticate_with_password( def authenticate_with_password(
cls, email: str, password: str, cls,
email: str,
password: str,
) -> Optional["User"]: ) -> Optional["User"]:
"""Authenticate user with email and password.""" """Authenticate user with email and password."""
user = cls.find_by_email(email) user = cls.find_by_email(email)

View File

@@ -2,6 +2,7 @@
from datetime import datetime from datetime import datetime
from typing import TYPE_CHECKING, Optional from typing import TYPE_CHECKING, Optional
from zoneinfo import ZoneInfo
from sqlalchemy import DateTime, ForeignKey, String, Text from sqlalchemy import DateTime, ForeignKey, String, Text
from sqlalchemy.orm import Mapped, mapped_column, relationship from sqlalchemy.orm import Mapped, mapped_column, relationship
@@ -20,7 +21,7 @@ class UserOAuth(db.Model):
id: Mapped[int] = mapped_column(primary_key=True) id: Mapped[int] = mapped_column(primary_key=True)
# User relationship # User relationship
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), nullable=False) user_id: Mapped[int] = mapped_column(ForeignKey("user.id"), nullable=False)
# OAuth provider information # OAuth provider information
provider: Mapped[str] = mapped_column(String(50), nullable=False) provider: Mapped[str] = mapped_column(String(50), nullable=False)
@@ -33,25 +34,30 @@ class UserOAuth(db.Model):
# Timestamps # Timestamps
created_at: Mapped[datetime] = mapped_column( created_at: Mapped[datetime] = mapped_column(
DateTime, default=datetime.utcnow, nullable=False, DateTime,
default=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False,
) )
updated_at: Mapped[datetime] = mapped_column( updated_at: Mapped[datetime] = mapped_column(
DateTime, DateTime,
default=datetime.utcnow, default=lambda: datetime.now(tz=ZoneInfo("UTC")),
onupdate=datetime.utcnow, onupdate=lambda: datetime.now(tz=ZoneInfo("UTC")),
nullable=False, nullable=False,
) )
# Unique constraint on provider + provider_id combination # Unique constraint on provider + provider_id combination
__table_args__ = ( __table_args__ = (
db.UniqueConstraint( db.UniqueConstraint(
"provider", "provider_id", name="unique_provider_user", "provider",
"provider_id",
name="unique_provider_user",
), ),
) )
# Relationships # Relationships
user: Mapped["User"] = relationship( user: Mapped["User"] = relationship(
"User", back_populates="oauth_providers", "User",
back_populates="oauth_providers",
) )
def __repr__(self) -> str: def __repr__(self) -> str:
@@ -73,11 +79,14 @@ class UserOAuth(db.Model):
@classmethod @classmethod
def find_by_provider_and_id( def find_by_provider_and_id(
cls, provider: str, provider_id: str, cls,
provider: str,
provider_id: str,
) -> Optional["UserOAuth"]: ) -> Optional["UserOAuth"]:
"""Find OAuth provider by provider name and provider ID.""" """Find OAuth provider by provider name and provider ID."""
return cls.query.filter_by( return cls.query.filter_by(
provider=provider, provider_id=provider_id, provider=provider,
provider_id=provider_id,
).first() ).first()
@classmethod @classmethod
@@ -99,7 +108,7 @@ class UserOAuth(db.Model):
oauth_provider.email = email oauth_provider.email = email
oauth_provider.name = name oauth_provider.name = name
oauth_provider.picture = picture oauth_provider.picture = picture
oauth_provider.updated_at = datetime.utcnow() oauth_provider.updated_at = datetime.now(tz=ZoneInfo("UTC"))
else: else:
# Create new provider # Create new provider
oauth_provider = cls( oauth_provider = cls(

175
app/routes/admin.py Normal file
View File

@@ -0,0 +1,175 @@
"""Admin routes for the application."""
from flask import Blueprint
from app.services.decorators import get_current_user, require_auth, require_role
from app.services.scheduler_service import scheduler_service
bp = Blueprint("admin", __name__)
@bp.route("/")
@require_auth
@require_role("admin")
def admin_only() -> dict[str, str]:
"""Admin-only endpoint to demonstrate role-based access."""
user = get_current_user()
return {
"message": f"Hello admin {user['name']}, you have admin access!",
"user": user,
"admin_info": "This endpoint is only accessible to admin users",
}
@bp.route("/scheduler/status")
@require_auth
@require_role("admin")
def scheduler_status() -> dict:
"""Get scheduler status (admin only)."""
return scheduler_service.get_scheduler_status()
@bp.route("/credits/refill", methods=["POST"])
@require_auth
@require_role("admin")
def manual_credit_refill() -> dict:
"""Manually trigger credit refill for all users (admin only)."""
return scheduler_service.trigger_credit_refill_now()
@bp.route("/users")
@require_auth
@require_role("admin")
def list_users() -> dict:
"""List all users (admin only)."""
from app.models.user import User
users = User.query.order_by(User.created_at.desc()).all()
return {
"users": [user.to_dict() for user in users],
"total": len(users)
}
@bp.route("/users/<int:user_id>", methods=["PATCH"])
@require_auth
@require_role("admin")
def update_user(user_id: int) -> dict:
"""Update user information (admin only)."""
from flask import request
from app.database import db
from app.models.user import User
from app.models.plan import Plan
data = request.get_json()
if not data:
return {"error": "No data provided"}, 400
user = User.query.get(user_id)
if not user:
return {"error": "User not found"}, 404
# Validate and update fields
try:
if "name" in data:
name = data["name"].strip()
if not name:
return {"error": "Name cannot be empty"}, 400
if len(name) > 100:
return {"error": "Name too long (max 100 characters)"}, 400
user.name = name
if "credits" in data:
credits = data["credits"]
if not isinstance(credits, int) or credits < 0:
return {"error": "Credits must be a non-negative integer"}, 400
user.credits = credits
if "plan_id" in data:
plan_id = data["plan_id"]
if not isinstance(plan_id, int):
return {"error": "Plan ID must be an integer"}, 400
plan = Plan.query.get(plan_id)
if not plan:
return {"error": "Plan not found"}, 404
user.plan_id = plan_id
if "is_active" in data:
is_active = data["is_active"]
if not isinstance(is_active, bool):
return {"error": "is_active must be a boolean"}, 400
user.is_active = is_active
db.session.commit()
return {
"message": "User updated successfully",
"user": user.to_dict()
}
except Exception as e:
db.session.rollback()
return {"error": f"Failed to update user: {str(e)}"}, 500
@bp.route("/users/<int:user_id>/deactivate", methods=["POST"])
@require_auth
@require_role("admin")
def deactivate_user(user_id: int) -> dict:
"""Deactivate a user (admin only)."""
from app.database import db
from app.models.user import User
user = User.query.get(user_id)
if not user:
return {"error": "User not found"}, 404
# Prevent admin from deactivating themselves
current_user = get_current_user()
if str(user.id) == current_user["id"]:
return {"error": "Cannot deactivate your own account"}, 400
try:
user.deactivate()
db.session.commit()
return {
"message": "User deactivated successfully",
"user": user.to_dict()
}
except Exception as e:
db.session.rollback()
return {"error": f"Failed to deactivate user: {str(e)}"}, 500
@bp.route("/users/<int:user_id>/activate", methods=["POST"])
@require_auth
@require_role("admin")
def activate_user(user_id: int) -> dict:
"""Activate a user (admin only)."""
from app.database import db
from app.models.user import User
user = User.query.get(user_id)
if not user:
return {"error": "User not found"}, 404
try:
user.activate()
db.session.commit()
return {
"message": "User activated successfully",
"user": user.to_dict()
}
except Exception as e:
db.session.rollback()
return {"error": f"Failed to activate user: {str(e)}"}, 500

View File

@@ -0,0 +1,84 @@
"""Admin sound management routes."""
from flask import Blueprint, jsonify, request
from app.services.decorators import require_admin, require_auth, require_role
from app.services.error_handling_service import ErrorHandlingService
from app.services.scheduler_service import scheduler_service
from app.services.sound_normalizer_service import SoundNormalizerService
from app.services.sound_scanner_service import SoundScannerService
bp = Blueprint("admin_sounds", __name__)
@bp.route("/scan", methods=["POST"])
@require_admin
def scan_sounds():
"""Manually trigger sound scanning."""
return ErrorHandlingService.handle_service_result(
scheduler_service.trigger_sound_scan_now()
)
@bp.route("/scan/status", methods=["GET"])
@require_admin
def get_scan_status():
"""Get current scan statistics and status."""
return ErrorHandlingService.wrap_service_call(
SoundScannerService.get_scan_statistics,
)
@bp.route("/normalize", methods=["POST"])
@require_admin
def normalize_sounds():
"""Normalize sounds (all or specific)."""
try:
data = request.get_json() or {}
sound_id = data.get("sound_id")
overwrite = data.get("overwrite", False)
two_pass = data.get("two_pass", True)
limit = data.get("limit")
if sound_id:
# Normalize specific sound
result = SoundNormalizerService.normalize_sound(
sound_id,
overwrite,
two_pass,
)
else:
# Normalize all sounds
result = SoundNormalizerService.normalize_all_sounds(
overwrite,
limit,
two_pass,
)
if result["success"]:
return jsonify(result), 200
return jsonify(result), 400
except Exception as e:
return jsonify({"error": str(e)}), 500
@bp.route("/normalize/status", methods=["GET"])
@require_admin
def get_normalization_status():
"""Get normalization statistics and status."""
try:
status = SoundNormalizerService.get_normalization_status()
return jsonify(status), 200
except Exception as e:
return jsonify({"error": str(e)}), 500
@bp.route("/ffmpeg/check", methods=["GET"])
@require_admin
def check_ffmpeg():
"""Check ffmpeg availability and capabilities."""
try:
ffmpeg_status = SoundNormalizerService.check_ffmpeg_availability()
return jsonify(ffmpeg_status), 200
except Exception as e:
return jsonify({"error": str(e)}), 500

View File

@@ -128,7 +128,9 @@ def refresh():
def link_provider(provider): def link_provider(provider):
"""Link a new OAuth provider to current user account.""" """Link a new OAuth provider to current user account."""
redirect_uri = url_for( redirect_uri = url_for(
"auth.link_callback", provider=provider, _external=True, "auth.link_callback",
provider=provider,
_external=True,
) )
return auth_service.redirect_to_login(provider, redirect_uri) return auth_service.redirect_to_login(provider, redirect_uri)
@@ -138,62 +140,27 @@ def link_provider(provider):
def link_callback(provider): def link_callback(provider):
"""Handle OAuth callback for linking new provider.""" """Handle OAuth callback for linking new provider."""
try: try:
from app.services.oauth_linking_service import OAuthLinkingService
current_user_id = get_jwt_identity() current_user_id = get_jwt_identity()
if not current_user_id: if not current_user_id:
return {"error": "User not authenticated"}, 401 return {"error": "User not authenticated"}, 401
# Get current user from database result = OAuthLinkingService.link_provider_to_user(
from app.models.user import User provider,
current_user_id,
user = User.query.get(current_user_id)
if not user:
return {"error": "User not found"}, 404
# Process OAuth callback but link to existing user
from authlib.integrations.flask_client import OAuth
from app.services.oauth_providers.registry import OAuthProviderRegistry
oauth = OAuth()
registry = OAuthProviderRegistry(oauth)
oauth_provider = registry.get_provider(provider)
if not oauth_provider:
return {"error": f"OAuth provider '{provider}' not configured"}, 400
token = oauth_provider.exchange_code_for_token(None, None)
raw_user_info = oauth_provider.get_user_info(token)
provider_data = oauth_provider.normalize_user_data(raw_user_info)
if not provider_data.get("id"):
return {
"error": "Failed to get user information from provider",
}, 400
# Check if this provider is already linked to another user
from app.models.user_oauth import UserOAuth
existing_provider = UserOAuth.find_by_provider_and_id(
provider, provider_data["id"],
) )
return result
if existing_provider and existing_provider.user_id != user.id: except ValueError as e:
return { error_str = str(e)
"error": "This provider account is already linked to another user", if "not found" in error_str:
}, 409 return {"error": error_str}, 404
if "not configured" in error_str:
# Link the provider to current user return {"error": error_str}, 400
UserOAuth.create_or_update( if "already linked" in error_str:
user_id=user.id, return {"error": error_str}, 409
provider=provider, return {"error": error_str}, 400
provider_id=provider_data["id"],
email=provider_data["email"],
name=provider_data["name"],
picture=provider_data.get("picture"),
)
return {"message": f"{provider.title()} account linked successfully"}
except Exception as e: except Exception as e:
return {"error": str(e)}, 400 return {"error": str(e)}, 400
@@ -203,33 +170,27 @@ def link_callback(provider):
def unlink_provider(provider): def unlink_provider(provider):
"""Unlink an OAuth provider from current user account.""" """Unlink an OAuth provider from current user account."""
try: try:
from app.services.oauth_linking_service import OAuthLinkingService
current_user_id = get_jwt_identity() current_user_id = get_jwt_identity()
if not current_user_id: if not current_user_id:
return {"error": "User not authenticated"}, 401 return {"error": "User not authenticated"}, 401
from app.database import db result = OAuthLinkingService.unlink_provider_from_user(
from app.models.user import User provider,
current_user_id,
user = User.query.get(current_user_id) )
if not user: return result
return {"error": "User not found"}, 404
# Check if user has more than one provider (prevent locking out)
if len(user.oauth_providers) <= 1:
return {"error": "Cannot unlink last authentication provider"}, 400
# Find and remove the provider
oauth_provider = user.get_provider(provider)
if not oauth_provider:
return {
"error": f"Provider '{provider}' not linked to this account",
}, 404
db.session.delete(oauth_provider)
db.session.commit()
return {"message": f"{provider.title()} account unlinked successfully"}
except ValueError as e:
error_str = str(e)
if "not found" in error_str:
return {"error": error_str}, 404
if "Cannot unlink" in error_str:
return {"error": error_str}, 400
if "not linked" in error_str:
return {"error": error_str}, 404
return {"error": error_str}, 400
except Exception as e: except Exception as e:
return {"error": str(e)}, 400 return {"error": str(e)}, 400

View File

@@ -1,157 +1,232 @@
"""Main routes for the application.""" """Main routes for the application."""
from flask import Blueprint, request from datetime import datetime, timedelta
from zoneinfo import ZoneInfo
from app.services.decorators import ( from flask import Blueprint, request
get_current_user, from sqlalchemy import desc, func
require_auth,
require_credits, from app.database import db
require_role, from app.models.playlist import Playlist
) from app.models.sound import Sound
from app.services.scheduler_service import scheduler_service from app.models.sound_played import SoundPlayed
from app.services.sound_normalizer_service import SoundNormalizerService from app.models.user import User
from app.services.sound_scanner_service import SoundScannerService from app.services.decorators import require_auth
bp = Blueprint("main", __name__) bp = Blueprint("main", __name__)
@bp.route("/")
def index() -> dict[str, str]:
"""Root endpoint that returns API status."""
return {"message": "API is running", "status": "ok"}
@bp.route("/protected")
@require_auth
def protected() -> dict[str, str]:
"""Protected endpoint that requires authentication."""
user = get_current_user()
return {
"message": f"Hello {user['name']}, this is a protected endpoint!",
"user": user,
}
@bp.route("/api-protected")
@require_auth
def api_protected() -> dict[str, str]:
"""Protected endpoint that accepts JWT or API token authentication."""
user = get_current_user()
return {
"message": f"Hello {user['name']}, you accessed this via {user['provider']}!",
"user": user,
}
@bp.route("/admin")
@require_auth
@require_role("admin")
def admin_only() -> dict[str, str]:
"""Admin-only endpoint to demonstrate role-based access."""
user = get_current_user()
return {
"message": f"Hello admin {user['name']}, you have admin access!",
"user": user,
"admin_info": "This endpoint is only accessible to admin users",
}
@bp.route("/health") @bp.route("/health")
def health() -> dict[str, str]: def health() -> dict[str, str]:
"""Health check endpoint.""" """Health check endpoint."""
return {"status": "ok"} return {"status": "ok"}
@bp.route("/use-credits/<int:amount>") def get_period_filter(period: str) -> datetime | None:
"""Get the start date for the specified period."""
now = datetime.now(tz=ZoneInfo("UTC"))
if period == "today":
return now.replace(hour=0, minute=0, second=0, microsecond=0)
if period == "week":
return now - timedelta(days=7)
if period == "month":
return now - timedelta(days=30)
if period == "year":
return now - timedelta(days=365)
if period == "all":
return None
# Default to all time
return None
@bp.route("/dashboard/stats")
@require_auth @require_auth
@require_credits(5) def dashboard_stats() -> dict:
def use_credits(amount: int) -> dict[str, str]: """Get dashboard statistics."""
"""Test endpoint that costs 5 credits to use.""" # Count soundboard sounds (type = SDB)
user = get_current_user() soundboard_count = Sound.query.filter_by(type="SDB").count()
# Count tracks (type = STR)
track_count = Sound.query.filter_by(type="STR").count()
# Count playlists
playlist_count = Playlist.query.count()
# Calculate total size of all sounds (original + normalized)
total_size_result = db.session.query(
func.sum(Sound.size).label("original_size"),
func.sum(Sound.normalized_size).label("normalized_size"),
).first()
original_size = getattr(total_size_result, "original_size", 0) or 0
normalized_size = getattr(total_size_result, "normalized_size", 0) or 0
total_size = original_size + normalized_size
return { return {
"message": f"Successfully used endpoint! You requested amount: {amount}", "soundboard_sounds": soundboard_count,
"user": user["email"], "tracks": track_count,
"remaining_credits": user["credits"] "playlists": playlist_count,
- 5, # Note: credits already deducted by decorator "total_size": total_size,
"original_size": original_size,
"normalized_size": normalized_size,
} }
@bp.route("/expensive-operation") @bp.route("/dashboard/top-sounds")
@require_auth @require_auth
@require_credits(10) def top_sounds() -> dict:
def expensive_operation() -> dict[str, str]: """Get top played sounds for a specific period."""
"""Test endpoint that costs 10 credits to use.""" period = request.args.get("period", "all")
user = get_current_user() limit = int(request.args.get("limit", 5))
period_start = get_period_filter(period)
# Base query for soundboard sounds with play counts
query = (
db.session.query(
Sound.id,
Sound.name,
Sound.filename,
Sound.thumbnail,
Sound.type,
func.count(SoundPlayed.id).label("play_count"),
)
.outerjoin(SoundPlayed, Sound.id == SoundPlayed.sound_id)
.filter(Sound.type == "SDB") # Only soundboard sounds
.group_by(
Sound.id,
Sound.name,
Sound.filename,
Sound.thumbnail,
Sound.type,
)
)
# Apply period filter if specified
if period_start:
query = query.filter(SoundPlayed.played_at >= period_start)
# Order by play count and limit results
results = query.order_by(desc("play_count")).limit(limit).all()
# Convert to list of dictionaries
top_sounds_list = [
{
"id": result.id,
"name": result.name,
"filename": result.filename,
"thumbnail": result.thumbnail,
"type": result.type,
"play_count": result.play_count,
}
for result in results
]
return { return {
"message": "Expensive operation completed successfully!", "period": period,
"user": user["email"], "sounds": top_sounds_list,
"operation_cost": 10,
} }
@bp.route("/admin/scheduler/status") @bp.route("/dashboard/top-tracks")
@require_auth @require_auth
@require_role("admin") def top_tracks() -> dict:
def scheduler_status() -> dict: """Get top played tracks for a specific period."""
"""Get scheduler status (admin only).""" period = request.args.get("period", "all")
return scheduler_service.get_scheduler_status() limit = int(request.args.get("limit", 10))
period_start = get_period_filter(period)
# Base query for tracks with play counts
query = (
db.session.query(
Sound.id,
Sound.name,
Sound.filename,
Sound.thumbnail,
Sound.type,
func.count(SoundPlayed.id).label("play_count"),
)
.outerjoin(SoundPlayed, Sound.id == SoundPlayed.sound_id)
.filter(Sound.type == "STR") # Only tracks
.group_by(
Sound.id,
Sound.name,
Sound.filename,
Sound.thumbnail,
Sound.type,
)
)
# Apply period filter if specified
if period_start:
query = query.filter(SoundPlayed.played_at >= period_start)
# Order by play count and limit results
results = query.order_by(desc("play_count")).limit(limit).all()
# Convert to list of dictionaries
top_tracks_list = [
{
"id": result.id,
"name": result.name,
"filename": result.filename,
"thumbnail": result.thumbnail,
"type": result.type,
"play_count": result.play_count,
}
for result in results
]
return {
"period": period,
"tracks": top_tracks_list,
}
@bp.route("/admin/credits/refill", methods=["POST"]) @bp.route("/dashboard/top-users")
@require_auth @require_auth
@require_role("admin") def top_users() -> dict:
def manual_credit_refill() -> dict: """Get top users by play count for a specific period."""
"""Manually trigger credit refill for all users (admin only).""" period = request.args.get("period", "all")
return scheduler_service.trigger_credit_refill_now() limit = int(request.args.get("limit", 10))
period_start = get_period_filter(period)
@bp.route("/admin/sounds/scan", methods=["POST"]) # Base query for users with play counts
@require_auth query = (
@require_role("admin") db.session.query(
def manual_sound_scan() -> dict: User.id,
"""Manually trigger sound directory scan (admin only).""" User.name,
return scheduler_service.trigger_sound_scan_now() User.email,
User.picture,
func.count(SoundPlayed.id).label("play_count"),
)
.outerjoin(SoundPlayed, User.id == SoundPlayed.user_id)
.group_by(User.id, User.name, User.email, User.picture)
)
# Apply period filter if specified
if period_start:
query = query.filter(SoundPlayed.played_at >= period_start)
@bp.route("/admin/sounds/stats") # Order by play count and limit results
@require_auth results = query.order_by(desc("play_count")).limit(limit).all()
@require_role("admin")
def sound_statistics() -> dict:
"""Get sound database statistics (admin only)."""
return SoundScannerService.get_scan_statistics()
# Convert to list of dictionaries
top_users_list = [
{
"id": result.id,
"name": result.name,
"email": result.email,
"picture": result.picture,
"play_count": result.play_count,
}
for result in results
]
@bp.route("/admin/sounds/normalize/<int:sound_id>", methods=["POST"]) return {
@require_auth "period": period,
@require_role("admin") "users": top_users_list,
def normalize_sound(sound_id: int) -> dict: }
"""Normalize a specific sound file (admin only)."""
overwrite = request.args.get("overwrite", "false").lower() == "true"
return SoundNormalizerService.normalize_sound(sound_id, overwrite)
@bp.route("/admin/sounds/normalize-all", methods=["POST"])
@require_auth
@require_role("admin")
def normalize_all_sounds() -> dict:
"""Normalize all soundboard files (admin only)."""
overwrite = request.args.get("overwrite", "false").lower() == "true"
limit_str = request.args.get("limit")
limit = int(limit_str) if limit_str else None
return SoundNormalizerService.normalize_all_sounds(overwrite, limit)
@bp.route("/admin/sounds/normalization-status")
@require_auth
@require_role("admin")
def normalization_status() -> dict:
"""Get normalization status statistics (admin only)."""
return SoundNormalizerService.get_normalization_status()
@bp.route("/admin/sounds/ffmpeg-check")
@require_auth
@require_role("admin")
def ffmpeg_check() -> dict:
"""Check ffmpeg availability and capabilities (admin only)."""
return SoundNormalizerService.check_ffmpeg_availability()

207
app/routes/player.py Normal file
View File

@@ -0,0 +1,207 @@
"""Music player API routes."""
from flask import Blueprint, jsonify, request
from app.services.decorators import require_auth
from app.services.error_handling_service import ErrorHandlingService
from app.services.music_player_service import music_player_service
bp = Blueprint("player", __name__)
@bp.route("/state", methods=["GET"])
@require_auth
def get_player_state():
"""Get current player state."""
try:
state = music_player_service.get_player_state()
return jsonify(state), 200
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)
@bp.route("/play", methods=["POST"])
@require_auth
def play():
"""Start playback."""
try:
success = music_player_service.play()
if success:
return jsonify({"message": "Playback started"}), 200
return jsonify({"error": "Failed to start playback"}), 400
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)
@bp.route("/pause", methods=["POST"])
@require_auth
def pause():
"""Pause playback."""
try:
success = music_player_service.pause()
if success:
return jsonify({"message": "Playback paused"}), 200
return jsonify({"error": "Failed to pause playback"}), 400
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)
@bp.route("/stop", methods=["POST"])
@require_auth
def stop():
"""Stop playback."""
try:
success = music_player_service.stop()
if success:
return jsonify({"message": "Playback stopped"}), 200
return jsonify({"error": "Failed to stop playback"}), 400
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)
@bp.route("/next", methods=["POST"])
@require_auth
def next_track():
"""Skip to next track."""
try:
success = music_player_service.next_track()
if success:
return jsonify({"message": "Skipped to next track"}), 200
return jsonify({"error": "Failed to skip to next track"}), 400
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)
@bp.route("/previous", methods=["POST"])
@require_auth
def previous_track():
"""Skip to previous track."""
try:
success = music_player_service.previous_track()
if success:
return jsonify({"message": "Skipped to previous track"}), 200
return jsonify({"error": "Failed to skip to previous track"}), 400
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)
@bp.route("/seek", methods=["POST"])
@require_auth
def seek():
"""Seek to position."""
try:
data = request.get_json()
if not data or "position" not in data:
return jsonify({"error": "Position required"}), 400
position = float(data["position"])
if not 0.0 <= position <= 1.0:
return (
jsonify({"error": "Position must be between 0.0 and 1.0"}),
400,
)
success = music_player_service.seek(position)
if success:
return jsonify({"message": "Seek successful"}), 200
return jsonify({"error": "Failed to seek"}), 400
except (ValueError, TypeError):
return jsonify({"error": "Invalid position value"}), 400
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)
@bp.route("/volume", methods=["POST"])
@require_auth
def set_volume():
"""Set volume."""
try:
data = request.get_json()
if not data or "volume" not in data:
return jsonify({"error": "Volume required"}), 400
volume = int(data["volume"])
if not 0 <= volume <= 100:
return jsonify({"error": "Volume must be between 0 and 100"}), 400
success = music_player_service.set_volume(volume)
if success:
return jsonify({"message": "Volume set successfully"}), 200
return jsonify({"error": "Failed to set volume"}), 400
except (ValueError, TypeError):
return jsonify({"error": "Invalid volume value"}), 400
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)
@bp.route("/mode", methods=["POST"])
@require_auth
def set_play_mode():
"""Set play mode."""
try:
data = request.get_json()
if not data or "mode" not in data:
return jsonify({"error": "Mode required"}), 400
mode = data["mode"]
valid_modes = [
"continuous",
"loop-playlist",
"loop-one",
"random",
"single",
]
if mode not in valid_modes:
return (
jsonify(
{"error": f"Mode must be one of: {', '.join(valid_modes)}"}
),
400,
)
success = music_player_service.set_play_mode(mode)
if success:
return jsonify({"message": "Play mode set successfully"}), 200
return jsonify({"error": "Failed to set play mode"}), 400
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)
@bp.route("/playlist", methods=["POST"])
@require_auth
def load_playlist():
"""Load a playlist into the player."""
try:
data = request.get_json()
if not data or "playlist_id" not in data:
return jsonify({"error": "Playlist ID required"}), 400
playlist_id = int(data["playlist_id"])
success = music_player_service.load_playlist(playlist_id)
if success:
return jsonify({"message": "Playlist loaded successfully"}), 200
return jsonify({"error": "Failed to load playlist"}), 400
except (ValueError, TypeError):
return jsonify({"error": "Invalid playlist ID"}), 400
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)
@bp.route("/play-track", methods=["POST"])
@require_auth
def play_track():
"""Play track at specific index."""
try:
data = request.get_json()
if not data or "index" not in data:
return jsonify({"error": "Track index required"}), 400
index = int(data["index"])
success = music_player_service.play_track_at_index(index)
if success:
return jsonify({"message": "Track playing"}), 200
return jsonify({"error": "Failed to play track"}), 400
except (ValueError, TypeError):
return jsonify({"error": "Invalid track index"}), 400
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)

17
app/routes/referential.py Normal file
View File

@@ -0,0 +1,17 @@
"""Referential routes for reference data."""
from flask import Blueprint
bp = Blueprint("referential", __name__)
@bp.route("/plans")
def list_plans() -> dict:
"""List all available plans."""
from app.models.plan import Plan
plans = Plan.query.order_by(Plan.id).all()
return {
"plans": [plan.to_dict() for plan in plans],
"total": len(plans)
}

164
app/routes/soundboard.py Normal file
View File

@@ -0,0 +1,164 @@
"""Soundboard routes."""
from flask import Blueprint, jsonify, request
from app.models.sound import Sound, SoundType
from app.models.sound_played import SoundPlayed
from app.services.decorators import (
get_current_user,
require_auth,
require_credits,
)
from app.services.vlc_service import vlc_service
bp = Blueprint("soundboard", __name__)
@bp.route("/sounds", methods=["GET"])
@require_auth
def get_sounds():
"""Get all soundboard sounds."""
try:
# Get filter parameters
sound_type = request.args.get("type", "SDB")
# Validate sound type
if sound_type not in [t.value for t in SoundType]:
return jsonify({"error": "Invalid sound type"}), 400
# Get sounds from database
sounds = Sound.find_by_type(sound_type)
# Order by name
sounds = sorted(sounds, key=lambda s: s.name.lower())
# Convert to dict format
sounds_data = [sound.to_dict() for sound in sounds]
return jsonify(
{
"sounds": sounds_data,
"total": len(sounds_data),
"type": sound_type,
},
)
except Exception as e:
return jsonify({"error": str(e)}), 500
@bp.route("/sounds/<int:sound_id>/play", methods=["POST"])
@require_auth
@require_credits(1)
def play_sound(sound_id: int):
"""Play a specific sound."""
try:
# Get current user for tracking
user = get_current_user()
user_id = int(user["id"]) if user else None
success = vlc_service.play_sound(
sound_id=sound_id,
user_id=user_id,
)
if success:
# Get updated sound data to emit the new play count
sound = Sound.query.get(sound_id)
if sound:
# Emit sound_changed event to all connected clients
try:
from app.services.socketio_service import SocketIOService
SocketIOService.emit_sound_play_count_changed(
sound_id, sound.play_count
)
except Exception as e:
# Don't fail the request if socket emission fails
import logging
logger = logging.getLogger(__name__)
logger.warning(
f"Failed to emit sound_play_count_changed event: {e}"
)
return jsonify({"message": "Sound playing", "sound_id": sound_id})
return (
jsonify({"error": "Sound not found or cannot be played"}),
404,
)
except Exception as e:
return jsonify({"error": str(e)}), 500
@bp.route("/stop-all", methods=["POST"])
@require_auth
def stop_all_sounds():
"""Stop all currently playing sounds."""
try:
# Try normal stop first
vlc_service.stop_all()
# Wait a moment and check if any are still playing
import time
time.sleep(0.2)
# If there are still instances, force stop them
if vlc_service.get_playing_count() > 0:
stopped_count = vlc_service.force_stop_all()
return jsonify(
{
"message": f"Force stopped {stopped_count} sounds",
"forced": True,
},
)
return jsonify({"message": "All sounds stopped"})
except Exception as e:
return jsonify({"error": str(e)}), 500
@bp.route("/force-stop", methods=["POST"])
@require_auth
def force_stop_all_sounds():
"""Force stop all sounds with aggressive cleanup."""
try:
stopped_count = vlc_service.force_stop_all()
return jsonify(
{
"message": f"Force stopped {stopped_count} sound instances",
"stopped_count": stopped_count,
},
)
except Exception as e:
return jsonify({"error": str(e)}), 500
@bp.route("/status", methods=["GET"])
@require_auth
def get_status():
"""Get current playback status."""
try:
playing_count = vlc_service.get_playing_count()
# Get detailed process information
with vlc_service.lock:
processes = []
for process_id, process in vlc_service.processes.items():
processes.append(
{
"id": process_id,
"pid": process.pid,
"running": process.poll() is None,
},
)
return jsonify(
{
"playing_count": playing_count,
"is_playing": playing_count > 0,
"processes": processes,
},
)
except Exception as e:
return jsonify({"error": str(e)}), 500

99
app/routes/sounds.py Normal file
View File

@@ -0,0 +1,99 @@
"""Routes for serving sound files and thumbnails."""
import os
from flask import Blueprint, send_from_directory, abort
from app.services.decorators import require_auth
bp = Blueprint("sounds", __name__)
@bp.route("/<sound_type>/thumbnails/<path:filename>")
def serve_thumbnail(sound_type, filename):
"""Serve thumbnail files for sounds."""
try:
# Map sound type codes to directory names
type_mapping = {
"str": "stream",
"sdb": "soundboard",
"say": "say"
}
# Security: validate sound type
if sound_type not in type_mapping:
abort(404)
# Basic filename validation (no path traversal)
if ".." in filename or "/" in filename or "\\" in filename:
abort(404)
if not filename or not filename.strip():
abort(404)
# Get the actual directory name
directory_name = type_mapping[sound_type]
# Construct the thumbnail directory path
sounds_dir = os.path.join(os.getcwd(), "sounds")
thumbnail_dir = os.path.join(sounds_dir, directory_name, "thumbnails")
# Check if thumbnail directory exists
if not os.path.exists(thumbnail_dir):
abort(404)
# Check if file exists
file_path = os.path.join(thumbnail_dir, filename)
if not os.path.exists(file_path):
abort(404)
# Serve the file
return send_from_directory(thumbnail_dir, filename)
except Exception:
abort(404)
@bp.route("/<sound_type>/audio/<path:filename>")
@require_auth
def serve_audio(sound_type, filename):
"""Serve audio files for sounds."""
try:
# Map sound type codes to directory names
type_mapping = {
"str": "stream",
"sdb": "soundboard",
"say": "say"
}
# Security: validate sound type
if sound_type not in type_mapping:
abort(404)
# Basic filename validation (no path traversal)
if ".." in filename or "/" in filename or "\\" in filename:
abort(404)
if not filename or not filename.strip():
abort(404)
# Get the actual directory name
directory_name = type_mapping[sound_type]
# Construct the audio directory path
sounds_dir = os.path.join(os.getcwd(), "sounds")
audio_dir = os.path.join(sounds_dir, directory_name)
# Check if audio directory exists
if not os.path.exists(audio_dir):
abort(404)
# Check if file exists
file_path = os.path.join(audio_dir, filename)
if not os.path.exists(file_path):
abort(404)
# Serve the file
return send_from_directory(audio_dir, filename)
except Exception:
abort(404)

92
app/routes/stream.py Normal file
View File

@@ -0,0 +1,92 @@
"""Stream routes for managing streaming service links."""
from flask import Blueprint, jsonify, request
from app.database import db
from app.models.stream import Stream
from app.services.decorators import require_auth
bp = Blueprint("stream", __name__)
@bp.route("/add-url", methods=["POST"])
@require_auth
def add_url():
"""Add a URL to the stream processing queue."""
try:
data = request.get_json()
if not data or "url" not in data:
return jsonify({"error": "URL is required"}), 400
url = data["url"].strip()
if not url:
return jsonify({"error": "URL cannot be empty"}), 400
# Check if URL already exists
existing_stream = Stream.query.filter_by(url=url).first()
if existing_stream:
return (
jsonify(
{
"error": "URL already exists in stream",
"stream": existing_stream.to_dict(),
}
),
409,
)
# Try to extract basic metadata to check for service/service_id duplicates
from app.services.stream_processing_service import (
StreamProcessingService,
)
try:
metadata, _ = StreamProcessingService._extract_metadata(url)
if metadata:
service = metadata.get("service")
service_id = metadata.get("service_id")
if service and service_id:
existing_service_stream = Stream.query.filter_by(
service=service, service_id=service_id
).first()
if existing_service_stream:
return (
jsonify(
{
"error": f"Stream already exists with {service} ID: {service_id}",
"existing_stream": existing_service_stream.to_dict(),
}
),
409,
)
except Exception as e:
# If metadata extraction fails here, we'll let the background process handle it
# This is just an early check to prevent obvious duplicates
pass
# Create stream entry with pending status
stream = Stream.create_stream(url=url, status="pending", commit=True)
# Add to processing queue (will be implemented next)
from app.services.stream_processing_service import (
StreamProcessingService,
)
StreamProcessingService.add_to_queue(stream.id)
return (
jsonify(
{
"message": "URL added to processing queue",
"stream": stream.to_dict(),
}
),
201,
)
except Exception as e:
db.session.rollback()
return jsonify({"error": str(e)}), 500

View File

@@ -149,7 +149,10 @@ class AuthService:
return None return None
def register_with_password( def register_with_password(
self, email: str, password: str, name: str, self,
email: str,
password: str,
name: str,
) -> Any: ) -> Any:
"""Register new user with email and password.""" """Register new user with email and password."""
try: try:

View File

@@ -2,6 +2,7 @@
import logging import logging
from datetime import datetime from datetime import datetime
from zoneinfo import ZoneInfo
from app.database import db from app.database import db
from app.models.user import User from app.models.user import User
@@ -44,7 +45,9 @@ class CreditService:
for user in users: for user in users:
if not user.plan: if not user.plan:
logger.warning(f"User {user.email} has no plan assigned, skipping") logger.warning(
f"User {user.email} has no plan assigned, skipping",
)
continue continue
# Calculate new credit amount, capped at plan max # Calculate new credit amount, capped at plan max
@@ -53,12 +56,15 @@ class CreditService:
max_credits = user.plan.max_credits max_credits = user.plan.max_credits
# Add daily credits but don't exceed maximum # Add daily credits but don't exceed maximum
new_credits = min(current_credits + plan_daily_credits, max_credits) new_credits = min(
current_credits + plan_daily_credits,
max_credits,
)
credits_added = new_credits - current_credits credits_added = new_credits - current_credits
if credits_added > 0: if credits_added > 0:
user.credits = new_credits user.credits = new_credits
user.updated_at = datetime.utcnow() user.updated_at = datetime.now(tz=ZoneInfo("UTC"))
total_credits_added += credits_added total_credits_added += credits_added
logger.debug( logger.debug(
@@ -100,34 +106,3 @@ class CreditService:
"error": str(e), "error": str(e),
"message": "Credit refill failed", "message": "Credit refill failed",
} }
@staticmethod
def get_user_credit_info(user_id: int) -> dict:
"""Get detailed credit information for a specific user.
Args:
user_id: The user's ID
Returns:
dict: User's credit information
"""
user = User.query.get(user_id)
if not user:
return {"error": "User not found"}
if not user.plan:
return {"error": "User has no plan assigned"}
return {
"user_id": user.id,
"email": user.email,
"current_credits": user.credits,
"plan": {
"code": user.plan.code,
"name": user.plan.name,
"daily_credits": user.plan.credits,
"max_credits": user.plan.max_credits,
},
"is_active": user.is_active,
}

View File

@@ -146,6 +146,11 @@ def require_role(required_role: str):
return decorator return decorator
def require_admin(f):
"""Decorator to require admin role for routes."""
return require_role("admin")(f)
def require_credits(credits_needed: int): def require_credits(credits_needed: int):
"""Decorator to require and deduct credits for routes.""" """Decorator to require and deduct credits for routes."""
@@ -167,6 +172,22 @@ def require_credits(credits_needed: int):
# Check if user has enough credits # Check if user has enough credits
if user.credits < credits_needed: if user.credits < credits_needed:
# Emit credits required event via SocketIO
try:
from app.services.socketio_service import socketio_service
socketio_service.emit_credits_required(
user.id, credits_needed
)
except Exception as e:
# Don't fail the request if SocketIO emission fails
import logging
logger = logging.getLogger(__name__)
logger.warning(
f"Failed to emit credits_required event: {e}"
)
return ( return (
jsonify( jsonify(
{ {
@@ -180,6 +201,18 @@ def require_credits(credits_needed: int):
user.credits -= credits_needed user.credits -= credits_needed
db.session.commit() db.session.commit()
# Emit credits changed event via SocketIO
try:
from app.services.socketio_service import socketio_service
socketio_service.emit_credits_changed(user.id, user.credits)
except Exception as e:
# Don't fail the request if SocketIO emission fails
import logging
logger = logging.getLogger(__name__)
logger.warning(f"Failed to emit credits_changed event: {e}")
# Execute the function # Execute the function
result = f(*args, **kwargs) result = f(*args, **kwargs)

View File

@@ -0,0 +1,133 @@
"""Centralized error handling service for consistent API responses."""
from typing import Any
from flask import jsonify
class ErrorHandlingService:
"""Service for standardized error handling and responses."""
@staticmethod
def handle_validation_error(error: ValueError) -> tuple[Any, int]:
"""Handle validation errors consistently."""
error_str = str(error)
# Map common validation errors to appropriate HTTP status codes
status_code = 400
if "not found" in error_str.lower():
status_code = 404
elif (
"not authorized" in error_str.lower()
or "permission" in error_str.lower()
):
status_code = 403
elif (
"already exists" in error_str.lower()
or "already linked" in error_str.lower()
):
status_code = 409
elif (
"not configured" in error_str.lower()
or "cannot unlink" in error_str.lower()
):
status_code = 400
elif "not deletable" in error_str.lower():
status_code = 403
return jsonify({"error": error_str}), status_code
@staticmethod
def handle_generic_error(error: Exception) -> tuple[Any, int]:
"""Handle generic exceptions with 500 status."""
return jsonify({"error": str(error)}), 500
@staticmethod
def handle_service_result(result: dict) -> tuple[Any, int]:
"""Handle service method results that return success/error dictionaries."""
if result.get("success"):
return jsonify(result), 200
return jsonify(result), 400
@staticmethod
def create_success_response(
message: str,
data: dict = None,
status_code: int = 200,
) -> tuple[Any, int]:
"""Create a standardized success response."""
response = {"message": message}
if data:
response.update(data)
return jsonify(response), status_code
@staticmethod
def create_error_response(
message: str,
status_code: int = 400,
details: dict = None,
) -> tuple[Any, int]:
"""Create a standardized error response."""
response = {"error": message}
if details:
response.update(details)
return jsonify(response), status_code
@staticmethod
def handle_auth_error(error_type: str) -> tuple[Any, int]:
"""Handle common authentication errors."""
auth_errors = {
"user_not_authenticated": ("User not authenticated", 401),
"user_not_found": ("User not found", 404),
"invalid_credentials": ("Invalid credentials", 401),
"account_disabled": ("Account is disabled", 401),
"insufficient_credits": ("Insufficient credits", 402),
"admin_required": ("Admin privileges required", 403),
}
if error_type in auth_errors:
message, status = auth_errors[error_type]
return jsonify({"error": message}), status
return jsonify({"error": "Authentication error"}), 401
@staticmethod
def handle_file_operation_error(
operation: str, error: Exception
) -> tuple[Any, int]:
"""Handle file operation errors consistently."""
error_message = f"Failed to {operation}: {error!s}"
# Check for specific file operation errors
if (
"not found" in str(error).lower()
or "no such file" in str(error).lower()
):
return jsonify({"error": f"File not found during {operation}"}), 404
if "permission" in str(error).lower():
return jsonify(
{"error": f"Permission denied during {operation}"}
), 403
return jsonify({"error": error_message}), 500
@staticmethod
def wrap_service_call(service_func, *args, **kwargs) -> tuple[Any, int]:
"""Wrap service calls with standardized error handling."""
try:
result = service_func(*args, **kwargs)
# If result is a dictionary with success/error structure
if isinstance(result, dict) and "success" in result:
return ErrorHandlingService.handle_service_result(result)
# If result is a simple dictionary (like user data)
if isinstance(result, dict):
return jsonify(result), 200
# For other types, assume success
return jsonify({"result": result}), 200
except ValueError as e:
return ErrorHandlingService.handle_validation_error(e)
except Exception as e:
return ErrorHandlingService.handle_generic_error(e)

View File

@@ -0,0 +1,136 @@
"""Centralized logging service for the application."""
import logging
import sys
class LoggingService:
"""Service for configuring and managing application logging."""
@staticmethod
def setup_logging(
level: str = "INFO",
format_string: str | None = None,
) -> None:
"""Setup application-wide logging configuration."""
if format_string is None:
format_string = (
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
# Configure root logger
logging.basicConfig(
level=getattr(logging, level.upper()),
format=format_string,
handlers=[
logging.StreamHandler(sys.stdout),
],
)
# Set specific logger levels for third-party libraries
logging.getLogger("werkzeug").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("requests").setLevel(logging.WARNING)
@staticmethod
def get_logger(name: str) -> logging.Logger:
"""Get a logger instance for a specific module."""
return logging.getLogger(name)
@staticmethod
def log_operation_start(logger: logging.Logger, operation: str) -> None:
"""Log the start of an operation."""
logger.info(f"Starting {operation}")
@staticmethod
def log_operation_success(
logger: logging.Logger,
operation: str,
details: str | None = None,
) -> None:
"""Log successful completion of an operation."""
message = f"Successfully completed {operation}"
if details:
message += f" - {details}"
logger.info(message)
@staticmethod
def log_operation_error(
logger: logging.Logger,
operation: str,
error: Exception,
) -> None:
"""Log an error during an operation."""
logger.error(f"Error during {operation}: {error}")
@staticmethod
def log_validation_error(
logger: logging.Logger,
field: str,
value: str,
reason: str,
) -> None:
"""Log validation errors consistently."""
logger.warning(f"Validation failed for {field}='{value}': {reason}")
@staticmethod
def log_resource_not_found(
logger: logging.Logger,
resource_type: str,
identifier: str,
) -> None:
"""Log when a resource is not found."""
logger.warning(f"{resource_type} not found: {identifier}")
@staticmethod
def log_resource_created(
logger: logging.Logger,
resource_type: str,
identifier: str,
) -> None:
"""Log when a resource is created."""
logger.info(f"Created {resource_type}: {identifier}")
@staticmethod
def log_resource_updated(
logger: logging.Logger,
resource_type: str,
identifier: str,
) -> None:
"""Log when a resource is updated."""
logger.info(f"Updated {resource_type}: {identifier}")
@staticmethod
def log_resource_deleted(
logger: logging.Logger,
resource_type: str,
identifier: str,
) -> None:
"""Log when a resource is deleted."""
logger.info(f"Deleted {resource_type}: {identifier}")
@staticmethod
def log_user_action(
logger: logging.Logger,
user_id: str,
action: str,
resource: str | None = None,
) -> None:
"""Log user actions for auditing."""
message = f"User {user_id} performed action: {action}"
if resource:
message += f" on {resource}"
logger.info(message)
@staticmethod
def log_security_event(
logger: logging.Logger,
event_type: str,
details: str,
user_id: str | None = None,
) -> None:
"""Log security-related events."""
message = f"Security event [{event_type}]: {details}"
if user_id:
message += f" (User: {user_id})"
logger.warning(message)

View File

@@ -0,0 +1,878 @@
"""Music player service using VLC Python bindings with playlist management and real-time sync."""
import os
import threading
import time
from datetime import datetime
from typing import Any, Optional
from zoneinfo import ZoneInfo
import vlc
from flask import current_app, request
from app.models.playlist import Playlist
from app.models.sound import Sound
from app.models.sound_played import SoundPlayed
from app.services.logging_service import LoggingService
from app.services.socketio_service import socketio_service
logger = LoggingService.get_logger(__name__)
# Constants
TRACK_START_THRESHOLD_MS = 500 # 500 milliseconds - threshold for considering a track as "starting fresh"
STATE_CHANGE_THRESHOLD_MS = (
1000 # 1 second threshold for state change detection
)
PLAY_COMPLETION_THRESHOLD = 0.20 # 20% completion threshold to count as a play
class MusicPlayerService:
"""Service for managing a VLC music player with playlist support."""
def __init__(self):
"""Initialize the music player service."""
self.instance: Optional[vlc.Instance] = None
self.player: Optional[vlc.MediaPlayer] = None
self.app: Optional[Any] = None # Store Flask app instance for context
self.current_playlist_id: Optional[int] = None
self.current_track_index = 0
self.playlist_files: list[str] = (
[]
) # Store file paths for manual playlist management
self.volume = 80
self.play_mode = (
"continuous" # single, continuous, loop-playlist, loop-one, random
)
self.is_playing = False
self.current_time = 0
self.duration = 0
self.last_sync_time = 0
self.sync_interval = (
0.5 # seconds (increased frequency to catch track endings)
)
self.lock = threading.Lock()
self._sync_thread = None
self._stop_sync = False
self._track_ending_handled = (
False # Flag to prevent duplicate ending triggers
)
self._track_play_tracked = (
False # Flag to track if current track play has been logged
)
self._cumulative_play_time = (
0 # Cumulative time actually played for current track
)
self._last_position_update = (
0 # Last position for calculating continuous play time
)
def start_vlc_instance(self) -> bool:
"""Start a VLC instance with Python bindings."""
try:
# Create VLC instance with audio output enabled
vlc_args = [
"--intf=dummy", # No interface
"--no-video", # Audio only
]
self.instance = vlc.Instance(vlc_args)
if not self.instance:
logger.error("Failed to create VLC instance")
return False
# Create media player
self.player = self.instance.media_player_new()
if not self.player:
logger.error("Failed to create VLC media player")
return False
# Set initial volume
self.player.audio_set_volume(self.volume)
logger.info("VLC music player started successfully")
# Automatically load the current playlist
self._load_current_playlist_on_startup()
self._start_sync_thread()
return True
except Exception as e:
logger.error(f"Error starting VLC instance: {e}")
return False
def stop_vlc_instance(self) -> bool:
"""Stop the VLC instance."""
try:
self._stop_sync = True
if self._sync_thread:
self._sync_thread.join(timeout=2)
if self.player:
self.player.stop()
# Release VLC objects
self.player = None
self.instance = None
logger.info("VLC music player stopped")
return True
except Exception as e:
logger.error(f"Error stopping VLC instance: {e}")
return False
def load_playlist(self, playlist_id: int, reload: bool = False) -> bool:
"""Load a playlist into VLC."""
try:
if not self.instance or not self.player:
logger.error("VLC not initialized")
return False
with self.lock:
# Ensure we have Flask app context for database queries
if current_app:
with current_app.app_context():
playlist = Playlist.query.get(playlist_id)
if not playlist:
return False
return self._load_playlist_with_context(
playlist, reload
)
else:
# Fallback for when no Flask context is available
logger.warning(
"No Flask context available for loading playlist"
)
return False
except Exception as e:
logger.error(f"Error loading playlist {playlist_id}: {e}")
return False
def _build_thumbnail_url(
self, sound_type: str, thumbnail_filename: str
) -> str:
"""Build absolute thumbnail URL."""
try:
# Try to get base URL from current request context
if request:
base_url = request.url_root.rstrip("/")
else:
# Fallback to localhost if no request context
base_url = "http://localhost:5000"
return f"{base_url}/api/sounds/{sound_type.lower()}/thumbnails/{thumbnail_filename}"
except Exception:
# Fallback if request context is not available
return f"http://localhost:5000/api/sounds/{sound_type.lower()}/thumbnails/{thumbnail_filename}"
def _build_stream_url(self, sound_type: str, filename: str) -> str:
"""Build absolute stream URL."""
try:
# Try to get base URL from current request context
if request:
base_url = request.url_root.rstrip("/")
else:
# Fallback to localhost if no request context
base_url = "http://localhost:5000"
return f"{base_url}/api/sounds/{sound_type.lower()}/audio/{filename}"
except Exception:
# Fallback if request context is not available
return f"http://localhost:5000/api/sounds/{sound_type.lower()}/audio/{filename}"
def _load_playlist_with_context(
self, playlist, reload: bool = False
) -> bool:
"""Load playlist with database context already established."""
try:
# Clear current playlist
self.playlist_files = []
# Add tracks to our internal playlist
for playlist_sound in sorted(
playlist.playlist_sounds, key=lambda x: x.order
):
sound = playlist_sound.sound
if sound:
file_path = self._get_sound_file_path(sound)
if file_path and os.path.exists(file_path):
self.playlist_files.append(file_path)
deleted = False
if reload:
# Set current track index to the real index of the current track
# in case the order has changed or the track has been deleted
current_track = self.get_current_track()
current_track_id = (
current_track["id"] if current_track else None
)
sound_ids = [
ps.sound.id
for ps in sorted(
playlist.playlist_sounds, key=lambda x: x.order
)
]
if current_track_id in sound_ids:
self.current_track_index = sound_ids.index(current_track_id)
else:
deleted = True
if not reload or deleted:
self.current_playlist_id = playlist.id
self.current_track_index = 0
# Load first track if available
if self.playlist_files:
self._load_track_at_index(0)
# Emit playlist loaded event
self._emit_player_state()
logger.info(
f"Loaded playlist '{playlist.name}' with {len(self.playlist_files)} tracks"
)
return True
except Exception as e:
logger.error(f"Error in _load_playlist_with_context: {e}")
return False
def _load_track_at_index(self, index: int) -> bool:
"""Load a specific track by index."""
try:
if 0 <= index < len(self.playlist_files):
file_path = self.playlist_files[index]
media = self.instance.media_new(file_path)
if media:
self.player.set_media(media)
self.current_track_index = index
# Reset track ending flag when loading a new track
self._track_ending_handled = False
self._track_play_tracked = (
False # Reset play tracking for new track
)
# Reset cumulative play time tracking for new track
self._cumulative_play_time = 0
self._last_position_update = 0
return True
return False
except Exception as e:
logger.error(f"Error loading track at index {index}: {e}")
return False
def _track_sound_play(self, sound_id: int) -> None:
"""Track that a sound has been played."""
try:
# Use stored app instance or current_app
app_to_use = self.app or current_app
if app_to_use:
with app_to_use.app_context():
# Get the sound and increment its play count
sound = Sound.query.get(sound_id)
if sound:
sound.play_count += 1
sound.updated_at = datetime.now(tz=ZoneInfo("UTC"))
logger.info(
f"Incremented play count for sound '{sound.name}' (ID: {sound_id})"
)
# Create a sound played record without user_id (anonymous play)
SoundPlayed.create_play_record(
user_id=None, sound_id=sound_id, commit=True
)
logger.info(
f"Created anonymous play record for sound ID: {sound_id}"
)
except Exception as e:
logger.error(f"Error tracking sound play for sound {sound_id}: {e}")
def _get_sound_file_path(self, sound: Sound) -> Optional[str]:
"""Get the file path for a sound, preferring normalized version."""
try:
base_path = "sounds/stream"
base_normalized_path = "sounds/normalized/stream"
# Check for normalized version first
if sound.is_normalized and sound.normalized_filename:
normalized_path = os.path.join(
base_normalized_path,
sound.normalized_filename,
)
if os.path.exists(normalized_path):
return os.path.abspath(normalized_path)
# Fall back to original file
original_path = os.path.join(base_path, sound.filename)
if os.path.exists(original_path):
return os.path.abspath(original_path)
return None
except Exception as e:
logger.error(f"Error getting file path for sound {sound.id}: {e}")
return None
def play(self) -> bool:
"""Start playback."""
try:
if not self.player:
return False
# Reset track ending flag when starting playback
self._track_ending_handled = False
result = self.player.play()
if result == 0: # Success
self.is_playing = True
self._track_play_tracked = (
False # Track when we first start playing
)
self._emit_player_state()
return True
return False
except Exception as e:
logger.error(f"Error starting playback: {e}")
return False
def pause(self) -> bool:
"""Pause playback."""
try:
if not self.player:
return False
self.player.pause()
self.is_playing = False
self._emit_player_state()
return True
except Exception as e:
logger.error(f"Error pausing playback: {e}")
return False
def stop(self) -> bool:
"""Stop playback."""
try:
if not self.player:
return False
self.player.stop()
self.is_playing = False
self.current_time = 0
self._emit_player_state()
return True
except Exception as e:
logger.error(f"Error stopping playback: {e}")
return False
def next_track(self, force_play: bool = False) -> bool:
"""Skip to next track."""
try:
if not self.playlist_files:
return False
next_index = self.current_track_index + 1
# Handle different play modes
if self.play_mode == "loop-playlist" and next_index >= len(
self.playlist_files
):
next_index = 0
elif self.play_mode == "random":
import random
next_index = random.randint(0, len(self.playlist_files) - 1)
elif next_index >= len(self.playlist_files):
# End of playlist in continuous mode
self.stop()
return True
if self._load_track_at_index(next_index):
if self.is_playing or force_play:
self.play()
self._emit_player_state()
return True
return False
except Exception as e:
logger.error(f"Error skipping to next track: {e}")
return False
def previous_track(self) -> bool:
"""Skip to previous track."""
try:
if not self.playlist_files:
return False
prev_index = self.current_track_index - 1
# Handle different play modes
if self.play_mode == "loop-playlist" and prev_index < 0:
prev_index = len(self.playlist_files) - 1
elif self.play_mode == "random":
import random
prev_index = random.randint(0, len(self.playlist_files) - 1)
elif prev_index < 0:
prev_index = 0
if self._load_track_at_index(prev_index):
if self.is_playing:
self.play()
self._emit_player_state()
return True
return False
except Exception as e:
logger.error(f"Error skipping to previous track: {e}")
return False
def seek(self, position: float) -> bool:
"""Seek to position (0.0 to 1.0)."""
try:
if not self.player:
return False
# Set position as percentage
self.player.set_position(position)
self.current_time = position * self.duration
self._emit_player_state()
return True
except Exception as e:
logger.error(f"Error seeking: {e}")
return False
def set_volume(self, volume: int) -> bool:
"""Set volume (0-100)."""
try:
if not self.player:
return False
volume = max(0, min(100, volume))
result = self.player.audio_set_volume(volume)
if result == 0: # Success
self.volume = volume
self._emit_player_state()
return True
return False
except Exception as e:
logger.error(f"Error setting volume: {e}")
return False
def set_play_mode(self, mode: str) -> bool:
"""Set play mode."""
try:
if mode in [
"continuous",
"loop-playlist",
"loop-one",
"random",
"single",
]:
self.play_mode = mode
self._emit_player_state()
return True
return False
except Exception as e:
logger.error(f"Error setting play mode: {e}")
return False
def play_track_at_index(self, index: int) -> bool:
"""Play track at specific playlist index."""
try:
if self._load_track_at_index(index):
result = self.play()
self._emit_player_state()
return result
return False
except Exception as e:
logger.error(f"Error playing track at index {index}: {e}")
return False
def _get_playlist_length(self) -> int:
"""Get current playlist length."""
return len(self.playlist_files)
def get_current_track(self) -> Optional[dict]:
"""Get current track information."""
try:
if not self.current_playlist_id:
return None
# Use stored app instance or current_app
app_to_use = self.app or current_app
if app_to_use:
with app_to_use.app_context():
playlist = Playlist.query.get(self.current_playlist_id)
if playlist and 0 <= self.current_track_index < len(
playlist.playlist_sounds
):
playlist_sounds = sorted(
playlist.playlist_sounds, key=lambda x: x.order
)
current_playlist_sound = playlist_sounds[
self.current_track_index
]
sound = current_playlist_sound.sound
if sound:
# Get the service URL from the associated stream
service_url = None
if sound.streams:
# Get the first stream's URL if available
service_url = sound.streams[0].url
return {
"id": sound.id,
"title": sound.name,
"artist": None, # Could be extracted from metadata
"duration": sound.duration or 0,
"thumbnail": (
self._build_thumbnail_url(
sound.type, sound.thumbnail
)
if sound.thumbnail
else None
),
"file_url": self._build_stream_url(sound.type, sound.filename),
"service_url": service_url,
"type": sound.type,
}
return None
except Exception as e:
logger.error(f"Error getting current track: {e}")
return None
def get_playlist_tracks(self) -> list[dict]:
"""Get all tracks in current playlist."""
try:
tracks = []
if not self.current_playlist_id:
return tracks
# Ensure we have Flask app context
if current_app:
with current_app.app_context():
playlist = Playlist.query.get(self.current_playlist_id)
if playlist:
for playlist_sound in sorted(
playlist.playlist_sounds, key=lambda x: x.order
):
sound = playlist_sound.sound
if sound:
# Get the service URL from the associated stream
service_url = None
if sound.streams:
# Get the first stream's URL if available
service_url = sound.streams[0].url
tracks.append(
{
"id": sound.id,
"title": sound.name,
"artist": None,
"duration": sound.duration or 0,
"thumbnail": (
self._build_thumbnail_url(
sound.type, sound.thumbnail
)
if sound.thumbnail
else None
),
"file_url": self._build_stream_url(sound.type, sound.filename),
"service_url": service_url,
"type": sound.type,
}
)
return tracks
except Exception as e:
logger.error(f"Error getting playlist tracks: {e}")
return []
def get_player_state(self) -> dict[str, Any]:
"""Get complete player state."""
current_track = self.get_current_track()
return {
"is_playing": self.is_playing,
"current_time": self.current_time,
"duration": self.duration,
"volume": self.volume,
"play_mode": self.play_mode,
"current_track": current_track,
"current_track_id": current_track["id"] if current_track else None,
"current_track_index": self.current_track_index,
"playlist": self.get_playlist_tracks(),
"playlist_id": self.current_playlist_id,
}
def _start_sync_thread(self):
"""Start background thread to sync with VLC state."""
self._stop_sync = False
self._sync_thread = threading.Thread(
target=self._sync_loop, daemon=True
)
self._sync_thread.start()
def _sync_loop(self):
"""Background loop to sync player state with VLC."""
while not self._stop_sync:
try:
current_time = time.time()
if current_time - self.last_sync_time >= self.sync_interval:
self._sync_with_vlc()
self.last_sync_time = current_time
time.sleep(0.1) # Small sleep to prevent busy waiting
except Exception as e:
logger.debug(f"Error in sync loop: {e}")
time.sleep(1) # Longer sleep on error
def _sync_with_vlc(self):
"""Sync internal state with VLC."""
try:
if not self.player:
return
# Update playback state
old_playing = self.is_playing
old_time = self.current_time
# Get current state from VLC
state = self.player.get_state()
self.is_playing = state == vlc.State.Playing
# Get time and duration (in milliseconds)
self.current_time = self.player.get_time()
self.duration = self.player.get_length()
# Get volume
self.volume = self.player.audio_get_volume()
# Enhanced track ending detection
track_ended = False
# Check for ended state
if state == vlc.State.Ended:
track_ended = True
logger.info(
f"Track ended via VLC State.Ended, mode: {self.play_mode}"
)
# Also check if we're very close to the end (within 500ms) and not playing
elif (
self.duration > 0
and self.current_time > 0
and self.current_time >= (self.duration - 500)
and not self.is_playing
and old_playing
):
track_ended = True
logger.info(
f"Track ended via time check, mode: {self.play_mode}"
)
# Handle track ending based on play mode (only if not already handled)
if track_ended and not self._track_ending_handled:
self._track_ending_handled = True
if self.play_mode == "loop-one":
logger.info("Restarting track for loop-one mode")
self.play_track_at_index(self.current_track_index)
elif self.play_mode == "single":
logger.info(
"Track ended in single mode - stopping playback"
)
self.stop()
elif self.play_mode in [
"continuous",
"loop-playlist",
"random",
]:
logger.info(
f"Advancing to next track for {self.play_mode} mode"
)
self.next_track(True)
# Reset the flag after track change
self._track_ending_handled = False
# Reset the flag if we're playing again (new track started)
elif self.is_playing and not old_playing:
self._track_ending_handled = False
# Update cumulative play time for continuous listening tracking
if self.is_playing and old_playing and self.current_time > 0:
# Calculate time elapsed since last update (but cap it to prevent huge jumps from seeking)
if self._last_position_update > 0:
time_diff = self.current_time - self._last_position_update
# Only add time if it's a reasonable progression (not a big jump from seeking)
if (
0 <= time_diff <= (self.sync_interval * 1000 * 2)
): # Max 2x sync interval
self._cumulative_play_time += time_diff
self._last_position_update = self.current_time
elif self.is_playing and not old_playing:
# Just started playing, initialize position tracking
self._last_position_update = (
self.current_time if self.current_time > 0 else 0
)
# Track play event when cumulative listening reaches 20% of track duration
if (
self.is_playing
and not self._track_play_tracked
and self.duration > 0
and self._cumulative_play_time
>= (self.duration * PLAY_COMPLETION_THRESHOLD)
):
current_track = self.get_current_track()
if current_track:
self._track_sound_play(current_track["id"])
self._track_play_tracked = True
logger.info(
f"Tracked play for '{current_track['title']}' after {self._cumulative_play_time}ms "
f"cumulative listening ({(self._cumulative_play_time/self.duration)*100:.1f}% of track)"
)
# Emit updates if state changed significantly or periodically
state_changed = (
old_playing != self.is_playing
or abs(old_time - self.current_time)
> STATE_CHANGE_THRESHOLD_MS # More than 1 second difference
)
# Always emit if playing to keep frontend updated
if state_changed or self.is_playing:
self._emit_player_state()
except Exception as e:
logger.debug(f"Error syncing with VLC: {e}")
def _emit_player_state(self):
"""Emit current player state via SocketIO."""
try:
# Update state from VLC before emitting
self._sync_vlc_state_only()
# Try to use Flask context for database queries
app_to_use = self.app or current_app
if app_to_use:
with app_to_use.app_context():
state = self.get_player_state()
socketio_service.emit_to_all("player_state_update", state)
logger.info(
f"Emitted player state: playing={state['is_playing']}, time={state['current_time']}, track={state.get('current_track', {}).get('title', 'None')}"
)
else:
# Fallback when no Flask context - emit basic state without database queries
basic_state = {
"is_playing": self.is_playing,
"current_time": self.current_time,
"duration": self.duration,
"volume": self.volume,
"play_mode": self.play_mode,
"current_track": None,
"current_track_id": None,
"current_track_index": self.current_track_index,
"playlist": [],
"playlist_id": self.current_playlist_id,
}
socketio_service.emit_to_all("player_state_update", basic_state)
logger.info(
f"Emitted basic player state: playing={basic_state['is_playing']}, time={basic_state['current_time']}"
)
except Exception as e:
logger.debug(f"Error emitting player state: {e}")
def _sync_vlc_state_only(self):
"""Sync only the VLC state without auto-advance logic."""
try:
if not self.player:
return
# Get current state from VLC
state = self.player.get_state()
self.is_playing = state == vlc.State.Playing
# Get time and duration (in milliseconds)
self.current_time = self.player.get_time()
self.duration = self.player.get_length()
# Get volume
self.volume = self.player.audio_get_volume()
except Exception as e:
logger.debug(f"Error syncing VLC state: {e}")
def _load_current_playlist_on_startup(self):
"""Load the current playlist automatically on startup."""
try:
if not self.app:
logger.warning(
"No Flask app context available, skipping current playlist load"
)
return
with self.app.app_context():
# Find the current playlist
current_playlist = Playlist.find_current_playlist()
if current_playlist:
success = self.load_playlist(current_playlist.id)
if success:
logger.info(
f"Automatically loaded current playlist '{current_playlist.name}' with {len(self.playlist_files)} tracks"
)
else:
logger.warning(
"Failed to load current playlist on startup"
)
else:
logger.info("No current playlist found to load on startup")
except Exception as e:
logger.error(f"Error loading current playlist on startup: {e}")
def reload_current_playlist_if_modified(
self, modified_playlist_id: int
) -> bool:
"""Reload the current playlist if it's the one that was modified."""
try:
if not self.app:
logger.warning(
"No Flask app context available, skipping playlist reload"
)
return False
with self.app.app_context():
# Find the current playlist
current_playlist = Playlist.find_current_playlist()
if (
current_playlist
and current_playlist.id == modified_playlist_id
):
# Reload the playlist
success = self.load_playlist(current_playlist.id, True)
if success:
logger.info(
f"Reloaded current playlist '{current_playlist.name}' after modification"
)
return True
else:
logger.warning(
"Failed to reload current playlist after modification"
)
return False
else:
# Not the current playlist, no need to reload
return True
except Exception as e:
logger.error(f"Error reloading current playlist: {e}")
return False
# Global music player service instance
music_player_service = MusicPlayerService()

View File

@@ -0,0 +1,108 @@
"""OAuth provider linking service."""
from authlib.integrations.flask_client import OAuth
from app.models.user import User
from app.models.user_oauth import UserOAuth
from app.services.oauth_providers.registry import OAuthProviderRegistry
class OAuthLinkingService:
"""Service for linking and unlinking OAuth providers."""
@staticmethod
def link_provider_to_user(
provider: str,
current_user_id: int,
) -> dict:
"""Link a new OAuth provider to existing user account."""
# Get current user from database
user = User.query.get(current_user_id)
if not user:
raise ValueError("User not found")
# Get OAuth provider and process callback
oauth = OAuth()
registry = OAuthProviderRegistry(oauth)
oauth_provider = registry.get_provider(provider)
if not oauth_provider:
raise ValueError(f"OAuth provider '{provider}' not configured")
# Exchange code for token and get user info
token = oauth_provider.exchange_code_for_token(None, None)
raw_user_info = oauth_provider.get_user_info(token)
provider_data = oauth_provider.normalize_user_data(raw_user_info)
if not provider_data.get("id"):
raise ValueError("Failed to get user information from provider")
# Check if this provider is already linked to another user
existing_provider = UserOAuth.find_by_provider_and_id(
provider,
provider_data["id"],
)
if existing_provider and existing_provider.user_id != user.id:
raise ValueError(
"This provider account is already linked to another user",
)
# Link the provider to current user
UserOAuth.create_or_update(
user_id=user.id,
provider=provider,
provider_id=provider_data["id"],
email=provider_data["email"],
name=provider_data["name"],
picture=provider_data.get("picture"),
)
return {"message": f"{provider.title()} account linked successfully"}
@staticmethod
def unlink_provider_from_user(
provider: str,
current_user_id: int,
) -> dict:
"""Unlink an OAuth provider from user account."""
from app.database import db
user = User.query.get(current_user_id)
if not user:
raise ValueError("User not found")
# Check if user has more than one provider (prevent locking out)
if len(user.oauth_providers) <= 1:
raise ValueError("Cannot unlink last authentication provider")
# Find and remove the provider
oauth_provider = user.get_provider(provider)
if not oauth_provider:
raise ValueError(
f"Provider '{provider}' not linked to this account",
)
db.session.delete(oauth_provider)
db.session.commit()
return {"message": f"{provider.title()} account unlinked successfully"}
@staticmethod
def get_user_providers(user_id: int) -> dict:
"""Get all OAuth providers linked to a user."""
user = User.query.get(user_id)
if not user:
raise ValueError("User not found")
return {
"providers": [
{
"provider": oauth.provider,
"email": oauth.email,
"name": oauth.name,
"picture": oauth.picture,
}
for oauth in user.oauth_providers
],
}

View File

@@ -49,7 +49,9 @@ class OAuthProvider(ABC):
return client.authorize_redirect(redirect_uri).location return client.authorize_redirect(redirect_uri).location
def exchange_code_for_token( def exchange_code_for_token(
self, code: str = None, redirect_uri: str = None, self,
code: str = None,
redirect_uri: str = None,
) -> dict[str, Any]: ) -> dict[str, Any]:
"""Exchange authorization code for access token.""" """Exchange authorization code for access token."""
client = self.get_client() client = self.get_client()

View File

@@ -22,7 +22,9 @@ class OAuthProviderRegistry:
google_client_secret = os.getenv("GOOGLE_CLIENT_SECRET") google_client_secret = os.getenv("GOOGLE_CLIENT_SECRET")
if google_client_id and google_client_secret: if google_client_id and google_client_secret:
self._providers["google"] = GoogleOAuthProvider( self._providers["google"] = GoogleOAuthProvider(
self.oauth, google_client_id, google_client_secret, self.oauth,
google_client_id,
google_client_secret,
) )
# GitHub OAuth # GitHub OAuth
@@ -30,7 +32,9 @@ class OAuthProviderRegistry:
github_client_secret = os.getenv("GITHUB_CLIENT_SECRET") github_client_secret = os.getenv("GITHUB_CLIENT_SECRET")
if github_client_id and github_client_secret: if github_client_id and github_client_secret:
self._providers["github"] = GitHubOAuthProvider( self._providers["github"] = GitHubOAuthProvider(
self.oauth, github_client_id, github_client_secret, self.oauth,
github_client_id,
github_client_secret,
) )
def get_provider(self, name: str) -> OAuthProvider | None: def get_provider(self, name: str) -> OAuthProvider | None:

View File

@@ -97,7 +97,9 @@ class SchedulerService:
f"{result['credits_added']} credits added", f"{result['credits_added']} credits added",
) )
else: else:
logger.error(f"Daily credit refill failed: {result['message']}") logger.error(
f"Daily credit refill failed: {result['message']}",
)
except Exception as e: except Exception as e:
logger.exception(f"Error during daily credit refill: {e}") logger.exception(f"Error during daily credit refill: {e}")
@@ -119,7 +121,9 @@ class SchedulerService:
else: else:
logger.debug("Sound scan completed: no new files found") logger.debug("Sound scan completed: no new files found")
else: else:
logger.error(f"Sound scan failed: {result.get('error', 'Unknown error')}") logger.error(
f"Sound scan failed: {result.get('error', 'Unknown error')}",
)
except Exception as e: except Exception as e:
logger.exception(f"Error during sound scan: {e}") logger.exception(f"Error during sound scan: {e}")
@@ -148,7 +152,8 @@ class SchedulerService:
"id": job.id, "id": job.id,
"name": job.name, "name": job.name,
"next_run": job.next_run_time.isoformat() "next_run": job.next_run_time.isoformat()
if job.next_run_time else None, if job.next_run_time
else None,
"trigger": str(job.trigger), "trigger": str(job.trigger),
} }
for job in self.scheduler.get_jobs() for job in self.scheduler.get_jobs()

View File

@@ -0,0 +1,223 @@
"""SocketIO service for real-time communication."""
import logging
from flask import request
from flask_socketio import disconnect, emit, join_room, leave_room
from app import socketio
from app.services.decorators import require_credits
logger = logging.getLogger(__name__)
class SocketIOService:
"""Service for managing SocketIO connections and user rooms."""
@staticmethod
def get_user_room(user_id: int) -> str:
"""Get the room name for a specific user."""
return f"user_{user_id}"
@staticmethod
def emit_to_user(user_id: int, event: str, data: dict) -> None:
"""Emit an event to a specific user's room."""
room = SocketIOService.get_user_room(user_id)
socketio.emit(event, data, room=room)
logger.debug(f"Emitted {event} to user {user_id} in room {room}")
@staticmethod
def emit_to_all(event: str, data: dict) -> None:
"""Emit an event to all connected clients."""
try:
socketio.emit(event, data)
logger.info(
f"Successfully emitted {event} to all clients with data keys: {list(data.keys())}"
)
except Exception as e:
logger.error(f"Failed to emit {event}: {e}")
@staticmethod
def emit_credits_changed(user_id: int, new_credits: int) -> None:
"""Emit credits_changed event to a user."""
SocketIOService.emit_to_user(
user_id,
"credits_changed",
{"credits": new_credits},
)
@staticmethod
def emit_sound_play_count_changed(sound_id: int, new_play_count: int) -> None:
"""Emit sound_play_count_changed event to all connected clients."""
SocketIOService.emit_to_all(
"sound_play_count_changed",
{"sound_id": sound_id, "play_count": new_play_count},
)
@staticmethod
def emit_credits_required(user_id: int, credits_needed: int) -> None:
"""Emit an event when credits are required."""
SocketIOService.emit_to_user(
user_id,
"credits_required",
{"credits_needed": credits_needed},
)
@staticmethod
def get_user_from_socketio() -> dict | None:
"""Get user from SocketIO connection using cookies."""
try:
from flask import current_app
from flask_jwt_extended import decode_token
# Check if we have the access_token cookie
access_token = request.cookies.get("access_token_cookie")
logger.debug(
f"Access token from cookies: {access_token[:20] if access_token else None}..."
)
if not access_token:
logger.debug("No access token found in cookies")
return None
# Decode the JWT token manually
with current_app.app_context():
try:
decoded_token = decode_token(access_token)
current_user_id = decoded_token["sub"]
logger.debug(f"Decoded user ID: {current_user_id}")
if not current_user_id:
logger.debug("No user ID in token")
return None
except Exception as e:
logger.debug(f"Token decode error: {e}")
return None
# Query database for user data
from app.models.user import User
user = User.query.get(int(current_user_id))
if not user or not user.is_active:
logger.debug(
f"User not found or inactive: {current_user_id}"
)
return None
logger.debug(f"Successfully found user: {user.email}")
return {
"id": str(user.id),
"email": user.email,
"name": user.name,
"role": user.role,
"credits": user.credits,
}
except Exception as e:
logger.debug(f"Exception in get_user_from_socketio: {e}")
return None
@socketio.on("connect")
def handle_connect(auth=None):
"""Handle client connection."""
try:
logger.info(
f"SocketIO connection established from {request.remote_addr}"
)
logger.info(f"Session ID: {request.sid}")
except Exception:
logger.exception("Error handling SocketIO connection")
disconnect()
@socketio.on("authenticate")
def handle_authenticate(data):
"""Handle authentication after connection."""
try:
user = SocketIOService.get_user_from_socketio()
if not user:
logger.warning("SocketIO authentication failed - no user found")
# emit("auth_error", {"error": "Authentication failed"})
disconnect()
return
user_id = int(user["id"])
user_room = SocketIOService.get_user_room(user_id)
# Join user-specific room
join_room(user_room)
logger.info(f"User {user_id} authenticated and joined room {user_room}")
# Send current credits on authentication
SocketIOService.emit_to_user(user_id, "auth_success", {"user": user})
SocketIOService.emit_to_user(
user_id, "credits_changed", {"credits": user["credits"]}
)
except Exception:
logger.exception("Error handling SocketIO authentication")
# emit("auth_error", {"error": "Authentication failed"})
disconnect()
# @socketio.on("play_sound")
# @require_credits(1)
# def handle_play_sound(data):
# """Handle play_sound event from client."""
# try:
# user = SocketIOService.get_user_from_socketio()
# if not user:
# logger.warning("SocketIO play_sound failed - no authenticated user")
# # emit("error", {"message": "Authentication required"})
# return
# user_id = int(user["id"])
# sound_id = data.get("soundId")
# if not sound_id:
# logger.warning("SocketIO play_sound failed - no soundId provided")
# SocketIOService.emit_to_user(
# user_id, "error", {"message": "Sound ID required"}
# )
# return
# # Import and use the VLC service to play the sound
# from app.services.vlc_service import vlc_service
# logger.info(f"User {user_id} playing sound {sound_id} via SocketIO")
# # Play the sound using the VLC service
# success = vlc_service.play_sound(sound_id, user_id)
# if not success:
# SocketIOService.emit_to_user(
# user_id,
# "error",
# {"message": f"Failed to play sound {sound_id}"},
# )
# except Exception as e:
# logger.exception(f"Error handling play_sound event: {e}")
# # emit("error", {"message": "Failed to play sound"})
@socketio.on("disconnect")
def handle_disconnect():
"""Handle client disconnection."""
try:
user = SocketIOService.get_user_from_socketio()
if user:
user_id = int(user["id"])
user_room = SocketIOService.get_user_room(user_id)
leave_room(user_room)
logger.info(f"User {user_id} disconnected from SocketIO")
except Exception:
logger.exception("Error handling SocketIO disconnection")
# Export the service instance
socketio_service = SocketIOService()

View File

@@ -0,0 +1,137 @@
"""Sound management service for admin operations."""
import os
from app.database import db
from app.models.sound import Sound
from app.services.sound_normalizer_service import SoundNormalizerService
class SoundManagementService:
"""Service for managing sound files and database operations."""
@staticmethod
def get_sounds_with_file_status(
sound_type: str = "SDB",
page: int = 1,
per_page: int = 50,
) -> dict:
"""Get paginated sounds with file existence status."""
# Validate sound type
if sound_type not in ["SDB", "SAY", "STR"]:
raise ValueError("Invalid sound type")
# Get paginated results
sounds_query = Sound.query.filter_by(type=sound_type)
total = sounds_query.count()
sounds = (
sounds_query.offset((page - 1) * per_page).limit(per_page).all()
)
# Convert to detailed dict format with file status
sounds_data = []
for sound in sounds:
sound_dict = sound.to_dict()
sound_dict.update(
SoundManagementService._get_file_status(sound),
)
sounds_data.append(sound_dict)
return {
"sounds": sounds_data,
"pagination": {
"page": page,
"per_page": per_page,
"total": total,
"pages": (total + per_page - 1) // per_page,
},
"type": sound_type,
}
@staticmethod
def _get_file_status(sound: Sound) -> dict:
"""Get file existence status for a sound."""
original_path = os.path.join(
"sounds",
sound.type.lower(),
sound.filename,
)
status = {"original_exists": os.path.exists(original_path)}
if sound.is_normalized and sound.normalized_filename:
normalized_path = os.path.join(
"sounds",
"normalized",
sound.type.lower(),
sound.normalized_filename,
)
status["normalized_exists"] = os.path.exists(normalized_path)
else:
status["normalized_exists"] = False
return status
@staticmethod
def delete_sound_with_files(sound_id: int) -> dict:
"""Delete a sound and its associated files."""
sound = Sound.query.get(sound_id)
if not sound:
raise ValueError("Sound not found")
if not sound.is_deletable:
raise ValueError("Sound is not deletable")
errors = []
# Delete normalized file if exists
if sound.is_normalized and sound.normalized_filename:
normalized_path = os.path.join(
"sounds",
"normalized",
sound.type.lower(),
sound.normalized_filename,
)
if os.path.exists(normalized_path):
try:
os.remove(normalized_path)
except Exception as e:
errors.append(f"Failed to delete normalized file: {e}")
# Delete original file
original_path = os.path.join(
"sounds",
sound.type.lower(),
sound.filename,
)
if os.path.exists(original_path):
try:
os.remove(original_path)
except Exception as e:
errors.append(f"Failed to delete original file: {e}")
if errors:
raise Exception("; ".join(errors))
# Delete database record
sound_name = sound.name
db.session.delete(sound)
db.session.commit()
return {
"message": f"Sound '{sound_name}' deleted successfully",
"sound_id": sound_id,
}
@staticmethod
def normalize_sound(
sound_id: int,
overwrite: bool = False,
two_pass: bool = True,
) -> dict:
"""Normalize a specific sound."""
return SoundNormalizerService.normalize_sound(
sound_id,
overwrite,
two_pass,
)

View File

@@ -7,7 +7,6 @@ import re
from pathlib import Path from pathlib import Path
import ffmpeg import ffmpeg
from pydub import AudioSegment
from app.database import db from app.database import db
from app.models.sound import Sound from app.models.sound import Sound
@@ -27,8 +26,17 @@ class SoundNormalizerService:
".aac", ".aac",
".opus", ".opus",
} }
SOUNDS_DIR = "sounds/soundboard" # Sound directories by type
NORMALIZED_DIR = "sounds/normalized/soundboard" SOUND_DIRS = {
"SDB": "sounds/soundboard",
"SAY": "sounds/say",
"STR": "sounds/stream"
}
NORMALIZED_DIRS = {
"SDB": "sounds/normalized/soundboard",
"SAY": "sounds/normalized/say",
"STR": "sounds/normalized/stream"
}
LOUDNORM_PARAMS = { LOUDNORM_PARAMS = {
"integrated": -16, "integrated": -16,
@@ -38,7 +46,11 @@ class SoundNormalizerService:
} }
@staticmethod @staticmethod
def normalize_sound(sound_id: int, overwrite: bool = False, two_pass: bool = True) -> dict: def normalize_sound(
sound_id: int,
overwrite: bool = False,
two_pass: bool = True,
) -> dict:
"""Normalize a specific sound file using ffmpeg loudnorm. """Normalize a specific sound file using ffmpeg loudnorm.
Args: Args:
@@ -58,7 +70,17 @@ class SoundNormalizerService:
"error": f"Sound with ID {sound_id} not found", "error": f"Sound with ID {sound_id} not found",
} }
source_path = Path(SoundNormalizerService.SOUNDS_DIR) / sound.filename # Get directories based on sound type
sound_dir = SoundNormalizerService.SOUND_DIRS.get(sound.type)
normalized_dir = SoundNormalizerService.NORMALIZED_DIRS.get(sound.type)
if not sound_dir or not normalized_dir:
return {
"success": False,
"error": f"Unsupported sound type: {sound.type}",
}
source_path = Path(sound_dir) / sound.filename
if not source_path.exists(): if not source_path.exists():
return { return {
"success": False, "success": False,
@@ -68,7 +90,7 @@ class SoundNormalizerService:
# Always output as WAV regardless of input format # Always output as WAV regardless of input format
filename_without_ext = Path(sound.filename).stem filename_without_ext = Path(sound.filename).stem
normalized_filename = f"{filename_without_ext}.wav" normalized_filename = f"{filename_without_ext}.wav"
normalized_path = Path(SoundNormalizerService.NORMALIZED_DIR) / normalized_filename normalized_path = Path(normalized_dir) / normalized_filename
normalized_path.parent.mkdir(parents=True, exist_ok=True) normalized_path.parent.mkdir(parents=True, exist_ok=True)
@@ -84,11 +106,15 @@ class SoundNormalizerService:
if two_pass: if two_pass:
result = SoundNormalizerService._normalize_with_ffmpeg( result = SoundNormalizerService._normalize_with_ffmpeg(
str(source_path), str(normalized_path), str(source_path),
str(normalized_path),
) )
else: else:
result = SoundNormalizerService._normalize_with_ffmpeg_single_pass( result = (
str(source_path), str(normalized_path), SoundNormalizerService._normalize_with_ffmpeg_single_pass(
str(source_path),
str(normalized_path),
)
) )
if result["success"]: if result["success"]:
@@ -131,7 +157,9 @@ class SoundNormalizerService:
@staticmethod @staticmethod
def normalize_all_sounds( def normalize_all_sounds(
overwrite: bool = False, limit: int = None, two_pass: bool = True, overwrite: bool = False,
limit: int = None,
two_pass: bool = True,
) -> dict: ) -> dict:
"""Normalize all soundboard files. """Normalize all soundboard files.
@@ -171,7 +199,9 @@ class SoundNormalizerService:
for sound in sounds: for sound in sounds:
result = SoundNormalizerService.normalize_sound( result = SoundNormalizerService.normalize_sound(
sound.id, overwrite, two_pass, sound.id,
overwrite,
two_pass,
) )
processed += 1 processed += 1
@@ -235,7 +265,8 @@ class SoundNormalizerService:
logger.debug("Starting first pass (analysis)") logger.debug("Starting first pass (analysis)")
first_pass_result = SoundNormalizerService._run_first_pass( first_pass_result = SoundNormalizerService._run_first_pass(
source_path, params source_path,
params,
) )
if not first_pass_result["success"]: if not first_pass_result["success"]:
@@ -247,7 +278,10 @@ class SoundNormalizerService:
logger.debug("Starting second pass (normalization)") logger.debug("Starting second pass (normalization)")
second_pass_result = SoundNormalizerService._run_second_pass( second_pass_result = SoundNormalizerService._run_second_pass(
source_path, output_path, params, measured_params source_path,
output_path,
params,
measured_params,
) )
if not second_pass_result["success"]: if not second_pass_result["success"]:
@@ -281,7 +315,10 @@ class SoundNormalizerService:
return {"success": False, "error": str(e)} return {"success": False, "error": str(e)}
@staticmethod @staticmethod
def _normalize_with_ffmpeg_single_pass(source_path: str, output_path: str) -> dict: def _normalize_with_ffmpeg_single_pass(
source_path: str,
output_path: str,
) -> dict:
"""Run ffmpeg loudnorm on a single file using single-pass normalization. """Run ffmpeg loudnorm on a single file using single-pass normalization.
This is the legacy single-pass method for backward compatibility. This is the legacy single-pass method for backward compatibility.
@@ -319,7 +356,9 @@ class SoundNormalizerService:
# Run the ffmpeg process # Run the ffmpeg process
out, err = ffmpeg.run( out, err = ffmpeg.run(
output_stream, capture_stdout=True, capture_stderr=True, output_stream,
capture_stdout=True,
capture_stderr=True,
) )
# Parse loudnorm statistics from stderr # Parse loudnorm statistics from stderr
@@ -355,6 +394,7 @@ class SoundNormalizerService:
Returns: Returns:
dict: Result with measured parameters and analysis stats dict: Result with measured parameters and analysis stats
""" """
try: try:
# Create ffmpeg input stream # Create ffmpeg input stream
@@ -373,23 +413,27 @@ class SoundNormalizerService:
input_stream, input_stream,
"/dev/null", "/dev/null",
af=loudnorm_filter, af=loudnorm_filter,
f="null" f="null",
) )
# Run the first pass # Run the first pass
out, err = ffmpeg.run( out, err = ffmpeg.run(
output_stream, capture_stdout=True, capture_stderr=True, output_stream,
capture_stdout=True,
capture_stderr=True,
) )
stderr_text = err.decode() if err else "" stderr_text = err.decode() if err else ""
# Parse measured parameters from JSON output # Parse measured parameters from JSON output
measured_params = SoundNormalizerService._parse_measured_params(stderr_text) measured_params = SoundNormalizerService._parse_measured_params(
stderr_text,
)
if not measured_params: if not measured_params:
return { return {
"success": False, "success": False,
"error": "Failed to parse measured parameters from first pass" "error": "Failed to parse measured parameters from first pass",
} }
# Parse basic stats # Parse basic stats
@@ -398,7 +442,7 @@ class SoundNormalizerService:
return { return {
"success": True, "success": True,
"measured_params": measured_params, "measured_params": measured_params,
"stats": stats "stats": stats,
} }
except ffmpeg.Error as e: except ffmpeg.Error as e:
@@ -410,7 +454,12 @@ class SoundNormalizerService:
return {"success": False, "error": str(e)} return {"success": False, "error": str(e)}
@staticmethod @staticmethod
def _run_second_pass(source_path: str, output_path: str, target_params: dict, measured_params: dict) -> dict: def _run_second_pass(
source_path: str,
output_path: str,
target_params: dict,
measured_params: dict,
) -> dict:
"""Run second pass of loudnorm using measured parameters. """Run second pass of loudnorm using measured parameters.
Args: Args:
@@ -421,6 +470,7 @@ class SoundNormalizerService:
Returns: Returns:
dict: Result with normalization stats dict: Result with normalization stats
""" """
try: try:
# Create ffmpeg input stream # Create ffmpeg input stream
@@ -452,7 +502,9 @@ class SoundNormalizerService:
# Run the second pass # Run the second pass
out, err = ffmpeg.run( out, err = ffmpeg.run(
output_stream, capture_stdout=True, capture_stderr=True, output_stream,
capture_stdout=True,
capture_stderr=True,
) )
stderr_text = err.decode() if err else "" stderr_text = err.decode() if err else ""
@@ -460,10 +512,7 @@ class SoundNormalizerService:
# Parse final statistics # Parse final statistics
stats = SoundNormalizerService._parse_loudnorm_stats(stderr_text) stats = SoundNormalizerService._parse_loudnorm_stats(stderr_text)
return { return {"success": True, "stats": stats}
"success": True,
"stats": stats
}
except ffmpeg.Error as e: except ffmpeg.Error as e:
error_msg = f"Second pass FFmpeg error: {e.stderr.decode() if e.stderr else str(e)}" error_msg = f"Second pass FFmpeg error: {e.stderr.decode() if e.stderr else str(e)}"
@@ -482,10 +531,15 @@ class SoundNormalizerService:
Returns: Returns:
dict: Parsed measured parameters, empty if parsing fails dict: Parsed measured parameters, empty if parsing fails
""" """
try: try:
# Find JSON block in stderr output # Find JSON block in stderr output
json_match = re.search(r'\{[^}]*"input_i"[^}]*\}', stderr_output, re.DOTALL) json_match = re.search(
r'\{[^}]*"input_i"[^}]*\}',
stderr_output,
re.DOTALL,
)
if not json_match: if not json_match:
logger.warning("No JSON block found in first pass output") logger.warning("No JSON block found in first pass output")
return {} return {}
@@ -577,9 +631,17 @@ class SoundNormalizerService:
# Calculate file hash # Calculate file hash
file_hash = SoundNormalizerService._calculate_file_hash(file_path) file_hash = SoundNormalizerService._calculate_file_hash(file_path)
# Get duration using pydub # Get duration using ffmpeg
audio = AudioSegment.from_wav(file_path) probe = ffmpeg.probe(file_path)
duration = len(audio) # Duration in milliseconds audio_stream = next(
(s for s in probe['streams'] if s['codec_type'] == 'audio'),
None
)
if audio_stream and 'duration' in audio_stream:
duration = int(float(audio_stream['duration']) * 1000) # Convert to milliseconds
else:
duration = 0
return { return {
"duration": duration, "duration": duration,
@@ -625,7 +687,9 @@ class SoundNormalizerService:
sounds = Sound.query.filter_by(type="SDB").all() sounds = Sound.query.filter_by(type="SDB").all()
for sound in sounds: for sound in sounds:
original_path = Path(SoundNormalizerService.SOUNDS_DIR) / sound.filename original_path = (
Path(SoundNormalizerService.SOUNDS_DIR) / sound.filename
)
if original_path.exists(): if original_path.exists():
total_original_size += original_path.stat().st_size total_original_size += original_path.stat().st_size
@@ -633,7 +697,10 @@ class SoundNormalizerService:
# Use database field to check if normalized, not file existence # Use database field to check if normalized, not file existence
if sound.is_normalized and sound.normalized_filename: if sound.is_normalized and sound.normalized_filename:
normalized_count += 1 normalized_count += 1
normalized_path = Path(SoundNormalizerService.NORMALIZED_DIR) / sound.normalized_filename normalized_path = (
Path(SoundNormalizerService.NORMALIZED_DIR)
/ sound.normalized_filename
)
if normalized_path.exists(): if normalized_path.exists():
total_normalized_size += normalized_path.stat().st_size total_normalized_size += normalized_path.stat().st_size
@@ -676,7 +743,8 @@ class SoundNormalizerService:
import tempfile import tempfile
with tempfile.NamedTemporaryFile( with tempfile.NamedTemporaryFile(
suffix=".wav", delete=False, suffix=".wav",
delete=False,
) as temp_file: ) as temp_file:
temp_path = temp_file.name temp_path = temp_file.name

View File

@@ -4,8 +4,7 @@ import hashlib
import logging import logging
from pathlib import Path from pathlib import Path
from pydub import AudioSegment import ffmpeg
from pydub.utils import mediainfo
from app.database import db from app.database import db
from app.models.sound import Sound from app.models.sound import Sound
@@ -83,7 +82,9 @@ class SoundScannerService:
files_added += 1 files_added += 1
logger.debug(f"Added sound: {filename}") logger.debug(f"Added sound: {filename}")
elif result.get("updated"): elif result.get("updated"):
files_added += 1 # Count updates as additions for reporting files_added += (
1 # Count updates as additions for reporting
)
logger.debug(f"Updated sound: {filename}") logger.debug(f"Updated sound: {filename}")
else: else:
files_skipped += 1 files_skipped += 1
@@ -138,76 +139,91 @@ class SoundScannerService:
@staticmethod @staticmethod
def _process_audio_file(file_path: str, base_dir: str) -> dict: def _process_audio_file(file_path: str, base_dir: str) -> dict:
"""Process a single audio file and add it to database if new. """Process a single audio file and add it to database if new."""
Args:
file_path: Full path to the audio file
base_dir: Base directory for relative path calculation
Returns:
dict: Processing result with added flag and reason
"""
# Calculate file hash for deduplication
file_hash = SoundScannerService._calculate_file_hash(file_path) file_hash = SoundScannerService._calculate_file_hash(file_path)
# Get file metadata
metadata = SoundScannerService._extract_audio_metadata(file_path) metadata = SoundScannerService._extract_audio_metadata(file_path)
# Calculate relative filename from base directory
relative_path = Path(file_path).relative_to(Path(base_dir)) relative_path = Path(file_path).relative_to(Path(base_dir))
# Check if file already exists in database by hash # Check for existing file by hash (duplicate content)
existing_sound = Sound.find_by_hash(file_hash) if existing_sound := Sound.find_by_hash(file_hash):
if existing_sound: return SoundScannerService._handle_duplicate_file(existing_sound)
return {
"added": False,
"reason": f"File already exists as '{existing_sound.name}'",
}
# Check if filename already exists in database # Check for existing filename (file replacement)
existing_filename_sound = Sound.find_by_filename(str(relative_path)) if existing_filename_sound := Sound.find_by_filename(
if existing_filename_sound: str(relative_path)
# Remove normalized files and clear normalized info ):
SoundScannerService._clear_normalized_files(existing_filename_sound) return SoundScannerService._handle_file_replacement(
existing_filename_sound.clear_normalized_info() existing_filename_sound,
str(relative_path),
# Update existing sound with new file information metadata,
existing_filename_sound.update_file_info( file_hash,
filename=str(relative_path),
duration=metadata["duration"],
size=metadata["size"],
hash_value=file_hash,
) )
return {
"added": False,
"updated": True,
"sound_id": existing_filename_sound.id,
"reason": f"Updated existing sound '{existing_filename_sound.name}' with new file data",
}
# Generate sound name from filename (without extension)
sound_name = Path(file_path).stem
# Check if name already exists and make it unique if needed
counter = 1
original_name = sound_name
while Sound.find_by_name(sound_name):
sound_name = f"{original_name}_{counter}"
counter += 1
# Create new sound record # Create new sound record
return SoundScannerService._create_new_sound(
file_path,
str(relative_path),
metadata,
file_hash,
)
@staticmethod
def _handle_duplicate_file(existing_sound: Sound) -> dict:
"""Handle case where file content already exists in database."""
return {
"added": False,
"reason": f"File already exists as '{existing_sound.name}'",
}
@staticmethod
def _handle_file_replacement(
existing_sound: Sound,
relative_path: str,
metadata: dict,
file_hash: str,
) -> dict:
"""Handle case where filename exists but content may be different."""
# Remove normalized files and clear normalized info
SoundScannerService._clear_normalized_files(existing_sound)
existing_sound.clear_normalized_info()
# Update existing sound with new file information
existing_sound.update_file_info(
filename=relative_path,
duration=metadata["duration"],
size=metadata["size"],
hash_value=file_hash,
)
return {
"added": False,
"updated": True,
"sound_id": existing_sound.id,
"reason": f"Updated existing sound '{existing_sound.name}' with new file data",
}
@staticmethod
def _create_new_sound(
file_path: str,
relative_path: str,
metadata: dict,
file_hash: str,
) -> dict:
"""Create a new sound record in the database."""
sound_name = SoundScannerService._generate_unique_sound_name(
Path(file_path).stem,
)
sound = Sound.create_sound( sound = Sound.create_sound(
sound_type="SDB", # Soundboard type sound_type="SDB",
name=sound_name, name=sound_name,
filename=str(relative_path), filename=relative_path,
duration=metadata["duration"], duration=metadata["duration"],
size=metadata["size"], size=metadata["size"],
hash_value=file_hash, hash_value=file_hash,
is_music=False, is_music=False,
is_deletable=False, is_deletable=False,
commit=False, # Don't commit individually, let scanner handle transaction commit=False,
) )
return { return {
@@ -216,6 +232,18 @@ class SoundScannerService:
"reason": "New file added successfully", "reason": "New file added successfully",
} }
@staticmethod
def _generate_unique_sound_name(base_name: str) -> str:
"""Generate a unique sound name by appending numbers if needed."""
sound_name = base_name
counter = 1
while Sound.find_by_name(sound_name):
sound_name = f"{base_name}_{counter}"
counter += 1
return sound_name
@staticmethod @staticmethod
def _calculate_file_hash(file_path: str) -> str: def _calculate_file_hash(file_path: str) -> str:
"""Calculate SHA256 hash of file contents.""" """Calculate SHA256 hash of file contents."""
@@ -233,44 +261,50 @@ class SoundScannerService:
"""Remove normalized files for a sound if they exist.""" """Remove normalized files for a sound if they exist."""
if sound.is_normalized and sound.normalized_filename: if sound.is_normalized and sound.normalized_filename:
# Import here to avoid circular imports # Import here to avoid circular imports
from app.services.sound_normalizer_service import SoundNormalizerService from app.services.sound_normalizer_service import (
SoundNormalizerService,
)
normalized_path = Path(SoundNormalizerService.NORMALIZED_DIR) / sound.normalized_filename normalized_path = (
Path(SoundNormalizerService.NORMALIZED_DIR)
/ sound.normalized_filename
)
if normalized_path.exists(): if normalized_path.exists():
try: try:
normalized_path.unlink() normalized_path.unlink()
logger.info(f"Removed normalized file: {normalized_path}") logger.info(f"Removed normalized file: {normalized_path}")
except Exception as e: except Exception as e:
logger.warning(f"Could not remove normalized file {normalized_path}: {e}") logger.warning(
f"Could not remove normalized file {normalized_path}: {e}",
)
@staticmethod @staticmethod
def _extract_audio_metadata(file_path: str) -> dict: def _extract_audio_metadata(file_path: str) -> dict:
"""Extract metadata from audio file using pydub and mediainfo.""" """Extract metadata from audio file using ffmpeg-python."""
try: try:
# Get file size # Get file size
file_size = Path(file_path).stat().st_size file_size = Path(file_path).stat().st_size
# Load audio file with pydub for basic info # Use ffmpeg to probe audio metadata
audio = AudioSegment.from_file(file_path) probe = ffmpeg.probe(file_path)
audio_stream = next(
(s for s in probe['streams'] if s['codec_type'] == 'audio'),
None
)
# Extract basic metadata from AudioSegment if not audio_stream:
duration = len(audio) raise ValueError("No audio stream found in file")
channels = audio.channels
sample_rate = audio.frame_rate
# Use mediainfo for more accurate bitrate information # Extract metadata from ffmpeg probe
bitrate = None duration = int(float(audio_stream.get('duration', 0)) * 1000) # Convert to milliseconds
try: channels = int(audio_stream.get('channels', 0))
info = mediainfo(file_path) sample_rate = int(audio_stream.get('sample_rate', 0))
if info and "bit_rate" in info: bitrate = int(audio_stream.get('bit_rate', 0)) if audio_stream.get('bit_rate') else None
bitrate = int(info["bit_rate"])
elif info and "bitrate" in info: # Fallback bitrate calculation if not available
bitrate = int(info["bitrate"]) if not bitrate and duration > 0:
except (ValueError, KeyError, TypeError): file_size_bits = file_size * 8
# Fallback to calculated bitrate if mediainfo fails bitrate = int(file_size_bits / (duration / 1000))
if duration > 0:
file_size_bits = file_size * 8
bitrate = int(file_size_bits / duration / 1000)
return { return {
"duration": duration, "duration": duration,

View File

@@ -0,0 +1,590 @@
"""Stream processing service with queue management and yt-dlp integration."""
import hashlib
import os
import re
import shutil
import threading
import time
from queue import Empty, Queue
from typing import Dict, List, Optional
from urllib.parse import parse_qs, urlparse
from app.database import db
from app.models.sound import Sound
from app.models.stream import Stream
from app.services.logging_service import LoggingService
# Configure logging
logger = LoggingService.get_logger(__name__)
class StreamProcessingService:
"""Service for processing streaming URLs with yt-dlp."""
# Class variables for queue management
_processing_queue: Queue = Queue()
_processing_threads: List[threading.Thread] = []
_is_running: bool = False
_max_concurrent_downloads: int = int(
os.getenv("STREAM_MAX_CONCURRENT", "2")
)
_downloads_dir: str = "sounds/temp"
_app_instance = None # Store the Flask app instance
@classmethod
def initialize(cls, app=None) -> None:
"""Initialize the stream processing service."""
if cls._is_running:
return
# Store the Flask app instance if provided
if app:
cls._app_instance = app
# Create necessary directories
os.makedirs(cls._downloads_dir, exist_ok=True)
os.makedirs("sounds/stream", exist_ok=True)
os.makedirs("sounds/stream/thumbnails", exist_ok=True)
# Start processing threads
for i in range(cls._max_concurrent_downloads):
thread = threading.Thread(
target=cls._worker_thread,
name=f"StreamProcessor-{i + 1}",
daemon=True,
)
thread.start()
cls._processing_threads.append(thread)
cls._is_running = True
logger.info(
f"StreamProcessingService initialized with {cls._max_concurrent_downloads} workers"
)
@classmethod
def add_to_queue(cls, stream_id: int) -> None:
"""Add a stream to the processing queue."""
if not cls._is_running:
cls.initialize()
cls._processing_queue.put(stream_id)
logger.info(f"Added stream {stream_id} to processing queue")
@classmethod
def get_queue_status(cls) -> Dict:
"""Get the current queue status."""
pending_count = Stream.query.filter_by(status="pending").count()
processing_count = Stream.query.filter_by(status="processing").count()
return {
"queue_size": cls._processing_queue.qsize(),
"pending_streams": pending_count,
"processing_streams": processing_count,
"max_concurrent": cls._max_concurrent_downloads,
"is_running": cls._is_running,
}
@classmethod
def _worker_thread(cls) -> None:
"""Worker thread for processing streams."""
while True:
try:
# Get stream ID from queue with timeout
stream_id = cls._processing_queue.get(timeout=1)
# Use the stored app instance for database operations
if cls._app_instance:
with cls._app_instance.app_context():
cls._process_stream(stream_id)
else:
# Fallback: import create_app if no app instance stored
from app import create_app
app = create_app()
with app.app_context():
cls._process_stream(stream_id)
cls._processing_queue.task_done()
except Empty:
# No items in queue, continue
continue
except Exception as e:
logger.error(f"Error in worker thread: {e}")
continue
@classmethod
def _process_stream(cls, stream_id: int) -> None:
"""Process a single stream."""
try:
stream = Stream.query.get(stream_id)
if not stream:
logger.error(f"Stream {stream_id} not found")
return
if stream.status == "cancelled":
logger.info(f"Stream {stream_id} was cancelled")
return
# Update status to processing
stream.status = "processing"
db.session.commit()
logger.info(
f"Starting processing of stream {stream_id}: {stream.url}"
)
# Extract metadata and download audio
metadata, error_msg = cls._extract_metadata(stream.url)
if not metadata:
if not error_msg:
error_msg = "Failed to extract metadata from URL"
stream.status = "failed"
stream.error = error_msg
db.session.commit()
logger.error(
f"Failed to extract metadata for stream {stream_id}: {error_msg}"
)
return
# Check for duplicate streams based on service and service_id
service = metadata.get("service")
service_id = metadata.get("service_id")
if service and service_id:
existing_stream = (
Stream.query.filter_by(
service=service, service_id=service_id
)
.filter(Stream.id != stream.id)
.first()
)
if existing_stream:
error_msg = f"Stream already exists with {service} ID: {service_id} (stream #{existing_stream.id})"
stream.status = "failed"
stream.error = error_msg
db.session.commit()
logger.error(
f"Duplicate stream detected for {stream_id}: {error_msg}"
)
return
# Update stream with metadata
cls._update_stream_metadata(stream, metadata)
# Download audio
audio_path, error_msg = cls._download_audio(stream.url, metadata)
if not audio_path:
if not error_msg:
error_msg = "Failed to download audio from URL"
stream.status = "failed"
stream.error = error_msg
db.session.commit()
logger.error(
f"Failed to download audio for stream {stream_id}: {error_msg}"
)
return
# Move files to final locations
final_audio_path, thumbnail_path, error_msg = (
cls._move_files_to_final_location(audio_path, metadata)
)
if not final_audio_path:
if not error_msg:
error_msg = "Failed to move files to final location"
stream.status = "failed"
stream.error = error_msg
db.session.commit()
logger.error(
f"Failed to move files for stream {stream_id}: {error_msg}"
)
return
# Create sound entry with final path
sound, error_msg = cls._create_sound_entry(
final_audio_path, metadata, thumbnail_path
)
if not sound:
if not error_msg:
error_msg = "Failed to create sound entry in database"
stream.status = "failed"
stream.error = error_msg
db.session.commit()
logger.error(
f"Failed to create sound entry for stream {stream_id}: {error_msg}"
)
return
# Update stream with sound_id and mark as completed
stream.sound_id = sound.id
stream.status = "completed"
stream.error = None # Clear any previous errors
db.session.commit()
logger.info(
f"Successfully processed stream {stream_id} -> sound {sound.id}"
)
except Exception as e:
error_msg = f"Unexpected error during processing: {str(e)}"
logger.error(f"Error processing stream {stream_id}: {error_msg}")
try:
stream = Stream.query.get(stream_id)
if stream:
stream.status = "failed"
stream.error = error_msg
db.session.commit()
except Exception as db_error:
logger.error(
f"Failed to update stream error in database: {db_error}"
)
@classmethod
def _extract_metadata(
cls, url: str
) -> tuple[Optional[Dict], Optional[str]]:
"""Extract metadata from URL using yt-dlp."""
try:
import yt_dlp
ydl_opts = {
"quiet": True,
"no_warnings": True,
"extract_flat": False,
}
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
info = ydl.extract_info(url, download=False)
# Extract service information
service = cls._detect_service(url)
service_id = cls._extract_service_id(url, info)
metadata = {
"service": info.get("extractor", service),
"service_id": info.get("id", service_id),
"title": info.get("title", ""),
"track": info.get("track", ""),
"artist": info.get("artist", "")
or info.get("uploader", ""),
"album": info.get("album", ""),
"genre": info.get("genre", ""),
"duration": info.get("duration", 0),
"description": info.get("description", ""),
}
return metadata, None
except Exception as e:
error_msg = f"yt-dlp extraction failed: {str(e)}"
logger.error(f"Error extracting metadata from {url}: {error_msg}")
return None, error_msg
@classmethod
def _download_audio(
cls, url: str, metadata: Dict
) -> tuple[Optional[str], Optional[str]]:
"""Download audio from URL using yt-dlp."""
try:
import yt_dlp
# Generate filename
title = metadata.get("title", "unknown")
safe_title = re.sub(r"[^\w\s-]", "", title)[:50]
filename = f"{safe_title}_{metadata.get('service_id', 'unknown')}"
output_path = os.path.join(
cls._downloads_dir, f"{filename}.%(ext)s"
)
ydl_opts = {
"format": "bestaudio/best",
"outtmpl": output_path,
"extractaudio": True,
"audioformat": "opus",
"audioquality": "192",
"postprocessors": [
{
"key": "FFmpegExtractAudio",
"preferredcodec": "opus",
"preferredquality": "192",
}
],
"writethumbnail": True,
"quiet": True,
"no_warnings": True,
}
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
ydl.download([url])
# Find the downloaded file
final_path = os.path.join(cls._downloads_dir, f"{filename}.opus")
if os.path.exists(final_path):
return final_path, None
# If opus doesn't exist, look for other formats
for ext in ["mp3", "wav", "m4a", "webm", "ogg"]:
alt_path = os.path.join(cls._downloads_dir, f"{filename}.{ext}")
if os.path.exists(alt_path):
return alt_path, None
error_msg = f"Downloaded file not found after yt-dlp processing"
logger.error(f"Downloaded file not found for {url}")
return None, error_msg
except Exception as e:
error_msg = f"yt-dlp download failed: {str(e)}"
logger.error(f"Error downloading audio from {url}: {error_msg}")
return None, error_msg
@classmethod
def _move_files_to_final_location(
cls, audio_path: str, metadata: dict
) -> tuple[Optional[str], Optional[str], Optional[str]]:
"""Move downloaded files to their final locations.
Returns:
tuple: (final_audio_path, thumbnail_path, error_message)
"""
try:
# Create target directories
stream_dir = "sounds/stream"
thumbnail_dir = "sounds/stream/thumbnails"
os.makedirs(stream_dir, exist_ok=True)
os.makedirs(thumbnail_dir, exist_ok=True)
# Generate safe filename
title = metadata.get("title", "unknown")
safe_title = re.sub(r"[^\w\s-]", "", title)[:50]
service_id = metadata.get("service_id", "unknown")
base_filename = f"{safe_title}_{service_id}"
# Move audio file
audio_extension = os.path.splitext(audio_path)[1]
final_audio_filename = f"{base_filename}{audio_extension}"
final_audio_path = os.path.join(stream_dir, final_audio_filename)
# If file already exists, add a counter
counter = 1
while os.path.exists(final_audio_path):
final_audio_filename = (
f"{base_filename}_{counter}{audio_extension}"
)
final_audio_path = os.path.join(
stream_dir, final_audio_filename
)
counter += 1
shutil.move(audio_path, final_audio_path)
logger.info(f"Moved audio file to: {final_audio_path}")
# Look for and move thumbnail
thumbnail_path = None
temp_dir = os.path.dirname(audio_path)
# Common thumbnail extensions
for thumb_ext in [".jpg", ".jpeg", ".png", ".webp", ".gif"]:
temp_thumb_path = os.path.join(
temp_dir,
f"{os.path.splitext(os.path.basename(audio_path))[0]}{thumb_ext}",
)
if os.path.exists(temp_thumb_path):
final_thumb_filename = f"{base_filename}{thumb_ext}"
final_thumb_path = os.path.join(
thumbnail_dir, final_thumb_filename
)
# Handle duplicate thumbnail names
thumb_counter = 1
while os.path.exists(final_thumb_path):
final_thumb_filename = (
f"{base_filename}_{thumb_counter}{thumb_ext}"
)
final_thumb_path = os.path.join(
thumbnail_dir, final_thumb_filename
)
thumb_counter += 1
shutil.move(temp_thumb_path, final_thumb_path)
thumbnail_path = final_thumb_path
logger.info(f"Moved thumbnail to: {final_thumb_path}")
break
return final_audio_path, thumbnail_path, None
except Exception as e:
error_msg = f"File move operation failed: {str(e)}"
logger.error(f"Error moving files: {error_msg}")
return None, None, error_msg
@classmethod
def _create_sound_entry(
cls,
audio_path: str,
metadata: dict,
thumbnail_path: str | None = None,
) -> tuple[Sound | None, str | None]:
"""Create a sound entry from the downloaded audio."""
try:
# Get file info
file_size = os.path.getsize(audio_path)
# Generate hash
file_hash = cls._calculate_file_hash(audio_path)
# Get duration (use metadata duration or calculate from file)
duration_ms = int((metadata.get("duration", 0) or 0) * 1000)
# Get thumbnail filename if available
thumbnail_filename = None
if thumbnail_path:
thumbnail_filename = os.path.basename(thumbnail_path)
# Create sound entry
sound = Sound(
type="STR", # Stream type
name=metadata.get("title", "Unknown Title"),
filename=os.path.basename(audio_path),
thumbnail=thumbnail_filename,
duration=duration_ms,
size=file_size,
hash=file_hash,
is_music=True, # Streams are typically music
is_deletable=True,
)
db.session.add(sound)
db.session.commit()
# Add sound to main playlist
cls._add_sound_to_main_playlist(sound)
# Normalize the sound
cls._normalize_sound(sound)
return sound, None
except Exception as e:
error_msg = f"Database error while creating sound entry: {str(e)}"
logger.error(
f"Error creating sound entry for {audio_path}: {error_msg}"
)
return None, error_msg
@classmethod
def _update_stream_metadata(cls, stream: Stream, metadata: Dict) -> None:
"""Update stream with extracted metadata."""
stream.service = metadata.get("service")
stream.service_id = metadata.get("service_id")
stream.title = metadata.get("title")
stream.track = metadata.get("track")
stream.artist = metadata.get("artist")
stream.album = metadata.get("album")
stream.genre = metadata.get("genre")
db.session.commit()
@classmethod
def _detect_service(cls, url: str) -> str:
"""Detect the streaming service from URL."""
domain = urlparse(url).netloc.lower()
if "youtube.com" in domain or "youtu.be" in domain:
return "youtube"
elif "soundcloud.com" in domain:
return "soundcloud"
elif "dailymotion.com" in domain:
return "dailymotion"
elif "spotify.com" in domain:
return "spotify"
elif "vimeo.com" in domain:
return "vimeo"
elif "twitch.tv" in domain:
return "twitch"
else:
return "unknown"
@classmethod
def _extract_service_id(cls, url: str, info: Dict) -> str:
"""Extract service-specific ID from URL or info."""
service = cls._detect_service(url)
if service == "youtube":
# Try to get from info first
if "id" in info:
return info["id"]
# Parse from URL
parsed = urlparse(url)
if "youtu.be" in parsed.netloc:
return parsed.path[1:] # Remove leading slash
elif "youtube.com" in parsed.netloc:
query_params = parse_qs(parsed.query)
return query_params.get("v", [""])[0]
elif service == "soundcloud":
if "id" in info:
return str(info["id"])
# Fallback to using info ID or last part of URL
if "id" in info:
return str(info["id"])
return urlparse(url).path.split("/")[-1] or "unknown"
@classmethod
def _add_sound_to_main_playlist(cls, sound: Sound) -> None:
"""Add a sound to the main playlist."""
try:
from app.models.playlist import Playlist
from app.services.music_player_service import music_player_service
# Find the main playlist
main_playlist = Playlist.find_main_playlist()
if main_playlist:
# Add sound to the main playlist
main_playlist.add_sound(sound.id, commit=True)
logger.info(f"Added sound {sound.id} to main playlist")
# Reload the playlist in music player if it's the current one
music_player_service.reload_current_playlist_if_modified(main_playlist.id)
else:
logger.warning("Main playlist not found - sound not added to any playlist")
except Exception as e:
logger.error(f"Failed to add sound {sound.id} to main playlist: {e}")
@classmethod
def _normalize_sound(cls, sound: Sound) -> None:
"""Normalize a stream sound using the sound normalizer service."""
try:
from app.services.sound_normalizer_service import SoundNormalizerService
logger.info(f"Starting normalization of stream sound {sound.id}: {sound.name}")
# Normalize the sound (overwrite=True since it's a new sound)
result = SoundNormalizerService.normalize_sound(
sound.id,
overwrite=True,
two_pass=True
)
if result.get("success"):
logger.info(f"Successfully normalized stream sound {sound.id}")
else:
error_msg = result.get("error", "Unknown normalization error")
logger.warning(f"Failed to normalize stream sound {sound.id}: {error_msg}")
except Exception as e:
logger.error(f"Error normalizing stream sound {sound.id}: {e}")
@classmethod
def _calculate_file_hash(cls, file_path: str) -> str:
"""Calculate SHA256 hash of file."""
sha256_hash = hashlib.sha256()
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
sha256_hash.update(chunk)
return sha256_hash.hexdigest()

223
app/services/vlc_service.py Normal file
View File

@@ -0,0 +1,223 @@
"""VLC service for playing sounds using subprocess."""
import os
import subprocess
import threading
import time
from app.models.sound import Sound
from app.models.sound_played import SoundPlayed
from app.services.logging_service import LoggingService
logger = LoggingService.get_logger(__name__)
class VLCService:
"""Service for playing sounds using VLC subprocess."""
def __init__(self) -> None:
"""Initialize VLC service."""
self.processes: dict[str, subprocess.Popen] = {}
self.lock = threading.Lock()
def play_sound(self, sound_id: int, user_id: int | None = None) -> bool:
"""Play a sound by ID using VLC subprocess."""
try:
# Get sound from database
sound = Sound.query.get(sound_id)
if not sound:
return False
# Use normalized file if available, otherwise use original
if sound.is_normalized and sound.normalized_filename:
sound_path = os.path.join(
"sounds",
"normalized",
"soundboard",
sound.normalized_filename,
)
else:
sound_path = os.path.join(
"sounds",
"soundboard",
sound.filename,
)
# Check if file exists
if not os.path.exists(sound_path):
return False
# Convert to absolute path
sound_path = os.path.abspath(sound_path)
# Create unique process ID
process_id = f"sound_{sound_id}_{int(time.time() * 1000000)}"
# Start VLC process
vlc_cmd = [
"vlc",
sound_path,
"--intf",
"dummy", # No interface
"--play-and-exit", # Exit after playing
"--no-video", # Audio only
"--quiet", # Reduce output
]
process = subprocess.Popen(
vlc_cmd,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
preexec_fn=os.setsid, # Create new process group
)
# Store process for tracking
with self.lock:
self.processes[process_id] = process
logger.info(
f"Started VLC process {process.pid} for sound '{sound.name}'. "
f"Total active processes: {len(self.processes)}",
)
# Increment play count
sound.increment_play_count()
# Record play event if user is provided
if user_id:
try:
SoundPlayed.create_play_record(
user_id=user_id,
sound_id=sound_id,
commit=True,
)
except Exception as e:
logger.error(f"Error recording play event: {e}")
# Schedule cleanup after sound duration
threading.Thread(
target=self._cleanup_after_playback,
args=(process_id, sound.duration if sound.duration else 10000),
daemon=True,
).start()
return True
except Exception as e:
logger.error(
f"Error starting VLC process for sound {sound_id}: {e}"
)
return False
def _cleanup_after_playback(self, process_id: str, duration: int) -> None:
"""Clean up VLC process after playback."""
# Wait for playback to finish (duration + 1 second buffer)
time.sleep(duration / 1000 + 1) # Convert ms to seconds
with self.lock:
if process_id in self.processes:
logger.debug(f"Cleaning up process {process_id} after playback")
process = self.processes[process_id]
try:
# Check if process is still running
if process.poll() is None:
logger.debug(
f"Process {process.pid} still running, terminating"
)
process.terminate()
# Give it a moment to terminate gracefully
try:
process.wait(timeout=2)
except subprocess.TimeoutExpired:
logger.debug(
f"Process {process.pid} didn't terminate, killing"
)
process.kill()
logger.debug(
f"Successfully cleaned up process {process_id}"
)
except Exception as e:
logger.warning(f"Error during cleanup of {process_id}: {e}")
finally:
# Always remove from tracking
del self.processes[process_id]
logger.debug(
f"Removed process {process_id}. Remaining processes: {len(self.processes)}",
)
def stop_all(self) -> None:
"""Stop all playing sounds by killing VLC processes."""
with self.lock:
processes_copy = dict(self.processes)
if processes_copy:
logger.info(f"Stopping {len(processes_copy)} VLC processes")
for process_id, process in processes_copy.items():
try:
if process.poll() is None: # Process is still running
logger.debug(f"Terminating process {process.pid}")
process.terminate()
# Give it a moment to terminate gracefully
try:
process.wait(timeout=1)
logger.debug(
f"Process {process.pid} terminated gracefully"
)
except subprocess.TimeoutExpired:
logger.debug(
f"Process {process.pid} didn't terminate, killing forcefully"
)
process.kill()
process.wait() # Wait for it to be killed
else:
logger.debug(f"Process {process.pid} already finished")
except Exception as e:
logger.warning(f"Error stopping process {process_id}: {e}")
# Clear all processes
self.processes.clear()
if processes_copy:
logger.info("All VLC processes stopped")
def get_playing_count(self) -> int:
"""Get number of currently playing sounds."""
with self.lock:
# Clean up finished processes and return count
finished_processes = []
for process_id, process in self.processes.items():
if process.poll() is not None: # Process has finished
finished_processes.append(process_id)
# Remove finished processes
for process_id in finished_processes:
del self.processes[process_id]
return len(self.processes)
def force_stop_all(self) -> int:
"""Force stop all sounds by killing VLC processes aggressively."""
with self.lock:
stopped_count = len(self.processes)
if stopped_count > 0:
logger.warning(f"Force stopping {stopped_count} VLC processes")
# Also try to kill any remaining VLC processes system-wide
try:
subprocess.run(["pkill", "-f", "vlc"], check=False)
logger.info("Killed any remaining VLC processes system-wide")
except Exception as e:
logger.error(f"Error killing system VLC processes: {e}")
# Clear all processes
self.processes.clear()
if stopped_count > 0:
logger.info("Force stop completed")
return stopped_count
# Global VLC service instance
vlc_service = VLCService()

82
backend.log Normal file
View File

@@ -0,0 +1,82 @@
19:40:31 - apscheduler.scheduler - INFO - Adding job tentatively -- it will be properly scheduled when the scheduler starts
19:40:31 - app.services.scheduler_service - INFO - Daily credit refill job scheduled for 00:00 UTC
19:40:31 - apscheduler.scheduler - INFO - Adding job tentatively -- it will be properly scheduled when the scheduler starts
19:40:31 - app.services.scheduler_service - INFO - Sound scanning job scheduled every 5 minutes
19:40:31 - apscheduler.scheduler - INFO - Added job "Daily Credit Refill" to job store "default"
19:40:31 - apscheduler.scheduler - INFO - Added job "Sound Directory Scan" to job store "default"
19:40:31 - apscheduler.scheduler - INFO - Scheduler started
19:40:31 - app.services.scheduler_service - INFO - Scheduler started successfully
19:40:31 - app.services.stream_processing_service - INFO - StreamProcessingService initialized with 2 workers
19:40:31 - app.services.scheduler_service - WARNING - Scheduler is already running
19:40:31 - app.services.scheduler_service - WARNING - Scheduler is already running
19:40:31 - app.services.music_player_service - INFO - VLC music player started successfully
19:40:31 - app.services.music_player_service - INFO - VLC music player started successfully
19:40:31 - app.services.music_player_service - INFO - VLC music player started successfully
19:40:31 - werkzeug - WARNING - Werkzeug appears to be used in a production deployment. Consider switching to a production web server instead.
🔧 SocketIO Service: Module loaded, logger level: 10
🔧 SocketIO Service: Effective logger level: 10
🔧 SocketIO Service: Parent logger handlers: [<StreamHandler <stderr> (NOTSET)>]
🔧 SocketIO Service: Logger handlers: []
🔧 SocketIO Service: Registered event handlers: ['/']
* Serving Flask app 'app'
* Debug mode: on
19:40:31 - werkzeug - INFO - WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.
* Running on all addresses (0.0.0.0)
* Running on http://127.0.0.1:5000
* Running on http://10.8.0.2:5000
19:40:31 - werkzeug - INFO - Press CTRL+C to quit
19:40:31 - werkzeug - INFO - * Restarting with stat
19:40:32 - apscheduler.scheduler - INFO - Adding job tentatively -- it will be properly scheduled when the scheduler starts
19:40:32 - app.services.scheduler_service - INFO - Daily credit refill job scheduled for 00:00 UTC
19:40:32 - apscheduler.scheduler - INFO - Adding job tentatively -- it will be properly scheduled when the scheduler starts
19:40:32 - app.services.scheduler_service - INFO - Sound scanning job scheduled every 5 minutes
19:40:32 - apscheduler.scheduler - INFO - Added job "Daily Credit Refill" to job store "default"
19:40:32 - apscheduler.scheduler - INFO - Added job "Sound Directory Scan" to job store "default"
19:40:32 - apscheduler.scheduler - INFO - Scheduler started
19:40:32 - app.services.scheduler_service - INFO - Scheduler started successfully
19:40:32 - app.services.stream_processing_service - INFO - StreamProcessingService initialized with 2 workers
19:40:32 - app.services.scheduler_service - WARNING - Scheduler is already running
19:40:32 - app.services.scheduler_service - WARNING - Scheduler is already running
19:40:32 - app.services.music_player_service - INFO - VLC music player started successfully
19:40:32 - app.services.music_player_service - INFO - VLC music player started successfully
19:40:32 - app.services.music_player_service - INFO - VLC music player started successfully
19:40:32 - werkzeug - WARNING - Werkzeug appears to be used in a production deployment. Consider switching to a production web server instead.
19:40:32 - werkzeug - WARNING - * Debugger is active!
19:40:32 - werkzeug - INFO - * Debugger PIN: 138-440-685
19:40:32 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:32] "GET /socket.io/?EIO=4&transport=polling&t=e00ab8wz HTTP/1.1" 200 -
19:40:32 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:32] "POST /socket.io/?EIO=4&transport=polling&t=e00dbx25&sid=3ANQFsbixyerJ988AAAA HTTP/1.1" 200 -
19:40:32 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:32] "GET /socket.io/?EIO=4&transport=polling&t=e00dc4kv&sid=3ANQFsbixyerJ988AAAA HTTP/1.1" 200 -
19:40:32 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:32] "POST /socket.io/?EIO=4&transport=polling&t=e00dltvr&sid=3ANQFsbixyerJ988AAAA HTTP/1.1" 200 -
19:40:32 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:32] "POST /socket.io/?EIO=4&transport=polling&t=e00dryur&sid=3ANQFsbixyerJ988AAAA HTTP/1.1" 200 -
19:40:49 - werkzeug - INFO - * Detected change in '/home/jschoisy/Dev/perso/sdb-claude/backend/app/services/socketio_service.py', reloading
🔧 SocketIO Service: Module loaded, logger level: 10
🔧 SocketIO Service: Effective logger level: 10
🔧 SocketIO Service: Parent logger handlers: [<StreamHandler <stderr> (NOTSET)>]
🔧 SocketIO Service: Logger handlers: []
🔧 SocketIO Service: Registered event handlers: ['/']
19:40:49 - werkzeug - INFO - * Restarting with stat
19:40:49 - apscheduler.scheduler - INFO - Adding job tentatively -- it will be properly scheduled when the scheduler starts
19:40:49 - app.services.scheduler_service - INFO - Daily credit refill job scheduled for 00:00 UTC
19:40:49 - apscheduler.scheduler - INFO - Adding job tentatively -- it will be properly scheduled when the scheduler starts
19:40:49 - app.services.scheduler_service - INFO - Sound scanning job scheduled every 5 minutes
19:40:49 - apscheduler.scheduler - INFO - Added job "Daily Credit Refill" to job store "default"
19:40:49 - apscheduler.scheduler - INFO - Added job "Sound Directory Scan" to job store "default"
19:40:49 - apscheduler.scheduler - INFO - Scheduler started
19:40:49 - app.services.scheduler_service - INFO - Scheduler started successfully
19:40:50 - app.services.stream_processing_service - INFO - StreamProcessingService initialized with 2 workers
19:40:50 - app.services.scheduler_service - WARNING - Scheduler is already running
19:40:50 - app.services.scheduler_service - WARNING - Scheduler is already running
19:40:50 - app.services.music_player_service - INFO - VLC music player started successfully
19:40:50 - werkzeug - WARNING - Werkzeug appears to be used in a production deployment. Consider switching to a production web server instead.
19:40:50 - app.services.music_player_service - INFO - VLC music player started successfully
19:40:50 - app.services.music_player_service - INFO - VLC music player started successfully
19:40:50 - werkzeug - WARNING - * Debugger is active!
19:40:50 - werkzeug - INFO - * Debugger PIN: 220-239-682
Invalid session 3ANQFsbixyerJ988AAAA (further occurrences of this error will be logged with level INFO)
19:40:50 - engineio.server - ERROR - Invalid session 3ANQFsbixyerJ988AAAA (further occurrences of this error will be logged with level INFO)
19:40:50 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:50] "POST /socket.io/?EIO=4&transport=polling&t=e0do0065&sid=3ANQFsbixyerJ988AAAA HTTP/1.1" 400 -
19:40:51 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:51] "GET /socket.io/?EIO=4&transport=polling&t=e0esh2w2 HTTP/1.1" 200 -
19:40:51 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:51] "POST /socket.io/?EIO=4&transport=polling&t=e0esr2m9&sid=C18CrSifHGP8BpkeAAAE HTTP/1.1" 200 -
19:40:51 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:51] "GET /socket.io/?EIO=4&transport=polling&t=e0ess9wp&sid=C18CrSifHGP8BpkeAAAE HTTP/1.1" 200 -
19:40:51 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:51] "POST /socket.io/?EIO=4&transport=polling&t=e0et0qa7&sid=C18CrSifHGP8BpkeAAAE HTTP/1.1" 200 -
19:40:51 - werkzeug - INFO - 127.0.0.1 - - [07/Jul/2025 19:40:51] "POST /socket.io/?EIO=4&transport=polling&t=e0et3xhl&sid=C18CrSifHGP8BpkeAAAE HTTP/1.1" 200 -

17
main.py
View File

@@ -1,15 +1,26 @@
import logging
from dotenv import load_dotenv from dotenv import load_dotenv
from app import create_app from app import create_app, socketio
# Load environment variables from .env file # Load environment variables from .env file
load_dotenv() load_dotenv()
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
datefmt="%H:%M:%S",
)
def main() -> None: def main() -> None:
"""Run the Flask application.""" """Run the Flask application with SocketIO."""
app = create_app() app = create_app()
app.run(debug=True, host="0.0.0.0", port=5000) socketio.run(
app, debug=True, host="127.0.0.1", port=5000, allow_unsafe_werkzeug=True
)
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -13,15 +13,17 @@ dependencies = [
"flask-cors==6.0.1", "flask-cors==6.0.1",
"flask-jwt-extended==4.7.1", "flask-jwt-extended==4.7.1",
"flask-migrate==4.1.0", "flask-migrate==4.1.0",
"flask-socketio==5.5.1",
"flask-sqlalchemy==3.1.1", "flask-sqlalchemy==3.1.1",
"pydub==0.25.1",
"python-dotenv==1.1.1", "python-dotenv==1.1.1",
"python-vlc>=3.0.21203",
"requests==2.32.4", "requests==2.32.4",
"werkzeug==3.1.3", "werkzeug==3.1.3",
"yt-dlp>=2025.6.30",
] ]
[dependency-groups] [dependency-groups]
dev = ["black==25.1.0", "pytest==8.4.1", "ruff==0.12.1"] dev = ["black==25.1.0", "pytest==8.4.1", "ruff==0.12.2"]
[tool.black] [tool.black]
line-length = 80 line-length = 80
@@ -29,7 +31,4 @@ line-length = 80
[tool.ruff] [tool.ruff]
line-length = 80 line-length = 80
lint.select = ["ALL"] lint.select = ["ALL"]
lint.ignore = [ lint.ignore = ["D100", "D104"]
"D100", # Missing docstring in public module
"D104", # Missing docstring in public package
]

155
uv.lock generated
View File

@@ -40,6 +40,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/84/29/587c189bbab1ccc8c86a03a5d0e13873df916380ef1be461ebe6acebf48d/authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d", size = 239981 }, { url = "https://files.pythonhosted.org/packages/84/29/587c189bbab1ccc8c86a03a5d0e13873df916380ef1be461ebe6acebf48d/authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d", size = 239981 },
] ]
[[package]]
name = "bidict"
version = "0.23.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764 },
]
[[package]] [[package]]
name = "black" name = "black"
version = "25.1.0" version = "25.1.0"
@@ -276,6 +285,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d2/c4/3f329b23d769fe7628a5fc57ad36956f1fb7132cf8837be6da762b197327/Flask_Migrate-4.1.0-py3-none-any.whl", hash = "sha256:24d8051af161782e0743af1b04a152d007bad9772b2bca67b7ec1e8ceeb3910d", size = 21237 }, { url = "https://files.pythonhosted.org/packages/d2/c4/3f329b23d769fe7628a5fc57ad36956f1fb7132cf8837be6da762b197327/Flask_Migrate-4.1.0-py3-none-any.whl", hash = "sha256:24d8051af161782e0743af1b04a152d007bad9772b2bca67b7ec1e8ceeb3910d", size = 21237 },
] ]
[[package]]
name = "flask-socketio"
version = "5.5.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "flask" },
{ name = "python-socketio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d1/1f/54d3de4982df695682af99c65d4b89f8a46fe6739780c5a68690195835a0/flask_socketio-5.5.1.tar.gz", hash = "sha256:d946c944a1074ccad8e99485a6f5c79bc5789e3ea4df0bb9c864939586c51ec4", size = 37401 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/47/38/1b75b3ba3452860211ec87710f9854112911a436ee4d155533e0b83b5cd9/Flask_SocketIO-5.5.1-py3-none-any.whl", hash = "sha256:35a50166db44d055f68021d6ec32cb96f1f925cd82de4504314be79139ea846f", size = 18259 },
]
[[package]] [[package]]
name = "flask-sqlalchemy" name = "flask-sqlalchemy"
version = "3.1.1" version = "3.1.1"
@@ -331,6 +353,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236 }, { url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236 },
] ]
[[package]]
name = "h11"
version = "0.16.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 },
]
[[package]] [[package]]
name = "idna" name = "idna"
version = "3.10" version = "3.10"
@@ -474,15 +505,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 },
] ]
[[package]]
name = "pydub"
version = "0.25.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/fe/9a/e6bca0eed82db26562c73b5076539a4a08d3cffd19c3cc5913a3e61145fd/pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f", size = 38326 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a6/53/d78dc063216e62fc55f6b2eebb447f6a4b0a59f55c8406376f76bf959b08/pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6", size = 32327 },
]
[[package]] [[package]]
name = "pygments" name = "pygments"
version = "2.19.2" version = "2.19.2"
@@ -526,6 +548,40 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556 }, { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556 },
] ]
[[package]]
name = "python-engineio"
version = "4.12.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "simple-websocket" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ba/0b/67295279b66835f9fa7a491650efcd78b20321c127036eef62c11a31e028/python_engineio-4.12.2.tar.gz", hash = "sha256:e7e712ffe1be1f6a05ee5f951e72d434854a32fcfc7f6e4d9d3cae24ec70defa", size = 91677 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0c/fa/df59acedf7bbb937f69174d00f921a7b93aa5a5f5c17d05296c814fff6fc/python_engineio-4.12.2-py3-none-any.whl", hash = "sha256:8218ab66950e179dfec4b4bbb30aecf3f5d86f5e58e6fc1aa7fde2c698b2804f", size = 59536 },
]
[[package]]
name = "python-socketio"
version = "5.13.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "bidict" },
{ name = "python-engineio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/21/1a/396d50ccf06ee539fa758ce5623b59a9cb27637fc4b2dc07ed08bf495e77/python_socketio-5.13.0.tar.gz", hash = "sha256:ac4e19a0302ae812e23b712ec8b6427ca0521f7c582d6abb096e36e24a263029", size = 121125 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3c/32/b4fb8585d1be0f68bde7e110dffbcf354915f77ad8c778563f0ad9655c02/python_socketio-5.13.0-py3-none-any.whl", hash = "sha256:51f68d6499f2df8524668c24bcec13ba1414117cfb3a90115c559b601ab10caf", size = 77800 },
]
[[package]]
name = "python-vlc"
version = "3.0.21203"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4b/5b/f9ce6f0c9877b6fe5eafbade55e0dcb6b2b30f1c2c95837aef40e390d63b/python_vlc-3.0.21203.tar.gz", hash = "sha256:52d0544b276b11e58b6c0b748c3e0518f94f74b1b4cd328c83a59eacabead1ec", size = 162211 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5b/ee/7d76eb3b50ccb1397621f32ede0fb4d17aa55a9aa2251bc34e6b9929fdce/python_vlc-3.0.21203-py3-none-any.whl", hash = "sha256:1613451a31b692ec276296ceeae0c0ba82bfc2d094dabf9aceb70f58944a6320", size = 87651 },
]
[[package]] [[package]]
name = "requests" name = "requests"
version = "2.32.4" version = "2.32.4"
@@ -543,27 +599,27 @@ wheels = [
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.12.1" version = "0.12.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/97/38/796a101608a90494440856ccfb52b1edae90de0b817e76bfade66b12d320/ruff-0.12.1.tar.gz", hash = "sha256:806bbc17f1104fd57451a98a58df35388ee3ab422e029e8f5cf30aa4af2c138c", size = 4413426 } sdist = { url = "https://files.pythonhosted.org/packages/6c/3d/d9a195676f25d00dbfcf3cf95fdd4c685c497fcfa7e862a44ac5e4e96480/ruff-0.12.2.tar.gz", hash = "sha256:d7b4f55cd6f325cb7621244f19c873c565a08aff5a4ba9c69aa7355f3f7afd3e", size = 4432239 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/06/bf/3dba52c1d12ab5e78d75bd78ad52fb85a6a1f29cc447c2423037b82bed0d/ruff-0.12.1-py3-none-linux_armv6l.whl", hash = "sha256:6013a46d865111e2edb71ad692fbb8262e6c172587a57c0669332a449384a36b", size = 10305649 }, { url = "https://files.pythonhosted.org/packages/74/b6/2098d0126d2d3318fd5bec3ad40d06c25d377d95749f7a0c5af17129b3b1/ruff-0.12.2-py3-none-linux_armv6l.whl", hash = "sha256:093ea2b221df1d2b8e7ad92fc6ffdca40a2cb10d8564477a987b44fd4008a7be", size = 10369761 },
{ url = "https://files.pythonhosted.org/packages/8c/65/dab1ba90269bc8c81ce1d499a6517e28fe6f87b2119ec449257d0983cceb/ruff-0.12.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b3f75a19e03a4b0757d1412edb7f27cffb0c700365e9d6b60bc1b68d35bc89e0", size = 11120201 }, { url = "https://files.pythonhosted.org/packages/b1/4b/5da0142033dbe155dc598cfb99262d8ee2449d76920ea92c4eeb9547c208/ruff-0.12.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:09e4cf27cc10f96b1708100fa851e0daf21767e9709e1649175355280e0d950e", size = 11155659 },
{ url = "https://files.pythonhosted.org/packages/3f/3e/2d819ffda01defe857fa2dd4cba4d19109713df4034cc36f06bbf582d62a/ruff-0.12.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9a256522893cb7e92bb1e1153283927f842dea2e48619c803243dccc8437b8be", size = 10466769 }, { url = "https://files.pythonhosted.org/packages/3e/21/967b82550a503d7c5c5c127d11c935344b35e8c521f52915fc858fb3e473/ruff-0.12.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8ae64755b22f4ff85e9c52d1f82644abd0b6b6b6deedceb74bd71f35c24044cc", size = 10537769 },
{ url = "https://files.pythonhosted.org/packages/63/37/bde4cf84dbd7821c8de56ec4ccc2816bce8125684f7b9e22fe4ad92364de/ruff-0.12.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:069052605fe74c765a5b4272eb89880e0ff7a31e6c0dbf8767203c1fbd31c7ff", size = 10660902 }, { url = "https://files.pythonhosted.org/packages/33/91/00cff7102e2ec71a4890fb7ba1803f2cdb122d82787c7d7cf8041fe8cbc1/ruff-0.12.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eb3a6b2db4d6e2c77e682f0b988d4d61aff06860158fdb413118ca133d57922", size = 10717602 },
{ url = "https://files.pythonhosted.org/packages/0e/3a/390782a9ed1358c95e78ccc745eed1a9d657a537e5c4c4812fce06c8d1a0/ruff-0.12.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a684f125a4fec2d5a6501a466be3841113ba6847827be4573fddf8308b83477d", size = 10167002 }, { url = "https://files.pythonhosted.org/packages/9b/eb/928814daec4e1ba9115858adcda44a637fb9010618721937491e4e2283b8/ruff-0.12.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73448de992d05517170fc37169cbca857dfeaeaa8c2b9be494d7bcb0d36c8f4b", size = 10198772 },
{ url = "https://files.pythonhosted.org/packages/6d/05/f2d4c965009634830e97ffe733201ec59e4addc5b1c0efa035645baa9e5f/ruff-0.12.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdecdef753bf1e95797593007569d8e1697a54fca843d78f6862f7dc279e23bd", size = 11751522 }, { url = "https://files.pythonhosted.org/packages/50/fa/f15089bc20c40f4f72334f9145dde55ab2b680e51afb3b55422effbf2fb6/ruff-0.12.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b94317cbc2ae4a2771af641739f933934b03555e51515e6e021c64441532d", size = 11845173 },
{ url = "https://files.pythonhosted.org/packages/35/4e/4bfc519b5fcd462233f82fc20ef8b1e5ecce476c283b355af92c0935d5d9/ruff-0.12.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:70d52a058c0e7b88b602f575d23596e89bd7d8196437a4148381a3f73fcd5010", size = 12520264 }, { url = "https://files.pythonhosted.org/packages/43/9f/1f6f98f39f2b9302acc161a4a2187b1e3a97634fe918a8e731e591841cf4/ruff-0.12.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45fc42c3bf1d30d2008023a0a9a0cfb06bf9835b147f11fe0679f21ae86d34b1", size = 12553002 },
{ url = "https://files.pythonhosted.org/packages/85/b2/7756a6925da236b3a31f234b4167397c3e5f91edb861028a631546bad719/ruff-0.12.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84d0a69d1e8d716dfeab22d8d5e7c786b73f2106429a933cee51d7b09f861d4e", size = 12133882 }, { url = "https://files.pythonhosted.org/packages/d8/70/08991ac46e38ddd231c8f4fd05ef189b1b94be8883e8c0c146a025c20a19/ruff-0.12.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce48f675c394c37e958bf229fb5c1e843e20945a6d962cf3ea20b7a107dcd9f4", size = 12171330 },
{ url = "https://files.pythonhosted.org/packages/dd/00/40da9c66d4a4d51291e619be6757fa65c91b92456ff4f01101593f3a1170/ruff-0.12.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6cc32e863adcf9e71690248607ccdf25252eeeab5193768e6873b901fd441fed", size = 11608941 }, { url = "https://files.pythonhosted.org/packages/88/a9/5a55266fec474acfd0a1c73285f19dd22461d95a538f29bba02edd07a5d9/ruff-0.12.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793d8859445ea47591272021a81391350205a4af65a9392401f418a95dfb75c9", size = 11774717 },
{ url = "https://files.pythonhosted.org/packages/91/e7/f898391cc026a77fbe68dfea5940f8213622474cb848eb30215538a2dadf/ruff-0.12.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fd49a4619f90d5afc65cf42e07b6ae98bb454fd5029d03b306bd9e2273d44cc", size = 11602887 }, { url = "https://files.pythonhosted.org/packages/87/e5/0c270e458fc73c46c0d0f7cf970bb14786e5fdb88c87b5e423a4bd65232b/ruff-0.12.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6932323db80484dda89153da3d8e58164d01d6da86857c79f1961934354992da", size = 11646659 },
{ url = "https://files.pythonhosted.org/packages/f6/02/0891872fc6aab8678084f4cf8826f85c5d2d24aa9114092139a38123f94b/ruff-0.12.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ed5af6aaaea20710e77698e2055b9ff9b3494891e1b24d26c07055459bb717e9", size = 10521742 }, { url = "https://files.pythonhosted.org/packages/b7/b6/45ab96070c9752af37f0be364d849ed70e9ccede07675b0ec4e3ef76b63b/ruff-0.12.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6aa7e623a3a11538108f61e859ebf016c4f14a7e6e4eba1980190cacb57714ce", size = 10604012 },
{ url = "https://files.pythonhosted.org/packages/2a/98/d6534322c74a7d47b0f33b036b2498ccac99d8d8c40edadb552c038cecf1/ruff-0.12.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:801d626de15e6bf988fbe7ce59b303a914ff9c616d5866f8c79eb5012720ae13", size = 10149909 }, { url = "https://files.pythonhosted.org/packages/86/91/26a6e6a424eb147cc7627eebae095cfa0b4b337a7c1c413c447c9ebb72fd/ruff-0.12.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2a4a20aeed74671b2def096bdf2eac610c7d8ffcbf4fb0e627c06947a1d7078d", size = 10176799 },
{ url = "https://files.pythonhosted.org/packages/34/5c/9b7ba8c19a31e2b6bd5e31aa1e65b533208a30512f118805371dbbbdf6a9/ruff-0.12.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2be9d32a147f98a1972c1e4df9a6956d612ca5f5578536814372113d09a27a6c", size = 11136005 }, { url = "https://files.pythonhosted.org/packages/f5/0c/9f344583465a61c8918a7cda604226e77b2c548daf8ef7c2bfccf2b37200/ruff-0.12.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:71a4c550195612f486c9d1f2b045a600aeba851b298c667807ae933478fcef04", size = 11241507 },
{ url = "https://files.pythonhosted.org/packages/dc/34/9bbefa4d0ff2c000e4e533f591499f6b834346025e11da97f4ded21cb23e/ruff-0.12.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:49b7ce354eed2a322fbaea80168c902de9504e6e174fd501e9447cad0232f9e6", size = 11648579 }, { url = "https://files.pythonhosted.org/packages/1c/b7/99c34ded8fb5f86c0280278fa89a0066c3760edc326e935ce0b1550d315d/ruff-0.12.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4987b8f4ceadf597c927beee65a5eaf994c6e2b631df963f86d8ad1bdea99342", size = 11717609 },
{ url = "https://files.pythonhosted.org/packages/6f/1c/20cdb593783f8f411839ce749ec9ae9e4298c2b2079b40295c3e6e2089e1/ruff-0.12.1-py3-none-win32.whl", hash = "sha256:d973fa626d4c8267848755bd0414211a456e99e125dcab147f24daa9e991a245", size = 10519495 }, { url = "https://files.pythonhosted.org/packages/51/de/8589fa724590faa057e5a6d171e7f2f6cffe3287406ef40e49c682c07d89/ruff-0.12.2-py3-none-win32.whl", hash = "sha256:369ffb69b70cd55b6c3fc453b9492d98aed98062db9fec828cdfd069555f5f1a", size = 10523823 },
{ url = "https://files.pythonhosted.org/packages/cf/56/7158bd8d3cf16394928f47c637d39a7d532268cd45220bdb6cd622985760/ruff-0.12.1-py3-none-win_amd64.whl", hash = "sha256:9e1123b1c033f77bd2590e4c1fe7e8ea72ef990a85d2484351d408224d603013", size = 11547485 }, { url = "https://files.pythonhosted.org/packages/94/47/8abf129102ae4c90cba0c2199a1a9b0fa896f6f806238d6f8c14448cc748/ruff-0.12.2-py3-none-win_amd64.whl", hash = "sha256:dca8a3b6d6dc9810ed8f328d406516bf4d660c00caeaef36eb831cf4871b0639", size = 11629831 },
{ url = "https://files.pythonhosted.org/packages/91/d0/6902c0d017259439d6fd2fd9393cea1cfe30169940118b007d5e0ea7e954/ruff-0.12.1-py3-none-win_arm64.whl", hash = "sha256:78ad09a022c64c13cc6077707f036bab0fac8cd7088772dcd1e5be21c5002efc", size = 10691209 }, { url = "https://files.pythonhosted.org/packages/e2/1f/72d2946e3cc7456bb837e88000eb3437e55f80db339c840c04015a11115d/ruff-0.12.2-py3-none-win_arm64.whl", hash = "sha256:48d6c6bfb4761df68bc05ae630e24f506755e702d4fb08f08460be778c7ccb12", size = 10735334 },
] ]
[[package]] [[package]]
@@ -578,11 +634,13 @@ dependencies = [
{ name = "flask-cors" }, { name = "flask-cors" },
{ name = "flask-jwt-extended" }, { name = "flask-jwt-extended" },
{ name = "flask-migrate" }, { name = "flask-migrate" },
{ name = "flask-socketio" },
{ name = "flask-sqlalchemy" }, { name = "flask-sqlalchemy" },
{ name = "pydub" },
{ name = "python-dotenv" }, { name = "python-dotenv" },
{ name = "python-vlc" },
{ name = "requests" }, { name = "requests" },
{ name = "werkzeug" }, { name = "werkzeug" },
{ name = "yt-dlp" },
] ]
[package.dev-dependencies] [package.dev-dependencies]
@@ -601,18 +659,32 @@ requires-dist = [
{ name = "flask-cors", specifier = "==6.0.1" }, { name = "flask-cors", specifier = "==6.0.1" },
{ name = "flask-jwt-extended", specifier = "==4.7.1" }, { name = "flask-jwt-extended", specifier = "==4.7.1" },
{ name = "flask-migrate", specifier = "==4.1.0" }, { name = "flask-migrate", specifier = "==4.1.0" },
{ name = "flask-socketio", specifier = "==5.5.1" },
{ name = "flask-sqlalchemy", specifier = "==3.1.1" }, { name = "flask-sqlalchemy", specifier = "==3.1.1" },
{ name = "pydub", specifier = "==0.25.1" },
{ name = "python-dotenv", specifier = "==1.1.1" }, { name = "python-dotenv", specifier = "==1.1.1" },
{ name = "python-vlc", specifier = ">=3.0.21203" },
{ name = "requests", specifier = "==2.32.4" }, { name = "requests", specifier = "==2.32.4" },
{ name = "werkzeug", specifier = "==3.1.3" }, { name = "werkzeug", specifier = "==3.1.3" },
{ name = "yt-dlp", specifier = ">=2025.6.30" },
] ]
[package.metadata.requires-dev] [package.metadata.requires-dev]
dev = [ dev = [
{ name = "black", specifier = "==25.1.0" }, { name = "black", specifier = "==25.1.0" },
{ name = "pytest", specifier = "==8.4.1" }, { name = "pytest", specifier = "==8.4.1" },
{ name = "ruff", specifier = "==0.12.1" }, { name = "ruff", specifier = "==0.12.2" },
]
[[package]]
name = "simple-websocket"
version = "1.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "wsproto" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b0/d4/bfa032f961103eba93de583b161f0e6a5b63cebb8f2c7d0c6e6efe1e3d2e/simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4", size = 17300 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/52/59/0782e51887ac6b07ffd1570e0364cf901ebc36345fea669969d2084baebb/simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c", size = 13842 },
] ]
[[package]] [[package]]
@@ -694,3 +766,24 @@ sdist = { url = "https://files.pythonhosted.org/packages/9f/69/83029f1f6300c5fb2
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498 }, { url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498 },
] ]
[[package]]
name = "wsproto"
version = "1.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226 },
]
[[package]]
name = "yt-dlp"
version = "2025.6.30"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/23/9c/ff64c2fed7909f43a9a0aedb7395c65404e71c2439198764685a6e3b3059/yt_dlp-2025.6.30.tar.gz", hash = "sha256:6d0ae855c0a55bfcc28dffba804ec8525b9b955d34a41191a1561a4cec03d8bd", size = 3034364 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/41/2f048ae3f6d0fa2e59223f08ba5049dbcdac628b0a9f9deac722dd9260a5/yt_dlp-2025.6.30-py3-none-any.whl", hash = "sha256:541becc29ed7b7b3a08751c0a66da4b7f8ee95cb81066221c78e83598bc3d1f3", size = 3279333 },
]