Refactor OAuth provider linking and unlinking logic into a dedicated service; enhance error handling and logging throughout the application; improve sound management and scanning services with better file handling and unique naming; implement centralized error and logging services for consistent API responses and application-wide logging configuration.
This commit is contained in:
@@ -46,7 +46,7 @@ class CreditService:
|
||||
for user in users:
|
||||
if not user.plan:
|
||||
logger.warning(
|
||||
f"User {user.email} has no plan assigned, skipping"
|
||||
f"User {user.email} has no plan assigned, skipping",
|
||||
)
|
||||
continue
|
||||
|
||||
@@ -57,7 +57,8 @@ class CreditService:
|
||||
|
||||
# Add daily credits but don't exceed maximum
|
||||
new_credits = min(
|
||||
current_credits + plan_daily_credits, max_credits
|
||||
current_credits + plan_daily_credits,
|
||||
max_credits,
|
||||
)
|
||||
credits_added = new_credits - current_credits
|
||||
|
||||
|
||||
@@ -188,10 +188,12 @@ def require_credits(credits_needed: int):
|
||||
# Emit credits changed event via SocketIO
|
||||
try:
|
||||
from app.services.socketio_service import socketio_service
|
||||
|
||||
socketio_service.emit_credits_changed(user.id, user.credits)
|
||||
except Exception as e:
|
||||
# Don't fail the request if SocketIO emission fails
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.warning(f"Failed to emit credits_changed event: {e}")
|
||||
|
||||
|
||||
133
app/services/error_handling_service.py
Normal file
133
app/services/error_handling_service.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""Centralized error handling service for consistent API responses."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from flask import jsonify
|
||||
|
||||
|
||||
class ErrorHandlingService:
|
||||
"""Service for standardized error handling and responses."""
|
||||
|
||||
@staticmethod
|
||||
def handle_validation_error(error: ValueError) -> tuple[Any, int]:
|
||||
"""Handle validation errors consistently."""
|
||||
error_str = str(error)
|
||||
|
||||
# Map common validation errors to appropriate HTTP status codes
|
||||
status_code = 400
|
||||
if "not found" in error_str.lower():
|
||||
status_code = 404
|
||||
elif (
|
||||
"not authorized" in error_str.lower()
|
||||
or "permission" in error_str.lower()
|
||||
):
|
||||
status_code = 403
|
||||
elif (
|
||||
"already exists" in error_str.lower()
|
||||
or "already linked" in error_str.lower()
|
||||
):
|
||||
status_code = 409
|
||||
elif (
|
||||
"not configured" in error_str.lower()
|
||||
or "cannot unlink" in error_str.lower()
|
||||
):
|
||||
status_code = 400
|
||||
elif "not deletable" in error_str.lower():
|
||||
status_code = 403
|
||||
|
||||
return jsonify({"error": error_str}), status_code
|
||||
|
||||
@staticmethod
|
||||
def handle_generic_error(error: Exception) -> tuple[Any, int]:
|
||||
"""Handle generic exceptions with 500 status."""
|
||||
return jsonify({"error": str(error)}), 500
|
||||
|
||||
@staticmethod
|
||||
def handle_service_result(result: dict) -> tuple[Any, int]:
|
||||
"""Handle service method results that return success/error dictionaries."""
|
||||
if result.get("success"):
|
||||
return jsonify(result), 200
|
||||
return jsonify(result), 400
|
||||
|
||||
@staticmethod
|
||||
def create_success_response(
|
||||
message: str,
|
||||
data: dict = None,
|
||||
status_code: int = 200,
|
||||
) -> tuple[Any, int]:
|
||||
"""Create a standardized success response."""
|
||||
response = {"message": message}
|
||||
if data:
|
||||
response.update(data)
|
||||
return jsonify(response), status_code
|
||||
|
||||
@staticmethod
|
||||
def create_error_response(
|
||||
message: str,
|
||||
status_code: int = 400,
|
||||
details: dict = None,
|
||||
) -> tuple[Any, int]:
|
||||
"""Create a standardized error response."""
|
||||
response = {"error": message}
|
||||
if details:
|
||||
response.update(details)
|
||||
return jsonify(response), status_code
|
||||
|
||||
@staticmethod
|
||||
def handle_auth_error(error_type: str) -> tuple[Any, int]:
|
||||
"""Handle common authentication errors."""
|
||||
auth_errors = {
|
||||
"user_not_authenticated": ("User not authenticated", 401),
|
||||
"user_not_found": ("User not found", 404),
|
||||
"invalid_credentials": ("Invalid credentials", 401),
|
||||
"account_disabled": ("Account is disabled", 401),
|
||||
"insufficient_credits": ("Insufficient credits", 402),
|
||||
"admin_required": ("Admin privileges required", 403),
|
||||
}
|
||||
|
||||
if error_type in auth_errors:
|
||||
message, status = auth_errors[error_type]
|
||||
return jsonify({"error": message}), status
|
||||
|
||||
return jsonify({"error": "Authentication error"}), 401
|
||||
|
||||
@staticmethod
|
||||
def handle_file_operation_error(
|
||||
operation: str, error: Exception
|
||||
) -> tuple[Any, int]:
|
||||
"""Handle file operation errors consistently."""
|
||||
error_message = f"Failed to {operation}: {error!s}"
|
||||
|
||||
# Check for specific file operation errors
|
||||
if (
|
||||
"not found" in str(error).lower()
|
||||
or "no such file" in str(error).lower()
|
||||
):
|
||||
return jsonify({"error": f"File not found during {operation}"}), 404
|
||||
if "permission" in str(error).lower():
|
||||
return jsonify(
|
||||
{"error": f"Permission denied during {operation}"}
|
||||
), 403
|
||||
return jsonify({"error": error_message}), 500
|
||||
|
||||
@staticmethod
|
||||
def wrap_service_call(service_func, *args, **kwargs) -> tuple[Any, int]:
|
||||
"""Wrap service calls with standardized error handling."""
|
||||
try:
|
||||
result = service_func(*args, **kwargs)
|
||||
|
||||
# If result is a dictionary with success/error structure
|
||||
if isinstance(result, dict) and "success" in result:
|
||||
return ErrorHandlingService.handle_service_result(result)
|
||||
|
||||
# If result is a simple dictionary (like user data)
|
||||
if isinstance(result, dict):
|
||||
return jsonify(result), 200
|
||||
|
||||
# For other types, assume success
|
||||
return jsonify({"result": result}), 200
|
||||
|
||||
except ValueError as e:
|
||||
return ErrorHandlingService.handle_validation_error(e)
|
||||
except Exception as e:
|
||||
return ErrorHandlingService.handle_generic_error(e)
|
||||
136
app/services/logging_service.py
Normal file
136
app/services/logging_service.py
Normal file
@@ -0,0 +1,136 @@
|
||||
"""Centralized logging service for the application."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
|
||||
class LoggingService:
|
||||
"""Service for configuring and managing application logging."""
|
||||
|
||||
@staticmethod
|
||||
def setup_logging(
|
||||
level: str = "INFO",
|
||||
format_string: str | None = None,
|
||||
) -> None:
|
||||
"""Setup application-wide logging configuration."""
|
||||
if format_string is None:
|
||||
format_string = (
|
||||
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
|
||||
# Configure root logger
|
||||
logging.basicConfig(
|
||||
level=getattr(logging, level.upper()),
|
||||
format=format_string,
|
||||
handlers=[
|
||||
logging.StreamHandler(sys.stdout),
|
||||
],
|
||||
)
|
||||
|
||||
# Set specific logger levels for third-party libraries
|
||||
logging.getLogger("werkzeug").setLevel(logging.WARNING)
|
||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||
logging.getLogger("requests").setLevel(logging.WARNING)
|
||||
|
||||
@staticmethod
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
"""Get a logger instance for a specific module."""
|
||||
return logging.getLogger(name)
|
||||
|
||||
@staticmethod
|
||||
def log_operation_start(logger: logging.Logger, operation: str) -> None:
|
||||
"""Log the start of an operation."""
|
||||
logger.info(f"Starting {operation}")
|
||||
|
||||
@staticmethod
|
||||
def log_operation_success(
|
||||
logger: logging.Logger,
|
||||
operation: str,
|
||||
details: str | None = None,
|
||||
) -> None:
|
||||
"""Log successful completion of an operation."""
|
||||
message = f"Successfully completed {operation}"
|
||||
if details:
|
||||
message += f" - {details}"
|
||||
logger.info(message)
|
||||
|
||||
@staticmethod
|
||||
def log_operation_error(
|
||||
logger: logging.Logger,
|
||||
operation: str,
|
||||
error: Exception,
|
||||
) -> None:
|
||||
"""Log an error during an operation."""
|
||||
logger.error(f"Error during {operation}: {error}")
|
||||
|
||||
@staticmethod
|
||||
def log_validation_error(
|
||||
logger: logging.Logger,
|
||||
field: str,
|
||||
value: str,
|
||||
reason: str,
|
||||
) -> None:
|
||||
"""Log validation errors consistently."""
|
||||
logger.warning(f"Validation failed for {field}='{value}': {reason}")
|
||||
|
||||
@staticmethod
|
||||
def log_resource_not_found(
|
||||
logger: logging.Logger,
|
||||
resource_type: str,
|
||||
identifier: str,
|
||||
) -> None:
|
||||
"""Log when a resource is not found."""
|
||||
logger.warning(f"{resource_type} not found: {identifier}")
|
||||
|
||||
@staticmethod
|
||||
def log_resource_created(
|
||||
logger: logging.Logger,
|
||||
resource_type: str,
|
||||
identifier: str,
|
||||
) -> None:
|
||||
"""Log when a resource is created."""
|
||||
logger.info(f"Created {resource_type}: {identifier}")
|
||||
|
||||
@staticmethod
|
||||
def log_resource_updated(
|
||||
logger: logging.Logger,
|
||||
resource_type: str,
|
||||
identifier: str,
|
||||
) -> None:
|
||||
"""Log when a resource is updated."""
|
||||
logger.info(f"Updated {resource_type}: {identifier}")
|
||||
|
||||
@staticmethod
|
||||
def log_resource_deleted(
|
||||
logger: logging.Logger,
|
||||
resource_type: str,
|
||||
identifier: str,
|
||||
) -> None:
|
||||
"""Log when a resource is deleted."""
|
||||
logger.info(f"Deleted {resource_type}: {identifier}")
|
||||
|
||||
@staticmethod
|
||||
def log_user_action(
|
||||
logger: logging.Logger,
|
||||
user_id: str,
|
||||
action: str,
|
||||
resource: str | None = None,
|
||||
) -> None:
|
||||
"""Log user actions for auditing."""
|
||||
message = f"User {user_id} performed action: {action}"
|
||||
if resource:
|
||||
message += f" on {resource}"
|
||||
logger.info(message)
|
||||
|
||||
@staticmethod
|
||||
def log_security_event(
|
||||
logger: logging.Logger,
|
||||
event_type: str,
|
||||
details: str,
|
||||
user_id: str | None = None,
|
||||
) -> None:
|
||||
"""Log security-related events."""
|
||||
message = f"Security event [{event_type}]: {details}"
|
||||
if user_id:
|
||||
message += f" (User: {user_id})"
|
||||
logger.warning(message)
|
||||
108
app/services/oauth_linking_service.py
Normal file
108
app/services/oauth_linking_service.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""OAuth provider linking service."""
|
||||
|
||||
from authlib.integrations.flask_client import OAuth
|
||||
|
||||
from app.models.user import User
|
||||
from app.models.user_oauth import UserOAuth
|
||||
from app.services.oauth_providers.registry import OAuthProviderRegistry
|
||||
|
||||
|
||||
class OAuthLinkingService:
|
||||
"""Service for linking and unlinking OAuth providers."""
|
||||
|
||||
@staticmethod
|
||||
def link_provider_to_user(
|
||||
provider: str,
|
||||
current_user_id: int,
|
||||
) -> dict:
|
||||
"""Link a new OAuth provider to existing user account."""
|
||||
# Get current user from database
|
||||
user = User.query.get(current_user_id)
|
||||
if not user:
|
||||
raise ValueError("User not found")
|
||||
|
||||
# Get OAuth provider and process callback
|
||||
oauth = OAuth()
|
||||
registry = OAuthProviderRegistry(oauth)
|
||||
oauth_provider = registry.get_provider(provider)
|
||||
|
||||
if not oauth_provider:
|
||||
raise ValueError(f"OAuth provider '{provider}' not configured")
|
||||
|
||||
# Exchange code for token and get user info
|
||||
token = oauth_provider.exchange_code_for_token(None, None)
|
||||
raw_user_info = oauth_provider.get_user_info(token)
|
||||
provider_data = oauth_provider.normalize_user_data(raw_user_info)
|
||||
|
||||
if not provider_data.get("id"):
|
||||
raise ValueError("Failed to get user information from provider")
|
||||
|
||||
# Check if this provider is already linked to another user
|
||||
existing_provider = UserOAuth.find_by_provider_and_id(
|
||||
provider,
|
||||
provider_data["id"],
|
||||
)
|
||||
|
||||
if existing_provider and existing_provider.user_id != user.id:
|
||||
raise ValueError(
|
||||
"This provider account is already linked to another user",
|
||||
)
|
||||
|
||||
# Link the provider to current user
|
||||
UserOAuth.create_or_update(
|
||||
user_id=user.id,
|
||||
provider=provider,
|
||||
provider_id=provider_data["id"],
|
||||
email=provider_data["email"],
|
||||
name=provider_data["name"],
|
||||
picture=provider_data.get("picture"),
|
||||
)
|
||||
|
||||
return {"message": f"{provider.title()} account linked successfully"}
|
||||
|
||||
@staticmethod
|
||||
def unlink_provider_from_user(
|
||||
provider: str,
|
||||
current_user_id: int,
|
||||
) -> dict:
|
||||
"""Unlink an OAuth provider from user account."""
|
||||
from app.database import db
|
||||
|
||||
user = User.query.get(current_user_id)
|
||||
if not user:
|
||||
raise ValueError("User not found")
|
||||
|
||||
# Check if user has more than one provider (prevent locking out)
|
||||
if len(user.oauth_providers) <= 1:
|
||||
raise ValueError("Cannot unlink last authentication provider")
|
||||
|
||||
# Find and remove the provider
|
||||
oauth_provider = user.get_provider(provider)
|
||||
if not oauth_provider:
|
||||
raise ValueError(
|
||||
f"Provider '{provider}' not linked to this account",
|
||||
)
|
||||
|
||||
db.session.delete(oauth_provider)
|
||||
db.session.commit()
|
||||
|
||||
return {"message": f"{provider.title()} account unlinked successfully"}
|
||||
|
||||
@staticmethod
|
||||
def get_user_providers(user_id: int) -> dict:
|
||||
"""Get all OAuth providers linked to a user."""
|
||||
user = User.query.get(user_id)
|
||||
if not user:
|
||||
raise ValueError("User not found")
|
||||
|
||||
return {
|
||||
"providers": [
|
||||
{
|
||||
"provider": oauth.provider,
|
||||
"email": oauth.email,
|
||||
"name": oauth.name,
|
||||
"picture": oauth.picture,
|
||||
}
|
||||
for oauth in user.oauth_providers
|
||||
],
|
||||
}
|
||||
@@ -98,7 +98,7 @@ class SchedulerService:
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"Daily credit refill failed: {result['message']}"
|
||||
f"Daily credit refill failed: {result['message']}",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -122,7 +122,7 @@ class SchedulerService:
|
||||
logger.debug("Sound scan completed: no new files found")
|
||||
else:
|
||||
logger.error(
|
||||
f"Sound scan failed: {result.get('error', 'Unknown error')}"
|
||||
f"Sound scan failed: {result.get('error', 'Unknown error')}",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import logging
|
||||
|
||||
from flask import request
|
||||
from flask_jwt_extended import decode_token
|
||||
from flask_socketio import disconnect, emit, join_room, leave_room
|
||||
|
||||
from app import socketio
|
||||
@@ -41,12 +40,12 @@ class SocketIOService:
|
||||
try:
|
||||
from flask import current_app
|
||||
from flask_jwt_extended import decode_token
|
||||
|
||||
|
||||
# Check if we have the access_token cookie
|
||||
access_token = request.cookies.get("access_token_cookie")
|
||||
if not access_token:
|
||||
return None
|
||||
|
||||
|
||||
# Decode the JWT token manually
|
||||
with current_app.app_context():
|
||||
try:
|
||||
@@ -59,6 +58,7 @@ class SocketIOService:
|
||||
|
||||
# Query database for user data
|
||||
from app.models.user import User
|
||||
|
||||
user = User.query.get(int(current_user_id))
|
||||
if not user or not user.is_active:
|
||||
return None
|
||||
@@ -131,4 +131,4 @@ def handle_disconnect() -> None:
|
||||
|
||||
|
||||
# Export the service instance
|
||||
socketio_service = SocketIOService()
|
||||
socketio_service = SocketIOService()
|
||||
|
||||
137
app/services/sound_management_service.py
Normal file
137
app/services/sound_management_service.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""Sound management service for admin operations."""
|
||||
|
||||
import os
|
||||
|
||||
from app.database import db
|
||||
from app.models.sound import Sound
|
||||
from app.services.sound_normalizer_service import SoundNormalizerService
|
||||
|
||||
|
||||
class SoundManagementService:
|
||||
"""Service for managing sound files and database operations."""
|
||||
|
||||
@staticmethod
|
||||
def get_sounds_with_file_status(
|
||||
sound_type: str = "SDB",
|
||||
page: int = 1,
|
||||
per_page: int = 50,
|
||||
) -> dict:
|
||||
"""Get paginated sounds with file existence status."""
|
||||
# Validate sound type
|
||||
if sound_type not in ["SDB", "SAY", "STR"]:
|
||||
raise ValueError("Invalid sound type")
|
||||
|
||||
# Get paginated results
|
||||
sounds_query = Sound.query.filter_by(type=sound_type)
|
||||
total = sounds_query.count()
|
||||
|
||||
sounds = (
|
||||
sounds_query.offset((page - 1) * per_page).limit(per_page).all()
|
||||
)
|
||||
|
||||
# Convert to detailed dict format with file status
|
||||
sounds_data = []
|
||||
for sound in sounds:
|
||||
sound_dict = sound.to_dict()
|
||||
sound_dict.update(
|
||||
SoundManagementService._get_file_status(sound),
|
||||
)
|
||||
sounds_data.append(sound_dict)
|
||||
|
||||
return {
|
||||
"sounds": sounds_data,
|
||||
"pagination": {
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
"total": total,
|
||||
"pages": (total + per_page - 1) // per_page,
|
||||
},
|
||||
"type": sound_type,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _get_file_status(sound: Sound) -> dict:
|
||||
"""Get file existence status for a sound."""
|
||||
original_path = os.path.join(
|
||||
"sounds",
|
||||
sound.type.lower(),
|
||||
sound.filename,
|
||||
)
|
||||
status = {"original_exists": os.path.exists(original_path)}
|
||||
|
||||
if sound.is_normalized and sound.normalized_filename:
|
||||
normalized_path = os.path.join(
|
||||
"sounds",
|
||||
"normalized",
|
||||
sound.type.lower(),
|
||||
sound.normalized_filename,
|
||||
)
|
||||
status["normalized_exists"] = os.path.exists(normalized_path)
|
||||
else:
|
||||
status["normalized_exists"] = False
|
||||
|
||||
return status
|
||||
|
||||
@staticmethod
|
||||
def delete_sound_with_files(sound_id: int) -> dict:
|
||||
"""Delete a sound and its associated files."""
|
||||
sound = Sound.query.get(sound_id)
|
||||
if not sound:
|
||||
raise ValueError("Sound not found")
|
||||
|
||||
if not sound.is_deletable:
|
||||
raise ValueError("Sound is not deletable")
|
||||
|
||||
errors = []
|
||||
|
||||
# Delete normalized file if exists
|
||||
if sound.is_normalized and sound.normalized_filename:
|
||||
normalized_path = os.path.join(
|
||||
"sounds",
|
||||
"normalized",
|
||||
sound.type.lower(),
|
||||
sound.normalized_filename,
|
||||
)
|
||||
if os.path.exists(normalized_path):
|
||||
try:
|
||||
os.remove(normalized_path)
|
||||
except Exception as e:
|
||||
errors.append(f"Failed to delete normalized file: {e}")
|
||||
|
||||
# Delete original file
|
||||
original_path = os.path.join(
|
||||
"sounds",
|
||||
sound.type.lower(),
|
||||
sound.filename,
|
||||
)
|
||||
if os.path.exists(original_path):
|
||||
try:
|
||||
os.remove(original_path)
|
||||
except Exception as e:
|
||||
errors.append(f"Failed to delete original file: {e}")
|
||||
|
||||
if errors:
|
||||
raise Exception("; ".join(errors))
|
||||
|
||||
# Delete database record
|
||||
sound_name = sound.name
|
||||
db.session.delete(sound)
|
||||
db.session.commit()
|
||||
|
||||
return {
|
||||
"message": f"Sound '{sound_name}' deleted successfully",
|
||||
"sound_id": sound_id,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def normalize_sound(
|
||||
sound_id: int,
|
||||
overwrite: bool = False,
|
||||
two_pass: bool = True,
|
||||
) -> dict:
|
||||
"""Normalize a specific sound."""
|
||||
return SoundNormalizerService.normalize_sound(
|
||||
sound_id,
|
||||
overwrite,
|
||||
two_pass,
|
||||
)
|
||||
@@ -39,7 +39,9 @@ class SoundNormalizerService:
|
||||
|
||||
@staticmethod
|
||||
def normalize_sound(
|
||||
sound_id: int, overwrite: bool = False, two_pass: bool = True
|
||||
sound_id: int,
|
||||
overwrite: bool = False,
|
||||
two_pass: bool = True,
|
||||
) -> dict:
|
||||
"""Normalize a specific sound file using ffmpeg loudnorm.
|
||||
|
||||
@@ -250,7 +252,8 @@ class SoundNormalizerService:
|
||||
logger.debug("Starting first pass (analysis)")
|
||||
|
||||
first_pass_result = SoundNormalizerService._run_first_pass(
|
||||
source_path, params
|
||||
source_path,
|
||||
params,
|
||||
)
|
||||
|
||||
if not first_pass_result["success"]:
|
||||
@@ -262,7 +265,10 @@ class SoundNormalizerService:
|
||||
logger.debug("Starting second pass (normalization)")
|
||||
|
||||
second_pass_result = SoundNormalizerService._run_second_pass(
|
||||
source_path, output_path, params, measured_params
|
||||
source_path,
|
||||
output_path,
|
||||
params,
|
||||
measured_params,
|
||||
)
|
||||
|
||||
if not second_pass_result["success"]:
|
||||
@@ -297,7 +303,8 @@ class SoundNormalizerService:
|
||||
|
||||
@staticmethod
|
||||
def _normalize_with_ffmpeg_single_pass(
|
||||
source_path: str, output_path: str
|
||||
source_path: str,
|
||||
output_path: str,
|
||||
) -> dict:
|
||||
"""Run ffmpeg loudnorm on a single file using single-pass normalization.
|
||||
|
||||
@@ -374,6 +381,7 @@ class SoundNormalizerService:
|
||||
|
||||
Returns:
|
||||
dict: Result with measured parameters and analysis stats
|
||||
|
||||
"""
|
||||
try:
|
||||
# Create ffmpeg input stream
|
||||
@@ -389,7 +397,10 @@ class SoundNormalizerService:
|
||||
|
||||
# Output to null device for analysis
|
||||
output_stream = ffmpeg.output(
|
||||
input_stream, "/dev/null", af=loudnorm_filter, f="null"
|
||||
input_stream,
|
||||
"/dev/null",
|
||||
af=loudnorm_filter,
|
||||
f="null",
|
||||
)
|
||||
|
||||
# Run the first pass
|
||||
@@ -403,7 +414,7 @@ class SoundNormalizerService:
|
||||
|
||||
# Parse measured parameters from JSON output
|
||||
measured_params = SoundNormalizerService._parse_measured_params(
|
||||
stderr_text
|
||||
stderr_text,
|
||||
)
|
||||
|
||||
if not measured_params:
|
||||
@@ -446,6 +457,7 @@ class SoundNormalizerService:
|
||||
|
||||
Returns:
|
||||
dict: Result with normalization stats
|
||||
|
||||
"""
|
||||
try:
|
||||
# Create ffmpeg input stream
|
||||
@@ -506,11 +518,14 @@ class SoundNormalizerService:
|
||||
|
||||
Returns:
|
||||
dict: Parsed measured parameters, empty if parsing fails
|
||||
|
||||
"""
|
||||
try:
|
||||
# Find JSON block in stderr output
|
||||
json_match = re.search(
|
||||
r'\{[^}]*"input_i"[^}]*\}', stderr_output, re.DOTALL
|
||||
r'\{[^}]*"input_i"[^}]*\}',
|
||||
stderr_output,
|
||||
re.DOTALL,
|
||||
)
|
||||
if not json_match:
|
||||
logger.warning("No JSON block found in first pass output")
|
||||
|
||||
@@ -140,76 +140,91 @@ class SoundScannerService:
|
||||
|
||||
@staticmethod
|
||||
def _process_audio_file(file_path: str, base_dir: str) -> dict:
|
||||
"""Process a single audio file and add it to database if new.
|
||||
|
||||
Args:
|
||||
file_path: Full path to the audio file
|
||||
base_dir: Base directory for relative path calculation
|
||||
|
||||
Returns:
|
||||
dict: Processing result with added flag and reason
|
||||
|
||||
"""
|
||||
# Calculate file hash for deduplication
|
||||
"""Process a single audio file and add it to database if new."""
|
||||
file_hash = SoundScannerService._calculate_file_hash(file_path)
|
||||
|
||||
# Get file metadata
|
||||
metadata = SoundScannerService._extract_audio_metadata(file_path)
|
||||
|
||||
# Calculate relative filename from base directory
|
||||
relative_path = Path(file_path).relative_to(Path(base_dir))
|
||||
|
||||
# Check if file already exists in database by hash
|
||||
existing_sound = Sound.find_by_hash(file_hash)
|
||||
if existing_sound:
|
||||
return {
|
||||
"added": False,
|
||||
"reason": f"File already exists as '{existing_sound.name}'",
|
||||
}
|
||||
# Check for existing file by hash (duplicate content)
|
||||
if existing_sound := Sound.find_by_hash(file_hash):
|
||||
return SoundScannerService._handle_duplicate_file(existing_sound)
|
||||
|
||||
# Check if filename already exists in database
|
||||
existing_filename_sound = Sound.find_by_filename(str(relative_path))
|
||||
if existing_filename_sound:
|
||||
# Remove normalized files and clear normalized info
|
||||
SoundScannerService._clear_normalized_files(existing_filename_sound)
|
||||
existing_filename_sound.clear_normalized_info()
|
||||
|
||||
# Update existing sound with new file information
|
||||
existing_filename_sound.update_file_info(
|
||||
filename=str(relative_path),
|
||||
duration=metadata["duration"],
|
||||
size=metadata["size"],
|
||||
hash_value=file_hash,
|
||||
# Check for existing filename (file replacement)
|
||||
if existing_filename_sound := Sound.find_by_filename(
|
||||
str(relative_path)
|
||||
):
|
||||
return SoundScannerService._handle_file_replacement(
|
||||
existing_filename_sound,
|
||||
str(relative_path),
|
||||
metadata,
|
||||
file_hash,
|
||||
)
|
||||
|
||||
return {
|
||||
"added": False,
|
||||
"updated": True,
|
||||
"sound_id": existing_filename_sound.id,
|
||||
"reason": f"Updated existing sound '{existing_filename_sound.name}' with new file data",
|
||||
}
|
||||
|
||||
# Generate sound name from filename (without extension)
|
||||
sound_name = Path(file_path).stem
|
||||
|
||||
# Check if name already exists and make it unique if needed
|
||||
counter = 1
|
||||
original_name = sound_name
|
||||
while Sound.find_by_name(sound_name):
|
||||
sound_name = f"{original_name}_{counter}"
|
||||
counter += 1
|
||||
|
||||
# Create new sound record
|
||||
return SoundScannerService._create_new_sound(
|
||||
file_path,
|
||||
str(relative_path),
|
||||
metadata,
|
||||
file_hash,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_duplicate_file(existing_sound: Sound) -> dict:
|
||||
"""Handle case where file content already exists in database."""
|
||||
return {
|
||||
"added": False,
|
||||
"reason": f"File already exists as '{existing_sound.name}'",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _handle_file_replacement(
|
||||
existing_sound: Sound,
|
||||
relative_path: str,
|
||||
metadata: dict,
|
||||
file_hash: str,
|
||||
) -> dict:
|
||||
"""Handle case where filename exists but content may be different."""
|
||||
# Remove normalized files and clear normalized info
|
||||
SoundScannerService._clear_normalized_files(existing_sound)
|
||||
existing_sound.clear_normalized_info()
|
||||
|
||||
# Update existing sound with new file information
|
||||
existing_sound.update_file_info(
|
||||
filename=relative_path,
|
||||
duration=metadata["duration"],
|
||||
size=metadata["size"],
|
||||
hash_value=file_hash,
|
||||
)
|
||||
|
||||
return {
|
||||
"added": False,
|
||||
"updated": True,
|
||||
"sound_id": existing_sound.id,
|
||||
"reason": f"Updated existing sound '{existing_sound.name}' with new file data",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _create_new_sound(
|
||||
file_path: str,
|
||||
relative_path: str,
|
||||
metadata: dict,
|
||||
file_hash: str,
|
||||
) -> dict:
|
||||
"""Create a new sound record in the database."""
|
||||
sound_name = SoundScannerService._generate_unique_sound_name(
|
||||
Path(file_path).stem,
|
||||
)
|
||||
|
||||
sound = Sound.create_sound(
|
||||
sound_type="SDB", # Soundboard type
|
||||
sound_type="SDB",
|
||||
name=sound_name,
|
||||
filename=str(relative_path),
|
||||
filename=relative_path,
|
||||
duration=metadata["duration"],
|
||||
size=metadata["size"],
|
||||
hash_value=file_hash,
|
||||
is_music=False,
|
||||
is_deletable=False,
|
||||
commit=False, # Don't commit individually, let scanner handle transaction
|
||||
commit=False,
|
||||
)
|
||||
|
||||
return {
|
||||
@@ -218,6 +233,18 @@ class SoundScannerService:
|
||||
"reason": "New file added successfully",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _generate_unique_sound_name(base_name: str) -> str:
|
||||
"""Generate a unique sound name by appending numbers if needed."""
|
||||
sound_name = base_name
|
||||
counter = 1
|
||||
|
||||
while Sound.find_by_name(sound_name):
|
||||
sound_name = f"{base_name}_{counter}"
|
||||
counter += 1
|
||||
|
||||
return sound_name
|
||||
|
||||
@staticmethod
|
||||
def _calculate_file_hash(file_path: str) -> str:
|
||||
"""Calculate SHA256 hash of file contents."""
|
||||
@@ -249,7 +276,7 @@ class SoundScannerService:
|
||||
logger.info(f"Removed normalized file: {normalized_path}")
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Could not remove normalized file {normalized_path}: {e}"
|
||||
f"Could not remove normalized file {normalized_path}: {e}",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
"""VLC service for playing sounds using subprocess."""
|
||||
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from app.database import db
|
||||
from app.models.sound import Sound
|
||||
from app.models.sound_played import SoundPlayed
|
||||
from app.services.logging_service import LoggingService
|
||||
|
||||
logger = LoggingService.get_logger(__name__)
|
||||
|
||||
|
||||
class VLCService:
|
||||
@@ -17,7 +17,7 @@ class VLCService:
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize VLC service."""
|
||||
self.processes: Dict[str, subprocess.Popen] = {}
|
||||
self.processes: dict[str, subprocess.Popen] = {}
|
||||
self.lock = threading.Lock()
|
||||
|
||||
def play_sound(self, sound_id: int, user_id: int | None = None) -> bool:
|
||||
@@ -38,7 +38,9 @@ class VLCService:
|
||||
)
|
||||
else:
|
||||
sound_path = os.path.join(
|
||||
"sounds", "soundboard", sound.filename
|
||||
"sounds",
|
||||
"soundboard",
|
||||
sound.filename,
|
||||
)
|
||||
|
||||
# Check if file exists
|
||||
@@ -73,8 +75,9 @@ class VLCService:
|
||||
with self.lock:
|
||||
self.processes[process_id] = process
|
||||
|
||||
print(
|
||||
f"Started VLC process {process.pid} ({process_id}) for sound {sound.name}. Total processes: {len(self.processes)}"
|
||||
logger.info(
|
||||
f"Started VLC process {process.pid} for sound '{sound.name}'. "
|
||||
f"Total active processes: {len(self.processes)}",
|
||||
)
|
||||
|
||||
# Increment play count
|
||||
@@ -89,7 +92,7 @@ class VLCService:
|
||||
commit=True,
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error recording play event: {e}")
|
||||
logger.error(f"Error recording play event: {e}")
|
||||
|
||||
# Schedule cleanup after sound duration
|
||||
threading.Thread(
|
||||
@@ -101,7 +104,9 @@ class VLCService:
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error starting VLC process for sound {sound_id}: {e}")
|
||||
logger.error(
|
||||
f"Error starting VLC process for sound {sound_id}: {e}"
|
||||
)
|
||||
return False
|
||||
|
||||
def _cleanup_after_playback(self, process_id: str, duration: int) -> None:
|
||||
@@ -111,13 +116,13 @@ class VLCService:
|
||||
|
||||
with self.lock:
|
||||
if process_id in self.processes:
|
||||
print(f"Cleaning up process {process_id} after playback")
|
||||
logger.debug(f"Cleaning up process {process_id} after playback")
|
||||
process = self.processes[process_id]
|
||||
|
||||
try:
|
||||
# Check if process is still running
|
||||
if process.poll() is None:
|
||||
print(
|
||||
logger.debug(
|
||||
f"Process {process.pid} still running, terminating"
|
||||
)
|
||||
process.terminate()
|
||||
@@ -125,62 +130,58 @@ class VLCService:
|
||||
try:
|
||||
process.wait(timeout=2)
|
||||
except subprocess.TimeoutExpired:
|
||||
print(
|
||||
logger.debug(
|
||||
f"Process {process.pid} didn't terminate, killing"
|
||||
)
|
||||
process.kill()
|
||||
|
||||
print(f"Successfully cleaned up process {process_id}")
|
||||
logger.debug(
|
||||
f"Successfully cleaned up process {process_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error during cleanup of {process_id}: {e}")
|
||||
logger.warning(f"Error during cleanup of {process_id}: {e}")
|
||||
finally:
|
||||
# Always remove from tracking
|
||||
del self.processes[process_id]
|
||||
print(
|
||||
f"Removed process {process_id}. Remaining processes: {len(self.processes)}"
|
||||
logger.debug(
|
||||
f"Removed process {process_id}. Remaining processes: {len(self.processes)}",
|
||||
)
|
||||
else:
|
||||
print(f"Process {process_id} not found during cleanup")
|
||||
|
||||
def stop_all(self) -> None:
|
||||
"""Stop all playing sounds by killing VLC processes."""
|
||||
with self.lock:
|
||||
processes_copy = dict(self.processes)
|
||||
print(
|
||||
f"Stopping {len(processes_copy)} VLC processes: {list(processes_copy.keys())}"
|
||||
)
|
||||
if processes_copy:
|
||||
logger.info(f"Stopping {len(processes_copy)} VLC processes")
|
||||
|
||||
for process_id, process in processes_copy.items():
|
||||
try:
|
||||
if process.poll() is None: # Process is still running
|
||||
print(
|
||||
f"Terminating process {process.pid} ({process_id})"
|
||||
)
|
||||
logger.debug(f"Terminating process {process.pid}")
|
||||
process.terminate()
|
||||
|
||||
# Give it a moment to terminate gracefully
|
||||
try:
|
||||
process.wait(timeout=1)
|
||||
print(
|
||||
logger.debug(
|
||||
f"Process {process.pid} terminated gracefully"
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
print(
|
||||
logger.debug(
|
||||
f"Process {process.pid} didn't terminate, killing forcefully"
|
||||
)
|
||||
process.kill()
|
||||
process.wait() # Wait for it to be killed
|
||||
else:
|
||||
print(
|
||||
f"Process {process.pid} ({process_id}) already finished"
|
||||
)
|
||||
logger.debug(f"Process {process.pid} already finished")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error stopping process {process_id}: {e}")
|
||||
logger.warning(f"Error stopping process {process_id}: {e}")
|
||||
|
||||
# Clear all processes
|
||||
self.processes.clear()
|
||||
print(f"Cleared all processes. Remaining: {len(self.processes)}")
|
||||
if processes_copy:
|
||||
logger.info("All VLC processes stopped")
|
||||
|
||||
def get_playing_count(self) -> int:
|
||||
"""Get number of currently playing sounds."""
|
||||
@@ -201,34 +202,20 @@ class VLCService:
|
||||
"""Force stop all sounds by killing VLC processes aggressively."""
|
||||
with self.lock:
|
||||
stopped_count = len(self.processes)
|
||||
print(f"Force stopping {stopped_count} VLC processes")
|
||||
|
||||
# # Kill all VLC processes aggressively
|
||||
# for process_id, process in list(self.processes.items()):
|
||||
# try:
|
||||
# if process.poll() is None: # Process is still running
|
||||
# print(f"Force killing process {process.pid} ({process_id})")
|
||||
# process.kill()
|
||||
# process.wait() # Wait for it to be killed
|
||||
# print(f"Process {process.pid} killed")
|
||||
# else:
|
||||
# print(f"Process {process.pid} ({process_id}) already finished")
|
||||
|
||||
# except Exception as e:
|
||||
# print(f"Error force-stopping process {process_id}: {e}")
|
||||
if stopped_count > 0:
|
||||
logger.warning(f"Force stopping {stopped_count} VLC processes")
|
||||
|
||||
# Also try to kill any remaining VLC processes system-wide
|
||||
try:
|
||||
subprocess.run(["pkill", "-f", "vlc"], check=False)
|
||||
print("Killed any remaining VLC processes system-wide")
|
||||
logger.info("Killed any remaining VLC processes system-wide")
|
||||
except Exception as e:
|
||||
print(f"Error killing system VLC processes: {e}")
|
||||
logger.error(f"Error killing system VLC processes: {e}")
|
||||
|
||||
# Clear all processes
|
||||
self.processes.clear()
|
||||
print(
|
||||
f"Force stop completed. Processes remaining: {len(self.processes)}"
|
||||
)
|
||||
if stopped_count > 0:
|
||||
logger.info("Force stop completed")
|
||||
return stopped_count
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user