feat: Requests / Add to Library Rework - Unmonitored album default + … (#25)

* feat: Requests / Add to Library Rework - Unmonitored album default + Resilience

* checking for source + refresh album logic

* artist monitoring + auto downloading + various request fixes

* synchronous album requests

* format
This commit is contained in:
Harvey 2026-04-06 23:08:58 +01:00 committed by GitHub
parent a3e0b2f65a
commit 343bafd7f4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
44 changed files with 2360 additions and 271 deletions

View file

@ -112,6 +112,27 @@ backend-test-sync-resume: $(BACKEND_VENV_STAMP) ## Run sync resume-on-failure te
backend-test-audiodb-parallel: $(BACKEND_VENV_STAMP) ## Run AudioDB parallel prewarm tests
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/test_audiodb_parallel.py -v
backend-test-request-queue: $(BACKEND_VENV_STAMP) ## Run MUS-14 request queue tests (dedup, cancel, concurrency)
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/infrastructure/test_request_queue_mus14.py tests/infrastructure/test_queue_persistence.py -v
backend-test-artist-lock: $(BACKEND_VENV_STAMP) ## Run MUS-14 per-artist lock tests
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/repositories/test_album_artist_lock.py -v
backend-test-request-service: $(BACKEND_VENV_STAMP) ## Run request service tests
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/services/test_request_service.py -v
test-mus14-all: backend-test-request-queue backend-test-artist-lock backend-test-request-service ## Run all MUS-14 request system tests
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/repositories/test_lidarr_library_cache.py -v
backend-test-mus15-status-race: $(BACKEND_VENV_STAMP) ## Run MUS-15 status race condition tests
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/test_mus15_status_race.py -v
backend-test-artist-monitoring: $(BACKEND_VENV_STAMP) ## Run MUS-15B artist monitoring tests
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/test_artist_monitoring.py -v
backend-test-album-refresh: $(BACKEND_VENV_STAMP) ## Run album refresh endpoint tests
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/routes/test_album_refresh.py tests/services/test_navidrome_cache_invalidation.py -v
test-audiodb-all: backend-test-audiodb backend-test-audiodb-prewarm backend-test-audiodb-settings backend-test-coverart-audiodb backend-test-audiodb-phase8 backend-test-audiodb-phase9 frontend-test-audiodb-images ## Run every AudioDB test target
test-sync-all: backend-test-sync-watchdog backend-test-sync-resume backend-test-audiodb-parallel ## Run all sync robustness tests

View file

@ -5,10 +5,11 @@ from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, Query, R
from core.exceptions import ClientDisconnectedError
from api.v1.schemas.album import AlbumInfo, AlbumBasicInfo, AlbumTracksInfo, LastFmAlbumEnrichment
from api.v1.schemas.discovery import SimilarAlbumsResponse, MoreByArtistResponse
from core.dependencies import get_album_service, get_album_discovery_service, get_album_enrichment_service
from core.dependencies import get_album_service, get_album_discovery_service, get_album_enrichment_service, get_navidrome_library_service
from services.album_service import AlbumService
from services.album_discovery_service import AlbumDiscoveryService
from services.album_enrichment_service import AlbumEnrichmentService
from services.navidrome_library_service import NavidromeLibraryService
from infrastructure.validators import is_unknown_mbid
from infrastructure.degradation import try_get_degradation_context
from infrastructure.msgspec_fastapi import MsgSpecRoute
@ -44,6 +45,30 @@ async def get_album(
)
@router.post("/{album_id}/refresh", response_model=AlbumBasicInfo)
async def refresh_album(
album_id: str,
album_service: AlbumService = Depends(get_album_service),
navidrome_service: NavidromeLibraryService = Depends(get_navidrome_library_service),
):
"""Clear all caches for an album and return fresh data."""
if is_unknown_mbid(album_id):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Invalid or unknown album ID: {album_id}"
)
try:
navidrome_service.invalidate_album_cache(album_id)
await album_service.refresh_album(album_id)
return await album_service.get_album_basic_info(album_id)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid album request"
)
@router.get("/{album_id}/basic", response_model=AlbumBasicInfo)
async def get_album_basic(
album_id: str,

View file

@ -2,15 +2,15 @@ import logging
from typing import Literal, Optional
from fastapi import APIRouter, Depends, HTTPException, Query, Request, status
from core.exceptions import ClientDisconnectedError
from api.v1.schemas.artist import ArtistInfo, ArtistExtendedInfo, ArtistReleases, LastFmArtistEnrichment
from core.exceptions import ClientDisconnectedError, ExternalServiceError
from api.v1.schemas.artist import ArtistInfo, ArtistExtendedInfo, ArtistReleases, LastFmArtistEnrichment, ArtistMonitoringRequest, ArtistMonitoringResponse, ArtistMonitoringStatus
from api.v1.schemas.discovery import SimilarArtistsResponse, TopSongsResponse, TopAlbumsResponse
from core.dependencies import get_artist_service, get_artist_discovery_service, get_artist_enrichment_service
from services.artist_service import ArtistService
from services.artist_discovery_service import ArtistDiscoveryService
from services.artist_enrichment_service import ArtistEnrichmentService
from infrastructure.validators import is_unknown_mbid
from infrastructure.msgspec_fastapi import MsgSpecRoute
from infrastructure.validators import is_unknown_mbid, validate_mbid
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
from infrastructure.degradation import try_get_degradation_context
import msgspec.structs
@ -150,3 +150,57 @@ async def get_artist_lastfm_enrichment(
if result is None:
return LastFmArtistEnrichment()
return result
@router.get("/{artist_id}/monitoring", response_model=ArtistMonitoringStatus)
async def get_artist_monitoring_status(
artist_id: str,
artist_service: ArtistService = Depends(get_artist_service),
):
try:
validate_mbid(artist_id)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid artist ID",
)
try:
return await artist_service.get_artist_monitoring_status(artist_id)
except Exception:
logger.debug("Failed to fetch monitoring status for %s", artist_id, exc_info=True)
return ArtistMonitoringStatus(in_lidarr=False, monitored=False, auto_download=False)
@router.put("/{artist_id}/monitoring", response_model=ArtistMonitoringResponse)
async def update_artist_monitoring(
artist_id: str,
body: ArtistMonitoringRequest = MsgSpecBody(ArtistMonitoringRequest),
artist_service: ArtistService = Depends(get_artist_service),
):
try:
validate_mbid(artist_id)
except ValueError:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid artist MBID format",
)
try:
result = await artist_service.set_artist_monitoring(
artist_id, monitored=body.monitored, auto_download=body.auto_download,
)
return ArtistMonitoringResponse(
success=True,
monitored=result.get("monitored", body.monitored),
auto_download=result.get("auto_download", False),
)
except ExternalServiceError:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY,
detail="Could not update monitoring. The music server returned an error.",
)
except Exception:
logger.exception("Failed to update artist monitoring for %s", artist_id)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to update monitoring status",
)

View file

@ -1,6 +1,6 @@
import logging
from fastapi import APIRouter, Depends
from api.v1.schemas.request import AlbumRequest, RequestResponse, QueueStatusResponse
from api.v1.schemas.request import AlbumRequest, RequestAcceptedResponse, QueueStatusResponse
from core.dependencies import get_request_service
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
from services.request_service import RequestService
@ -10,16 +10,19 @@ logger = logging.getLogger(__name__)
router = APIRouter(route_class=MsgSpecRoute, prefix="/requests", tags=["requests"])
@router.post("/new", response_model=RequestResponse)
@router.post("/new", response_model=RequestAcceptedResponse, status_code=202)
async def request_album(
album_request: AlbumRequest = MsgSpecBody(AlbumRequest),
request_service: RequestService = Depends(get_request_service)
request_service: RequestService = Depends(get_request_service),
):
return await request_service.request_album(
album_request.musicbrainz_id,
artist=album_request.artist,
album=album_request.album,
year=album_request.year,
artist_mbid=album_request.artist_mbid,
monitor_artist=album_request.monitor_artist,
auto_download_artist=album_request.auto_download_artist,
)

View file

@ -106,6 +106,7 @@ class AdvancedSettings(AppStruct):
audiodb_prewarm_concurrency: int = 4
audiodb_prewarm_delay: float = 0.3
genre_section_ttl: int = 21600
request_concurrency: int = 2
request_history_retention_days: int = 180
ignored_releases_retention_days: int = 365
orphan_cover_demote_interval_hours: int = 24
@ -158,6 +159,7 @@ class AdvancedSettings(AppStruct):
"sync_stall_timeout_minutes": (2, 30),
"sync_max_timeout_hours": (1, 48),
"audiodb_prewarm_concurrency": (1, 8),
"request_concurrency": (1, 5),
"audiodb_prewarm_delay": (0.0, 5.0),
"discover_queue_size": (1, 20),
"discover_queue_ttl": (3600, 604800),
@ -282,6 +284,7 @@ class AdvancedSettingsFrontend(AppStruct):
sync_max_timeout_hours: int = 8
audiodb_prewarm_concurrency: int = 4
audiodb_prewarm_delay: float = 0.3
request_concurrency: int = 2
artist_discovery_precache_concurrency: int = 3
def __post_init__(self) -> None:
@ -389,6 +392,7 @@ class AdvancedSettingsFrontend(AppStruct):
"sync_stall_timeout_minutes": (2, 30),
"sync_max_timeout_hours": (1, 48),
"audiodb_prewarm_concurrency": (1, 8),
"request_concurrency": (1, 5),
"audiodb_prewarm_delay": (0.0, 5.0),
"artist_discovery_precache_concurrency": (1, 8),
}
@ -474,6 +478,7 @@ class AdvancedSettingsFrontend(AppStruct):
sync_max_timeout_hours=settings.sync_max_timeout_hours,
audiodb_prewarm_concurrency=settings.audiodb_prewarm_concurrency,
audiodb_prewarm_delay=settings.audiodb_prewarm_delay,
request_concurrency=settings.request_concurrency,
artist_discovery_precache_concurrency=settings.artist_discovery_precache_concurrency,
)
@ -555,5 +560,6 @@ class AdvancedSettingsFrontend(AppStruct):
sync_max_timeout_hours=self.sync_max_timeout_hours,
audiodb_prewarm_concurrency=self.audiodb_prewarm_concurrency,
audiodb_prewarm_delay=self.audiodb_prewarm_delay,
request_concurrency=self.request_concurrency,
artist_discovery_precache_concurrency=self.artist_discovery_precache_concurrency,
)

View file

@ -34,3 +34,20 @@ class LastFmArtistEnrichment(AppStruct):
playcount: int = 0
similar_artists: list[LastFmSimilarArtistSchema] = []
url: str | None = None
class ArtistMonitoringRequest(AppStruct):
monitored: bool
auto_download: bool = False
class ArtistMonitoringResponse(AppStruct):
success: bool
monitored: bool
auto_download: bool
class ArtistMonitoringStatus(AppStruct):
in_lidarr: bool
monitored: bool
auto_download: bool

View file

@ -7,14 +7,20 @@ class AlbumRequest(AppStruct):
artist: str | None = None
album: str | None = None
year: int | None = None
artist_mbid: str | None = None
monitor_artist: bool = False
auto_download_artist: bool = False
class RequestResponse(AppStruct):
class RequestAcceptedResponse(AppStruct):
success: bool
message: str
lidarr_response: dict | None = None
musicbrainz_id: str
status: str = "pending"
class QueueStatusResponse(AppStruct):
queue_size: int
processing: bool
active_workers: int = 0
max_workers: int = 1

View file

@ -38,7 +38,8 @@ def get_lidarr_repository() -> "LidarrRepository":
settings = get_settings()
cache = get_cache()
http_client = _get_configured_http_client()
return LidarrRepository(settings, http_client, cache)
request_history_store = get_request_history_store()
return LidarrRepository(settings, http_client, cache, request_history_store=request_history_store)
@singleton

View file

@ -95,12 +95,43 @@ def get_album_service() -> "AlbumService":
def get_request_queue() -> "RequestQueue":
from infrastructure.queue.request_queue import RequestQueue
from infrastructure.queue.queue_store import QueueStore
from infrastructure.persistence.request_history import RequestHistoryRecord
from core.config import get_settings
settings = get_settings()
lidarr_repo = get_lidarr_repository()
disk_cache = get_disk_cache()
cover_repo = get_coverart_repository()
memory_cache = get_cache()
library_db = get_library_db()
async def on_queue_import(record: RequestHistoryRecord) -> None:
"""Invalidate caches when the queue worker detects an already-imported album."""
invalidations = [
memory_cache.delete(lidarr_raw_albums_key()),
memory_cache.clear_prefix(f"{LIDARR_PREFIX}library:"),
memory_cache.delete(lidarr_requested_mbids_key()),
memory_cache.delete(f"{ALBUM_INFO_PREFIX}{record.musicbrainz_id}"),
memory_cache.delete(f"{LIDARR_ALBUM_DETAILS_PREFIX}{record.musicbrainz_id}"),
]
if record.artist_mbid:
invalidations.append(
memory_cache.delete(f"{ARTIST_INFO_PREFIX}{record.artist_mbid}")
)
await asyncio.gather(*invalidations, return_exceptions=True)
try:
await library_db.upsert_album({
"mbid": record.musicbrainz_id,
"artist_mbid": record.artist_mbid or "",
"artist_name": record.artist_name or "",
"title": record.album_title or "",
"year": record.year,
"cover_url": record.cover_url or "",
"monitored": True,
})
except Exception as ex: # noqa: BLE001
logger.warning("Queue import: failed to upsert album %s: %s", record.musicbrainz_id[:8], ex)
logger.info("Queue import: invalidated caches for album=%s", record.musicbrainz_id[:8])
async def processor(album_mbid: str) -> dict:
result = await lidarr_repo.add_album(album_mbid)
@ -109,6 +140,10 @@ def get_request_queue() -> "RequestQueue":
if payload and isinstance(payload, dict):
is_monitored = payload.get("monitored", False)
# Belt-andsuspenders: prefer structured signal over payload inspection
if not is_monitored:
is_monitored = bool(result.get("monitored"))
if is_monitored:
logger.info(f"Album {album_mbid[:8]}... successfully monitored - promoting cache entries to persistent")
@ -129,10 +164,44 @@ def get_request_queue() -> "RequestQueue":
else:
logger.warning(f"Album {album_mbid[:8]}... added but not monitored - skipping cache promotion")
try:
record = await request_history.async_get_record(album_mbid)
if record and record.monitor_artist and record.artist_mbid:
monitor_new = "all" if record.auto_download_artist else "none"
for attempt in range(2):
try:
await lidarr_repo.update_artist_monitoring(
record.artist_mbid, monitored=True, monitor_new_items=monitor_new,
)
await memory_cache.delete(f"{ARTIST_INFO_PREFIX}{record.artist_mbid}")
logger.info("Applied deferred artist monitoring for %s", record.artist_mbid[:8])
break
except Exception: # noqa: BLE001
if attempt == 0:
await asyncio.sleep(2)
else:
raise
except Exception as e: # noqa: BLE001
logger.warning("Failed to apply deferred artist monitoring for %s: %s", album_mbid[:8], e)
return result
store = QueueStore(db_path=settings.queue_db_path)
return RequestQueue(processor, store=store)
request_history = get_request_history_store()
concurrency = 2
try:
from services.preferences_service import PreferencesService
prefs = PreferencesService(settings)
advanced = prefs.get_advanced_settings()
concurrency = advanced.request_concurrency
except Exception: # noqa: BLE001
pass
return RequestQueue(
processor, store=store, request_history=request_history,
concurrency=concurrency, on_import_callback=on_queue_import,
)
@singleton
@ -202,11 +271,18 @@ def get_requests_page_service() -> "RequestsPageService":
(record.artist_mbid or "?")[:8],
)
request_queue = get_request_queue()
library_service = get_library_service()
async def merged_library_mbids() -> set[str]:
return set(await library_service.get_library_mbids())
return RequestsPageService(
lidarr_repo=lidarr_repo,
request_history=request_history,
library_mbids_fn=lidarr_repo.get_library_mbids,
library_mbids_fn=merged_library_mbids,
on_import_callback=on_import,
request_queue=request_queue,
)

View file

@ -21,6 +21,8 @@ class RequestHistoryRecord(msgspec.Struct):
cover_url: str | None = None
completed_at: str | None = None
lidarr_album_id: int | None = None
monitor_artist: bool = False
auto_download_artist: bool = False
class RequestHistoryStore:
@ -56,13 +58,22 @@ class RequestHistoryStore:
requested_at TEXT NOT NULL,
completed_at TEXT,
status TEXT NOT NULL,
lidarr_album_id INTEGER
lidarr_album_id INTEGER,
monitor_artist INTEGER NOT NULL DEFAULT 0,
auto_download_artist INTEGER NOT NULL DEFAULT 0
)
"""
)
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_request_history_status_requested_at ON request_history(status, requested_at DESC)"
)
# Migrate existing tables missing the monitoring columns
for col in ("monitor_artist", "auto_download_artist"):
try:
conn.execute(f"ALTER TABLE request_history ADD COLUMN {col} INTEGER NOT NULL DEFAULT 0")
except sqlite3.OperationalError as e:
if "duplicate column" not in str(e).lower():
logger.warning("Unexpected error adding column %s: %s", col, e)
conn.commit()
finally:
conn.close()
@ -105,6 +116,8 @@ class RequestHistoryStore:
completed_at=row["completed_at"],
status=row["status"],
lidarr_album_id=row["lidarr_album_id"],
monitor_artist=bool(row["monitor_artist"]) if row["monitor_artist"] is not None else False,
auto_download_artist=bool(row["auto_download_artist"]) if row["auto_download_artist"] is not None else False,
)
async def async_record_request(
@ -116,6 +129,8 @@ class RequestHistoryStore:
cover_url: str | None = None,
artist_mbid: str | None = None,
lidarr_album_id: int | None = None,
monitor_artist: bool = False,
auto_download_artist: bool = False,
) -> None:
requested_at = datetime.now(timezone.utc).isoformat()
normalized_mbid = musicbrainz_id.lower()
@ -125,8 +140,9 @@ class RequestHistoryStore:
"""
INSERT INTO request_history (
musicbrainz_id_lower, musicbrainz_id, artist_name, album_title,
artist_mbid, year, cover_url, requested_at, completed_at, status, lidarr_album_id
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, NULL, 'pending', ?)
artist_mbid, year, cover_url, requested_at, completed_at, status, lidarr_album_id,
monitor_artist, auto_download_artist
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, NULL, 'pending', ?, ?, ?)
ON CONFLICT(musicbrainz_id_lower) DO UPDATE SET
musicbrainz_id = excluded.musicbrainz_id,
artist_name = excluded.artist_name,
@ -137,7 +153,9 @@ class RequestHistoryStore:
requested_at = excluded.requested_at,
completed_at = NULL,
status = 'pending',
lidarr_album_id = COALESCE(excluded.lidarr_album_id, request_history.lidarr_album_id)
lidarr_album_id = COALESCE(excluded.lidarr_album_id, request_history.lidarr_album_id),
monitor_artist = excluded.monitor_artist,
auto_download_artist = excluded.auto_download_artist
""",
(
normalized_mbid,
@ -149,6 +167,8 @@ class RequestHistoryStore:
cover_url,
requested_at,
lidarr_album_id,
int(monitor_artist),
int(auto_download_artist),
),
)
@ -166,6 +186,30 @@ class RequestHistoryStore:
return await self._read(operation)
async def async_update_monitoring_flags(
self, musicbrainz_id: str, *, monitor_artist: bool, auto_download_artist: bool,
) -> None:
normalized_mbid = musicbrainz_id.lower()
def operation(conn: sqlite3.Connection) -> None:
conn.execute(
"UPDATE request_history SET monitor_artist = ?, auto_download_artist = ? WHERE musicbrainz_id_lower = ?",
(int(monitor_artist), int(auto_download_artist), normalized_mbid),
)
await self._write(operation)
async def async_get_active_mbids(self) -> set[str]:
"""Return the set of MBIDs with active (pending/downloading) requests."""
def operation(conn: sqlite3.Connection) -> set[str]:
rows = conn.execute(
"SELECT musicbrainz_id_lower FROM request_history WHERE status IN (?, ?)",
self._ACTIVE_STATUSES,
).fetchall()
return {row["musicbrainz_id_lower"] for row in rows}
return await self._read(operation)
async def async_get_active_requests(self) -> list[RequestHistoryRecord]:
def operation(conn: sqlite3.Connection) -> list[RequestHistoryRecord]:
rows = conn.execute(
@ -272,6 +316,18 @@ class RequestHistoryStore:
await self._write(operation)
async def async_update_artist_mbid(self, musicbrainz_id: str, artist_mbid: str) -> None:
"""Backfill the artist MBID without resetting other fields."""
normalized_mbid = musicbrainz_id.lower()
def operation(conn: sqlite3.Connection) -> None:
conn.execute(
"UPDATE request_history SET artist_mbid = ? WHERE musicbrainz_id_lower = ? AND (artist_mbid IS NULL OR artist_mbid = '')",
(artist_mbid, normalized_mbid),
)
await self._write(operation)
async def async_delete_record(self, musicbrainz_id: str) -> bool:
normalized_mbid = musicbrainz_id.lower()

View file

@ -88,8 +88,20 @@ class QueueStore:
finally:
conn.close()
def has_active_mbid(self, album_mbid: str) -> bool:
"""Check if a pending or processing job already exists for this album MBID."""
conn = self._connect()
try:
row = conn.execute(
"SELECT 1 FROM pending_jobs WHERE album_mbid = ? AND status IN ('pending', 'processing') LIMIT 1",
(album_mbid,),
).fetchone()
return row is not None
finally:
conn.close()
def has_pending_mbid(self, album_mbid: str) -> bool:
"""Check if a pending job already exists for this album MBID."""
"""Check if a pending job exists for this album MBID (used by dead-letter retry)."""
conn = self._connect()
try:
row = conn.execute(
@ -100,6 +112,20 @@ class QueueStore:
finally:
conn.close()
def remove_by_mbid(self, album_mbid: str) -> bool:
"""Remove a pending job by album MBID. Returns True if a row was removed."""
with self._write_lock:
conn = self._connect()
try:
cursor = conn.execute(
"DELETE FROM pending_jobs WHERE album_mbid = ? AND status = 'pending'",
(album_mbid,),
)
conn.commit()
return cursor.rowcount > 0
finally:
conn.close()
def get_pending(self) -> list[sqlite3.Row]:
conn = self._connect()
try:

View file

@ -1,11 +1,14 @@
import asyncio
import logging
import time
import uuid
from datetime import datetime, timezone
from typing import Any, Callable, Optional, TYPE_CHECKING
from abc import ABC, abstractmethod
if TYPE_CHECKING:
from infrastructure.queue.queue_store import QueueStore
from infrastructure.persistence.request_history import RequestHistoryStore
logger = logging.getLogger(__name__)
@ -29,7 +32,7 @@ class QueueInterface(ABC):
class QueuedRequest:
__slots__ = ('album_mbid', 'future', 'job_id', 'retry_count', 'recovered')
__slots__ = ('album_mbid', 'future', 'job_id', 'retry_count', 'recovered', 'enqueued_at')
def __init__(
self,
@ -43,6 +46,7 @@ class QueuedRequest:
self.job_id = job_id or str(uuid.uuid4())
self.retry_count = 0
self.recovered = recovered
self.enqueued_at = time.monotonic()
class RequestQueue(QueueInterface):
@ -52,42 +56,85 @@ class RequestQueue(QueueInterface):
maxsize: int = 200,
store: "QueueStore | None" = None,
max_retries: int = 3,
request_history: "RequestHistoryStore | None" = None,
concurrency: int = 2,
on_import_callback: Callable | None = None,
):
self._queue: asyncio.Queue = asyncio.Queue(maxsize=maxsize)
self._processor = processor
self._processor_task: Optional[asyncio.Task] = None
self._processing = False
self._worker_tasks: list[asyncio.Task] = []
self._active_workers = 0
self._maxsize = maxsize
self._store = store
self._max_retries = max_retries
self._request_history = request_history
self._concurrency = max(1, min(concurrency, 5))
self._cancelled_mbids: set[str] = set()
self._enqueue_lock = asyncio.Lock()
self._recovered = False
self._on_import_callback = on_import_callback
async def add(self, album_mbid: str) -> dict:
"""Blocking enqueue — waits for the result."""
await self.start()
request = QueuedRequest(album_mbid)
await self._queue.put(request)
if self._store:
self._store.enqueue(request.job_id, album_mbid)
await self._queue.put(request)
result = await request.future
return result
async def enqueue(self, album_mbid: str) -> bool:
"""Fire-and-forget enqueue. Returns True if enqueued, False if duplicate."""
async with self._enqueue_lock:
if self._store and self._store.has_active_mbid(album_mbid):
logger.info("Duplicate request rejected for %s — already active", album_mbid[:8])
return False
# Clear any prior cancellation so re-requests aren't silently dropped
self._cancelled_mbids.discard(album_mbid.lower())
await self.start()
request = QueuedRequest(album_mbid)
if self._store:
self._store.enqueue(request.job_id, album_mbid)
await self._queue.put(request)
logger.info("Enqueued request for album %s (job %s)", album_mbid[:8], request.job_id[:8])
return True
async def cancel(self, album_mbid: str) -> bool:
"""Remove a pending job from the queue. Returns True if removed."""
removed = False
if self._store:
removed = self._store.remove_by_mbid(album_mbid)
# Mark for skip - items already in the asyncio.Queue can't be removed,
# so workers check this set before processing.
self._cancelled_mbids.add(album_mbid.lower())
if removed:
logger.info("Cancelled pending queue job for %s", album_mbid[:8])
return removed
async def start(self) -> None:
if self._processor_task is None or self._processor_task.done():
self._processor_task = asyncio.create_task(self._process_queue())
logger.info("Queue processor started")
self._recover_pending()
alive = [t for t in self._worker_tasks if not t.done()]
if len(alive) < self._concurrency:
for _ in range(self._concurrency - len(alive)):
task = asyncio.create_task(self._process_queue())
self._worker_tasks.append(task)
logger.info("Queue processor started (%d workers)", self._concurrency)
if not self._recovered:
self._recovered = True
self._recover_pending()
async def stop(self) -> None:
if self._processor_task and not self._processor_task.done():
alive = [t for t in self._worker_tasks if not t.done()]
if alive:
await self.drain()
self._processor_task.cancel()
try:
await self._processor_task
except asyncio.CancelledError:
pass
self._processor_task = None
for t in alive:
t.cancel()
await asyncio.gather(*alive, return_exceptions=True)
self._worker_tasks.clear()
logger.info("Queue processor stopped")
async def drain(self, timeout: float = 30.0) -> None:
@ -102,7 +149,9 @@ class RequestQueue(QueueInterface):
status = {
"queue_size": self._queue.qsize(),
"max_size": self._maxsize,
"processing": self._processing,
"processing": self._active_workers > 0,
"active_workers": self._active_workers,
"max_workers": self._concurrency,
}
if self._store:
status["dead_letter_count"] = self._store.get_dead_letter_count()
@ -124,6 +173,12 @@ class RequestQueue(QueueInterface):
try:
self._queue.put_nowait(request)
recovered += 1
if self._request_history:
task = asyncio.ensure_future(self._backfill_history(row["album_mbid"]))
task.add_done_callback(
lambda t: t.exception() and logger.error("Backfill failed: %s", t.exception())
if not t.cancelled() and t.exception() else None
)
except asyncio.QueueFull:
logger.warning("Queue full during recovery, %d items deferred to next restart",
len(pending) - recovered)
@ -133,6 +188,88 @@ class RequestQueue(QueueInterface):
self._retry_dead_letters()
async def _backfill_history(self, album_mbid: str) -> None:
"""Create a minimal history record for recovered jobs that lack one."""
if not self._request_history:
return
try:
existing = await self._request_history.async_get_record(album_mbid)
if not existing:
await self._request_history.async_record_request(
musicbrainz_id=album_mbid,
artist_name="Unknown",
album_title="Unknown",
)
logger.info("Backfilled history record for recovered job %s", album_mbid[:8])
except Exception as e: # noqa: BLE001
logger.warning("Failed to backfill history for %s: %s", album_mbid[:8], e)
async def _update_history_on_result(self, album_mbid: str, result: dict) -> None:
if not self._request_history:
return
try:
from services.request_utils import extract_cover_url
# Don't overwrite a user-initiated cancellation
existing = await self._request_history.async_get_record(album_mbid)
if existing and existing.status == "cancelled":
logger.info("Skipping history update for %s — already cancelled", album_mbid[:8])
return
payload = result.get("payload", {})
if not payload or not isinstance(payload, dict):
await self._request_history.async_update_status(album_mbid, "downloading")
return
lidarr_album_id = payload.get("id")
cover_url = extract_cover_url(payload)
artist_mbid = None
artist_data = payload.get("artist", {})
if artist_data:
artist_mbid = artist_data.get("foreignArtistId")
statistics = payload.get("statistics", {})
has_files = statistics.get("trackFileCount", 0) > 0
# Persist metadata fields BEFORE status update / callback so the
# record is complete when the import callback reads it.
if lidarr_album_id:
await self._request_history.async_update_lidarr_album_id(album_mbid, lidarr_album_id)
if cover_url:
await self._request_history.async_update_cover_url(album_mbid, cover_url)
if artist_mbid:
await self._request_history.async_update_artist_mbid(album_mbid, artist_mbid)
if has_files:
now_iso = datetime.now(timezone.utc).isoformat()
await self._request_history.async_update_status(
album_mbid, "imported", completed_at=now_iso
)
# Invalidate caches so the album immediately appears as "In Library"
if self._on_import_callback:
try:
enriched = await self._request_history.async_get_record(album_mbid)
if enriched:
await self._on_import_callback(enriched)
except Exception as cb_err: # noqa: BLE001
logger.warning("Import callback failed for %s: %s", album_mbid[:8], cb_err)
else:
await self._request_history.async_update_status(album_mbid, "downloading")
except Exception as e: # noqa: BLE001
logger.error("Failed to update history after processing %s: %s", album_mbid[:8], e)
async def _update_history_on_failure(self, album_mbid: str, error: Exception) -> None:
if not self._request_history:
return
try:
now_iso = datetime.now(timezone.utc).isoformat()
await self._request_history.async_update_status(
album_mbid, "failed", completed_at=now_iso
)
except Exception as e: # noqa: BLE001
logger.error("Failed to update history on failure for %s: %s", album_mbid[:8], e)
def _retry_dead_letters(self) -> None:
if not self._store:
return
@ -142,10 +279,6 @@ class RequestQueue(QueueInterface):
if self._store.has_pending_mbid(row["album_mbid"]):
self._store.remove_dead_letter(row["id"])
continue
self._store.remove_dead_letter(row["id"])
inserted = self._store.enqueue(row["id"], row["album_mbid"])
if not inserted:
continue
request = QueuedRequest(
album_mbid=row["album_mbid"],
job_id=row["id"],
@ -154,10 +287,13 @@ class RequestQueue(QueueInterface):
request.retry_count = row["retry_count"]
try:
self._queue.put_nowait(request)
enqueued += 1
except asyncio.QueueFull:
logger.warning("Queue full during dead-letter retry, remaining deferred")
break
# Only remove dead letter + persist to pending AFTER successful in-memory enqueue
self._store.remove_dead_letter(row["id"])
self._store.enqueue(row["id"], row["album_mbid"])
enqueued += 1
if enqueued:
logger.info("Re-enqueued %d dead-letter jobs for retry", enqueued)
@ -165,11 +301,29 @@ class RequestQueue(QueueInterface):
while True:
try:
request: QueuedRequest = await self._queue.get()
self._processing = True
# Skip items cancelled while sitting in the asyncio.Queue
if request.album_mbid.lower() in self._cancelled_mbids:
self._cancelled_mbids.discard(request.album_mbid.lower())
logger.info("Skipping cancelled request %s", request.album_mbid[:8])
if not request.future.done():
request.future.cancel()
self._queue.task_done()
continue
# Prevent unbounded growth from orphaned cancel entries
if len(self._cancelled_mbids) > 200:
self._cancelled_mbids.clear()
self._active_workers += 1
if self._store:
self._store.mark_processing(request.job_id)
queue_wait_ms = int((time.monotonic() - request.enqueued_at) * 1000)
logger.info(
"Processing request %s (queue_wait=%dms)", request.album_mbid[:8], queue_wait_ms
)
try:
if request.recovered:
logger.info("Processing recovered job %s for album %s", request.job_id[:8], request.album_mbid[:8])
@ -178,6 +332,7 @@ class RequestQueue(QueueInterface):
request.future.set_result(result)
if self._store:
self._store.dequeue(request.job_id)
await self._update_history_on_result(request.album_mbid, result)
except Exception as e: # noqa: BLE001
logger.error("Error processing request for %s (attempt %d/%d): %s",
request.album_mbid[:8], request.retry_count + 1, self._max_retries, e)
@ -192,13 +347,13 @@ class RequestQueue(QueueInterface):
retry_count=request.retry_count + 1,
max_retries=self._max_retries,
)
await self._update_history_on_failure(request.album_mbid, e)
finally:
self._active_workers -= 1
self._queue.task_done()
self._processing = False
except asyncio.CancelledError:
logger.info("Queue processor cancelled")
logger.info("Queue worker cancelled")
break
except Exception as e: # noqa: BLE001
logger.error("Queue processor error: %s", e)
self._processing = False
logger.error("Queue worker error: %s", e)

View file

@ -51,6 +51,9 @@ class ArtistInfo(AppStruct):
aliases: list[str] = []
external_links: list[ExternalLink] = []
in_library: bool = False
in_lidarr: bool = False
monitored: bool = False
auto_download: bool = False
albums: list[ReleaseItem] = []
singles: list[ReleaseItem] = []
eps: list[ReleaseItem] = []

View file

@ -1,4 +1,5 @@
import asyncio
import collections
import logging
import time
from typing import Any, Optional
@ -17,6 +18,31 @@ logger = logging.getLogger(__name__)
_album_details_deduplicator = RequestDeduplicator()
_MAX_ARTIST_LOCKS = 64
_artist_locks: collections.OrderedDict[str, asyncio.Lock] = collections.OrderedDict()
def _get_artist_lock(artist_mbid: str) -> asyncio.Lock:
"""Get or create a per-artist lock. Evicts only unlocked entries when over limit."""
if artist_mbid in _artist_locks:
_artist_locks.move_to_end(artist_mbid)
return _artist_locks[artist_mbid]
lock = asyncio.Lock()
_artist_locks[artist_mbid] = lock
while len(_artist_locks) > _MAX_ARTIST_LOCKS:
# Find the oldest unlocked entry to evict
evicted = False
for key in list(_artist_locks.keys()):
if key == artist_mbid:
continue
if not _artist_locks[key].locked():
del _artist_locks[key]
evicted = True
break
if not evicted:
break
return lock
def _safe_int(value: Any, fallback: int = 0) -> int:
"""Coerce a value to int, returning fallback for non-numeric inputs."""
@ -254,6 +280,19 @@ class LidarrAlbumRepository(LidarrHistoryRepository):
logger.warning(f"Error getting album by foreign ID {album_mbid}: {e}")
return None
_ALBUM_MUTABLE_FIELDS = frozenset({"monitored"})
async def _update_album(self, album_id: int, updates: dict[str, Any]) -> dict[str, Any]:
"""Update album via PUT /album/{id} - synchronous 200 OK, returns updated object.
Callers must hold the per-artist lock to avoid lost-update races.
Only fields in _ALBUM_MUTABLE_FIELDS are applied unknown keys are silently dropped.
"""
safe_updates = {k: v for k, v in updates.items() if k in self._ALBUM_MUTABLE_FIELDS}
album = await self._get(f"/api/v1/album/{album_id}")
album.update(safe_updates)
return await self._put(f"/api/v1/album/{album_id}", album)
async def delete_album(self, album_id: int, delete_files: bool = False) -> bool:
try:
params = {"deleteFiles": str(delete_files).lower(), "addImportListExclusion": "false"}
@ -266,6 +305,7 @@ class LidarrAlbumRepository(LidarrHistoryRepository):
raise
async def add_album(self, musicbrainz_id: str, artist_repo) -> dict:
t0 = time.monotonic()
if not musicbrainz_id or not isinstance(musicbrainz_id, str):
raise ExternalServiceError("Invalid MBID provided")
@ -289,104 +329,211 @@ class LidarrAlbumRepository(LidarrHistoryRepository):
if not artist_mbid:
raise ExternalServiceError("Album lookup did not include artist MBID")
artist = await artist_repo._ensure_artist_exists(artist_mbid, artist_name)
# Serialize per-artist to prevent duplicate artist creation from concurrent requests
lock = _get_artist_lock(artist_mbid)
async with lock:
return await self._add_album_locked(
musicbrainz_id, artist_repo, t0,
candidate, album_title, album_type, secondary_types,
artist_mbid, artist_name,
)
async def _add_album_locked(
self,
musicbrainz_id: str,
artist_repo,
t0: float,
candidate: dict,
album_title: str,
album_type: str,
secondary_types: list,
artist_mbid: str,
artist_name: str | None,
) -> dict:
# Capture which albums are already monitored so we can revert any Lidarr auto-monitors after the add
pre_add_monitored_ids: set[int] = set()
try:
existing_items = await self._get("/api/v1/artist", params={"mbId": artist_mbid})
if existing_items:
existing_artist_id = existing_items[0].get("id")
if existing_artist_id:
albums_before = await self._get(
"/api/v1/album", params={"artistId": existing_artist_id}
)
if isinstance(albums_before, list):
pre_add_monitored_ids = {
a["id"] for a in albums_before if a.get("monitored")
}
except ExternalServiceError:
pass
t_artist = time.monotonic()
artist, artist_created = await artist_repo._ensure_artist_exists(artist_mbid, artist_name)
artist_id = artist["id"]
artist_ensure_ms = int((time.monotonic() - t_artist) * 1000)
album_obj = await self._get_album_by_foreign_id(musicbrainz_id)
action = "exists"
if not album_obj:
async def album_is_indexed():
a = await self._get_album_by_foreign_id(musicbrainz_id)
return a and a.get("id")
if album_obj:
album_id = album_obj["id"]
has_files = (album_obj.get("statistics") or {}).get("trackFileCount", 0) > 0
is_monitored = album_obj.get("monitored", False)
await self._wait_for_artist_commands_to_complete(artist_id, timeout=600.0)
album_obj = await self._wait_for(album_is_indexed, timeout=60.0, poll=5.0)
if not album_obj:
profile_id = artist.get("qualityProfileId")
if profile_id is None:
try:
qps = await self._get("/api/v1/qualityprofile")
if not qps:
raise ExternalServiceError("No quality profiles in Lidarr")
profile_id = qps[0]["id"]
except Exception: # noqa: BLE001
profile_id = self._settings.quality_profile_id
payload = {
"title": album_title,
"artistId": artist_id,
"artist": artist,
"foreignAlbumId": musicbrainz_id,
"monitored": True,
"anyReleaseOk": True,
"profileId": profile_id,
"images": [],
"addOptions": {"addType": "automatic", "searchForNewAlbum": True},
if has_files and is_monitored:
total_ms = int((time.monotonic() - t0) * 1000)
logger.info(
"add_album timing: album=%s artist_ensure=%dms total=%dms (already downloaded)",
musicbrainz_id[:8], artist_ensure_ms, total_ms,
)
await self._invalidate_album_list_caches()
return {
"message": f"Album already downloaded: {album_title}",
"payload": album_obj,
}
if not is_monitored:
album_obj = await self._update_album(album_id, {"monitored": True})
try:
await self._post_command({"name": "AlbumSearch", "albumIds": [album_id]})
except ExternalServiceError as exc:
logger.warning("Failed to queue AlbumSearch for %s: %s", musicbrainz_id, exc)
await self._unmonitor_auto_monitored_albums(
artist_id, musicbrainz_id, album_id, pre_add_monitored_ids
)
await self._invalidate_album_list_caches()
await self._cache.clear_prefix(f"{LIDARR_PREFIX}artists:mbids")
total_ms = int((time.monotonic() - t0) * 1000)
logger.info(
"add_album timing: album=%s artist_ensure=%dms total=%dms (existing album, monitor+search)",
musicbrainz_id[:8], artist_ensure_ms, total_ms,
)
return {
"message": f"Album monitored & search triggered: {album_title}",
"monitored": True,
"payload": album_obj,
}
# Album doesn't exist yet — wait for indexing after artist add/refresh
if artist_created:
await self._wait_for_artist_commands_to_complete(artist_id, timeout=120.0)
async def album_is_indexed():
a = await self._get_album_by_foreign_id(musicbrainz_id)
return a and a.get("id")
# Only wait for auto-indexing if we just created/refreshed the artist;
# for existing artists nothing triggered new indexing, so skip the long wait.
if artist_created:
album_obj = await self._wait_for(album_is_indexed, timeout=60.0, poll=5.0)
else:
album_obj = await album_is_indexed()
if not album_obj:
# Album not auto-indexed; POST to add it directly
profile_id = artist.get("qualityProfileId")
if profile_id is None:
try:
album_obj = await self._post("/api/v1/album", payload)
action = "added"
album_obj = await self._wait_for(album_is_indexed, timeout=120.0, poll=2.0)
except Exception as e:
err_str = str(e)
if "POST failed" in err_str or "405" in err_str:
logger.debug("Raw Lidarr rejection for %s: %s", album_title, err_str)
raise ExternalServiceError(
f"Cannot add this {album_type}. "
f"Lidarr rejected adding '{album_title}'. This is likely because your Lidarr "
f"Metadata Profile is configured to exclude {album_type}s{' (' + ', '.join(secondary_types) + ')' if secondary_types else ''}. "
f"To fix this: Go to Lidarr -> Settings -> Profiles -> Metadata Profiles, "
f"and enable '{album_type}' in your active profile."
)
else:
logger.debug("Unexpected error adding '%s': %s", album_title, err_str)
raise
qps = await self._get("/api/v1/qualityprofile")
if not qps:
raise ExternalServiceError("No quality profiles in Lidarr")
profile_id = qps[0]["id"]
except Exception: # noqa: BLE001
profile_id = self._settings.quality_profile_id
payload = {
"title": album_title,
"artistId": artist_id,
"artist": {
"id": artist["id"],
"artistName": artist.get("artistName"),
"foreignArtistId": artist.get("foreignArtistId"),
"qualityProfileId": artist.get("qualityProfileId"),
"metadataProfileId": artist.get("metadataProfileId"),
"rootFolderPath": artist.get("rootFolderPath"),
},
"foreignAlbumId": musicbrainz_id,
"monitored": True,
"anyReleaseOk": True,
"profileId": profile_id,
"images": [],
"addOptions": {"addType": "automatic", "searchForNewAlbum": True},
}
try:
album_obj = await self._post("/api/v1/album", payload)
album_obj = await self._wait_for(album_is_indexed, timeout=120.0, poll=2.0)
except ExternalServiceError as e:
err_str = str(e).lower()
if "already exists" in err_str:
logger.info("Album %s already exists per Lidarr, fetching", musicbrainz_id)
album_obj = await self._get_album_by_foreign_id(musicbrainz_id)
if album_obj:
if not album_obj.get("monitored"):
album_obj = await self._update_album(album_obj["id"], {"monitored": True})
try:
await self._post_command(
{"name": "AlbumSearch", "albumIds": [album_obj["id"]]}
)
except ExternalServiceError:
pass
elif "post failed" in err_str or "405" in err_str or "metadata" in err_str:
raise ExternalServiceError(
f"Lidarr rejected '{album_title}' ({album_type}"
f"{'' + ', '.join(secondary_types) if secondary_types else ''}). "
f"Your Metadata Profile probably excludes {album_type}s. "
f"Go to Lidarr > Settings > Profiles > Metadata Profiles and enable '{album_type}'."
)
else:
logger.error("Unexpected error adding '%s': %s", album_title, e)
raise
if not album_obj or "id" not in album_obj:
raise ExternalServiceError(
f"Cannot add this {album_type}. "
f"'{album_title}' could not be found in Lidarr after the artist refresh. This usually means "
f"your Lidarr Metadata Profile is configured to exclude {album_type}s. "
f"To fix this: Go to Lidarr -> Settings -> Profiles -> Metadata Profiles, "
f"enable '{album_type}', then refresh the artist in Lidarr."
f"'{album_title}' wasn't found in Lidarr after refreshing the artist. "
f"Your Metadata Profile may exclude {album_type}s. "
f"Go to Lidarr > Settings > Profiles > Metadata Profiles, enable '{album_type}', then refresh the artist."
)
album_id = album_obj["id"]
await self._wait_for_artist_commands_to_complete(artist_id, timeout=600.0)
await self._monitor_artist_and_album(artist_id, album_id, musicbrainz_id, album_title)
# Only monitor the specific album; only set artist monitored if newly created
await self._monitor_artist_and_album(
artist_id, album_id, musicbrainz_id, album_title,
set_artist_monitored=artist_created,
)
try:
await self._post_command({"name": "AlbumSearch", "albumIds": [album_id]})
except ExternalServiceError as exc:
logger.warning("Failed to queue Lidarr AlbumSearch for %s: %s", musicbrainz_id, exc)
logger.warning("Failed to queue AlbumSearch for %s: %s", musicbrainz_id, exc)
# Unmonitor albums that Lidarr auto-monitored during the add
await self._unmonitor_auto_monitored_albums(
artist_id, musicbrainz_id, album_id, pre_add_monitored_ids
)
final_album = await self._get_album_by_foreign_id(musicbrainz_id)
if final_album and not final_album.get("monitored"):
try:
await self._put("/api/v1/album/monitor", {
"albumIds": [album_id],
"monitored": True
})
await asyncio.sleep(2.0)
final_album = await self._get_album_by_foreign_id(musicbrainz_id)
except ExternalServiceError as exc:
logger.warning("Failed to update Lidarr album monitor state for %s: %s", musicbrainz_id, exc)
await self._invalidate_album_list_caches()
await self._cache.clear_prefix(f"{LIDARR_PREFIX}artists:mbids")
msg = "Album added & monitored" if action == "added" else "Album exists; monitored ensured"
total_ms = int((time.monotonic() - t0) * 1000)
logger.info(
"add_album timing: album=%s artist_ensure=%dms total=%dms (new album, created=%s)",
musicbrainz_id[:8], artist_ensure_ms, total_ms, artist_created,
)
return {
"message": f"{msg}: {album_title}",
"payload": final_album or album_obj
"message": f"Album added & monitored: {album_title}",
"monitored": True,
"payload": final_album or album_obj,
}
async def _wait_for_artist_commands_to_complete(self, artist_id: int, timeout: float = 600.0) -> None:
async def _wait_for_artist_commands_to_complete(self, artist_id: int, timeout: float = 120.0) -> None:
deadline = time.monotonic() + timeout
while time.monotonic() < deadline:
@ -418,7 +565,7 @@ class LidarrAlbumRepository(LidarrHistoryRepository):
await asyncio.sleep(5.0)
await asyncio.sleep(5.0)
await asyncio.sleep(1.0)
async def _monitor_artist_and_album(
self,
@ -426,35 +573,63 @@ class LidarrAlbumRepository(LidarrHistoryRepository):
album_id: int,
album_mbid: str,
album_title: str,
max_attempts: int = 3
max_attempts: int = 2,
set_artist_monitored: bool = False,
) -> None:
for attempt in range(max_attempts):
try:
await self._put(
"/api/v1/artist/editor",
{"artistIds": [artist_id], "monitored": True, "monitorNewItems": "none"},
)
if set_artist_monitored and attempt == 0:
await self._put(
"/api/v1/artist/editor",
{"artistIds": [artist_id], "monitored": True, "monitorNewItems": "none"},
)
await asyncio.sleep(5.0 + (attempt * 3.0))
await self._put("/api/v1/album/monitor", {"albumIds": [album_id], "monitored": True})
async def both_monitored():
album = await self._get_album_by_foreign_id(album_mbid)
artist_data = await self._get(f"/api/v1/artist/{artist_id}")
return (album and album.get("monitored")) and (artist_data and artist_data.get("monitored"))
timeout = 20.0 + (attempt * 10.0)
if await self._wait_for(both_monitored, timeout=timeout, poll=1.0):
updated = await self._update_album(album_id, {"monitored": True})
if updated and updated.get("monitored"):
return
if attempt < max_attempts - 1:
logger.warning(f"Monitoring verification failed, attempt {attempt + 1}/{max_attempts}")
await asyncio.sleep(5.0)
logger.warning("Album monitoring verification failed, attempt %d/%d", attempt + 1, max_attempts)
await asyncio.sleep(2.0 + (attempt * 2.0))
except Exception as e: # noqa: BLE001
if attempt == max_attempts - 1:
raise ExternalServiceError(
f"Failed to set monitoring status after {max_attempts} attempts: {str(e)}"
)
await asyncio.sleep(5.0)
await asyncio.sleep(3.0)
async def _unmonitor_auto_monitored_albums(
self,
artist_id: int,
requested_mbid: str,
requested_album_id: int,
pre_add_monitored_ids: set[int],
) -> None:
"""Unmonitor albums that Lidarr auto-monitored during artist add (Aurral pattern)."""
try:
current_albums = await self._get(
"/api/v1/album", params={"artistId": artist_id}
)
if not isinstance(current_albums, list):
return
to_unmonitor = [
a["id"]
for a in current_albums
if a.get("monitored")
and a["id"] != requested_album_id
and a["id"] not in pre_add_monitored_ids
]
if to_unmonitor:
await self._put(
"/api/v1/album/monitor",
{"albumIds": to_unmonitor, "monitored": False},
)
logger.info(
"Unmonitored %d auto-monitored albums for artist %d (kept requested %s)",
len(to_unmonitor), artist_id, requested_mbid[:8],
)
except ExternalServiceError as exc:
logger.warning("Failed to unmonitor auto-monitored albums: %s", exc)

View file

@ -113,6 +113,7 @@ class LidarrArtistRepository(LidarrBase):
"fanart_url": image_urls["fanart"],
"banner_url": image_urls["banner"],
"monitored": artist.get("monitored", False),
"monitor_new_items": artist.get("monitorNewItems", "none"),
"statistics": artist.get("statistics", {}),
"ratings": artist.get("ratings", {}),
}
@ -216,12 +217,47 @@ class LidarrArtistRepository(LidarrBase):
logger.error(f"Failed to delete artist {artist_id}: {e}")
raise
async def _ensure_artist_exists(self, artist_mbid: str, artist_name_hint: Optional[str] = None) -> dict[str, Any]:
async def update_artist_monitoring(
self, artist_mbid: str, *, monitored: bool, monitor_new_items: str = "none",
) -> dict[str, Any]:
if monitor_new_items not in ("none", "all"):
raise ValueError(f"Invalid monitor_new_items value: {monitor_new_items}")
data = await self._get("/api/v1/artist", params={"mbId": artist_mbid})
if not data or not isinstance(data, list) or len(data) == 0:
raise ExternalServiceError(f"Artist {artist_mbid[:8]} not found in Lidarr")
artist_id = data[0].get("id")
if not artist_id:
raise ExternalServiceError(f"Artist {artist_mbid[:8]} has no Lidarr ID")
await self._put(
"/api/v1/artist/editor",
{
"artistIds": [artist_id],
"monitored": monitored,
"monitorNewItems": monitor_new_items,
},
)
cache_key = f"{LIDARR_ARTIST_DETAILS_PREFIX}{artist_mbid}"
await self._cache.delete(cache_key)
logger.info(
"Updated artist %s monitoring: monitored=%s, monitorNewItems=%s",
artist_mbid[:8], monitored, monitor_new_items,
)
return {"monitored": monitored, "auto_download": monitor_new_items == "all"}
async def _ensure_artist_exists(
self, artist_mbid: str, artist_name_hint: Optional[str] = None
) -> tuple[dict[str, Any], bool]:
"""Return (artist_dict, created). created=True means we just added the artist."""
try:
items = await self._get("/api/v1/artist", params={"mbId": artist_mbid})
if items:
logger.info(f"Artist already exists: {items[0].get('artistName')}")
return items[0]
logger.info("Artist already exists: %s", items[0].get("artistName"))
return items[0], False
except ExternalServiceError as exc:
logger.debug("Failed to query existing Lidarr artist %s: %s", artist_mbid, exc)
@ -264,23 +300,20 @@ class LidarrArtistRepository(LidarrBase):
try:
created = await self._post("/api/v1/artist", payload)
artist_id = created["id"]
logger.info(f"Created artist {artist_name} (ID: {artist_id}), triggering refresh commands")
logger.info("Created artist %s (ID: %s), triggering refresh", artist_name, artist_id)
logger.info(f"Refreshing artist {artist_name} library (this may take several minutes)...")
await self._await_command(
{"name": "RefreshArtist", "artistId": artist_id},
timeout=600.0
timeout=180.0,
)
logger.info(f"Rescanning artist {artist_name} library...")
await self._await_command(
{"name": "RescanArtist", "artistId": artist_id},
timeout=300.0
)
await asyncio.sleep(5.0)
logger.info(f"Artist {artist_name} library refresh complete")
return created
except Exception as e: # noqa: BLE001
raise ExternalServiceError(f"Failed to add artist: {e}")
logger.info("Artist %s refresh complete", artist_name)
return created, True
except ExternalServiceError as exc:
err_str = str(exc).lower()
if "already exists" in err_str or "409" in err_str:
logger.info("Artist %s was added concurrently, retrying GET", artist_mbid)
items = await self._get("/api/v1/artist", params={"mbId": artist_mbid})
if items:
return items[0], False
raise ExternalServiceError(f"Failed to add artist: {exc}")

View file

@ -1,6 +1,6 @@
import logging
from datetime import datetime
from typing import Any
from typing import Any, TYPE_CHECKING
from models.library import LibraryAlbum, LibraryGroupedAlbum, LibraryGroupedArtist
from infrastructure.cover_urls import prefer_release_group_cover_url
from infrastructure.cache.cache_keys import (
@ -13,10 +13,15 @@ from infrastructure.cache.cache_keys import (
)
from .base import LidarrBase
if TYPE_CHECKING:
from infrastructure.persistence.request_history import RequestHistoryStore
logger = logging.getLogger(__name__)
class LidarrLibraryRepository(LidarrBase):
_request_history_store: "RequestHistoryStore | None" = None
async def get_library(self, include_unmonitored: bool = False) -> list[LibraryAlbum]:
cache_key = lidarr_library_albums_key(include_unmonitored)
cached_result = await self._cache.get(cache_key)
@ -226,6 +231,17 @@ class LidarrLibraryRepository(LidarrBase):
return ids
async def get_requested_mbids(self) -> set[str]:
"""Return MBIDs of albums with active requests in MusicSeerr."""
if self._request_history_store is not None:
try:
return await self._request_history_store.async_get_active_mbids()
except Exception as e: # noqa: BLE001
logger.warning("RequestHistoryStore unavailable, falling back to empty set: %s", e)
return set()
return set()
async def get_monitored_no_files_mbids(self) -> set[str]:
"""Return monitored Lidarr albums that have no downloaded track files."""
cache_key = lidarr_requested_mbids_key()
cached_result = await self._cache.get(cache_key)

View file

@ -1,5 +1,5 @@
import httpx
from typing import Any, Optional
from typing import Any, Optional, TYPE_CHECKING
from core.config import Settings
from models.library import LibraryAlbum
from models.request import QueueItem
@ -11,6 +11,9 @@ from .album import LidarrAlbumRepository
from .config import LidarrConfigRepository
from .queue import LidarrQueueRepository
if TYPE_CHECKING:
from infrastructure.persistence.request_history import RequestHistoryStore
class LidarrRepository(
LidarrLibraryRepository,
@ -23,9 +26,11 @@ class LidarrRepository(
self,
settings: Settings,
http_client: httpx.AsyncClient,
cache: CacheInterface
cache: CacheInterface,
request_history_store: "RequestHistoryStore | None" = None,
):
super().__init__(settings, http_client, cache)
self._request_history_store = request_history_store
async def add_album(self, musicbrainz_id: str) -> dict:
return await LidarrAlbumRepository.add_album(self, musicbrainz_id, self)

View file

@ -87,3 +87,8 @@ class LidarrRepositoryProtocol(Protocol):
async def get_recently_imported(self, limit: int = 20) -> list[LibraryAlbum]:
...
async def update_artist_monitoring(
self, artist_mbid: str, *, monitored: bool, monitor_new_items: str = "none",
) -> dict[str, Any]:
...

View file

@ -8,7 +8,7 @@ from repositories.protocols import LidarrRepositoryProtocol, MusicBrainzReposito
from services.preferences_service import PreferencesService
from services.album_utils import parse_year, find_primary_release, get_ranked_releases, extract_artist_info, extract_tracks, extract_label, build_album_basic_info, lidarr_to_basic_info, mb_to_basic_info
from infrastructure.persistence import LibraryDB
from infrastructure.cache.cache_keys import ALBUM_INFO_PREFIX
from infrastructure.cache.cache_keys import ALBUM_INFO_PREFIX, LIDARR_ALBUM_DETAILS_PREFIX
from infrastructure.cache.memory_cache import CacheInterface
from infrastructure.cache.disk_cache import DiskMetadataCache
from infrastructure.cover_urls import prefer_release_group_cover_url
@ -191,6 +191,18 @@ class AlbumService:
except Exception: # noqa: BLE001
logger.debug(f"Background album cache warm failed for {release_group_id[:8]}")
async def refresh_album(self, release_group_id: str) -> AlbumInfo:
release_group_id = validate_mbid(release_group_id, "album")
await self._cache.delete(f"{ALBUM_INFO_PREFIX}{release_group_id}")
await self._cache.delete(f"{LIDARR_ALBUM_DETAILS_PREFIX}{release_group_id}")
await self._disk_cache.delete_album(release_group_id)
self._revalidation_timestamps.pop(release_group_id, None)
self._album_in_flight.pop(release_group_id, None)
logger.info("Cleared all caches for album %s", release_group_id[:8])
return await self.get_album_info(release_group_id)
async def get_album_info(self, release_group_id: str, monitored_mbids: set[str] = None) -> AlbumInfo:
try:
release_group_id = validate_mbid(release_group_id, "album")
@ -295,7 +307,7 @@ class AlbumService:
in_library = self._check_lidarr_in_library(lidarr_album)
if lidarr_album and lidarr_album.get("monitored", False):
logger.info(f"[BASIC] Using Lidarr for album {release_group_id[:8]}")
basic = AlbumBasicInfo(**lidarr_to_basic_info(lidarr_album, release_group_id, in_library))
basic = AlbumBasicInfo(**lidarr_to_basic_info(lidarr_album, release_group_id, in_library, is_requested=is_requested))
if not basic.album_thumb_url:
basic.album_thumb_url = await self._get_audiodb_album_thumb(
release_group_id, basic.artist_name, basic.title,

View file

@ -111,7 +111,7 @@ def build_album_basic_info(
}
def lidarr_to_basic_info(lidarr_album: dict, release_group_id: str, in_library: bool) -> dict:
def lidarr_to_basic_info(lidarr_album: dict, release_group_id: str, in_library: bool, is_requested: bool = False) -> dict:
year = None
if release_date := lidarr_album.get("release_date"):
try:
@ -128,7 +128,7 @@ def lidarr_to_basic_info(lidarr_album: dict, release_group_id: str, in_library:
"type": lidarr_album.get("album_type"),
"disambiguation": lidarr_album.get("disambiguation"),
"in_library": in_library,
"requested": not in_library,
"requested": is_requested and not in_library,
"cover_url": lidarr_album.get("cover_url"),
}

View file

@ -21,7 +21,7 @@ from infrastructure.cache.cache_keys import ARTIST_INFO_PREFIX
from infrastructure.cache.memory_cache import CacheInterface
from infrastructure.cache.disk_cache import DiskMetadataCache
from infrastructure.validators import validate_mbid
from core.exceptions import ResourceNotFoundError
from core.exceptions import ExternalServiceError, ResourceNotFoundError
from services.audiodb_image_service import AudioDBImageService
from repositories.audiodb_models import AudioDBArtistImages
@ -228,13 +228,41 @@ class ArtistService:
else:
logger.info(f"Using MusicBrainz as primary source for artist {artist_id[:8]}")
artist_info = await self._build_artist_from_musicbrainz(artist_id, library_artist_mbids, library_album_mbids)
if lidarr_artist is not None:
artist_info.in_lidarr = True
artist_info.monitored = lidarr_artist.get("monitored", False)
artist_info.auto_download = lidarr_artist.get("monitor_new_items", "none") == "all"
artist_info = await self._apply_audiodb_artist_images(
artist_info, artist_id, artist_info.name,
allow_fetch=False, is_monitored=artist_info.in_library,
)
await self._save_artist_to_cache(artist_id, artist_info)
return artist_info
async def set_artist_monitoring(
self, artist_mbid: str, *, monitored: bool, auto_download: bool = False,
) -> dict[str, Any]:
if not self._lidarr_repo.is_configured():
raise ExternalServiceError("Lidarr is not configured")
monitor_new_items = "all" if (monitored and auto_download) else "none"
result = await self._lidarr_repo.update_artist_monitoring(
artist_mbid, monitored=monitored, monitor_new_items=monitor_new_items,
)
await self._cache.delete(f"{ARTIST_INFO_PREFIX}{artist_mbid}")
return result
async def get_artist_monitoring_status(self, artist_mbid: str) -> dict[str, Any]:
if not self._lidarr_repo.is_configured():
return {"in_lidarr": False, "monitored": False, "auto_download": False}
details = await self._lidarr_repo.get_artist_details(artist_mbid)
if details is None:
return {"in_lidarr": False, "monitored": False, "auto_download": False}
return {
"in_lidarr": True,
"monitored": details.get("monitored", False),
"auto_download": details.get("monitor_new_items", "none") == "all",
}
async def _build_artist_from_lidarr(
self,
artist_id: str,
@ -268,6 +296,8 @@ class ArtistService:
task_names.append("cache_mbids")
parallel_tasks.append(self._lidarr_repo.get_artist_albums(artist_id))
task_names.append("lidarr_albums")
parallel_tasks.append(self._lidarr_repo.get_requested_mbids())
task_names.append("requested_mbids")
if need_musicbrainz:
parallel_tasks.append(self._mb_repo.get_artist_by_id(artist_id))
task_names.append("mb_artist")
@ -282,9 +312,12 @@ class ArtistService:
cache_mbids = cache_result if not isinstance(cache_result, Exception) and cache_result else {}
library_album_mbids = library_album_mbids | cache_mbids
req_result = result_map.get("requested_mbids")
requested_mbids = req_result if not isinstance(req_result, Exception) and req_result else set()
albums_result = result_map.get("lidarr_albums")
lidarr_albums = albums_result if not isinstance(albums_result, Exception) and albums_result else []
albums, singles, eps = self._categorize_lidarr_albums(lidarr_albums, library_album_mbids)
albums, singles, eps = self._categorize_lidarr_albums(lidarr_albums, library_album_mbids, requested_mbids=requested_mbids)
aliases = []
life_span = None
@ -347,12 +380,13 @@ class ArtistService:
def _categorize_lidarr_albums(
self,
lidarr_albums: list[dict[str, Any]],
library_album_mbids: set[str]
library_album_mbids: set[str],
requested_mbids: set[str] | None = None,
) -> tuple[list[ReleaseItem], list[ReleaseItem], list[ReleaseItem]]:
prefs = self._preferences_service.get_preferences()
included_primary_types = set(t.lower() for t in prefs.primary_types)
included_secondary_types = set(t.lower() for t in prefs.secondary_types)
return categorize_lidarr_albums(lidarr_albums, included_primary_types, included_secondary_types, library_album_mbids)
return categorize_lidarr_albums(lidarr_albums, included_primary_types, included_secondary_types, library_album_mbids, requested_mbids=requested_mbids)
async def _build_artist_from_musicbrainz(
self,
@ -507,7 +541,7 @@ class ArtistService:
if in_library and offset == 0:
logger.debug(f"Using Lidarr for artist releases {artist_id[:8]}")
lidarr_albums = await self._lidarr_repo.get_artist_albums(artist_id)
albums, singles, eps = self._categorize_lidarr_albums(lidarr_albums, album_mbids)
albums, singles, eps = self._categorize_lidarr_albums(lidarr_albums, album_mbids, requested_mbids=requested_mbids)
total_count = len(albums) + len(singles) + len(eps)

View file

@ -159,11 +159,13 @@ def categorize_lidarr_albums(
included_primary_types: set[str],
included_secondary_types: set[str],
library_cache_mbids: set[str] | None = None,
requested_mbids: set[str] | None = None,
) -> tuple[list[ReleaseItem], list[ReleaseItem], list[ReleaseItem]]:
albums: list[ReleaseItem] = []
singles: list[ReleaseItem] = []
eps: list[ReleaseItem] = []
_cache_mbids = library_cache_mbids or set()
_requested_mbids = requested_mbids or set()
for album in lidarr_albums:
album_type = (album.get("album_type") or "").lower()
secondary_types = set(map(str.lower, album.get("secondary_types", []) or []))
@ -178,9 +180,8 @@ def categorize_lidarr_albums(
mbid = album.get("mbid", "")
mbid_lower = mbid.lower() if mbid else ""
track_file_count = album.get("track_file_count", 0)
monitored = album.get("monitored", False)
in_library = track_file_count > 0 or (mbid_lower in _cache_mbids)
requested = monitored and not in_library
requested = mbid_lower in _requested_mbids and not in_library
album_data = ReleaseItem(
id=mbid,
title=album.get("title"),

View file

@ -136,8 +136,25 @@ class LibraryService:
if not self._lidarr_repo.is_configured():
return []
try:
mbids_set = await self._lidarr_repo.get_library_mbids(include_release_ids=False)
return list(mbids_set)
lidarr_mbids_coro = self._lidarr_repo.get_library_mbids(include_release_ids=False)
local_mbids_coro = self._library_db.get_all_album_mbids()
results = await asyncio.gather(
lidarr_mbids_coro, local_mbids_coro, return_exceptions=True,
)
lidarr_mbids = results[0] if not isinstance(results[0], BaseException) else set()
local_mbids = results[1] if not isinstance(results[1], BaseException) else []
if isinstance(results[0], BaseException):
logger.warning("Lidarr library mbids fetch failed, degrading: %s", results[0])
if isinstance(results[1], BaseException):
logger.warning("Local library_db mbids fetch failed, degrading: %s", results[1])
if isinstance(lidarr_mbids, BaseException) and isinstance(local_mbids, BaseException):
raise ExternalServiceError("Both library mbid sources failed")
# Union: Lidarr API + local library_db (catches recently-imported
# albums that Lidarr's cached response may not yet reflect).
merged = (lidarr_mbids if isinstance(lidarr_mbids, set) else set()) | {m.lower() for m in local_mbids}
return list(merged)
except ExternalServiceError:
raise
except Exception as e: # noqa: BLE001
logger.error(f"Failed to fetch library mbids: {e}")
raise ExternalServiceError(f"Failed to fetch library mbids: {e}")

View file

@ -27,7 +27,7 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
_CONCURRENCY_LIMIT = 5
_NEGATIVE_CACHE_TTL = 14400 # 4 hours aligned with periodic warmup interval
_NEGATIVE_CACHE_TTL = 14400 # 4 hours - aligned with periodic warmup interval
def _cache_get_mbid(cache: dict[str, str | tuple[None, float]], key: str) -> str | None:
@ -83,6 +83,16 @@ class NavidromeLibraryService:
"""Public accessor for MBID → Navidrome album ID reverse index."""
return self._mbid_to_navidrome_id.get(mbid)
def invalidate_album_cache(self, album_mbid: str) -> None:
"""Remove cached entries for a specific album MBID, forcing re-lookup on next match."""
self._mbid_to_navidrome_id.pop(album_mbid, None)
stale_keys = [k for k, v in self._album_mbid_cache.items() if v == album_mbid]
for key in stale_keys:
del self._album_mbid_cache[key]
if stale_keys:
self._dirty = True
logger.debug("navidrome.cache action=invalidate mbid=%s cleared_keys=%d", album_mbid[:8], len(stale_keys))
async def _resolve_album_mbid(self, name: str, artist: str) -> str | None:
"""Resolve a release-group MBID for an album via Lidarr library matching."""
if not name or not artist:
@ -482,7 +492,7 @@ class NavidromeLibraryService:
logger.warning("Failed to load Navidrome MBID cache from disk", exc_info=True)
if not self._lidarr_album_index:
logger.warning("Lidarr library data unavailable Lidarr enrichment will be skipped")
logger.warning("Lidarr library data unavailable - Lidarr enrichment will be skipped")
# Phase 2: Fetch current Navidrome library (paginated) for reconciliation + enrichment
try:
@ -502,7 +512,7 @@ class NavidromeLibraryService:
logger.warning("Failed to fetch Navidrome albums for MBID enrichment")
return
# Phase 3: Reconcile remove stale entries no longer in Navidrome
# Phase 3: Reconcile - remove stale entries no longer in Navidrome
current_album_keys: set[str] = set()
current_artist_names: set[str] = set()
for album in all_albums:

View file

@ -2,9 +2,8 @@ import logging
from repositories.protocols import LidarrRepositoryProtocol
from infrastructure.queue.request_queue import RequestQueue
from infrastructure.persistence.request_history import RequestHistoryStore
from api.v1.schemas.request import QueueStatusResponse, RequestResponse
from api.v1.schemas.request import QueueStatusResponse, RequestAcceptedResponse
from core.exceptions import ExternalServiceError
from services.request_utils import extract_cover_url
logger = logging.getLogger(__name__)
@ -20,54 +19,81 @@ class RequestService:
self._request_queue = request_queue
self._request_history = request_history
async def request_album(self, musicbrainz_id: str, artist: str | None = None, album: str | None = None, year: int | None = None) -> RequestResponse:
async def request_album(
self,
musicbrainz_id: str,
artist: str | None = None,
album: str | None = None,
year: int | None = None,
artist_mbid: str | None = None,
monitor_artist: bool = False,
auto_download_artist: bool = False,
) -> RequestAcceptedResponse:
if not self._lidarr_repo.is_configured():
raise ExternalServiceError("Lidarr is not configured — set a Lidarr API key in Settings to request albums.")
raise ExternalServiceError("Lidarr isn't configured. Add an API key in Settings before requesting albums.")
try:
result = await self._request_queue.add(musicbrainz_id)
payload = result.get("payload", {})
lidarr_album_id = None
cover_url = None
artist_mbid = None
resolved_artist = artist or "Unknown"
resolved_album = album or "Unknown"
if payload and isinstance(payload, dict):
lidarr_album_id = payload.get("id")
resolved_album = payload.get("title") or resolved_album
cover_url = extract_cover_url(payload)
artist_data = payload.get("artist", {})
if artist_data:
resolved_artist = artist_data.get("artistName") or resolved_artist
artist_mbid = artist_data.get("foreignArtistId")
try:
await self._request_history.async_record_request(
# Don't overwrite an active record (pending/downloading) — just re-check the queue.
existing = await self._request_history.async_get_record(musicbrainz_id)
if existing and existing.status in ("pending", "downloading"):
# Merge monitoring flags if the user updated their choice on re-request
if monitor_artist and not existing.monitor_artist:
await self._request_history.async_update_monitoring_flags(
musicbrainz_id, monitor_artist=True, auto_download_artist=auto_download_artist,
)
enqueued = await self._request_queue.enqueue(musicbrainz_id)
return RequestAcceptedResponse(
success=True,
message="Request already in progress",
musicbrainz_id=musicbrainz_id,
artist_name=resolved_artist,
album_title=resolved_album,
year=year,
cover_url=cover_url,
artist_mbid=artist_mbid,
lidarr_album_id=lidarr_album_id,
status=existing.status,
)
except Exception as e: # noqa: BLE001
logger.error("Failed to persist request history for %s: %s", musicbrainz_id, e)
return RequestResponse(
success=True,
message=result["message"],
lidarr_response=payload,
await self._request_history.async_record_request(
musicbrainz_id=musicbrainz_id,
artist_name=artist or "Unknown",
album_title=album or "Unknown",
year=year,
artist_mbid=artist_mbid,
monitor_artist=monitor_artist,
auto_download_artist=auto_download_artist,
)
except Exception as e: # noqa: BLE001
logger.error("Failed to request album %s: %s", musicbrainz_id, e)
raise ExternalServiceError(f"Failed to request album: {e}")
logger.error("Failed to record request history for %s: %s", musicbrainz_id, e)
raise ExternalServiceError(f"Failed to record request: {e}")
try:
enqueued = await self._request_queue.enqueue(musicbrainz_id)
if not enqueued:
return RequestAcceptedResponse(
success=True,
message="Request already in queue",
musicbrainz_id=musicbrainz_id,
status="pending",
)
except Exception as e: # noqa: BLE001
logger.error("Failed to enqueue album %s: %s", musicbrainz_id, e)
try:
from datetime import datetime, timezone
await self._request_history.async_update_status(
musicbrainz_id, "failed",
completed_at=datetime.now(timezone.utc).isoformat(),
)
except Exception: # noqa: BLE001
pass
raise ExternalServiceError(f"Failed to enqueue request: {e}")
return RequestAcceptedResponse(
success=True,
message="Request accepted",
musicbrainz_id=musicbrainz_id,
status="pending",
)
def get_queue_status(self) -> QueueStatusResponse:
status = self._request_queue.get_status()
return QueueStatusResponse(
queue_size=status["queue_size"],
processing=status["processing"]
processing=status["processing"],
active_workers=status.get("active_workers", 0),
max_workers=status.get("max_workers", 1),
)

View file

@ -3,7 +3,7 @@ import math
import time as _time
from collections.abc import Callable, Coroutine
from datetime import datetime, timezone
from typing import Any, Optional
from typing import Any, Optional, TYPE_CHECKING
from api.v1.schemas.requests_page import (
ActiveRequestItem,
@ -19,6 +19,9 @@ from infrastructure.persistence.request_history import RequestHistoryRecord, Req
from repositories.protocols import LidarrRepositoryProtocol
from services.request_utils import extract_cover_url, parse_eta, resolve_display_status
if TYPE_CHECKING:
from infrastructure.queue.request_queue import RequestQueue
logger = logging.getLogger(__name__)
_CANCELLABLE_STATUSES = {"pending", "downloading"}
@ -36,11 +39,13 @@ class RequestsPageService:
request_history: RequestHistoryStore,
library_mbids_fn: Callable[..., Coroutine[Any, Any, set[str]]],
on_import_callback: Callable[[RequestHistoryRecord], Coroutine[Any, Any, None]] | None = None,
request_queue: Optional["RequestQueue"] = None,
):
self._lidarr_repo = lidarr_repo
self._request_history = request_history
self._library_mbids_fn = library_mbids_fn
self._on_import_callback = on_import_callback
self._request_queue = request_queue
self._queue_cache: list[dict] | None = None
self._queue_cache_time: float = 0
self._library_mbids_cache: set[str] | None = None
@ -129,6 +134,11 @@ class RequestsPageService:
message=f"Cannot cancel request with status '{record.status}'",
)
# Cancel from local queue first
queue_cancelled = False
if self._request_queue:
queue_cancelled = await self._request_queue.cancel(musicbrainz_id)
try:
queue_items = await self._get_cached_queue()
except Exception as e: # noqa: BLE001
@ -151,7 +161,8 @@ class RequestsPageService:
success=False, message="Couldn't remove the item from the download queue"
)
self._invalidate_queue_cache()
else:
elif not queue_cancelled:
# Not in the local queue or Lidarr's download queue
library_mbids = await self._fetch_library_mbids()
if musicbrainz_id.lower() in library_mbids:
return CancelRequestResponse(
@ -184,6 +195,7 @@ class RequestsPageService:
message=f"Cannot retry request with status '{record.status}'",
)
# If we have a Lidarr album ID, try a targeted search first
if record.lidarr_album_id:
result = await self._lidarr_repo.trigger_album_search(
[record.lidarr_album_id]
@ -195,9 +207,27 @@ class RequestsPageService:
message=f"Retrying search for {record.album_title}",
)
# Search failed or no Lidarr album ID — fall through to add_album
# which handles the "album already exists" case gracefully via its
# action="exists" path.
# Route through queue for dedup, per-artist locking, and history callbacks
if self._request_queue:
try:
await self._request_history.async_update_status(musicbrainz_id, "pending")
enqueued = await self._request_queue.enqueue(musicbrainz_id)
if enqueued:
return RetryRequestResponse(
success=True,
message=f"Re-requested {record.album_title}",
)
return RetryRequestResponse(
success=True,
message=f"Request already in queue for {record.album_title}",
)
except Exception as e: # noqa: BLE001
logger.error("Retry via queue failed for %s: %s", musicbrainz_id, e)
return RetryRequestResponse(
success=False, message=f"Retry failed: {e}"
)
# Fallback: direct add_album (only if no queue available)
try:
add_result = await self._lidarr_repo.add_album(musicbrainz_id)
payload = add_result.get("payload", {})

View file

@ -25,11 +25,14 @@ async def test_jobs_survive_restart(store: QueueStore):
store.enqueue("job-1", "mbid-abc")
store.mark_processing("job-1")
q1._processor_task.cancel()
try:
await q1._processor_task
except asyncio.CancelledError:
pass
for task in q1._worker_tasks:
task.cancel()
for task in q1._worker_tasks:
try:
await task
except asyncio.CancelledError:
pass
q1._worker_tasks.clear()
fast_processed = []

View file

@ -0,0 +1,210 @@
"""Tests for MUS-14 queue changes: dedup, cancel, concurrency, atomic enqueue."""
import asyncio
from pathlib import Path
from unittest.mock import MagicMock
import pytest
from infrastructure.queue.queue_store import QueueStore
from infrastructure.queue.request_queue import RequestQueue
@pytest.fixture
def store(tmp_path: Path) -> QueueStore:
return QueueStore(db_path=tmp_path / "test_queue.db")
@pytest.mark.asyncio
async def test_enqueue_dedup_rejects_duplicate(store: QueueStore):
"""Duplicate MBIDs are rejected by enqueue()."""
processed = []
async def processor(mbid: str) -> dict:
await asyncio.sleep(10)
processed.append(mbid)
return {"status": "ok"}
q = RequestQueue(processor=processor, store=store)
first = await q.enqueue("mbid-aaa")
second = await q.enqueue("mbid-aaa")
assert first is True
assert second is False
await q.stop()
@pytest.mark.asyncio
async def test_enqueue_dedup_is_atomic(store: QueueStore):
"""Concurrent enqueue calls for the same MBID should only succeed once."""
processed = []
async def slow_processor(mbid: str) -> dict:
await asyncio.sleep(10)
processed.append(mbid)
return {"status": "ok"}
q = RequestQueue(processor=slow_processor, store=store)
results = await asyncio.gather(
q.enqueue("mbid-race"),
q.enqueue("mbid-race"),
q.enqueue("mbid-race"),
)
assert sum(results) == 1
await q.stop()
@pytest.mark.asyncio
async def test_cancel_skips_queued_item(store: QueueStore):
"""Cancelled items are skipped by the worker."""
processed = []
gate = asyncio.Event()
async def gated_processor(mbid: str) -> dict:
await gate.wait()
processed.append(mbid)
return {"status": "ok"}
q = RequestQueue(processor=gated_processor, store=store, concurrency=1)
await q.enqueue("mbid-first")
await q.enqueue("mbid-second")
await q.cancel("mbid-second")
gate.set()
await asyncio.sleep(0.5)
assert "mbid-first" in processed
assert "mbid-second" not in processed
await q.stop()
@pytest.mark.asyncio
async def test_concurrent_workers_process_in_parallel(store: QueueStore):
"""Multiple workers process items concurrently."""
active = {"count": 0, "max": 0}
lock = asyncio.Lock()
async def tracking_processor(mbid: str) -> dict:
async with lock:
active["count"] += 1
active["max"] = max(active["max"], active["count"])
await asyncio.sleep(0.2)
async with lock:
active["count"] -= 1
return {"status": "ok"}
q = RequestQueue(processor=tracking_processor, store=store, concurrency=3)
await q.enqueue("mbid-a")
await q.enqueue("mbid-b")
await q.enqueue("mbid-c")
await asyncio.sleep(0.5)
assert active["max"] >= 2
await q.stop()
@pytest.mark.asyncio
async def test_store_persisted_before_memory_queue():
"""enqueue() persists to store before putting in asyncio.Queue."""
call_order: list[str] = []
class TrackedStore:
def has_active_mbid(self, mbid: str) -> bool:
return False
def enqueue(self, job_id: str, mbid: str) -> bool:
call_order.append("store_enqueue")
return True
def get_dead_letter_count(self) -> int:
return 0
def get_all(self) -> list:
return []
def reset_processing(self) -> None:
pass
def get_pending(self) -> list:
return []
def get_retryable_dead_letters(self) -> list:
return []
def mark_processing(self, job_id: str) -> None:
pass
def dequeue(self, job_id: str) -> None:
pass
original_put = asyncio.Queue.put
async def tracked_put(self, item):
call_order.append("queue_put")
return await original_put(self, item)
asyncio.Queue.put = tracked_put
try:
async def noop_processor(mbid: str) -> dict:
return {"status": "ok"}
q = RequestQueue(processor=noop_processor, store=TrackedStore())
await q.enqueue("mbid-order-test")
assert call_order.index("store_enqueue") < call_order.index("queue_put")
await q.stop()
finally:
asyncio.Queue.put = original_put
@pytest.mark.asyncio
async def test_cancel_then_re_request_processes_new_request(store: QueueStore):
"""A cancelled MBID can be re-requested and will be processed."""
processed = []
gate = asyncio.Event()
async def gated_processor(mbid: str) -> dict:
await gate.wait()
processed.append(mbid)
return {"status": "ok"}
q = RequestQueue(processor=gated_processor, store=store, concurrency=1)
# Enqueue, cancel, then re-enqueue the same MBID
first = await q.enqueue("mbid-bounce")
assert first is True
await q.cancel("mbid-bounce")
second = await q.enqueue("mbid-bounce")
assert second is True
gate.set()
await asyncio.sleep(0.5)
# The re-request should have been processed (not skipped by stale cancel)
assert "mbid-bounce" in processed
await q.stop()
@pytest.mark.asyncio
async def test_cancelled_mbids_bounded(store: QueueStore):
"""_cancelled_mbids set doesn't grow unbounded."""
async def noop(mbid: str) -> dict:
return {"status": "ok"}
q = RequestQueue(processor=noop, store=store, concurrency=1)
# Cancel 250 non-existent MBIDs (orphan cancels)
for i in range(250):
await q.cancel(f"orphan-{i}")
assert len(q._cancelled_mbids) == 250
# Process one item — triggers the cleanup threshold
await q.enqueue("mbid-trigger")
await asyncio.sleep(0.5)
assert len(q._cancelled_mbids) <= 200
await q.stop()

View file

@ -0,0 +1,101 @@
"""Tests for MUS-14 album.py changes: per-artist lock, safe LRU eviction."""
import asyncio
import pytest
from repositories.lidarr.album import _get_artist_lock, _artist_locks, _MAX_ARTIST_LOCKS
@pytest.fixture(autouse=True)
def _clear_locks():
"""Reset the module-level lock dict between tests."""
_artist_locks.clear()
yield
_artist_locks.clear()
def test_get_artist_lock_returns_same_lock_for_same_mbid():
lock1 = _get_artist_lock("artist-aaa")
lock2 = _get_artist_lock("artist-aaa")
assert lock1 is lock2
def test_get_artist_lock_returns_different_locks_for_different_mbids():
lock1 = _get_artist_lock("artist-aaa")
lock2 = _get_artist_lock("artist-bbb")
assert lock1 is not lock2
def test_lru_eviction_respects_max():
for i in range(_MAX_ARTIST_LOCKS + 10):
_get_artist_lock(f"artist-{i}")
assert len(_artist_locks) <= _MAX_ARTIST_LOCKS
@pytest.mark.asyncio
async def test_lru_eviction_skips_held_locks():
"""A lock that is currently held must not be evicted."""
first_lock = _get_artist_lock("artist-held")
await first_lock.acquire()
try:
for i in range(_MAX_ARTIST_LOCKS + 5):
_get_artist_lock(f"artist-fill-{i}")
assert "artist-held" in _artist_locks
assert _artist_locks["artist-held"] is first_lock
finally:
first_lock.release()
@pytest.mark.asyncio
async def test_per_artist_lock_serializes_concurrent_calls():
"""Concurrent calls for the same artist should be serialized."""
call_order: list[str] = []
async def simulated_add(artist_mbid: str, label: str):
lock = _get_artist_lock(artist_mbid)
async with lock:
call_order.append(f"{label}-start")
await asyncio.sleep(0.1)
call_order.append(f"{label}-end")
await asyncio.gather(
simulated_add("same-artist", "A"),
simulated_add("same-artist", "B"),
)
# One must complete fully before the other starts
a_start = call_order.index("A-start")
a_end = call_order.index("A-end")
b_start = call_order.index("B-start")
b_end = call_order.index("B-end")
serialized = (a_end < b_start) or (b_end < a_start)
assert serialized, f"Expected serialized execution, got: {call_order}"
@pytest.mark.asyncio
async def test_different_artists_run_concurrently():
"""Requests for different artists should NOT be serialized."""
active = {"count": 0, "max": 0}
lock = asyncio.Lock()
async def simulated_add(artist_mbid: str):
artist_lock = _get_artist_lock(artist_mbid)
async with artist_lock:
async with lock:
active["count"] += 1
active["max"] = max(active["max"], active["count"])
await asyncio.sleep(0.1)
async with lock:
active["count"] -= 1
await asyncio.gather(
simulated_add("artist-X"),
simulated_add("artist-Y"),
simulated_add("artist-Z"),
)
assert active["max"] >= 2, f"Expected concurrent execution, max active was {active['max']}"

View file

@ -166,14 +166,32 @@ class TestSharedRawAlbumCache:
},
]
library_mbids, requested_mbids = await asyncio.gather(
library_mbids, monitored_no_files = await asyncio.gather(
repo.get_library_mbids(include_release_ids=False),
repo.get_requested_mbids(),
repo.get_monitored_no_files_mbids(),
)
assert mock_get.await_count == 1
assert library_mbids == {"aaaa"}
assert requested_mbids == {"bbbb"}
assert monitored_no_files == {"bbbb"}
@pytest.mark.asyncio
async def test_get_requested_mbids_uses_history_store(self, repo):
"""get_requested_mbids delegates to RequestHistoryStore."""
mock_store = AsyncMock()
mock_store.async_get_active_mbids = AsyncMock(return_value={"cccc", "dddd"})
repo._request_history_store = mock_store
result = await repo.get_requested_mbids()
assert result == {"cccc", "dddd"}
mock_store.async_get_active_mbids.assert_awaited_once()
@pytest.mark.asyncio
async def test_get_requested_mbids_returns_empty_without_store(self, repo):
"""get_requested_mbids returns empty set when no history store."""
repo._request_history_store = None
result = await repo.get_requested_mbids()
assert result == set()
@pytest.mark.asyncio
async def test_explicit_album_cache_invalidation_forces_refetch(self, repo):

View file

@ -0,0 +1,90 @@
import os
import tempfile
import pytest
from unittest.mock import AsyncMock, MagicMock, patch
# Ensure config uses a writable temp dir for tests
_test_dir = tempfile.mkdtemp()
os.environ.setdefault("ROOT_APP_DIR", _test_dir)
VALID_MBID = "77434d0b-1c5f-48c3-8694-050cb378ebd2"
UNKNOWN_MBID = "00000000-0000-0000-0000-000000000000"
def _make_basic_info():
return MagicMock(
release_group_id=VALID_MBID,
title="Test Album",
artist_name="Test Artist",
in_library=True,
)
@pytest.fixture
def mock_album_service():
svc = MagicMock()
svc.refresh_album = AsyncMock()
svc.get_album_basic_info = AsyncMock(return_value=_make_basic_info())
return svc
@pytest.fixture
def mock_navidrome_service():
svc = MagicMock()
svc.invalidate_album_cache = MagicMock()
return svc
@pytest.fixture
def client(mock_album_service, mock_navidrome_service):
from fastapi import FastAPI
from fastapi.testclient import TestClient
from api.v1.routes.albums import router
from core.dependencies import get_album_service, get_navidrome_library_service
app = FastAPI()
app.include_router(router)
app.dependency_overrides[get_album_service] = lambda: mock_album_service
app.dependency_overrides[get_navidrome_library_service] = lambda: mock_navidrome_service
return TestClient(app)
def test_refresh_calls_invalidate_and_refresh(client, mock_album_service, mock_navidrome_service):
response = client.post(f"/albums/{VALID_MBID}/refresh")
assert response.status_code == 200
mock_navidrome_service.invalidate_album_cache.assert_called_once_with(VALID_MBID)
mock_album_service.refresh_album.assert_called_once_with(VALID_MBID)
mock_album_service.get_album_basic_info.assert_called_once_with(VALID_MBID)
def test_refresh_returns_basic_info(client):
response = client.post(f"/albums/{VALID_MBID}/refresh")
assert response.status_code == 200
def test_refresh_rejects_unknown_mbid(client, mock_album_service, mock_navidrome_service):
response = client.post("/albums/unknown_test/refresh")
assert response.status_code == 400
assert "Invalid or unknown" in response.json()["detail"]
mock_album_service.refresh_album.assert_not_called()
mock_navidrome_service.invalidate_album_cache.assert_not_called()
def test_refresh_rejects_empty_album_id(client, mock_album_service):
response = client.post("/albums/%20/refresh")
assert response.status_code == 400
mock_album_service.refresh_album.assert_not_called()
def test_refresh_propagates_service_value_error(client, mock_album_service, mock_navidrome_service):
mock_navidrome_service.invalidate_album_cache = MagicMock(side_effect=ValueError("bad"))
response = client.post(f"/albums/{VALID_MBID}/refresh")
assert response.status_code == 400

View file

@ -0,0 +1,65 @@
import pytest
import time
from unittest.mock import MagicMock, patch
from services.navidrome_library_service import NavidromeLibraryService
@pytest.fixture
def service():
svc = NavidromeLibraryService.__new__(NavidromeLibraryService)
svc._mbid_to_navidrome_id = {}
svc._album_mbid_cache = {}
svc._dirty = False
return svc
ALBUM_MBID = "77434d0b-1c5f-48c3-8694-050cb378ebd2"
NAVIDROME_ID = "nav-12345"
def test_invalidate_clears_mbid_to_navidrome_id(service):
service._mbid_to_navidrome_id[ALBUM_MBID] = NAVIDROME_ID
service.invalidate_album_cache(ALBUM_MBID)
assert ALBUM_MBID not in service._mbid_to_navidrome_id
def test_invalidate_clears_positive_album_mbid_cache_entries(service):
cache_key = "test album:test artist"
service._album_mbid_cache[cache_key] = ALBUM_MBID
service.invalidate_album_cache(ALBUM_MBID)
assert cache_key not in service._album_mbid_cache
assert service._dirty is True
def test_invalidate_clears_multiple_matching_entries(service):
service._album_mbid_cache["key1:artist1"] = ALBUM_MBID
service._album_mbid_cache["key2:artist2"] = ALBUM_MBID
service._album_mbid_cache["other:other"] = "different-mbid"
service.invalidate_album_cache(ALBUM_MBID)
assert "key1:artist1" not in service._album_mbid_cache
assert "key2:artist2" not in service._album_mbid_cache
assert "other:other" in service._album_mbid_cache
def test_invalidate_noop_when_not_cached(service):
service.invalidate_album_cache(ALBUM_MBID)
assert service._dirty is False
def test_invalidate_leaves_unrelated_entries(service):
other_mbid = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"
service._mbid_to_navidrome_id[other_mbid] = "nav-other"
service._album_mbid_cache["other:artist"] = other_mbid
service.invalidate_album_cache(ALBUM_MBID)
assert service._mbid_to_navidrome_id[other_mbid] == "nav-other"
assert service._album_mbid_cache["other:artist"] == other_mbid

View file

@ -11,9 +11,10 @@ def _make_service(queue_add_result: dict | None = None) -> tuple[RequestService,
request_queue = MagicMock()
request_history = MagicMock()
request_queue.add = AsyncMock(return_value=queue_add_result or {"message": "queued", "payload": {}})
request_queue.enqueue = AsyncMock(return_value=True)
request_queue.get_status = MagicMock(return_value={"queue_size": 0, "processing": False})
request_history.async_record_request = AsyncMock()
request_history.async_get_record = AsyncMock(return_value=None)
service = RequestService(lidarr_repo, request_queue, request_history)
return service, request_queue, request_history
@ -21,37 +22,26 @@ def _make_service(queue_add_result: dict | None = None) -> tuple[RequestService,
@pytest.mark.asyncio
async def test_request_album_records_history_and_returns_response():
service, request_queue, request_history = _make_service(
{
"message": "Album queued",
"payload": {
"id": 42,
"title": "Album X",
"foreignAlbumId": "rg-123",
"artist": {"artistName": "Artist X", "foreignArtistId": "artist-123"},
},
}
)
service, request_queue, request_history = _make_service()
response = await service.request_album("rg-123", artist="Fallback Artist", album="Fallback Album", year=2024)
assert response.success is True
assert response.message == "Album queued"
assert isinstance(response.lidarr_response, dict)
assert response.lidarr_response["id"] == 42
assert response.message == "Request accepted"
assert response.musicbrainz_id == "rg-123"
assert response.status == "pending"
request_queue.add.assert_awaited_once_with("rg-123")
request_queue.enqueue.assert_awaited_once_with("rg-123")
request_history.async_record_request.assert_awaited_once()
kwargs = request_history.async_record_request.await_args.kwargs
assert kwargs["artist_name"] == "Artist X"
assert kwargs["album_title"] == "Album X"
assert kwargs["artist_mbid"] == "artist-123"
assert kwargs["artist_name"] == "Fallback Artist"
assert kwargs["album_title"] == "Fallback Album"
@pytest.mark.asyncio
async def test_request_album_wraps_errors():
service, request_queue, _ = _make_service()
request_queue.add.side_effect = RuntimeError("boom")
request_queue.enqueue = AsyncMock(side_effect=RuntimeError("boom"))
with pytest.raises(ExternalServiceError):
await service.request_album("rg-123")

View file

@ -0,0 +1,295 @@
"""Tests for MUS-15B: Artist monitoring API and integration."""
import pytest
from unittest.mock import AsyncMock, MagicMock, patch
from services.artist_service import ArtistService
from core.exceptions import ExternalServiceError
@pytest.fixture
def mock_lidarr_repo():
repo = AsyncMock()
repo.is_configured = MagicMock(return_value=True)
repo.get_artist_details.return_value = {
"id": 42,
"monitored": False,
"monitorNewItems": "none",
"monitor_new_items": "none",
"name": "Test Artist",
"overview": "A test artist",
"genres": ["rock"],
"links": [],
"poster_url": None,
"fanart_url": None,
"banner_url": None,
}
repo.update_artist_monitoring.return_value = {"monitored": False, "auto_download": False}
repo.get_library_mbids.return_value = set()
repo.get_artist_albums.return_value = []
repo.get_requested_mbids.return_value = set()
return repo
@pytest.fixture
def mock_mb_repo():
repo = AsyncMock()
return repo
@pytest.fixture
def mock_wikidata_repo():
repo = AsyncMock()
return repo
@pytest.fixture
def mock_preferences():
prefs = MagicMock()
prefs.get_preferences.return_value = MagicMock(
primary_types=["Album"],
secondary_types=[],
)
return prefs
@pytest.fixture
def mock_cache():
cache = AsyncMock()
cache.get.return_value = None
cache.delete.return_value = None
cache.clear_prefix.return_value = 0
return cache
@pytest.fixture
def mock_disk_cache():
return AsyncMock()
@pytest.fixture
def artist_service(
mock_lidarr_repo, mock_mb_repo, mock_wikidata_repo,
mock_preferences, mock_cache, mock_disk_cache,
):
return ArtistService(
mb_repo=mock_mb_repo,
lidarr_repo=mock_lidarr_repo,
wikidata_repo=mock_wikidata_repo,
preferences_service=mock_preferences,
memory_cache=mock_cache,
disk_cache=mock_disk_cache,
)
class TestSetArtistMonitoring:
@pytest.mark.asyncio
async def test_set_monitoring_on(self, artist_service, mock_lidarr_repo, mock_cache):
result = await artist_service.set_artist_monitoring(
"abc-123", monitored=True, auto_download=False,
)
mock_lidarr_repo.update_artist_monitoring.assert_awaited_once_with(
"abc-123", monitored=True, monitor_new_items="none",
)
mock_cache.delete.assert_awaited()
assert result == {"monitored": False, "auto_download": False}
@pytest.mark.asyncio
async def test_set_monitoring_with_auto_download(self, artist_service, mock_lidarr_repo):
await artist_service.set_artist_monitoring(
"abc-123", monitored=True, auto_download=True,
)
mock_lidarr_repo.update_artist_monitoring.assert_awaited_once_with(
"abc-123", monitored=True, monitor_new_items="all",
)
@pytest.mark.asyncio
async def test_auto_download_false_when_unmonitored(self, artist_service, mock_lidarr_repo):
await artist_service.set_artist_monitoring(
"abc-123", monitored=False, auto_download=True,
)
mock_lidarr_repo.update_artist_monitoring.assert_awaited_once_with(
"abc-123", monitored=False, monitor_new_items="none",
)
@pytest.mark.asyncio
async def test_raises_when_lidarr_not_configured(self, artist_service, mock_lidarr_repo):
mock_lidarr_repo.is_configured = MagicMock(return_value=False)
with pytest.raises(ExternalServiceError, match="not configured"):
await artist_service.set_artist_monitoring("abc-123", monitored=True)
@pytest.mark.asyncio
async def test_invalidates_artist_cache(self, artist_service, mock_cache):
await artist_service.set_artist_monitoring("abc-123", monitored=True)
delete_calls = [str(c) for c in mock_cache.delete.await_args_list]
assert any("artist_info:abc-123" in c for c in delete_calls)
class TestGetArtistMonitoringStatus:
@pytest.mark.asyncio
async def test_returns_status_from_lidarr(self, artist_service, mock_lidarr_repo):
mock_lidarr_repo.get_artist_details.return_value = {
"id": 42, "monitored": True, "monitor_new_items": "all",
}
result = await artist_service.get_artist_monitoring_status("abc-123")
assert result == {"in_lidarr": True, "monitored": True, "auto_download": True}
@pytest.mark.asyncio
async def test_returns_defaults_when_not_in_lidarr(self, artist_service, mock_lidarr_repo):
mock_lidarr_repo.get_artist_details.return_value = None
result = await artist_service.get_artist_monitoring_status("abc-123")
assert result == {"in_lidarr": False, "monitored": False, "auto_download": False}
@pytest.mark.asyncio
async def test_returns_defaults_when_lidarr_not_configured(self, artist_service, mock_lidarr_repo):
mock_lidarr_repo.is_configured = MagicMock(return_value=False)
result = await artist_service.get_artist_monitoring_status("abc-123")
assert result == {"in_lidarr": False, "monitored": False, "auto_download": False}
class TestArtistInfoMonitoringFields:
@pytest.mark.asyncio
async def test_monitoring_fields_set_from_lidarr(self, artist_service, mock_lidarr_repo, mock_cache):
mock_lidarr_repo.get_artist_details.return_value = {
"id": 42,
"monitored": True,
"monitor_new_items": "all",
"name": "Test Artist",
"overview": "A test artist",
"genres": ["rock"],
"links": [],
"poster_url": None,
"fanart_url": None,
"banner_url": None,
}
info = await artist_service._do_get_artist_info("abc-123", None, None)
assert info.monitored is True
assert info.auto_download is True
assert info.in_lidarr is True
@pytest.mark.asyncio
async def test_monitoring_fields_default_when_no_lidarr(self, artist_service, mock_lidarr_repo, mock_cache):
mock_lidarr_repo.is_configured = MagicMock(return_value=False)
mock_lidarr_repo.get_artist_details.return_value = None
with patch.object(artist_service, '_build_artist_from_musicbrainz') as mock_build:
from models.artist import ArtistInfo
mock_build.return_value = ArtistInfo(
name="Test", musicbrainz_id="abc-123",
tags=[], aliases=[], external_links=[],
)
info = await artist_service._do_get_artist_info("abc-123", None, None)
assert info.monitored is False
assert info.auto_download is False
assert info.in_lidarr is False
class TestUpdateAlbumHelper:
"""Tests for Aurral-aligned _update_album helper (PUT /album/{id} instead of PUT /album/monitor)."""
@pytest.mark.asyncio
async def test_update_album_returns_synchronous_result(self):
"""_update_album GETs full album, merges updates, PUTs back, returns updated object."""
repo = AsyncMock()
from repositories.lidarr.album import LidarrAlbumRepository
repo._ALBUM_MUTABLE_FIELDS = LidarrAlbumRepository._ALBUM_MUTABLE_FIELDS
original_album = {"id": 10, "title": "Test", "monitored": False, "statistics": {}}
updated_album = {"id": 10, "title": "Test", "monitored": True, "statistics": {}}
repo._get = AsyncMock(return_value=original_album)
repo._put = AsyncMock(return_value=updated_album)
result = await LidarrAlbumRepository._update_album(repo, 10, {"monitored": True})
repo._get.assert_awaited_once_with("/api/v1/album/10")
repo._put.assert_awaited_once()
put_args = repo._put.await_args
assert put_args[0][0] == "/api/v1/album/10"
assert put_args[0][1]["monitored"] is True
assert result["monitored"] is True
@pytest.mark.asyncio
async def test_update_album_preserves_other_fields(self):
"""_update_album only merges specified fields, preserving the rest."""
repo = AsyncMock()
from repositories.lidarr.album import LidarrAlbumRepository
repo._ALBUM_MUTABLE_FIELDS = LidarrAlbumRepository._ALBUM_MUTABLE_FIELDS
original = {"id": 10, "title": "Original", "monitored": False, "anyReleaseOk": True}
repo._get = AsyncMock(return_value=original.copy())
repo._put = AsyncMock(return_value={**original, "monitored": True})
result = await LidarrAlbumRepository._update_album(repo, 10, {"monitored": True})
put_payload = repo._put.await_args[0][1]
assert put_payload["title"] == "Original"
assert put_payload["anyReleaseOk"] is True
assert put_payload["monitored"] is True
assert result is not None
@pytest.mark.asyncio
async def test_update_album_rejects_disallowed_fields(self):
"""_update_album silently drops fields not in the allowlist."""
repo = AsyncMock()
from repositories.lidarr.album import LidarrAlbumRepository
repo._ALBUM_MUTABLE_FIELDS = LidarrAlbumRepository._ALBUM_MUTABLE_FIELDS
original = {"id": 10, "title": "Original", "monitored": False, "rootFolderPath": "/music"}
repo._get = AsyncMock(return_value=original.copy())
repo._put = AsyncMock(return_value={**original, "monitored": True})
result = await LidarrAlbumRepository._update_album(
repo, 10, {"monitored": True, "rootFolderPath": "/evil", "qualityProfileId": 999}
)
put_payload = repo._put.await_args[0][1]
assert put_payload["monitored"] is True
assert put_payload["rootFolderPath"] == "/music"
assert "qualityProfileId" not in put_payload or put_payload.get("qualityProfileId") != 999
assert result is not None
class TestProcessorMonitoringSignal:
"""Tests for the queue processor's belt-and-suspenders monitoring check (structured boolean)."""
def _check_monitored(self, result: dict) -> bool:
"""Replicate the processor's monitoring check logic."""
payload = result.get("payload", {})
is_monitored = payload.get("monitored", False) if isinstance(payload, dict) else False
if not is_monitored:
is_monitored = bool(result.get("monitored"))
return is_monitored
@pytest.mark.asyncio
async def test_processor_trusts_structured_flag_when_payload_stale(self):
"""Processor should treat album as monitored via structured boolean, even if payload is stale."""
result = {
"message": "Album monitored & search triggered: Test Album",
"monitored": True,
"payload": {"monitored": False, "id": 10},
}
assert self._check_monitored(result) is True
@pytest.mark.asyncio
async def test_processor_trusts_added_and_monitored_flag(self):
"""Processor should detect structured monitored=True for add+monitor path."""
result = {
"message": "Album added & monitored: New Album",
"monitored": True,
"payload": {"monitored": False, "id": 20},
}
assert self._check_monitored(result) is True
@pytest.mark.asyncio
async def test_processor_does_not_false_positive_without_flag(self):
"""Processor should NOT flag as monitored when structured boolean is absent."""
result = {
"message": "Album already downloaded: Some Album",
"payload": {"monitored": False, "id": 30},
}
assert self._check_monitored(result) is False
@pytest.mark.asyncio
async def test_processor_uses_payload_when_already_monitored(self):
"""When payload correctly has monitored=True, structured flag is not needed."""
result = {
"message": "Album already downloaded: Some Album",
"payload": {"monitored": True, "id": 40},
}
assert self._check_monitored(result) is True

View file

@ -0,0 +1,209 @@
"""Tests for MUS-15: album status race condition fixes."""
from unittest.mock import AsyncMock, MagicMock, patch
import asyncio
import pytest
# ---------- Fix 1: library_service.get_library_mbids merges library_db ----------
@pytest.mark.asyncio
async def test_get_library_mbids_merges_library_db():
"""Library mbids should union Lidarr API results with library_db MBIDs."""
from services.library_service import LibraryService
lidarr_repo = MagicMock()
lidarr_repo.is_configured.return_value = True
lidarr_repo.get_library_mbids = AsyncMock(return_value={"aaa", "bbb"})
library_db = MagicMock()
library_db.get_all_album_mbids = AsyncMock(return_value={"CCC", "DDD"})
svc = LibraryService(
lidarr_repo=lidarr_repo,
library_db=library_db,
cover_repo=MagicMock(),
preferences_service=MagicMock(),
)
result = await svc.get_library_mbids()
result_set = {m.lower() for m in result}
assert result_set == {"aaa", "bbb", "ccc", "ddd"}, "Should contain both Lidarr and library_db MBIDs"
assert len(result) == 4
@pytest.mark.asyncio
async def test_get_library_mbids_handles_library_db_overlap():
"""Overlapping MBIDs should be deduplicated."""
from services.library_service import LibraryService
lidarr_repo = MagicMock()
lidarr_repo.is_configured.return_value = True
lidarr_repo.get_library_mbids = AsyncMock(return_value={"aaa", "bbb"})
library_db = MagicMock()
library_db.get_all_album_mbids = AsyncMock(return_value={"AAA", "bbb"})
svc = LibraryService(
lidarr_repo=lidarr_repo,
library_db=library_db,
cover_repo=MagicMock(),
preferences_service=MagicMock(),
)
result = await svc.get_library_mbids()
assert len(result) == 2, "Overlapping MBIDs should be deduplicated (case-insensitive)"
@pytest.mark.asyncio
async def test_get_library_mbids_recently_imported_visible():
"""Album upserted to library_db (by on_import) appears even when Lidarr cache is stale."""
from services.library_service import LibraryService
# Lidarr API returns old cached data — doesn't include the newly imported album
lidarr_repo = MagicMock()
lidarr_repo.is_configured.return_value = True
lidarr_repo.get_library_mbids = AsyncMock(return_value={"old-album"})
# library_db has the newly imported album (upserted by on_import callback)
library_db = MagicMock()
library_db.get_all_album_mbids = AsyncMock(return_value={"old-album", "newly-imported"})
svc = LibraryService(
lidarr_repo=lidarr_repo,
library_db=library_db,
cover_repo=MagicMock(),
preferences_service=MagicMock(),
)
result = await svc.get_library_mbids()
result_set = set(result)
assert "newly-imported" in result_set, "Recently imported album must appear in library mbids"
@pytest.mark.asyncio
async def test_get_library_mbids_degrades_when_library_db_fails():
"""If library_db fails, endpoint should degrade to Lidarr-only MBIDs."""
from services.library_service import LibraryService
lidarr_repo = MagicMock()
lidarr_repo.is_configured.return_value = True
lidarr_repo.get_library_mbids = AsyncMock(return_value={"aaa", "bbb"})
library_db = MagicMock()
library_db.get_all_album_mbids = AsyncMock(side_effect=RuntimeError("DB locked"))
svc = LibraryService(
lidarr_repo=lidarr_repo,
library_db=library_db,
cover_repo=MagicMock(),
preferences_service=MagicMock(),
)
result = await svc.get_library_mbids()
result_set = set(result)
assert result_set == {"aaa", "bbb"}, "Should fall back to Lidarr-only when library_db fails"
# ---------- Fix 2: queue worker fires import callback ----------
@pytest.mark.asyncio
async def test_queue_worker_fires_import_callback_when_has_files():
"""When the queue worker detects has_files, the import callback should fire."""
from infrastructure.queue.request_queue import RequestQueue
from infrastructure.persistence.request_history import RequestHistoryRecord
callback_called = asyncio.Event()
callback_record = {}
async def on_import(record):
callback_record["mbid"] = record.musicbrainz_id
callback_record["artist_mbid"] = record.artist_mbid
callback_called.set()
history = MagicMock()
# First call: cancel check (returns original pending record)
# Second call: enriched record for callback (after field updates)
original_record = RequestHistoryRecord(
musicbrainz_id="test-mbid",
artist_name="Test Artist",
album_title="Test Album",
requested_at="2026-01-01T00:00:00Z",
status="pending",
)
enriched_record = RequestHistoryRecord(
musicbrainz_id="test-mbid",
artist_name="Test Artist",
album_title="Test Album",
requested_at="2026-01-01T00:00:00Z",
status="imported",
artist_mbid="artist-mbid",
)
history.async_get_record = AsyncMock(side_effect=[original_record, enriched_record])
history.async_update_status = AsyncMock()
history.async_update_lidarr_album_id = AsyncMock()
history.async_update_cover_url = AsyncMock()
history.async_update_artist_mbid = AsyncMock()
q = RequestQueue(
processor=AsyncMock(),
request_history=history,
on_import_callback=on_import,
)
result = {
"payload": {
"id": 42,
"statistics": {"trackFileCount": 5},
"artist": {"foreignArtistId": "artist-mbid"},
}
}
await q._update_history_on_result("test-mbid", result)
assert callback_called.is_set(), "Import callback should have been called"
assert callback_record["mbid"] == "test-mbid"
assert callback_record["artist_mbid"] == "artist-mbid", "Callback should receive enriched record"
@pytest.mark.asyncio
async def test_queue_worker_no_callback_when_downloading():
"""When the queue worker detects no files, import callback should NOT fire."""
from infrastructure.queue.request_queue import RequestQueue
from infrastructure.persistence.request_history import RequestHistoryRecord
callback_called = asyncio.Event()
async def on_import(record):
callback_called.set()
history = MagicMock()
original_record = RequestHistoryRecord(
musicbrainz_id="test-mbid",
artist_name="Test Artist",
album_title="Test Album",
requested_at="2026-01-01T00:00:00Z",
status="pending",
)
history.async_get_record = AsyncMock(return_value=original_record)
history.async_update_status = AsyncMock()
history.async_update_lidarr_album_id = AsyncMock()
history.async_update_cover_url = AsyncMock()
history.async_update_artist_mbid = AsyncMock()
q = RequestQueue(
processor=AsyncMock(),
request_history=history,
on_import_callback=on_import,
)
result = {
"payload": {
"id": 42,
"statistics": {"trackFileCount": 0},
"artist": {"foreignArtistId": "artist-mbid"},
}
}
await q._update_history_on_result("test-mbid", result)
assert not callback_called.is_set(), "Import callback should NOT fire when trackFileCount=0"

View file

@ -7,6 +7,7 @@
import { appendAudioDBSizeSuffix } from '$lib/utils/imageSuffix';
import { imageSettingsStore } from '$lib/stores/imageSettings';
import ArtistLinks from './ArtistLinks.svelte';
import ArtistMonitoringToggle from './ArtistMonitoringToggle.svelte';
import BackButton from './BackButton.svelte';
import HeroBackdrop from './HeroBackdrop.svelte';
import { getApiUrl } from '$lib/utils/api';
@ -165,6 +166,20 @@
{#if validLinks.length > 0}
<ArtistLinks links={validLinks} />
{/if}
{#if artist.in_lidarr}
<div class="mt-3">
<ArtistMonitoringToggle
artistMbid={artist.musicbrainz_id}
monitored={artist.monitored ?? false}
autoDownload={artist.auto_download ?? false}
on:change={(e) => {
artist.monitored = e.detail.monitored;
artist.auto_download = e.detail.autoDownload;
}}
/>
</div>
{/if}
</div>
</div>
</div>

View file

@ -0,0 +1,73 @@
<script lang="ts">
import { Eye, Download } from 'lucide-svelte';
import { api } from '$lib/api/client';
import { createEventDispatcher } from 'svelte';
export let artistMbid: string;
export let monitored: boolean = false;
export let autoDownload: boolean = false;
export let disabled: boolean = false;
const dispatch = createEventDispatcher<{
change: { monitored: boolean; autoDownload: boolean };
}>();
let saving = false;
async function updateMonitoring(newMonitored: boolean, newAutoDownload: boolean) {
if (saving) return;
saving = true;
try {
await api.global.put(`/api/v1/artists/${artistMbid}/monitoring`, {
monitored: newMonitored,
auto_download: newAutoDownload
});
monitored = newMonitored;
autoDownload = newAutoDownload;
dispatch('change', { monitored, autoDownload });
} catch {
// revert is implicit: we only update state on success
} finally {
saving = false;
}
}
async function handleMonitorToggle() {
const newMonitored = !monitored;
const newAutoDownload = newMonitored ? autoDownload : false;
await updateMonitoring(newMonitored, newAutoDownload);
}
async function handleAutoDownloadToggle() {
await updateMonitoring(monitored, !autoDownload);
}
</script>
<div class="flex items-center gap-4 flex-wrap">
<label class="label cursor-pointer gap-2" aria-label="Monitor this artist">
<Eye class="h-4 w-4 text-base-content/70" />
<span class="text-sm text-base-content/70">Monitor</span>
<input
type="checkbox"
checked={monitored}
on:change={handleMonitorToggle}
disabled={disabled || saving}
class="toggle toggle-sm toggle-accent"
/>
</label>
<label
class="label cursor-pointer gap-2 transition-opacity"
class:opacity-40={!monitored}
aria-label="Download new releases"
>
<Download class="h-4 w-4 text-base-content/70" />
<span class="text-sm text-base-content/70">Download new releases</span>
<input
type="checkbox"
checked={autoDownload}
on:change={handleAutoDownloadToggle}
disabled={disabled || saving || !monitored}
class="toggle toggle-sm toggle-accent"
/>
</label>
</div>

View file

@ -147,6 +147,9 @@ export type ArtistInfo = {
aliases: string[];
external_links: ExternalLink[];
in_library: boolean;
in_lidarr?: boolean;
monitored?: boolean;
auto_download?: boolean;
albums: ReleaseGroup[];
singles: ReleaseGroup[];
eps: ReleaseGroup[];

View file

@ -12,6 +12,9 @@ export type AlbumRequestContext = {
artist?: string;
album?: string;
year?: number | null;
artistMbid?: string;
monitorArtist?: boolean;
autoDownloadArtist?: boolean;
};
export async function requestAlbum(
@ -23,7 +26,10 @@ export async function requestAlbum(
musicbrainz_id: musicbrainzId,
artist: context?.artist ?? undefined,
album: context?.album ?? undefined,
year: context?.year ?? undefined
year: context?.year ?? undefined,
artist_mbid: context?.artistMbid ?? undefined,
monitor_artist: context?.monitorArtist ?? false,
auto_download_artist: context?.autoDownloadArtist ?? false
});
libraryStore.addRequested(musicbrainzId);

View file

@ -60,9 +60,13 @@
inLibrary={state.inLibrary}
isRequested={state.isRequested}
requesting={state.requesting}
refreshing={state.refreshing}
pollingForSources={state.pollingForSources}
lidarrConfigured={$integrationStore.lidarr}
artistMonitored={state.artistMonitored}
onrequest={state.handleRequest}
ondelete={state.handleDeleteClick}
onrefresh={state.refreshAll}
onartistclick={state.goToArtist}
/>

View file

@ -5,7 +5,7 @@
import AlbumImage from '$lib/components/AlbumImage.svelte';
import HeroBackdrop from '$lib/components/HeroBackdrop.svelte';
import { formatTotalDuration } from '$lib/utils/formatting';
import { Check, Trash2, Clock, Plus } from 'lucide-svelte';
import { Check, Trash2, Clock, Plus, RefreshCw } from 'lucide-svelte';
interface Props {
album: AlbumBasicInfo;
@ -14,9 +14,14 @@
inLibrary: boolean;
isRequested: boolean;
requesting: boolean;
refreshing: boolean;
pollingForSources: boolean;
lidarrConfigured: boolean;
onrequest: () => void;
artistMonitored?: boolean;
onrequest: (opts?: { monitorArtist?: boolean; autoDownloadArtist?: boolean }) => void;
ondelete: () => void;
onrefresh: () => void;
onartistclick: () => void;
}
@ -27,12 +32,26 @@
inLibrary,
isRequested,
requesting,
refreshing,
pollingForSources,
lidarrConfigured,
artistMonitored = false,
onrequest,
ondelete,
onrefresh,
onartistclick
}: Props = $props();
let monitorArtist = $state(false);
let autoDownloadArtist = $state(false);
// Reset checkboxes when navigating between albums
$effect(() => {
void album.musicbrainz_id;
monitorArtist = false;
autoDownloadArtist = false;
});
let backdropUrl = $derived(
album.cover_url ||
album.album_thumb_url ||
@ -53,7 +72,17 @@
/>
<div class="relative z-10 flex flex-col lg:flex-row gap-6 lg:gap-8 p-4 sm:p-6 lg:p-8">
<div class="w-full lg:w-64 xl:w-80 shrink-0">
{#if (inLibrary || isRequested) && lidarrConfigured}
<button
class="absolute top-3 right-3 btn btn-sm btn-ghost btn-circle z-20"
onclick={onrefresh}
disabled={refreshing}
title="Refresh album status"
>
<RefreshCw class="h-5 w-5 {refreshing ? 'animate-spin' : ''}" />
</button>
{/if}
<div class="w-full lg:w-64 xl:w-80 flex-shrink-0">
<AlbumImage
mbid={album.musicbrainz_id}
customUrl={album.cover_url}
@ -135,6 +164,12 @@
<Check class="h-4 w-4" />
In Library
</div>
{#if pollingForSources}
<div class="badge badge-lg badge-ghost gap-2 animate-pulse">
<span class="loading loading-spinner loading-xs"></span>
Checking for sources…
</div>
{/if}
<button class="btn btn-sm btn-error btn-outline gap-1" onclick={ondelete}>
<Trash2 class="h-4 w-4" />
Remove
@ -149,20 +184,42 @@
Remove
</button>
{:else}
<button
class="btn btn-lg gap-2"
style="background-color: {colors.accent}; color: {colors.secondary}; border: none;"
onclick={onrequest}
disabled={requesting}
>
{#if requesting}
<span class="loading loading-spinner loading-sm"></span>
Requesting...
{:else}
<Plus class="h-5 w-5" />
Add to Library
<div class="flex flex-col gap-3">
<button
class="btn btn-lg gap-2"
style="background-color: {colors.accent}; color: {colors.secondary}; border: none;"
onclick={() => onrequest({ monitorArtist, autoDownloadArtist })}
disabled={requesting}
>
{#if requesting}
<span class="loading loading-spinner loading-sm"></span>
Requesting...
{:else}
<Plus class="h-5 w-5" />
Add to Library
{/if}
</button>
{#if !artistMonitored}
<label class="label cursor-pointer gap-2 justify-start">
<input
type="checkbox"
bind:checked={monitorArtist}
class="checkbox checkbox-sm checkbox-accent"
/>
<span class="text-sm text-base-content/70">Monitor this artist</span>
</label>
{#if monitorArtist}
<label class="label cursor-pointer gap-2 justify-start pl-6">
<input
type="checkbox"
bind:checked={autoDownloadArtist}
class="checkbox checkbox-sm checkbox-accent"
/>
<span class="text-sm text-base-content/70">Download new releases</span>
</label>
{/if}
{/if}
</button>
</div>
{/if}
</div>
{/if}

View file

@ -20,6 +20,7 @@ export interface EventHandlerDeps {
setRemovedArtistName: (v: string) => void;
setToast: (msg: string, type: 'success' | 'error' | 'info' | 'warning') => void;
setShowToast: (v: boolean) => void;
onRequestSuccess?: () => void;
}
export function createEventHandlers(deps: EventHandlerDeps) {
@ -44,7 +45,10 @@ export function createEventHandlers(deps: EventHandlerDeps) {
deps.setQuota(q);
}
async function handleRequest(): Promise<void> {
async function handleRequest(opts?: {
monitorArtist?: boolean;
autoDownloadArtist?: boolean;
}): Promise<void> {
const album = deps.getAlbum();
if (!album || deps.getRequesting()) return;
deps.setRequesting(true);
@ -52,7 +56,10 @@ export function createEventHandlers(deps: EventHandlerDeps) {
const result = await requestAlbum(album.musicbrainz_id, {
artist: album.artist_name ?? undefined,
album: album.title,
year: album.year ?? undefined
year: album.year ?? undefined,
artistMbid: album.artist_id,
monitorArtist: opts?.monitorArtist,
autoDownloadArtist: opts?.autoDownloadArtist
});
const current = deps.getAlbum();
if (result.success && current) {
@ -61,6 +68,7 @@ export function createEventHandlers(deps: EventHandlerDeps) {
deps.albumBasicCacheSet(current, deps.getAlbumId());
deps.setToast('Added to Library', 'success');
deps.setShowToast(true);
deps.onRequestSuccess?.();
}
} finally {
deps.setRequesting(false);

View file

@ -106,3 +106,12 @@ export async function fetchLastFm(
signal
});
}
export async function refreshAlbum(
albumId: string,
signal?: AbortSignal
): Promise<AlbumBasicInfo | null> {
return api
.post<AlbumBasicInfo>(`/api/v1/albums/${albumId}/refresh`, undefined, { signal })
.catch(() => null);
}

View file

@ -21,6 +21,7 @@ import { libraryStore } from '$lib/stores/library';
import { integrationStore } from '$lib/stores/integration';
import { isAbortError } from '$lib/utils/errorHandling';
import { extractServiceStatus } from '$lib/utils/serviceStatus';
import { api } from '$lib/api/client';
import {
albumBasicCache,
albumDiscoveryCache,
@ -45,7 +46,8 @@ import {
fetchJellyfinMatch,
fetchLocalMatch,
fetchNavidromeMatch,
fetchLastFm
fetchLastFm,
refreshAlbum
} from './albumFetchers';
import { buildRenderedTrackSections, buildSortedTrackMap } from './albumTrackResolvers';
import type { RenderedTrackSection } from './albumTrackResolvers';
@ -95,6 +97,11 @@ export function createAlbumPageState(albumIdGetter: () => string) {
let renderedTrackSections = $state<RenderedTrackSection[]>([]);
let playlistModalRef = $state<{ open: (tracks: QueueItem[]) => void } | null>(null);
let abortController: AbortController | null = null;
let refreshing = $state(false);
let pollingForSources = $state(false);
let pollTimer: ReturnType<typeof setInterval> | null = null;
let artistInLidarr = $state(false);
let artistMonitored = $state(false);
// eslint-disable-next-line svelte/prefer-svelte-reactivity -- derived Map is recreated each time, reactive by nature
const trackLinkMap = $derived(new Map(trackLinks.map((tl) => [getDiscTrackKey(tl), tl])));
@ -116,6 +123,7 @@ export function createAlbumPageState(albumIdGetter: () => string) {
abortController.abort();
abortController = null;
}
stopPolling();
album = null;
tracksInfo = null;
renderedTrackSections = [];
@ -137,6 +145,7 @@ export function createAlbumPageState(albumIdGetter: () => string) {
loadingNavidrome = false;
lastfmEnrichment = null;
loadingLastfm = true;
refreshing = false;
}
function hydrateFromCache(albumId: string) {
@ -316,6 +325,23 @@ export function createAlbumPageState(albumIdGetter: () => string) {
}
}
async function fetchArtistMonitoringState(artistId: string, signal: AbortSignal) {
try {
const integrations = get(integrationStore);
if (!integrations.lidarr) return;
const info = await api.global.get<{
in_lidarr?: boolean;
monitored?: boolean;
auto_download?: boolean;
}>(`/api/v1/artists/${artistId}/monitoring`, { signal });
if (signal.aborted) return;
artistInLidarr = info.in_lidarr ?? false;
artistMonitored = info.monitored ?? false;
} catch (e) {
console.debug('Artist monitoring fetch failed:', e);
}
}
async function loadAlbum(albumId: string) {
const { refreshBasic, refreshTracks, refreshDiscovery, refreshLastfm, refreshSourceMatch } =
hydrateFromCache(albumId);
@ -323,6 +349,9 @@ export function createAlbumPageState(albumIdGetter: () => string) {
abortController = new AbortController();
const signal = abortController.signal;
artistInLidarr = false;
artistMonitored = false;
// Fire source matches that only need albumId immediately (before basic loads)
if (refreshSourceMatch) {
void (async () => {
@ -364,11 +393,12 @@ export function createAlbumPageState(albumIdGetter: () => string) {
void doFetchBasic(albumId, signal);
}
if (signal.aborted || !album) return;
if (album.artist_id) void fetchArtistMonitoringState(album.artist_id, signal);
if (refreshTracks && !refreshBasic) void doFetchTracks(albumId, signal);
if (refreshDiscovery) void doFetchDiscovery(albumId, signal);
if (!refreshBasic) void doFetchYouTube(albumId, signal);
if (refreshLastfm) void doFetchLastFm(albumId, signal);
// Navidrome match needs album title/artist fire after basic loads
// Navidrome match needs album title/artist - fire after basic loads
if (refreshSourceMatch) {
void (async () => {
try {
@ -397,6 +427,71 @@ export function createAlbumPageState(albumIdGetter: () => string) {
}
}
function stopPolling() {
if (pollTimer) {
clearInterval(pollTimer);
pollTimer = null;
}
pollingForSources = false;
}
function hasAnySourceFound(): boolean {
return !!(jellyfinMatch?.found || localMatch?.found || navidromeMatch?.found);
}
async function forceLoadAlbum(albumId: string): Promise<void> {
albumBasicCache.remove(albumId);
albumTracksCache.remove(albumId);
albumSourceMatchCache.remove(albumId);
if (abortController) abortController.abort();
abortController = new AbortController();
const signal = abortController.signal;
try {
const freshBasic = await refreshAlbum(albumId, signal);
if (freshBasic) {
album = freshBasic;
extractServiceStatus(album);
albumBasicCache.set(album, albumId);
}
} catch {
/* refresh endpoint failure is non-fatal, loadAlbum will re-fetch */
}
if (signal.aborted) return;
await loadAlbum(albumId);
}
async function refreshAll(): Promise<void> {
const albumId = albumIdGetter();
if (!albumId || refreshing) return;
refreshing = true;
try {
await forceLoadAlbum(albumId);
} finally {
refreshing = false;
}
}
function startSourcePolling(): void {
stopPolling();
const albumId = albumIdGetter();
if (!albumId) return;
pollingForSources = true;
const startTime = Date.now();
const POLL_INTERVAL = 30_000;
const MAX_POLL_DURATION = 5 * 60_000;
pollTimer = setInterval(() => {
if (Date.now() - startTime >= MAX_POLL_DURATION || hasAnySourceFound()) {
stopPolling();
return;
}
void forceLoadAlbum(albumId);
}, POLL_INTERVAL);
}
$effect(() => {
const albumId = albumIdGetter();
if (!browser || !albumId) return;
@ -405,6 +500,7 @@ export function createAlbumPageState(albumIdGetter: () => string) {
void loadAlbum(albumId);
});
return () => {
stopPolling();
if (abortController) {
abortController.abort();
abortController = null;
@ -412,6 +508,12 @@ export function createAlbumPageState(albumIdGetter: () => string) {
};
});
$effect(() => {
if (inLibrary && !hasAnySourceFound()) {
untrack(() => startSourcePolling());
}
});
const eventHandlers = createEventHandlers({
getAlbum: () => album,
setAlbum: (a) => (album = a),
@ -430,7 +532,12 @@ export function createAlbumPageState(albumIdGetter: () => string) {
toastMessage = msg;
toastType = type;
},
setShowToast: (v) => (showToast = v)
setShowToast: (v) => (showToast = v),
onRequestSuccess: () => {
albumSourceMatchCache.remove(albumIdGetter());
const aid = album?.artist_id;
if (aid && abortController) void fetchArtistMonitoringState(aid, abortController.signal);
}
});
function retryTracks(): void {
@ -630,6 +737,18 @@ export function createAlbumPageState(albumIdGetter: () => string) {
get isRequested() {
return isRequested;
},
get artistInLidarr() {
return artistInLidarr;
},
get artistMonitored() {
return artistMonitored;
},
get refreshing() {
return refreshing;
},
get pollingForSources() {
return pollingForSources;
},
get playlistModalRef() {
return playlistModalRef;
},
@ -641,6 +760,7 @@ export function createAlbumPageState(albumIdGetter: () => string) {
navidromeCallbacks,
...eventHandlers,
retryTracks,
refreshAll,
playSourceTrack,
getTrackContextMenuItems
};