Initial public release
45
.dockerignore
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
# Source control & CI
|
||||
.git
|
||||
.github
|
||||
.gitignore
|
||||
.dockerignore
|
||||
|
||||
# Documentation & metadata
|
||||
*.md
|
||||
LICENSE
|
||||
pyproject.toml
|
||||
todos.md
|
||||
AGENTS.md
|
||||
Docs/
|
||||
Images/
|
||||
|
||||
# Caches & runtime data
|
||||
/cache/
|
||||
config/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
.mypy_cache/
|
||||
.ruff_cache/
|
||||
backend/.ruff*
|
||||
|
||||
# Dev-only files
|
||||
backend/requirements-dev.txt
|
||||
|
||||
# Frontend build artifacts & dev files
|
||||
frontend/node_modules/
|
||||
frontend/build/
|
||||
frontend/.svelte-kit/
|
||||
frontend/.vite/
|
||||
|
||||
# Docker (prevent recursive context)
|
||||
docker-compose*.yml
|
||||
Dockerfile
|
||||
manage.sh
|
||||
|
||||
# IDE & OS
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
83
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
name: "\U0001F41B Bug Report"
|
||||
description: Report a problem with MusicSeerr
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Before opening a bug report, please search [existing issues](https://github.com/habirabbu/musicseerr/issues) to check if it's already been reported.
|
||||
|
||||
For general support questions, use [Discord](https://discord.gg/f98bFfsPuB) instead.
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: MusicSeerr Version
|
||||
description: Found in Settings or the bottom of the sidebar.
|
||||
placeholder: v1.0.0
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: A clear description of the bug.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: repro-steps
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: How can we reproduce this?
|
||||
placeholder: |
|
||||
1. Go to ...
|
||||
2. Click on ...
|
||||
3. See error ...
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behaviour
|
||||
description: What did you expect to happen instead?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: If applicable, add screenshots to help explain the problem.
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Logs
|
||||
description: Relevant log output from `docker compose logs musicseerr`. This will be formatted as code automatically.
|
||||
render: shell
|
||||
- type: dropdown
|
||||
id: deployment
|
||||
attributes:
|
||||
label: Deployment method
|
||||
options:
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: browser
|
||||
attributes:
|
||||
label: Browser
|
||||
description: e.g. Chrome 120, Firefox 121, Safari 17
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional-context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: Any other details that might help (integrations in use, host OS, Docker version, etc).
|
||||
- type: checkboxes
|
||||
id: search-existing
|
||||
attributes:
|
||||
label: Duplicate check
|
||||
options:
|
||||
- label: I have searched existing issues and this hasn't been reported before.
|
||||
required: true
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Discord
|
||||
url: https://discord.gg/f98bFfsPuB
|
||||
about: For support questions and general chat.
|
||||
- name: Documentation
|
||||
url: https://musicseerr.com/
|
||||
about: Check the docs before opening an issue.
|
||||
36
.github/ISSUE_TEMPLATE/feature.yml
vendored
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
name: "\u2728 Feature Request"
|
||||
description: Suggest an idea for MusicSeerr
|
||||
labels: ["enhancement"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Before opening a feature request, please search [existing issues](https://github.com/habirabbu/musicseerr/issues) to check if it's already been suggested.
|
||||
|
||||
For general discussion, use [Discord](https://discord.gg/f98bFfsPuB).
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: What would you like?
|
||||
description: A clear description of what you want MusicSeerr to do. If this is related to a problem, describe the problem too.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: desired-behavior
|
||||
attributes:
|
||||
label: How should it work?
|
||||
description: Describe the behaviour you'd expect from this feature.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional-context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: Screenshots, mockups, links to similar features in other apps, or anything else that helps explain what you're after.
|
||||
- type: checkboxes
|
||||
id: search-existing
|
||||
attributes:
|
||||
label: Duplicate check
|
||||
options:
|
||||
- label: I have searched existing issues and this hasn't been requested before.
|
||||
required: true
|
||||
63
.github/workflows/release.yml
vendored
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to ghcr.io
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract version from tag
|
||||
id: version
|
||||
run: echo "tag=${GITHUB_REF#refs/tags/}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Get build date
|
||||
id: date
|
||||
run: echo "timestamp=$(git log -1 --pretty=%cI)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Build and push (versioned)
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ghcr.io/habirabbu/musicseerr:${{ steps.version.outputs.tag }}
|
||||
build-args: |
|
||||
COMMIT_TAG=${{ steps.version.outputs.tag }}
|
||||
BUILD_DATE=${{ steps.date.outputs.timestamp }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
provenance: false
|
||||
|
||||
- name: Tag latest (stable releases only)
|
||||
if: ${{ !contains(steps.version.outputs.tag, '-') }}
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
-t ghcr.io/habirabbu/musicseerr:latest \
|
||||
ghcr.io/habirabbu/musicseerr:${{ steps.version.outputs.tag }}
|
||||
55
.gitignore
vendored
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
.github/instructions/
|
||||
.Trash-1000
|
||||
|
||||
# Runtime files
|
||||
/cache/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
*.log
|
||||
*.pid
|
||||
*.sock
|
||||
*.tmp
|
||||
.env
|
||||
.env.*
|
||||
|
||||
# Virtual environments and tool caches
|
||||
.venv/
|
||||
venv/
|
||||
env/
|
||||
.pytest_cache
|
||||
.mypy_cache/
|
||||
.ruff_cache/
|
||||
backend/.venv/
|
||||
backend/.ruff*
|
||||
backend/.virtualenv.pyz
|
||||
|
||||
# Frontend build output
|
||||
frontend/node_modules/
|
||||
frontend/.svelte-kit/
|
||||
frontend/build/
|
||||
frontend/dist/
|
||||
frontend/.vite/
|
||||
.vite/
|
||||
|
||||
# Local config
|
||||
docker-compose.yml
|
||||
config/config.json
|
||||
!config/config.example.json
|
||||
|
||||
# Editor and OS files
|
||||
.DS_Store
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
Thumbs.db
|
||||
ehthumbs.db
|
||||
|
||||
# Private working files
|
||||
/manage.sh
|
||||
/scripts/
|
||||
/todos.md
|
||||
AGENTS.md
|
||||
AGENTS.md.bak
|
||||
Docs/
|
||||
78
CONTRIBUTING.md
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
# Contributing to MusicSeerr
|
||||
|
||||
Thanks for your interest. Bug reports, feature requests, and pull requests are all welcome.
|
||||
|
||||
## Reporting Bugs
|
||||
|
||||
Use the [bug report template](https://github.com/habirabbu/musicseerr/issues/new?template=bug.yml). Include your MusicSeerr version, steps to reproduce, and relevant logs from `docker compose logs musicseerr`. The more detail you give, the faster things get fixed.
|
||||
|
||||
## Requesting Features
|
||||
|
||||
Use the [feature request template](https://github.com/habirabbu/musicseerr/issues/new?template=feature.yml). Check existing issues first to avoid duplicates.
|
||||
|
||||
## Development Setup
|
||||
|
||||
The backend is Python 3.13 with FastAPI. The frontend is SvelteKit with Svelte 5, Tailwind CSS, and daisyUI.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.13+
|
||||
- Node.js 22+
|
||||
- Docker (for building the full image)
|
||||
|
||||
### Running Locally
|
||||
|
||||
Backend:
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
pip install -r requirements-dev.txt
|
||||
uvicorn main:app --reload --port 8688
|
||||
```
|
||||
|
||||
Frontend:
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### Running Tests
|
||||
|
||||
```bash
|
||||
make backend-test # backend suite
|
||||
make frontend-test # frontend suite
|
||||
make test # both
|
||||
```
|
||||
|
||||
Frontend browser tests use Playwright. Install the browser first:
|
||||
|
||||
```bash
|
||||
make frontend-browser-install
|
||||
```
|
||||
|
||||
## Pull Requests
|
||||
|
||||
1. Fork the repo and create a branch from `main`.
|
||||
2. Give your branch a descriptive name: `fix-scrobble-timing`, `feature-playlist-export`, etc.
|
||||
3. If you're fixing a bug, mention the issue number in the PR description.
|
||||
4. Make sure tests pass before submitting.
|
||||
5. Keep changes focused. One PR per fix or feature.
|
||||
|
||||
## Code Style
|
||||
|
||||
- Backend: strong typing, async/await, no blocking I/O in async contexts.
|
||||
- Frontend: strict TypeScript, no `any`. Named exports. Async/await only.
|
||||
- Use existing design tokens (`primary`, `secondary`, etc.) for colours, not hardcoded values.
|
||||
- Run `npm run lint` and `npm run check` in the frontend before submitting.
|
||||
|
||||
## AI-Assisted Contributions
|
||||
|
||||
If you used AI tools (Copilot, ChatGPT, Claude, etc.) to write code in your PR, please mention it. This isn't a problem and won't get your PR rejected, but it helps reviewers calibrate how much scrutiny to apply. A quick note like "Claude helped with the caching logic" is enough.
|
||||
|
||||
You're still responsible for understanding and testing the code you submit.
|
||||
|
||||
## Questions?
|
||||
|
||||
Open a thread in [Discord](https://discord.gg/f98bFfsPuB) or start a [GitHub Discussion](https://github.com/habirabbu/musicseerr/discussions).
|
||||
68
Dockerfile
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
##
|
||||
# Stage 1 — Build frontend
|
||||
##
|
||||
FROM node:22.16-alpine AS frontend-build
|
||||
|
||||
WORKDIR /app/frontend
|
||||
|
||||
COPY frontend/package*.json ./
|
||||
RUN npm ci --ignore-scripts
|
||||
|
||||
COPY frontend/ .
|
||||
RUN npm run build
|
||||
|
||||
##
|
||||
# Stage 2 — Install Python dependencies
|
||||
##
|
||||
FROM python:3.13.5-slim AS python-deps
|
||||
|
||||
COPY backend/requirements.txt /tmp/requirements.txt
|
||||
RUN pip install --no-cache-dir --prefix=/install -r /tmp/requirements.txt
|
||||
|
||||
##
|
||||
# Stage 3 — Final runtime image
|
||||
##
|
||||
FROM python:3.13.5-slim
|
||||
|
||||
ARG COMMIT_TAG
|
||||
ARG BUILD_DATE
|
||||
|
||||
LABEL org.opencontainers.image.title="MusicSeerr" \
|
||||
org.opencontainers.image.description="Music request and discovery app for Lidarr" \
|
||||
org.opencontainers.image.url="https://github.com/habirabbu/musicseerr" \
|
||||
org.opencontainers.image.source="https://github.com/habirabbu/musicseerr" \
|
||||
org.opencontainers.image.version="${COMMIT_TAG}" \
|
||||
org.opencontainers.image.created="${BUILD_DATE}" \
|
||||
org.opencontainers.image.licenses="AGPL-3.0"
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PORT=8688 \
|
||||
COMMIT_TAG=${COMMIT_TAG}
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends curl tini gosu \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=python-deps /install /usr/local
|
||||
|
||||
RUN groupadd -r -g 911 musicseerr \
|
||||
&& useradd -r -u 911 -g musicseerr -d /app -s /sbin/nologin musicseerr
|
||||
|
||||
COPY backend/ .
|
||||
COPY --from=frontend-build /app/frontend/build ./static
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
|
||||
RUN mkdir -p /app/cache /app/config \
|
||||
&& chown -R musicseerr:musicseerr /app \
|
||||
&& chmod +x /entrypoint.sh
|
||||
|
||||
EXPOSE ${PORT}
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=15s --retries=3 \
|
||||
CMD curl -f http://localhost:${PORT}/health || exit 1
|
||||
|
||||
ENTRYPOINT ["tini", "--", "/entrypoint.sh"]
|
||||
CMD ["sh", "-c", "exec uvicorn main:app --host 0.0.0.0 --port ${PORT} --loop uvloop --http httptools --workers 1"]
|
||||
BIN
Images/AlbumPage.png
Normal file
|
After Width: | Height: | Size: 628 KiB |
BIN
Images/ArtistPage.png
Normal file
|
After Width: | Height: | Size: 850 KiB |
BIN
Images/DiscoverPage.png
Normal file
|
After Width: | Height: | Size: 1.7 MiB |
BIN
Images/DiscoverQueue.png
Normal file
|
After Width: | Height: | Size: 1.5 MiB |
BIN
Images/HomePage.png
Normal file
|
After Width: | Height: | Size: 2.2 MiB |
BIN
Images/LibraryAlbumViewer.png
Normal file
|
After Width: | Height: | Size: 1.6 MiB |
BIN
Images/LibraryArtistViewer.png
Normal file
|
After Width: | Height: | Size: 1.3 MiB |
BIN
Images/LibraryPage.png
Normal file
|
After Width: | Height: | Size: 1.7 MiB |
BIN
Images/LocalFilesPage.png
Normal file
|
After Width: | Height: | Size: 1.8 MiB |
BIN
Images/Logo-OG.png
Normal file
|
After Width: | Height: | Size: 65 KiB |
BIN
Images/NavidromePage.png
Normal file
|
After Width: | Height: | Size: 1.1 MiB |
BIN
Images/PlaylistPage.png
Normal file
|
After Width: | Height: | Size: 532 KiB |
BIN
Images/ProfilePage.png
Normal file
|
After Width: | Height: | Size: 479 KiB |
BIN
Images/SearchPage.png
Normal file
|
After Width: | Height: | Size: 1.3 MiB |
BIN
Images/YoutubePage.png
Normal file
|
After Width: | Height: | Size: 270 KiB |
BIN
Images/logo_icon.png
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
Images/logo_wide.png
Normal file
|
After Width: | Height: | Size: 126 KiB |
670
LICENSE
Normal file
|
|
@ -0,0 +1,670 @@
|
|||
Copyright (c) 2025 Harvey Bragg
|
||||
|
||||
This software is licensed under the GNU Affero General Public License v3.0 (AGPLv3).
|
||||
You may obtain a copy of the License at https://www.gnu.org/licenses/agpl-3.0.txt
|
||||
|
||||
Commercial licensing options are available from the copyright holder.
|
||||
|
||||
───────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
MusicSeerr - self-hosted music request and discovery platform
|
||||
Copyright (C) 2025 Harvey Bragg
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
139
Makefile
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
SHELL := /bin/bash
|
||||
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
ROOT_DIR := $(abspath $(dir $(lastword $(MAKEFILE_LIST))))
|
||||
BACKEND_DIR := $(ROOT_DIR)/backend
|
||||
FRONTEND_DIR := $(ROOT_DIR)/frontend
|
||||
BACKEND_VENV_DIR := $(BACKEND_DIR)/.venv
|
||||
BACKEND_VENV_PYTHON := $(BACKEND_VENV_DIR)/bin/python
|
||||
BACKEND_VENV_STAMP := $(BACKEND_VENV_DIR)/.deps-stamp
|
||||
BACKEND_VIRTUALENV_ZIPAPP := $(BACKEND_DIR)/.virtualenv.pyz
|
||||
PYTHON ?= python3
|
||||
NPM ?= npm
|
||||
|
||||
.PHONY: help backend-venv backend-lint backend-test backend-test-audiodb backend-test-audiodb-prewarm backend-test-audiodb-settings backend-test-coverart-audiodb backend-test-audiodb-phase8 backend-test-audiodb-phase9 backend-test-exception-handling backend-test-playlist backend-test-multidisc backend-test-performance backend-test-security backend-test-config-validation backend-test-home backend-test-home-genre backend-test-infra-hardening backend-test-library-pagination backend-test-search-top-result test-audiodb-all frontend-install frontend-build frontend-check frontend-lint frontend-test frontend-test-queuehelpers frontend-test-album-page frontend-test-playlist-detail frontend-test-audiodb-images frontend-browser-install project-map rebuild test check lint ci
|
||||
|
||||
help: ## Show available targets
|
||||
@grep -E '^[a-zA-Z0-9_.-]+:.*## ' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*## "}; {printf "%-26s %s\n", $$1, $$2}'
|
||||
|
||||
$(BACKEND_VENV_DIR):
|
||||
cd "$(BACKEND_DIR)" && test -f .virtualenv.pyz || curl -fsSLo .virtualenv.pyz https://bootstrap.pypa.io/virtualenv.pyz
|
||||
cd "$(BACKEND_DIR)" && $(PYTHON) .virtualenv.pyz .venv
|
||||
|
||||
$(BACKEND_VENV_STAMP): $(BACKEND_DIR)/requirements.txt $(BACKEND_DIR)/requirements-dev.txt | $(BACKEND_VENV_DIR)
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pip install --upgrade pip setuptools wheel
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pip install -r requirements-dev.txt pytest pytest-asyncio
|
||||
touch "$(BACKEND_VENV_STAMP)"
|
||||
|
||||
backend-venv: $(BACKEND_VENV_STAMP) ## Create or refresh the backend virtualenv
|
||||
|
||||
backend-lint: $(BACKEND_VENV_STAMP) ## Run backend Ruff checks
|
||||
cd "$(ROOT_DIR)" && $(BACKEND_VENV_DIR)/bin/ruff check backend
|
||||
|
||||
backend-test: $(BACKEND_VENV_STAMP) ## Run all backend tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest
|
||||
|
||||
backend-test-audiodb: $(BACKEND_VENV_STAMP) ## Run focused AudioDB backend tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/repositories/test_audiodb_repository.py tests/infrastructure/test_disk_metadata_cache.py tests/services/test_audiodb_image_service.py tests/services/test_artist_audiodb_population.py tests/services/test_album_audiodb_population.py tests/services/test_audiodb_detail_flows.py tests/services/test_search_audiodb_overlay.py
|
||||
|
||||
backend-test-audiodb-prewarm: $(BACKEND_VENV_STAMP) ## Run AudioDB prewarm tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/services/test_audiodb_prewarm.py tests/services/test_audiodb_sweep.py tests/services/test_audiodb_browse_queue.py tests/services/test_audiodb_fallback_gating.py tests/services/test_preferences_generic_settings.py
|
||||
|
||||
backend-test-coverart-audiodb: $(BACKEND_VENV_STAMP) ## Run AudioDB coverart provider tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/repositories/test_coverart_album_fetcher.py tests/repositories/test_coverart_audiodb_provider.py tests/repositories/test_coverart_repository_memory_cache.py tests/services/test_audiodb_byte_caching_integration.py
|
||||
|
||||
backend-test-audiodb-settings: $(BACKEND_VENV_STAMP) ## Run AudioDB settings tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/test_audiodb_settings.py tests/test_advanced_settings_roundtrip.py tests/routes/test_settings_audiodb_key.py
|
||||
|
||||
backend-test-audiodb-phase8: $(BACKEND_VENV_STAMP) ## Run AudioDB cross-cutting tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/repositories/test_audiodb_models.py tests/test_audiodb_schema_contracts.py tests/services/test_audiodb_byte_caching_integration.py tests/services/test_audiodb_url_only_integration.py tests/services/test_audiodb_fallback_integration.py tests/services/test_audiodb_negative_cache_expiry.py tests/test_audiodb_killswitch.py tests/test_advanced_settings_roundtrip.py
|
||||
|
||||
backend-test-audiodb-phase9: $(BACKEND_VENV_STAMP) ## Run AudioDB observability tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/test_phase9_observability.py
|
||||
|
||||
backend-test-exception-handling: $(BACKEND_VENV_STAMP) ## Run exception-handling regressions
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/routes/test_scrobble_routes.py tests/routes/test_scrobble_settings_routes.py tests/test_error_leakage.py tests/test_background_task_logging.py
|
||||
|
||||
backend-test-playlist: $(BACKEND_VENV_STAMP) ## Run playlist tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/services/test_playlist_service.py tests/services/test_playlist_source_resolution.py tests/repositories/test_playlist_repository.py tests/routes/test_playlist_routes.py
|
||||
|
||||
backend-test-multidisc: $(BACKEND_VENV_STAMP) ## Run multi-disc album tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/services/test_album_utils.py tests/services/test_album_service.py tests/infrastructure/test_cache_layer_followups.py
|
||||
|
||||
backend-test-performance: $(BACKEND_VENV_STAMP) ## Run performance regression tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/services/test_album_singleflight.py tests/services/test_artist_singleflight.py tests/services/test_genre_batch_parallel.py tests/services/test_cache_stats_nonblocking.py tests/services/test_settings_cache_invalidation.py tests/services/test_discover_enrich_singleflight.py
|
||||
|
||||
backend-test-security: $(BACKEND_VENV_STAMP) ## Run security regression tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/test_rate_limiter_middleware.py tests/test_url_validation.py tests/test_error_leakage.py
|
||||
|
||||
backend-test-config-validation: $(BACKEND_VENV_STAMP) ## Run config validation tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/test_config_validation.py
|
||||
|
||||
backend-test-home: $(BACKEND_VENV_STAMP) ## Run home page backend tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/services/test_home_service.py tests/routes/test_home_routes.py
|
||||
|
||||
backend-test-home-genre: $(BACKEND_VENV_STAMP) ## Run home genre decoupling tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/services/test_home_genre_decoupling.py
|
||||
|
||||
backend-test-infra-hardening: $(BACKEND_VENV_STAMP) ## Run infrastructure hardening tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/infrastructure/test_circuit_breaker_sync.py tests/infrastructure/test_disk_cache_periodic.py tests/infrastructure/test_retry_non_breaking.py
|
||||
|
||||
backend-test-discovery-precache: $(BACKEND_VENV_STAMP) ## Run artist discovery precache tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/services/test_discovery_precache_progress.py tests/infrastructure/test_retry_non_breaking.py -v
|
||||
|
||||
backend-test-library-pagination: $(BACKEND_VENV_STAMP) ## Run library pagination tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/infrastructure/test_library_pagination.py -v
|
||||
|
||||
backend-test-search-top-result: $(BACKEND_VENV_STAMP) ## Run search top result detection tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/services/test_search_top_result.py -v
|
||||
|
||||
backend-test-cache-cleanup: $(BACKEND_VENV_STAMP) ## Run cache cleanup tests
|
||||
cd "$(BACKEND_DIR)" && .venv/bin/python -m pytest tests/test_cache_cleanup.py -v
|
||||
|
||||
test-audiodb-all: backend-test-audiodb backend-test-audiodb-prewarm backend-test-audiodb-settings backend-test-coverart-audiodb backend-test-audiodb-phase8 backend-test-audiodb-phase9 frontend-test-audiodb-images ## Run every AudioDB test target
|
||||
|
||||
frontend-install: ## Install frontend npm dependencies
|
||||
cd "$(FRONTEND_DIR)" && $(NPM) install
|
||||
|
||||
frontend-build: ## Run frontend production build
|
||||
cd "$(FRONTEND_DIR)" && $(NPM) run build
|
||||
|
||||
frontend-check: ## Run frontend type checks
|
||||
cd "$(FRONTEND_DIR)" && $(NPM) run check
|
||||
|
||||
frontend-lint: ## Run frontend linting
|
||||
cd "$(FRONTEND_DIR)" && $(NPM) run lint
|
||||
|
||||
frontend-test: ## Run the frontend vitest suite
|
||||
cd "$(FRONTEND_DIR)" && $(NPM) run test
|
||||
|
||||
frontend-test-queuehelpers: ## Run queue helper regressions
|
||||
cd "$(FRONTEND_DIR)" && npx vitest run --project server src/lib/player/queueHelpers.spec.ts
|
||||
|
||||
frontend-test-album-page: ## Run the album page browser test
|
||||
cd "$(FRONTEND_DIR)" && npx vitest run --project client src/routes/album/[id]/page.svelte.spec.ts
|
||||
|
||||
frontend-test-playlist-detail: ## Run playlist page browser tests
|
||||
cd "$(FRONTEND_DIR)" && npx vitest run --project client src/routes/playlists/[id]/page.svelte.spec.ts
|
||||
|
||||
frontend-browser-install: ## Install Playwright Chromium for browser tests
|
||||
cd "$(FRONTEND_DIR)" && npx playwright install chromium
|
||||
|
||||
frontend-test-audiodb-images: ## Run AudioDB image tests
|
||||
cd "$(FRONTEND_DIR)" && npx vitest run --project server src/lib/utils/imageSuffix.spec.ts
|
||||
cd "$(FRONTEND_DIR)" && npx vitest run --project client src/lib/components/BaseImage.svelte.spec.ts
|
||||
|
||||
project-map: ## Refresh the project map block
|
||||
cd "$(ROOT_DIR)" && $(PYTHON) scripts/gen-project-map.py
|
||||
|
||||
rebuild: ## Rebuild the application
|
||||
cd "$(ROOT_DIR)" && ./manage.sh --rebuild
|
||||
|
||||
test: backend-test frontend-test ## Run backend and frontend tests
|
||||
|
||||
check: backend-test frontend-check ## Run backend tests and frontend type checks
|
||||
|
||||
lint: backend-lint frontend-lint ## Run linting targets
|
||||
|
||||
ci: backend-test backend-lint frontend-check frontend-lint frontend-test ## Run the local CI checks
|
||||
317
README.md
Normal file
|
|
@ -0,0 +1,317 @@
|
|||
<div align="center">
|
||||
|
||||
<img src="Images/logo_wide.png" alt="MusicSeerr" width="400" />
|
||||
|
||||
[](LICENSE)
|
||||
[](https://github.com/habirabbu/musicseerr/pkgs/container/musicseerr)
|
||||
[](https://discord.gg/f98bFfsPuB)
|
||||
[](https://musicseerr.com/)
|
||||
|
||||
[](https://ko-fi.com/M4M41URGJO)
|
||||
|
||||
</div>
|
||||
|
||||
---
|
||||
|
||||
MusicSeerr is a self-hosted music request and discovery app built around [Lidarr](https://lidarr.audio/). Search the full MusicBrainz catalogue, request albums, stream music from Jellyfin, Navidrome, or your local library, discover new albums based on your listening history, and scrobble everything to ListenBrainz and Last.fm. The whole thing runs as a single Docker container with a web UI for all configuration.
|
||||
|
||||
---
|
||||
|
||||
## Screenshots
|
||||
|
||||
<img src="Images/HomePage.png" alt="Home page with trending artists, popular albums, and personalized recommendations" width="100%" />
|
||||
<img src="Images/ArtistPage.png" alt="Artist detail page with biography, discography, and similar artists" width="100%" />
|
||||
<img src="Images/AlbumPage.png" alt="Album detail page with tracklist, playback controls, and request button" width="100%" />
|
||||
<img src="Images/DiscoverPage.png" alt="Discover page with personalized album recommendations" width="100%" />
|
||||
|
||||
<details>
|
||||
<summary>More screenshots</summary>
|
||||
|
||||
<img src="Images/SearchPage.png" alt="Search results for artists and albums" width="100%" />
|
||||
<img src="Images/LibraryPage.png" alt="Library overview with statistics and recent additions" width="100%" />
|
||||
<img src="Images/PlaylistPage.png" alt="Playlist with tracklist and playback controls" width="100%" />
|
||||
<img src="Images/DiscoverQueue.png" alt="Discover queue with album recommendations to request or skip" width="100%" />
|
||||
<img src="Images/LocalFilesPage.png" alt="Local files library with format and storage stats" width="100%" />
|
||||
<img src="Images/NavidromePage.png" alt="Navidrome library view" width="100%" />
|
||||
<img src="Images/YoutubePage.png" alt="YouTube linked albums for streaming" width="100%" />
|
||||
<img src="Images/ProfilePage.png" alt="User profile with connected services and library stats" width="100%" />
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
You need Docker and a running [Lidarr](https://lidarr.audio/) instance with an API key.
|
||||
|
||||
### 1. Create a docker-compose.yml
|
||||
|
||||
```yaml
|
||||
services:
|
||||
musicseerr:
|
||||
image: ghcr.io/habirabbu/musicseerr:latest
|
||||
container_name: musicseerr
|
||||
environment:
|
||||
- PUID=1000 # Run `id` on your host to find your user/group ID
|
||||
- PGID=1000
|
||||
- PORT=8688
|
||||
- TZ=Etc/UTC # Your timezone, e.g. Europe/London, America/New_York
|
||||
ports:
|
||||
- "8688:8688"
|
||||
volumes:
|
||||
- ./config:/app/config # Persistent app configuration
|
||||
- ./cache:/app/cache # Cover art and metadata cache
|
||||
# Optional: mount your music library for local file playback.
|
||||
# The left side should match the root folder Lidarr uses.
|
||||
# The right side (/music) must match "Music Directory Path" in Settings > Local Files.
|
||||
# - /path/to/music:/music:ro
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8688/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
start_period: 15s
|
||||
retries: 3
|
||||
```
|
||||
|
||||
### 2. Start it
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
### 3. Configure
|
||||
|
||||
Open [http://localhost:8688](http://localhost:8688) and head to Settings. Add your Lidarr URL and API key, then connect whichever streaming and discovery services you use.
|
||||
|
||||
---
|
||||
|
||||
## Recommended Stack
|
||||
|
||||
MusicSeerr is designed to work with Lidarr. If you're putting together a music stack from scratch, this combination covers most needs:
|
||||
|
||||
| Service | Role |
|
||||
|-|-|
|
||||
| [Lidarr](https://lidarr.audio/) (nightly recommended) | Library management, download orchestration |
|
||||
| [slskd](https://github.com/slskd/slskd) | Soulseek download client |
|
||||
| [Tubifarry](https://github.com/Tubifarry/Tubifarry) | YouTube-based download client for Lidarr |
|
||||
|
||||
Lidarr is the only requirement. slskd and Tubifarry are optional but between them they cover most music sourcing needs. For playback, connect Jellyfin, Navidrome, or mount your music folder directly into the container.
|
||||
|
||||
---
|
||||
|
||||
## Features
|
||||
|
||||
### Search and Request
|
||||
|
||||
Search the full MusicBrainz catalogue for any artist or album. Request an album and Lidarr handles the download. A persistent queue tracks all requests, and you can browse pending and fulfilled requests on a dedicated page with retry and cancel support.
|
||||
|
||||
### Built-in Player
|
||||
|
||||
MusicSeerr has a full audio player that supports multiple playback sources per track:
|
||||
|
||||
- Jellyfin, with configurable codec (AAC, MP3, FLAC, Opus, and others) and bitrate. Playback events are reported back to Jellyfin automatically.
|
||||
- Navidrome, streaming via the Subsonic API.
|
||||
- Local files, served directly from a mounted music directory.
|
||||
- YouTube, for previewing albums you haven't downloaded yet. Links can be auto-generated or set manually.
|
||||
|
||||
The player supports queue management, shuffle, seek, volume control, and a 10-band equalizer with presets.
|
||||
|
||||
### Discovery
|
||||
|
||||
The home page shows trending artists, popular albums, recently added items, genre quick-links, weekly exploration playlists from ListenBrainz, and "Because You Listened To" carousels personalized to your history.
|
||||
|
||||
The discover page goes further with a recommendation queue drawn from similar artists, library gaps, fresh releases, global charts, and your listening patterns across ListenBrainz and Last.fm. Each album can be expanded to show the full tracklist and artwork before you decide to request or skip it.
|
||||
|
||||
You can also browse by genre, view trending and popular charts over different time ranges, and see your own top albums.
|
||||
|
||||
### Library
|
||||
|
||||
Browse your Lidarr-managed library by artist or album with search, filtering, sorting, and pagination. View recently added albums and library statistics. Remove albums directly from the UI.
|
||||
|
||||
Jellyfin, Navidrome, and local file sources each get their own library view with play, shuffle, and queue actions.
|
||||
|
||||
### Scrobbling
|
||||
|
||||
Every track you play can be scrobbled to ListenBrainz and Last.fm simultaneously. Both are toggled independently in settings. A "now playing" update goes out when a track starts, and a scrobble is submitted when it finishes.
|
||||
|
||||
### Playlists
|
||||
|
||||
Create playlists from any mix of Jellyfin, Navidrome, local, and YouTube tracks. Reorder by dragging, set custom cover art, and play everything through the same player.
|
||||
|
||||
### Profile
|
||||
|
||||
Set a display name and avatar, view connected services, and check your library statistics from a profile page.
|
||||
|
||||
---
|
||||
|
||||
## Integrations
|
||||
|
||||
| Service | What it does |
|
||||
|-|-|
|
||||
| [Lidarr](https://lidarr.audio/) | Download management and library syncing |
|
||||
| [MusicBrainz](https://musicbrainz.org/) | Artist and album metadata, release search |
|
||||
| [Cover Art Archive](https://coverartarchive.org/) | Album artwork |
|
||||
| [TheAudioDB](https://www.theaudiodb.com/) | Artist and album images (fanart, banners, logos, CD art) |
|
||||
| [Wikidata](https://www.wikidata.org/) | Artist descriptions and external links |
|
||||
| [Jellyfin](https://jellyfin.org/) | Audio streaming and library browsing |
|
||||
| [Navidrome](https://www.navidrome.org/) | Audio streaming via Subsonic API |
|
||||
| [ListenBrainz](https://listenbrainz.org/) | Listening history, discovery, scrobbling, weekly playlists |
|
||||
| [Last.fm](https://www.last.fm/) | Scrobbling and listen tracking |
|
||||
| YouTube | Album playback when no local copy exists |
|
||||
| Local files | Direct playback from a mounted music directory |
|
||||
|
||||
All integrations are configured through the web UI. No config files or environment variables needed beyond the basics listed below.
|
||||
|
||||
---
|
||||
|
||||
## Configuration
|
||||
|
||||
MusicSeerr stores its config in `config/config.json` inside the mapped config volume. Everything is managed through the UI.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Default | Description |
|
||||
|-|-|-|
|
||||
| `PUID` | `1000` | User ID for file ownership inside the container |
|
||||
| `PGID` | `1000` | Group ID for file ownership inside the container |
|
||||
| `PORT` | `8688` | Port the application listens on |
|
||||
| `TZ` | `Etc/UTC` | Container timezone |
|
||||
|
||||
Run `id` on your host to find your PUID and PGID values.
|
||||
|
||||
### In-App Settings
|
||||
|
||||
| Setting | Location |
|
||||
|-|-|
|
||||
| Lidarr URL, API key, profiles, root folder, sync frequency | Settings > Lidarr |
|
||||
| Jellyfin URL and API key | Settings > Jellyfin |
|
||||
| Navidrome URL and credentials | Settings > Navidrome |
|
||||
| Local files directory path | Settings > Local Files |
|
||||
| ListenBrainz username and token | Settings > ListenBrainz |
|
||||
| Last.fm API key, secret, and OAuth session | Settings > Last.fm |
|
||||
| YouTube API key | Settings > YouTube |
|
||||
| Scrobbling toggles per service | Settings > Scrobbling |
|
||||
| Home page layout preferences | Settings > Preferences |
|
||||
| AudioDB settings and cache TTLs | Settings > Advanced |
|
||||
|
||||
### Setting Up Last.fm
|
||||
|
||||
1. Register an app at [last.fm/api/account/create](https://www.last.fm/api/account/create) to get an API key and shared secret.
|
||||
2. Enter them in Settings > Last.fm.
|
||||
3. Click Authorise and follow the redirect. You'll be returned to MusicSeerr automatically.
|
||||
|
||||
### Setting Up ListenBrainz
|
||||
|
||||
1. Copy your user token from [listenbrainz.org/profile](https://listenbrainz.org/profile/).
|
||||
2. Enter your username and token in Settings > ListenBrainz.
|
||||
|
||||
### TheAudioDB
|
||||
|
||||
AudioDB provides richer artist and album artwork from a fast CDN. It's enabled by default with the free public API key, which is rate-limited to 30 requests per minute. Premium keys from [theaudiodb.com](https://www.theaudiodb.com/) unlock higher limits.
|
||||
|
||||
Under Settings > Advanced, you can toggle AudioDB on or off, switch between direct CDN loading and proxied loading (for privacy), enable name-based search fallback for niche artists, and adjust cache TTLs.
|
||||
|
||||
---
|
||||
|
||||
## Playback Sources
|
||||
|
||||
### Jellyfin
|
||||
|
||||
Audio is transcoded on the Jellyfin server and streamed to the browser. Supported codecs include AAC, MP3, Opus, FLAC, Vorbis, ALAC, WAV, and WMA. Bitrate is configurable between 32 kbps and 320 kbps. Playback start, progress, and stop events are reported back to Jellyfin.
|
||||
|
||||
### Local Files
|
||||
|
||||
Mount your music directory into the container and MusicSeerr serves files directly. The mount path inside the container must match the Music Directory Path set in Settings > Local Files.
|
||||
|
||||
```yaml
|
||||
volumes:
|
||||
- /path/to/your/music:/music:ro
|
||||
```
|
||||
|
||||
### Navidrome
|
||||
|
||||
Connect your Navidrome instance under Settings > Navidrome.
|
||||
|
||||
### YouTube
|
||||
|
||||
Albums can be linked to a YouTube URL and played inline. This is useful for listening to albums before you've downloaded them. Links can be auto-generated with a YouTube API key or added manually.
|
||||
|
||||
A note on reliability: YouTube playback depends on the embedded player, which can be finicky. It works best in a browser where you're signed into YouTube, and VPNs tend to cause issues. Treat it as a convenience for previewing albums rather than a primary playback source.
|
||||
|
||||
---
|
||||
|
||||
## Volumes and Persistence
|
||||
|
||||
| Container path | Purpose |
|
||||
|-|-|
|
||||
| `/app/config` | Application config (`config.json`) |
|
||||
| `/app/cache` | Cover art cache, metadata cache, SQLite databases |
|
||||
| `/music` (optional) | Music library root for local file playback |
|
||||
|
||||
Map both `/app/config` and `/app/cache` to persistent host directories so they survive container restarts.
|
||||
|
||||
---
|
||||
|
||||
## API
|
||||
|
||||
Interactive API docs (Swagger UI) are available at `/api/v1/docs` on your MusicSeerr instance.
|
||||
|
||||
A health check endpoint is at `/health`.
|
||||
|
||||
---
|
||||
|
||||
## Development
|
||||
|
||||
The backend is Python 3.13 with FastAPI. The frontend is SvelteKit with Svelte 5, Tailwind CSS, and DaisyUI.
|
||||
|
||||
### Backend
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
pip install -r requirements-dev.txt
|
||||
uvicorn main:app --reload --port 8688
|
||||
```
|
||||
|
||||
### Frontend
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### Tests
|
||||
|
||||
A root Makefile wraps the test commands:
|
||||
|
||||
```bash
|
||||
make backend-test # full backend suite
|
||||
make frontend-test # full frontend suite
|
||||
make test # both
|
||||
make ci # tests + linting + type checks
|
||||
```
|
||||
|
||||
Frontend browser tests use Playwright. Install the browser with:
|
||||
|
||||
```bash
|
||||
make frontend-browser-install
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Support
|
||||
|
||||
Documentation is at [musicseerr.com](https://musicseerr.com/).
|
||||
|
||||
For questions, help, or just to chat, join the [Discord](https://discord.gg/f98bFfsPuB). Bug reports and feature requests go on [GitHub Issues](https://github.com/habirabbu/musicseerr/issues).
|
||||
|
||||
If you find MusicSeerr useful, consider supporting development:
|
||||
|
||||
[](https://ko-fi.com/M4M41URGJO)
|
||||
|
||||
---
|
||||
|
||||
## License
|
||||
|
||||
[GNU Affero General Public License v3.0](LICENSE)
|
||||
0
backend/api/__init__.py
Normal file
0
backend/api/v1/__init__.py
Normal file
0
backend/api/v1/routes/__init__.py
Normal file
149
backend/api/v1/routes/albums.py
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, Query, Request, status
|
||||
from core.exceptions import ClientDisconnectedError
|
||||
from api.v1.schemas.album import AlbumInfo, AlbumBasicInfo, AlbumTracksInfo, LastFmAlbumEnrichment
|
||||
from api.v1.schemas.discovery import SimilarAlbumsResponse, MoreByArtistResponse
|
||||
from core.dependencies import get_album_service, get_album_discovery_service, get_album_enrichment_service
|
||||
from services.album_service import AlbumService
|
||||
from services.album_discovery_service import AlbumDiscoveryService
|
||||
from services.album_enrichment_service import AlbumEnrichmentService
|
||||
from infrastructure.validators import is_unknown_mbid
|
||||
from infrastructure.degradation import try_get_degradation_context
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
|
||||
import msgspec.structs
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/albums", tags=["album"])
|
||||
|
||||
|
||||
@router.get("/{album_id}", response_model=AlbumInfo)
|
||||
async def get_album(
|
||||
album_id: str,
|
||||
album_service: AlbumService = Depends(get_album_service)
|
||||
):
|
||||
if is_unknown_mbid(album_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown album ID: {album_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
result = await album_service.get_album_info(album_id)
|
||||
ctx = try_get_degradation_context()
|
||||
if ctx is not None and ctx.has_degradation():
|
||||
result = msgspec.structs.replace(result, service_status=ctx.degraded_summary())
|
||||
return result
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid album request"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{album_id}/basic", response_model=AlbumBasicInfo)
|
||||
async def get_album_basic(
|
||||
album_id: str,
|
||||
request: Request,
|
||||
background_tasks: BackgroundTasks,
|
||||
album_service: AlbumService = Depends(get_album_service)
|
||||
):
|
||||
"""Get minimal album info for fast initial load - no tracks."""
|
||||
if await request.is_disconnected():
|
||||
raise ClientDisconnectedError("Client disconnected")
|
||||
|
||||
if is_unknown_mbid(album_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown album ID: {album_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
result = await album_service.get_album_basic_info(album_id)
|
||||
background_tasks.add_task(album_service.warm_full_album_cache, album_id)
|
||||
return result
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid album request"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{album_id}/tracks", response_model=AlbumTracksInfo)
|
||||
async def get_album_tracks(
|
||||
album_id: str,
|
||||
request: Request,
|
||||
album_service: AlbumService = Depends(get_album_service)
|
||||
):
|
||||
"""Get track list and extended details - loaded asynchronously."""
|
||||
if await request.is_disconnected():
|
||||
raise ClientDisconnectedError("Client disconnected")
|
||||
|
||||
if is_unknown_mbid(album_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown album ID: {album_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
return await album_service.get_album_tracks_info(album_id)
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid album request"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{album_id}/similar", response_model=SimilarAlbumsResponse)
|
||||
async def get_similar_albums(
|
||||
album_id: str,
|
||||
artist_id: str = Query(..., description="Artist MBID for similarity lookup"),
|
||||
count: int = Query(default=10, ge=1, le=30),
|
||||
discovery_service: AlbumDiscoveryService = Depends(get_album_discovery_service)
|
||||
):
|
||||
"""Get albums from similar artists."""
|
||||
if is_unknown_mbid(album_id) or is_unknown_mbid(artist_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid or unknown album/artist ID"
|
||||
)
|
||||
return await discovery_service.get_similar_albums(album_id, artist_id, count)
|
||||
|
||||
|
||||
@router.get("/{album_id}/more-by-artist", response_model=MoreByArtistResponse)
|
||||
async def get_more_by_artist(
|
||||
album_id: str,
|
||||
artist_id: str = Query(..., description="Artist MBID"),
|
||||
count: int = Query(default=10, ge=1, le=30),
|
||||
discovery_service: AlbumDiscoveryService = Depends(get_album_discovery_service)
|
||||
):
|
||||
"""Get other albums by the same artist."""
|
||||
if is_unknown_mbid(artist_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid or unknown artist ID"
|
||||
)
|
||||
return await discovery_service.get_more_by_artist(artist_id, album_id, count)
|
||||
|
||||
|
||||
@router.get("/{album_id}/lastfm", response_model=LastFmAlbumEnrichment)
|
||||
async def get_album_lastfm_enrichment(
|
||||
album_id: str,
|
||||
artist_name: str = Query(..., description="Artist name for Last.fm lookup"),
|
||||
album_name: str = Query(..., description="Album name for Last.fm lookup"),
|
||||
enrichment_service: AlbumEnrichmentService = Depends(get_album_enrichment_service),
|
||||
):
|
||||
if is_unknown_mbid(album_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown album ID: {album_id}"
|
||||
)
|
||||
result = await enrichment_service.get_lastfm_enrichment(
|
||||
artist_name=artist_name, album_name=album_name, album_mbid=album_id
|
||||
)
|
||||
if result is None:
|
||||
return LastFmAlbumEnrichment()
|
||||
return result
|
||||
152
backend/api/v1/routes/artists.py
Normal file
|
|
@ -0,0 +1,152 @@
|
|||
import logging
|
||||
from typing import Literal, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Request, status
|
||||
from core.exceptions import ClientDisconnectedError
|
||||
from api.v1.schemas.artist import ArtistInfo, ArtistExtendedInfo, ArtistReleases, LastFmArtistEnrichment
|
||||
from api.v1.schemas.discovery import SimilarArtistsResponse, TopSongsResponse, TopAlbumsResponse
|
||||
from core.dependencies import get_artist_service, get_artist_discovery_service, get_artist_enrichment_service
|
||||
from services.artist_service import ArtistService
|
||||
from services.artist_discovery_service import ArtistDiscoveryService
|
||||
from services.artist_enrichment_service import ArtistEnrichmentService
|
||||
from infrastructure.validators import is_unknown_mbid
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
from infrastructure.degradation import try_get_degradation_context
|
||||
|
||||
import msgspec.structs
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/artists", tags=["artist"])
|
||||
|
||||
|
||||
@router.get("/{artist_id}", response_model=ArtistInfo)
|
||||
async def get_artist(
|
||||
artist_id: str,
|
||||
request: Request,
|
||||
artist_service: ArtistService = Depends(get_artist_service)
|
||||
):
|
||||
if await request.is_disconnected():
|
||||
raise ClientDisconnectedError("Client disconnected")
|
||||
|
||||
if is_unknown_mbid(artist_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown artist ID: {artist_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
result = await artist_service.get_artist_info(artist_id)
|
||||
ctx = try_get_degradation_context()
|
||||
if ctx and ctx.has_degradation():
|
||||
result = msgspec.structs.replace(result, service_status=ctx.degraded_summary())
|
||||
return result
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid artist request"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{artist_id}/extended", response_model=ArtistExtendedInfo)
|
||||
async def get_artist_extended(
|
||||
artist_id: str,
|
||||
artist_service: ArtistService = Depends(get_artist_service)
|
||||
):
|
||||
if is_unknown_mbid(artist_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown artist ID: {artist_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
return await artist_service.get_artist_extended_info(artist_id)
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid artist request"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{artist_id}/releases", response_model=ArtistReleases)
|
||||
async def get_artist_releases(
|
||||
artist_id: str,
|
||||
offset: int = Query(default=0, ge=0),
|
||||
limit: int = Query(default=50, ge=1, le=200),
|
||||
artist_service: ArtistService = Depends(get_artist_service)
|
||||
):
|
||||
if is_unknown_mbid(artist_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown artist ID: {artist_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
return await artist_service.get_artist_releases(artist_id, offset, limit)
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid artist request"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{artist_id}/similar", response_model=SimilarArtistsResponse)
|
||||
async def get_similar_artists(
|
||||
artist_id: str,
|
||||
count: int = Query(default=15, ge=1, le=50),
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None, description="Data source: listenbrainz or lastfm"),
|
||||
discovery_service: ArtistDiscoveryService = Depends(get_artist_discovery_service)
|
||||
):
|
||||
if is_unknown_mbid(artist_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown artist ID: {artist_id}"
|
||||
)
|
||||
return await discovery_service.get_similar_artists(artist_id, count, source=source)
|
||||
|
||||
|
||||
@router.get("/{artist_id}/top-songs", response_model=TopSongsResponse)
|
||||
async def get_top_songs(
|
||||
artist_id: str,
|
||||
count: int = Query(default=10, ge=1, le=50),
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None, description="Data source: listenbrainz or lastfm"),
|
||||
discovery_service: ArtistDiscoveryService = Depends(get_artist_discovery_service)
|
||||
):
|
||||
if is_unknown_mbid(artist_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown artist ID: {artist_id}"
|
||||
)
|
||||
return await discovery_service.get_top_songs(artist_id, count, source=source)
|
||||
|
||||
|
||||
@router.get("/{artist_id}/top-albums", response_model=TopAlbumsResponse)
|
||||
async def get_top_albums(
|
||||
artist_id: str,
|
||||
count: int = Query(default=10, ge=1, le=50),
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None, description="Data source: listenbrainz or lastfm"),
|
||||
discovery_service: ArtistDiscoveryService = Depends(get_artist_discovery_service)
|
||||
):
|
||||
if is_unknown_mbid(artist_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown artist ID: {artist_id}"
|
||||
)
|
||||
return await discovery_service.get_top_albums(artist_id, count, source=source)
|
||||
|
||||
|
||||
@router.get("/{artist_id}/lastfm", response_model=LastFmArtistEnrichment)
|
||||
async def get_artist_lastfm_enrichment(
|
||||
artist_id: str,
|
||||
artist_name: str = Query(..., description="Artist name for Last.fm lookup"),
|
||||
enrichment_service: ArtistEnrichmentService = Depends(get_artist_enrichment_service),
|
||||
):
|
||||
if is_unknown_mbid(artist_id):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid or unknown artist ID: {artist_id}"
|
||||
)
|
||||
result = await enrichment_service.get_lastfm_enrichment(artist_id, artist_name)
|
||||
if result is None:
|
||||
return LastFmArtistEnrichment()
|
||||
return result
|
||||
78
backend/api/v1/routes/cache.py
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
import logging
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from api.v1.schemas.cache import CacheStats, CacheClearResponse
|
||||
from core.dependencies import get_cache_service
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
from services.cache_service import CacheService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/cache", tags=["cache"])
|
||||
|
||||
|
||||
@router.get("/stats", response_model=CacheStats)
|
||||
async def get_cache_stats(
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
return await cache_service.get_stats()
|
||||
|
||||
|
||||
@router.post("/clear/memory", response_model=CacheClearResponse)
|
||||
async def clear_memory_cache(
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
result = await cache_service.clear_memory_cache()
|
||||
if not result.success:
|
||||
raise HTTPException(status_code=500, detail=result.message)
|
||||
return result
|
||||
|
||||
|
||||
@router.post("/clear/disk", response_model=CacheClearResponse)
|
||||
async def clear_disk_cache(
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
result = await cache_service.clear_disk_cache()
|
||||
if not result.success:
|
||||
raise HTTPException(status_code=500, detail=result.message)
|
||||
return result
|
||||
|
||||
|
||||
@router.post("/clear/all", response_model=CacheClearResponse)
|
||||
async def clear_all_cache(
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
result = await cache_service.clear_all_cache()
|
||||
if not result.success:
|
||||
raise HTTPException(status_code=500, detail=result.message)
|
||||
return result
|
||||
|
||||
|
||||
@router.post("/clear/covers", response_model=CacheClearResponse)
|
||||
async def clear_covers_cache(
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
result = await cache_service.clear_covers_cache()
|
||||
if not result.success:
|
||||
raise HTTPException(status_code=500, detail=result.message)
|
||||
return result
|
||||
|
||||
|
||||
@router.post("/clear/library", response_model=CacheClearResponse)
|
||||
async def clear_library_cache(
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
result = await cache_service.clear_library_cache()
|
||||
if not result.success:
|
||||
raise HTTPException(status_code=500, detail=result.message)
|
||||
return result
|
||||
|
||||
|
||||
@router.post("/clear/audiodb", response_model=CacheClearResponse)
|
||||
async def clear_audiodb_cache(
|
||||
cache_service: CacheService = Depends(get_cache_service),
|
||||
):
|
||||
result = await cache_service.clear_audiodb()
|
||||
if not result.success:
|
||||
raise HTTPException(status_code=500, detail=result.message)
|
||||
return result
|
||||
83
backend/api/v1/routes/cache_status.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
import asyncio
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import StreamingResponse
|
||||
import msgspec
|
||||
|
||||
from api.v1.schemas.cache_status import CacheSyncStatus
|
||||
from core.dependencies import get_cache_status_service
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
from services.cache_status_service import CacheStatusService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/cache/sync", tags=["cache"])
|
||||
|
||||
|
||||
@router.get("/status", response_model=CacheSyncStatus)
|
||||
async def get_sync_status(
|
||||
status_service: CacheStatusService = Depends(get_cache_status_service),
|
||||
):
|
||||
progress = status_service.get_progress()
|
||||
|
||||
return CacheSyncStatus(
|
||||
is_syncing=progress.is_syncing,
|
||||
phase=progress.phase,
|
||||
total_items=progress.total_items,
|
||||
processed_items=progress.processed_items,
|
||||
progress_percent=progress.progress_percent,
|
||||
current_item=progress.current_item,
|
||||
started_at=progress.started_at,
|
||||
error_message=progress.error_message,
|
||||
total_artists=progress.total_artists,
|
||||
processed_artists=progress.processed_artists,
|
||||
total_albums=progress.total_albums,
|
||||
processed_albums=progress.processed_albums
|
||||
)
|
||||
|
||||
|
||||
@router.get("/stream")
|
||||
async def stream_sync_status(
|
||||
status_service: CacheStatusService = Depends(get_cache_status_service),
|
||||
):
|
||||
queue = status_service.subscribe_sse()
|
||||
|
||||
async def event_generator():
|
||||
try:
|
||||
progress = status_service.get_progress()
|
||||
initial_data = {
|
||||
'is_syncing': progress.is_syncing,
|
||||
'phase': progress.phase,
|
||||
'total_items': progress.total_items,
|
||||
'processed_items': progress.processed_items,
|
||||
'progress_percent': progress.progress_percent,
|
||||
'current_item': progress.current_item,
|
||||
'started_at': progress.started_at,
|
||||
'error_message': progress.error_message,
|
||||
'total_artists': progress.total_artists,
|
||||
'processed_artists': progress.processed_artists,
|
||||
'total_albums': progress.total_albums,
|
||||
'processed_albums': progress.processed_albums
|
||||
}
|
||||
yield f"data: {msgspec.json.encode(initial_data).decode('utf-8')}\n\n"
|
||||
|
||||
while True:
|
||||
try:
|
||||
data = await asyncio.wait_for(queue.get(), timeout=30.0)
|
||||
yield f"data: {data}\n\n"
|
||||
except asyncio.TimeoutError:
|
||||
yield ": keepalive\n\n"
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
finally:
|
||||
status_service.unsubscribe_sse(queue)
|
||||
|
||||
return StreamingResponse(
|
||||
event_generator(),
|
||||
media_type="text/event-stream",
|
||||
headers={
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
"X-Accel-Buffering": "no"
|
||||
}
|
||||
)
|
||||
281
backend/api/v1/routes/covers.py
Normal file
|
|
@ -0,0 +1,281 @@
|
|||
import logging
|
||||
import hashlib
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, HTTPException, Path, Query, Depends, Request
|
||||
from fastapi.responses import Response
|
||||
from core.dependencies import get_coverart_repository
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
from repositories.coverart_repository import CoverArtRepository
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/covers", tags=["covers"])
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_ALLOWED_SIZES = {"250", "500", "1200"}
|
||||
_SIZE_ALIAS_NONE = {"", "original", "full", "max", "largest"}
|
||||
|
||||
|
||||
def _quote_etag(content_hash: str) -> str:
|
||||
return f'"{content_hash}"'
|
||||
|
||||
|
||||
def _etag_matches(if_none_match: Optional[str], etag_header: str) -> bool:
|
||||
if not if_none_match:
|
||||
return False
|
||||
|
||||
candidates = [token.strip() for token in if_none_match.split(",")]
|
||||
if "*" in candidates:
|
||||
return True
|
||||
|
||||
if etag_header in candidates:
|
||||
return True
|
||||
|
||||
weak_etag = f"W/{etag_header}"
|
||||
return weak_etag in candidates
|
||||
|
||||
|
||||
def _normalize_size(size: Optional[str]) -> Optional[str]:
|
||||
if size is None:
|
||||
return "500"
|
||||
normalized = size.strip().lower()
|
||||
if normalized in _SIZE_ALIAS_NONE:
|
||||
return None
|
||||
if normalized not in _ALLOWED_SIZES:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Unsupported size '{size}'. Choose one of 250, 500, 1200 or original.",
|
||||
)
|
||||
return normalized
|
||||
|
||||
|
||||
@router.get("/release-group/{release_group_id}")
|
||||
async def cover_from_release_group(
|
||||
request: Request,
|
||||
release_group_id: str = Path(..., min_length=1, description="MusicBrainz release group ID"),
|
||||
size: Optional[str] = Query(
|
||||
"500",
|
||||
description="Preferred size: 250, 500, 1200, or 'original' for full size",
|
||||
),
|
||||
coverart_repo: CoverArtRepository = Depends(get_coverart_repository)
|
||||
):
|
||||
desired_size = _normalize_size(size)
|
||||
|
||||
etag_hash = await coverart_repo.get_release_group_cover_etag(release_group_id, desired_size)
|
||||
etag_header = _quote_etag(etag_hash) if etag_hash else None
|
||||
if etag_header and _etag_matches(request.headers.get("if-none-match"), etag_header):
|
||||
return Response(
|
||||
status_code=304,
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=31536000, immutable",
|
||||
"ETag": etag_header,
|
||||
},
|
||||
)
|
||||
|
||||
result = await coverart_repo.get_release_group_cover(release_group_id, desired_size, is_disconnected=request.is_disconnected)
|
||||
|
||||
if result:
|
||||
image_data, content_type, source = result
|
||||
if not etag_header:
|
||||
etag_header = _quote_etag(hashlib.sha1(image_data).hexdigest())
|
||||
return Response(
|
||||
content=image_data,
|
||||
media_type=content_type,
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=31536000, immutable",
|
||||
"X-Cover-Source": source,
|
||||
"ETag": etag_header,
|
||||
}
|
||||
)
|
||||
|
||||
placeholder_svg = '''<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 200 200">
|
||||
<rect fill="#374151" width="200" height="200"/>
|
||||
<circle cx="100" cy="100" r="70" fill="#1f2937" stroke="#4B5563" stroke-width="2"/>
|
||||
<circle cx="100" cy="100" r="50" fill="none" stroke="#4B5563" stroke-width="1"/>
|
||||
<circle cx="100" cy="100" r="30" fill="none" stroke="#4B5563" stroke-width="1"/>
|
||||
<circle cx="100" cy="100" r="12" fill="#4B5563"/>
|
||||
<circle cx="100" cy="100" r="4" fill="#374151"/>
|
||||
</svg>'''
|
||||
return Response(
|
||||
content=placeholder_svg.encode(),
|
||||
media_type="image/svg+xml",
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=86400",
|
||||
"X-Cover-Source": "placeholder",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/release/{release_id}")
|
||||
async def cover_from_release(
|
||||
request: Request,
|
||||
release_id: str = Path(..., min_length=1, description="MusicBrainz release ID"),
|
||||
size: Optional[str] = Query(
|
||||
"500",
|
||||
description="Preferred size: 250, 500, 1200, or 'original' for full size",
|
||||
),
|
||||
coverart_repo: CoverArtRepository = Depends(get_coverart_repository)
|
||||
):
|
||||
desired_size = _normalize_size(size)
|
||||
|
||||
etag_hash = await coverart_repo.get_release_cover_etag(release_id, desired_size)
|
||||
etag_header = _quote_etag(etag_hash) if etag_hash else None
|
||||
if etag_header and _etag_matches(request.headers.get("if-none-match"), etag_header):
|
||||
return Response(
|
||||
status_code=304,
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=31536000, immutable",
|
||||
"ETag": etag_header,
|
||||
},
|
||||
)
|
||||
|
||||
result = await coverart_repo.get_release_cover(release_id, desired_size, is_disconnected=request.is_disconnected)
|
||||
|
||||
if result:
|
||||
image_data, content_type, source = result
|
||||
if not etag_header:
|
||||
etag_header = _quote_etag(hashlib.sha1(image_data).hexdigest())
|
||||
return Response(
|
||||
content=image_data,
|
||||
media_type=content_type,
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=31536000, immutable",
|
||||
"X-Cover-Source": source,
|
||||
"ETag": etag_header,
|
||||
}
|
||||
)
|
||||
|
||||
placeholder_svg = '''<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 200 200">
|
||||
<rect fill="#374151" width="200" height="200"/>
|
||||
<circle cx="100" cy="100" r="70" fill="#1f2937" stroke="#4B5563" stroke-width="2"/>
|
||||
<circle cx="100" cy="100" r="50" fill="none" stroke="#4B5563" stroke-width="1"/>
|
||||
<circle cx="100" cy="100" r="30" fill="none" stroke="#4B5563" stroke-width="1"/>
|
||||
<circle cx="100" cy="100" r="12" fill="#4B5563"/>
|
||||
<circle cx="100" cy="100" r="4" fill="#374151"/>
|
||||
</svg>'''
|
||||
return Response(
|
||||
content=placeholder_svg.encode(),
|
||||
media_type="image/svg+xml",
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=86400",
|
||||
"X-Cover-Source": "placeholder",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/artist/{artist_id}")
|
||||
async def get_artist_cover(
|
||||
request: Request,
|
||||
artist_id: str,
|
||||
size: Optional[int] = Query(None, description="Preferred size in pixels for width"),
|
||||
coverart_repo: CoverArtRepository = Depends(get_coverart_repository)
|
||||
):
|
||||
etag_hash = await coverart_repo.get_artist_image_etag(artist_id, size)
|
||||
etag_header = _quote_etag(etag_hash) if etag_hash else None
|
||||
if etag_header and _etag_matches(request.headers.get("if-none-match"), etag_header):
|
||||
return Response(
|
||||
status_code=304,
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=31536000, immutable",
|
||||
"ETag": etag_header,
|
||||
},
|
||||
)
|
||||
|
||||
result = await coverart_repo.get_artist_image(artist_id, size, is_disconnected=request.is_disconnected)
|
||||
|
||||
if not result:
|
||||
placeholder_svg = '''<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 200 200">
|
||||
<rect fill="#374151" width="200" height="200"/>
|
||||
<circle cx="100" cy="80" r="30" fill="#6B7280"/>
|
||||
<path d="M60 120 Q100 140 140 120 L140 160 Q100 180 60 160 Z" fill="#6B7280"/>
|
||||
</svg>'''
|
||||
return Response(
|
||||
content=placeholder_svg.encode(),
|
||||
media_type="image/svg+xml",
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=86400",
|
||||
"X-Cover-Source": "placeholder",
|
||||
}
|
||||
)
|
||||
|
||||
image_data, content_type, source = result
|
||||
if not etag_header:
|
||||
etag_header = _quote_etag(hashlib.sha1(image_data).hexdigest())
|
||||
return Response(
|
||||
content=image_data,
|
||||
media_type=content_type,
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=31536000, immutable",
|
||||
"X-Cover-Source": source,
|
||||
"ETag": etag_header,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/debug/artist/{artist_id}")
|
||||
async def debug_artist_cover(
|
||||
artist_id: str,
|
||||
coverart_repo: CoverArtRepository = Depends(get_coverart_repository)
|
||||
):
|
||||
"""
|
||||
Debug endpoint that returns diagnostic info about an artist image fetch.
|
||||
Shows cache state, Lidarr availability, MusicBrainz relations, and Wikidata URL.
|
||||
"""
|
||||
from infrastructure.validators import validate_mbid
|
||||
|
||||
debug_info = {
|
||||
"artist_id": artist_id,
|
||||
"is_valid_mbid": False,
|
||||
"validated_mbid": None,
|
||||
"disk_cache": {
|
||||
"exists_250": False,
|
||||
"exists_500": False,
|
||||
"negative_250": False,
|
||||
"negative_500": False,
|
||||
"meta_250": None,
|
||||
"meta_500": None,
|
||||
},
|
||||
"lidarr": {
|
||||
"configured": False,
|
||||
"has_image_url": False,
|
||||
"image_url": None,
|
||||
},
|
||||
"musicbrainz": {
|
||||
"artist_found": False,
|
||||
"has_wikidata_relation": False,
|
||||
"wikidata_url": None,
|
||||
},
|
||||
"memory_cache": {
|
||||
"wikidata_url_cached": False,
|
||||
"cached_value": None,
|
||||
},
|
||||
"circuit_breakers": {},
|
||||
"recommendation": None,
|
||||
}
|
||||
|
||||
try:
|
||||
validated_id = validate_mbid(artist_id, "artist")
|
||||
debug_info["is_valid_mbid"] = True
|
||||
debug_info["validated_mbid"] = validated_id
|
||||
except ValueError as e:
|
||||
debug_info["recommendation"] = f"Invalid MBID format: {e}. No image can be fetched."
|
||||
return debug_info
|
||||
|
||||
debug_info = await coverart_repo.debug_artist_image(validated_id, debug_info)
|
||||
|
||||
if debug_info["disk_cache"]["negative_250"] or debug_info["disk_cache"]["negative_500"]:
|
||||
debug_info["recommendation"] = "Artist has a negative cache entry. Wait for expiry or purge negative cache."
|
||||
elif debug_info["disk_cache"]["exists_250"] or debug_info["disk_cache"]["exists_500"]:
|
||||
debug_info["recommendation"] = "Image is cached on disk - should load successfully."
|
||||
elif any(
|
||||
breaker.get("state") == "open"
|
||||
for breaker in debug_info.get("circuit_breakers", {}).values()
|
||||
if isinstance(breaker, dict)
|
||||
):
|
||||
debug_info["recommendation"] = "One or more cover fetch circuit breakers are OPEN. Retry after cooldown or reset breakers."
|
||||
elif debug_info["lidarr"]["has_image_url"]:
|
||||
debug_info["recommendation"] = "Lidarr has an image URL - fetch should succeed from Lidarr."
|
||||
elif debug_info["musicbrainz"]["has_wikidata_relation"]:
|
||||
debug_info["recommendation"] = "Wikidata URL found - fetch should succeed from Wikidata/Wikimedia."
|
||||
else:
|
||||
debug_info["recommendation"] = "No image source found. This artist will show a placeholder."
|
||||
|
||||
return debug_info
|
||||
213
backend/api/v1/routes/discover.py
Normal file
|
|
@ -0,0 +1,213 @@
|
|||
import logging
|
||||
from typing import Literal
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, Response
|
||||
from api.v1.schemas.discover import (
|
||||
DiscoverResponse,
|
||||
DiscoverQueueResponse,
|
||||
DiscoverQueueEnrichment,
|
||||
DiscoverIgnoredRelease,
|
||||
DiscoverQueueIgnoreRequest,
|
||||
DiscoverQueueValidateRequest,
|
||||
DiscoverQueueValidateResponse,
|
||||
DiscoverQueueStatusResponse,
|
||||
QueueGenerateRequest,
|
||||
QueueGenerateResponse,
|
||||
YouTubeSearchResponse,
|
||||
YouTubeQuotaResponse,
|
||||
TrackCacheCheckRequest,
|
||||
TrackCacheCheckResponse,
|
||||
TrackCacheCheckResponseItem,
|
||||
)
|
||||
from api.v1.schemas.common import StatusMessageResponse
|
||||
from core.dependencies import get_discover_service, get_discover_queue_manager, get_youtube_repo
|
||||
from infrastructure.degradation import try_get_degradation_context
|
||||
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
|
||||
|
||||
import msgspec.structs
|
||||
from repositories.youtube import YouTubeRepository
|
||||
from services.discover_service import DiscoverService
|
||||
from services.discover_queue_manager import DiscoverQueueManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/discover", tags=["discover"])
|
||||
|
||||
|
||||
@router.get("", response_model=DiscoverResponse)
|
||||
async def get_discover_data(
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None, description="Data source: listenbrainz or lastfm"),
|
||||
discover_service: DiscoverService = Depends(get_discover_service),
|
||||
):
|
||||
result = await discover_service.get_discover_data(source=source)
|
||||
ctx = try_get_degradation_context()
|
||||
if ctx is not None and ctx.has_degradation():
|
||||
result = msgspec.structs.replace(result, service_status=ctx.degraded_summary())
|
||||
return result
|
||||
|
||||
|
||||
@router.post("/refresh", response_model=StatusMessageResponse)
|
||||
async def refresh_discover_data(
|
||||
discover_service: DiscoverService = Depends(get_discover_service),
|
||||
):
|
||||
await discover_service.refresh_discover_data()
|
||||
return StatusMessageResponse(status="ok", message="Discover refresh triggered")
|
||||
|
||||
|
||||
@router.get("/queue", response_model=DiscoverQueueResponse)
|
||||
async def get_discover_queue(
|
||||
count: int | None = Query(default=None, description="Number of items (default from settings, max 20)"),
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None, description="Data source: listenbrainz or lastfm"),
|
||||
discover_service: DiscoverService = Depends(get_discover_service),
|
||||
queue_manager: DiscoverQueueManager = Depends(get_discover_queue_manager),
|
||||
):
|
||||
resolved = source or discover_service.resolve_source(None)
|
||||
cached = await queue_manager.consume_queue(resolved)
|
||||
if cached:
|
||||
logger.info("Serving pre-built discover queue (source=%s, items=%d)", resolved, len(cached.items))
|
||||
return cached
|
||||
effective_count = min(count, 20) if count is not None else None
|
||||
return await queue_manager.build_hydrated_queue(resolved, effective_count)
|
||||
|
||||
|
||||
@router.get("/queue/status", response_model=DiscoverQueueStatusResponse)
|
||||
async def get_queue_status(
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None, description="Data source"),
|
||||
discover_service: DiscoverService = Depends(get_discover_service),
|
||||
queue_manager: DiscoverQueueManager = Depends(get_discover_queue_manager),
|
||||
):
|
||||
resolved = source or discover_service.resolve_source(None)
|
||||
return queue_manager.get_status(resolved)
|
||||
|
||||
|
||||
@router.post("/queue/generate", response_model=QueueGenerateResponse)
|
||||
async def generate_queue(
|
||||
body: QueueGenerateRequest = MsgSpecBody(QueueGenerateRequest),
|
||||
discover_service: DiscoverService = Depends(get_discover_service),
|
||||
queue_manager: DiscoverQueueManager = Depends(get_discover_queue_manager),
|
||||
):
|
||||
resolved = body.source or discover_service.resolve_source(None)
|
||||
return await queue_manager.start_build(resolved, force=body.force)
|
||||
|
||||
|
||||
@router.get("/queue/enrich/{release_group_mbid}", response_model=DiscoverQueueEnrichment)
|
||||
async def enrich_queue_item(
|
||||
release_group_mbid: str,
|
||||
discover_service: DiscoverService = Depends(get_discover_service),
|
||||
):
|
||||
return await discover_service.enrich_queue_item(release_group_mbid)
|
||||
|
||||
|
||||
@router.post("/queue/ignore", status_code=204)
|
||||
async def ignore_queue_item(
|
||||
body: DiscoverQueueIgnoreRequest = MsgSpecBody(DiscoverQueueIgnoreRequest),
|
||||
discover_service: DiscoverService = Depends(get_discover_service),
|
||||
):
|
||||
await discover_service.ignore_release(
|
||||
body.release_group_mbid, body.artist_mbid, body.release_name, body.artist_name
|
||||
)
|
||||
|
||||
|
||||
@router.get("/queue/ignored", response_model=list[DiscoverIgnoredRelease])
|
||||
async def get_ignored_items(
|
||||
discover_service: DiscoverService = Depends(get_discover_service),
|
||||
):
|
||||
return await discover_service.get_ignored_releases()
|
||||
|
||||
|
||||
@router.post("/queue/validate", response_model=DiscoverQueueValidateResponse)
|
||||
async def validate_queue(
|
||||
body: DiscoverQueueValidateRequest = MsgSpecBody(DiscoverQueueValidateRequest),
|
||||
discover_service: DiscoverService = Depends(get_discover_service),
|
||||
):
|
||||
in_library = await discover_service.validate_queue_mbids(body.release_group_mbids)
|
||||
return DiscoverQueueValidateResponse(in_library=in_library)
|
||||
|
||||
|
||||
@router.get("/queue/youtube-search", response_model=YouTubeSearchResponse)
|
||||
async def youtube_search(
|
||||
artist: str = Query(..., description="Artist name"),
|
||||
album: str = Query(..., description="Album name"),
|
||||
yt_repo: YouTubeRepository = Depends(get_youtube_repo),
|
||||
):
|
||||
if not yt_repo or not yt_repo.is_configured:
|
||||
return YouTubeSearchResponse(error="not_configured")
|
||||
|
||||
if yt_repo.quota_remaining <= 0 and not yt_repo.is_cached(artist, album):
|
||||
return YouTubeSearchResponse(error="quota_exceeded")
|
||||
|
||||
was_cached = yt_repo.is_cached(artist, album)
|
||||
video_id = await yt_repo.search_video(artist, album)
|
||||
if video_id:
|
||||
return YouTubeSearchResponse(
|
||||
video_id=video_id,
|
||||
embed_url=f"https://www.youtube.com/embed/{video_id}",
|
||||
cached=was_cached,
|
||||
)
|
||||
return YouTubeSearchResponse(error="not_found")
|
||||
|
||||
|
||||
@router.get("/queue/youtube-track-search", response_model=YouTubeSearchResponse)
|
||||
async def youtube_track_search(
|
||||
artist: str = Query(..., description="Artist name"),
|
||||
track: str = Query(..., description="Track name"),
|
||||
yt_repo: YouTubeRepository = Depends(get_youtube_repo),
|
||||
):
|
||||
if not yt_repo or not yt_repo.is_configured:
|
||||
return YouTubeSearchResponse(error="not_configured")
|
||||
|
||||
if yt_repo.quota_remaining <= 0 and not yt_repo.is_cached(artist, track):
|
||||
return YouTubeSearchResponse(error="quota_exceeded")
|
||||
|
||||
was_cached = yt_repo.is_cached(artist, track)
|
||||
video_id = await yt_repo.search_track(artist, track)
|
||||
if video_id:
|
||||
return YouTubeSearchResponse(
|
||||
video_id=video_id,
|
||||
embed_url=f"https://www.youtube.com/embed/{video_id}",
|
||||
cached=was_cached,
|
||||
)
|
||||
return YouTubeSearchResponse(error="not_found")
|
||||
|
||||
|
||||
@router.get("/queue/youtube-quota", response_model=YouTubeQuotaResponse)
|
||||
async def youtube_quota(
|
||||
yt_repo: YouTubeRepository = Depends(get_youtube_repo),
|
||||
):
|
||||
if not yt_repo or not yt_repo.is_configured:
|
||||
raise HTTPException(status_code=404, detail="YouTube not configured")
|
||||
return yt_repo.get_quota_status()
|
||||
|
||||
|
||||
CACHE_CHECK_MAX_ITEMS = 100
|
||||
CACHE_CHECK_MAX_STR_LEN = 200
|
||||
|
||||
|
||||
@router.post("/queue/youtube-cache-check", response_model=TrackCacheCheckResponse)
|
||||
async def youtube_cache_check(
|
||||
body: TrackCacheCheckRequest = MsgSpecBody(TrackCacheCheckRequest),
|
||||
yt_repo: YouTubeRepository = Depends(get_youtube_repo),
|
||||
):
|
||||
if not yt_repo or not yt_repo.is_configured:
|
||||
return TrackCacheCheckResponse()
|
||||
|
||||
seen: set[str] = set()
|
||||
deduped: list[tuple[str, str]] = []
|
||||
for item in body.items[:CACHE_CHECK_MAX_ITEMS]:
|
||||
artist = item.artist[:CACHE_CHECK_MAX_STR_LEN]
|
||||
track = item.track[:CACHE_CHECK_MAX_STR_LEN]
|
||||
key = f"{artist.lower()}|{track.lower()}"
|
||||
if key not in seen:
|
||||
seen.add(key)
|
||||
deduped.append((artist, track))
|
||||
|
||||
cache_results = yt_repo.are_cached(deduped)
|
||||
return TrackCacheCheckResponse(
|
||||
items=[
|
||||
TrackCacheCheckResponseItem(
|
||||
artist=artist,
|
||||
track=track,
|
||||
cached=cache_results.get(f"{artist.lower()}|{track.lower()}", False),
|
||||
)
|
||||
for artist, track in deduped
|
||||
]
|
||||
)
|
||||
144
backend/api/v1/routes/home.py
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
import logging
|
||||
from typing import Literal
|
||||
from fastapi import APIRouter, Depends, Query, HTTPException
|
||||
from api.v1.schemas.home import (
|
||||
HomeResponse,
|
||||
HomeIntegrationStatus,
|
||||
GenreDetailResponse,
|
||||
GenreArtistResponse,
|
||||
GenreArtistsBatchResponse,
|
||||
TrendingArtistsResponse,
|
||||
TrendingArtistsRangeResponse,
|
||||
PopularAlbumsResponse,
|
||||
PopularAlbumsRangeResponse,
|
||||
)
|
||||
from core.dependencies import get_home_service, get_home_charts_service
|
||||
from infrastructure.degradation import try_get_degradation_context
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
|
||||
import msgspec.structs
|
||||
from services.home_service import HomeService
|
||||
from services.home_charts_service import HomeChartsService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/home", tags=["home"])
|
||||
|
||||
|
||||
@router.get("", response_model=HomeResponse)
|
||||
async def get_home_data(
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None, description="Data source: listenbrainz or lastfm"),
|
||||
home_service: HomeService = Depends(get_home_service),
|
||||
):
|
||||
result = await home_service.get_home_data(source=source)
|
||||
ctx = try_get_degradation_context()
|
||||
if ctx is not None and ctx.has_degradation():
|
||||
result = msgspec.structs.replace(result, service_status=ctx.degraded_summary())
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/integration-status", response_model=HomeIntegrationStatus)
|
||||
async def get_integration_status(
|
||||
home_service: HomeService = Depends(get_home_service)
|
||||
):
|
||||
return home_service.get_integration_status()
|
||||
|
||||
|
||||
@router.get("/genre/{genre_name}", response_model=GenreDetailResponse)
|
||||
async def get_genre_detail(
|
||||
genre_name: str,
|
||||
limit: int = Query(default=50, ge=1, le=200),
|
||||
artist_offset: int = Query(default=0, ge=0),
|
||||
album_offset: int = Query(default=0, ge=0),
|
||||
charts_service: HomeChartsService = Depends(get_home_charts_service)
|
||||
):
|
||||
return await charts_service.get_genre_artists(
|
||||
genre=genre_name,
|
||||
limit=limit,
|
||||
artist_offset=artist_offset,
|
||||
album_offset=album_offset,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/trending/artists", response_model=TrendingArtistsResponse)
|
||||
async def get_trending_artists(
|
||||
limit: int = Query(default=10, ge=1, le=25),
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None),
|
||||
charts_service: HomeChartsService = Depends(get_home_charts_service)
|
||||
):
|
||||
return await charts_service.get_trending_artists(limit=limit, source=source)
|
||||
|
||||
|
||||
@router.get("/trending/artists/{range_key}", response_model=TrendingArtistsRangeResponse)
|
||||
async def get_trending_artists_by_range(
|
||||
range_key: str,
|
||||
limit: int = Query(default=25, ge=1, le=100),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None),
|
||||
charts_service: HomeChartsService = Depends(get_home_charts_service)
|
||||
):
|
||||
return await charts_service.get_trending_artists_by_range(
|
||||
range_key=range_key, limit=limit, offset=offset, source=source
|
||||
)
|
||||
|
||||
|
||||
@router.get("/popular/albums", response_model=PopularAlbumsResponse)
|
||||
async def get_popular_albums(
|
||||
limit: int = Query(default=10, ge=1, le=25),
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None),
|
||||
charts_service: HomeChartsService = Depends(get_home_charts_service)
|
||||
):
|
||||
return await charts_service.get_popular_albums(limit=limit, source=source)
|
||||
|
||||
|
||||
@router.get("/popular/albums/{range_key}", response_model=PopularAlbumsRangeResponse)
|
||||
async def get_popular_albums_by_range(
|
||||
range_key: str,
|
||||
limit: int = Query(default=25, ge=1, le=100),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None),
|
||||
charts_service: HomeChartsService = Depends(get_home_charts_service)
|
||||
):
|
||||
return await charts_service.get_popular_albums_by_range(
|
||||
range_key=range_key, limit=limit, offset=offset, source=source
|
||||
)
|
||||
|
||||
|
||||
@router.get("/your-top/albums", response_model=PopularAlbumsResponse)
|
||||
async def get_your_top_albums(
|
||||
limit: int = Query(default=10, ge=1, le=25),
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None),
|
||||
charts_service: HomeChartsService = Depends(get_home_charts_service)
|
||||
):
|
||||
return await charts_service.get_your_top_albums(limit=limit, source=source)
|
||||
|
||||
|
||||
@router.get("/your-top/albums/{range_key}", response_model=PopularAlbumsRangeResponse)
|
||||
async def get_your_top_albums_by_range(
|
||||
range_key: str,
|
||||
limit: int = Query(default=25, ge=1, le=100),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
source: Literal["listenbrainz", "lastfm"] | None = Query(default=None),
|
||||
charts_service: HomeChartsService = Depends(get_home_charts_service)
|
||||
):
|
||||
return await charts_service.get_your_top_albums_by_range(
|
||||
range_key=range_key, limit=limit, offset=offset, source=source
|
||||
)
|
||||
|
||||
|
||||
@router.get("/genre-artist/{genre_name}", response_model=GenreArtistResponse)
|
||||
async def get_genre_artist(
|
||||
genre_name: str,
|
||||
home_service: HomeService = Depends(get_home_service)
|
||||
):
|
||||
artist_mbid = await home_service.get_genre_artist(genre_name)
|
||||
return GenreArtistResponse(artist_mbid=artist_mbid)
|
||||
|
||||
|
||||
@router.post("/genre-artists", response_model=GenreArtistsBatchResponse)
|
||||
async def get_genre_artists_batch(
|
||||
genres: list[str],
|
||||
home_service: HomeService = Depends(get_home_service)
|
||||
):
|
||||
results = await home_service.get_genre_artists_batch(genres)
|
||||
return GenreArtistsBatchResponse(genre_artists=results)
|
||||
134
backend/api/v1/routes/jellyfin_library.py
Normal file
|
|
@ -0,0 +1,134 @@
|
|||
import logging
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
|
||||
from api.v1.schemas.jellyfin import (
|
||||
JellyfinAlbumDetail,
|
||||
JellyfinAlbumMatch,
|
||||
JellyfinAlbumSummary,
|
||||
JellyfinArtistSummary,
|
||||
JellyfinLibraryStats,
|
||||
JellyfinPaginatedResponse,
|
||||
JellyfinSearchResponse,
|
||||
JellyfinTrackInfo,
|
||||
)
|
||||
from core.dependencies import get_jellyfin_library_service
|
||||
from core.exceptions import ExternalServiceError
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
from services.jellyfin_library_service import JellyfinLibraryService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/jellyfin", tags=["jellyfin-library"])
|
||||
|
||||
|
||||
@router.get("/albums", response_model=JellyfinPaginatedResponse)
|
||||
async def get_jellyfin_albums(
|
||||
limit: int = Query(default=50, ge=1, le=200),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
sort_by: Literal["SortName", "DateCreated", "PlayCount", "ProductionYear"] = Query(default="SortName"),
|
||||
sort_order: Literal["Ascending", "Descending"] = Query(default="Ascending"),
|
||||
genre: str | None = Query(default=None),
|
||||
service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
) -> JellyfinPaginatedResponse:
|
||||
try:
|
||||
items, total = await service.get_albums(
|
||||
limit=limit, offset=offset, sort_by=sort_by, sort_order=sort_order, genre=genre
|
||||
)
|
||||
return JellyfinPaginatedResponse(
|
||||
items=items, total=total, offset=offset, limit=limit
|
||||
)
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Jellyfin service error getting albums: %s", e)
|
||||
raise HTTPException(status_code=502, detail="Failed to communicate with Jellyfin")
|
||||
|
||||
|
||||
@router.get("/albums/{album_id}", response_model=JellyfinAlbumDetail)
|
||||
async def get_jellyfin_album_detail(
|
||||
album_id: str,
|
||||
service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
) -> JellyfinAlbumDetail:
|
||||
result = await service.get_album_detail(album_id)
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Album not found")
|
||||
return result
|
||||
|
||||
|
||||
@router.get(
|
||||
"/albums/{album_id}/tracks", response_model=list[JellyfinTrackInfo]
|
||||
)
|
||||
async def get_jellyfin_album_tracks(
|
||||
album_id: str,
|
||||
service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
) -> list[JellyfinTrackInfo]:
|
||||
try:
|
||||
return await service.get_album_tracks(album_id)
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Jellyfin service error getting album tracks %s: %s", album_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to communicate with Jellyfin")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/albums/match/{musicbrainz_id}", response_model=JellyfinAlbumMatch
|
||||
)
|
||||
async def match_jellyfin_album(
|
||||
musicbrainz_id: str,
|
||||
service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
) -> JellyfinAlbumMatch:
|
||||
try:
|
||||
return await service.match_album_by_mbid(musicbrainz_id)
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Failed to match Jellyfin album %s: %s", musicbrainz_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to match Jellyfin album")
|
||||
|
||||
|
||||
@router.get("/artists", response_model=list[JellyfinArtistSummary])
|
||||
async def get_jellyfin_artists(
|
||||
limit: int = Query(default=50, ge=1, le=200),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
) -> list[JellyfinArtistSummary]:
|
||||
return await service.get_artists(limit=limit, offset=offset)
|
||||
|
||||
|
||||
@router.get("/search", response_model=JellyfinSearchResponse)
|
||||
async def search_jellyfin(
|
||||
q: str = Query(..., min_length=1),
|
||||
service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
) -> JellyfinSearchResponse:
|
||||
return await service.search(q)
|
||||
|
||||
|
||||
@router.get("/recent", response_model=list[JellyfinAlbumSummary])
|
||||
async def get_jellyfin_recent(
|
||||
limit: int = Query(default=20, ge=1, le=50),
|
||||
service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
) -> list[JellyfinAlbumSummary]:
|
||||
return await service.get_recently_played(limit=limit)
|
||||
|
||||
|
||||
@router.get("/favorites", response_model=list[JellyfinAlbumSummary])
|
||||
async def get_jellyfin_favorites(
|
||||
limit: int = Query(default=20, ge=1, le=50),
|
||||
service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
) -> list[JellyfinAlbumSummary]:
|
||||
return await service.get_favorites(limit=limit)
|
||||
|
||||
|
||||
@router.get("/genres", response_model=list[str])
|
||||
async def get_jellyfin_genres(
|
||||
service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
) -> list[str]:
|
||||
try:
|
||||
return await service.get_genres()
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Jellyfin service error getting genres: %s", e)
|
||||
raise HTTPException(status_code=502, detail="Failed to communicate with Jellyfin")
|
||||
|
||||
|
||||
@router.get("/stats", response_model=JellyfinLibraryStats)
|
||||
async def get_jellyfin_stats(
|
||||
service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
) -> JellyfinLibraryStats:
|
||||
return await service.get_stats()
|
||||
132
backend/api/v1/routes/lastfm.py
Normal file
|
|
@ -0,0 +1,132 @@
|
|||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from api.v1.schemas.settings import (
|
||||
LastFmAuthTokenResponse,
|
||||
LastFmAuthSessionRequest,
|
||||
LastFmAuthSessionResponse,
|
||||
LastFmConnectionSettings,
|
||||
LASTFM_SECRET_MASK,
|
||||
)
|
||||
from core.dependencies import (
|
||||
get_lastfm_auth_service,
|
||||
get_lastfm_repository,
|
||||
get_preferences_service,
|
||||
clear_lastfm_dependent_caches,
|
||||
)
|
||||
from core.exceptions import ConfigurationError, ExternalServiceError, TokenNotAuthorizedError
|
||||
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
|
||||
from services.lastfm_auth_service import LastFmAuthService
|
||||
from services.preferences_service import PreferencesService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/lastfm", tags=["lastfm"])
|
||||
|
||||
|
||||
@router.post("/auth/token", response_model=LastFmAuthTokenResponse)
|
||||
async def request_auth_token(
|
||||
auth_service: LastFmAuthService = Depends(get_lastfm_auth_service),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
try:
|
||||
settings = preferences_service.get_lastfm_connection()
|
||||
if not settings.api_key or not settings.shared_secret:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Add a Last.fm API key and shared secret first",
|
||||
)
|
||||
|
||||
token, auth_url = await auth_service.request_token(settings.api_key)
|
||||
logger.info(
|
||||
"Last.fm auth token requested",
|
||||
extra={"step": "token_requested", "status": "success"},
|
||||
)
|
||||
return LastFmAuthTokenResponse(token=token, auth_url=auth_url)
|
||||
except HTTPException:
|
||||
raise
|
||||
except ConfigurationError as e:
|
||||
logger.warning(
|
||||
"Last.fm auth token request failed (config): %s",
|
||||
e,
|
||||
extra={"step": "token_requested", "status": "config_error"},
|
||||
)
|
||||
raise HTTPException(status_code=400, detail="Last.fm settings are incomplete or invalid")
|
||||
except ExternalServiceError as e:
|
||||
logger.warning(
|
||||
"Last.fm auth token request failed (external): %s",
|
||||
e,
|
||||
extra={"step": "token_requested", "status": "external_error"},
|
||||
)
|
||||
raise HTTPException(status_code=502, detail="Couldn't reach Last.fm for a sign-in token")
|
||||
|
||||
|
||||
@router.post("/auth/session", response_model=LastFmAuthSessionResponse)
|
||||
async def exchange_auth_session(
|
||||
request: LastFmAuthSessionRequest = MsgSpecBody(LastFmAuthSessionRequest),
|
||||
auth_service: LastFmAuthService = Depends(get_lastfm_auth_service),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
try:
|
||||
username, session_key, _ = await auth_service.exchange_session(request.token)
|
||||
|
||||
settings = preferences_service.get_lastfm_connection()
|
||||
updated = LastFmConnectionSettings(
|
||||
api_key=settings.api_key,
|
||||
shared_secret=settings.shared_secret,
|
||||
session_key=session_key,
|
||||
username=username,
|
||||
enabled=settings.enabled,
|
||||
)
|
||||
preferences_service.save_lastfm_connection(updated)
|
||||
get_lastfm_repository.cache_clear()
|
||||
get_lastfm_auth_service.cache_clear()
|
||||
clear_lastfm_dependent_caches()
|
||||
logger.info(
|
||||
"Last.fm session exchanged for user %s",
|
||||
username,
|
||||
extra={"step": "session_exchanged", "status": "success"},
|
||||
)
|
||||
|
||||
return LastFmAuthSessionResponse(
|
||||
username=username,
|
||||
success=True,
|
||||
message=f"Connected as {username}",
|
||||
)
|
||||
except TokenNotAuthorizedError:
|
||||
message = "Last.fm access hasn't been approved yet. Authorize it in the Last.fm tab, then try again."
|
||||
error_code = "token_not_authorized"
|
||||
logger.warning(
|
||||
"Last.fm session exchange failed: token not authorized",
|
||||
extra={
|
||||
"step": "session_exchanged",
|
||||
"status": "token_not_authorized",
|
||||
"error_code": error_code,
|
||||
},
|
||||
)
|
||||
raise HTTPException(status_code=502, detail=message)
|
||||
except ExternalServiceError as e:
|
||||
message = "Couldn't finish the Last.fm sign-in. Please try again."
|
||||
error_code = "external_error"
|
||||
logger.warning(
|
||||
"Last.fm session exchange failed: %s",
|
||||
e,
|
||||
extra={
|
||||
"step": "session_exchanged",
|
||||
"status": "external_error",
|
||||
"error_code": error_code,
|
||||
},
|
||||
)
|
||||
raise HTTPException(status_code=502, detail=message)
|
||||
except ConfigurationError as e:
|
||||
logger.warning(
|
||||
"Last.fm session exchange rejected: %s",
|
||||
e,
|
||||
extra={
|
||||
"step": "session_exchanged",
|
||||
"status": "configuration_error",
|
||||
"error_code": "configuration_error",
|
||||
},
|
||||
)
|
||||
raise HTTPException(status_code=422, detail="Last.fm configuration error. Check your settings and try again.")
|
||||
160
backend/api/v1/routes/library.py
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
import asyncio
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from api.v1.schemas.library import (
|
||||
LibraryResponse,
|
||||
LibraryArtistsResponse,
|
||||
LibraryAlbumsResponse,
|
||||
PaginatedLibraryAlbumsResponse,
|
||||
PaginatedLibraryArtistsResponse,
|
||||
RecentlyAddedResponse,
|
||||
LibraryStatsResponse,
|
||||
AlbumRemoveResponse,
|
||||
AlbumRemovePreviewResponse,
|
||||
SyncLibraryResponse,
|
||||
LibraryMbidsResponse,
|
||||
LibraryGroupedResponse,
|
||||
TrackResolveRequest,
|
||||
TrackResolveResponse,
|
||||
)
|
||||
from core.dependencies import get_library_service
|
||||
from core.exceptions import ExternalServiceError
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute, MsgSpecBody
|
||||
from services.library_service import LibraryService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/library", tags=["library"])
|
||||
|
||||
|
||||
@router.get("/", response_model=LibraryResponse)
|
||||
async def get_library(
|
||||
library_service: LibraryService = Depends(get_library_service)
|
||||
):
|
||||
library = await library_service.get_library()
|
||||
return LibraryResponse(library=library)
|
||||
|
||||
|
||||
@router.get("/artists", response_model=PaginatedLibraryArtistsResponse)
|
||||
async def get_library_artists(
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
sort_by: str = "name",
|
||||
sort_order: str = "asc",
|
||||
q: str | None = None,
|
||||
library_service: LibraryService = Depends(get_library_service)
|
||||
):
|
||||
limit = max(1, min(limit, 100))
|
||||
offset = max(0, offset)
|
||||
allowed_sort = {"name", "album_count", "date_added"}
|
||||
if sort_by not in allowed_sort:
|
||||
sort_by = "name"
|
||||
if sort_order not in ("asc", "desc"):
|
||||
sort_order = "asc"
|
||||
artists, total = await library_service.get_artists_paginated(
|
||||
limit=limit, offset=offset, sort_by=sort_by, sort_order=sort_order, search=q,
|
||||
)
|
||||
return PaginatedLibraryArtistsResponse(artists=artists, total=total, offset=offset, limit=limit)
|
||||
|
||||
|
||||
@router.get("/albums", response_model=PaginatedLibraryAlbumsResponse)
|
||||
async def get_library_albums(
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
sort_by: str = "date_added",
|
||||
sort_order: str = "desc",
|
||||
q: str | None = None,
|
||||
library_service: LibraryService = Depends(get_library_service)
|
||||
):
|
||||
limit = max(1, min(limit, 100))
|
||||
offset = max(0, offset)
|
||||
allowed_sort = {"date_added", "artist", "title", "year"}
|
||||
if sort_by not in allowed_sort:
|
||||
sort_by = "date_added"
|
||||
if sort_order not in ("asc", "desc"):
|
||||
sort_order = "desc"
|
||||
albums, total = await library_service.get_albums_paginated(
|
||||
limit=limit, offset=offset, sort_by=sort_by, sort_order=sort_order, search=q,
|
||||
)
|
||||
return PaginatedLibraryAlbumsResponse(albums=albums, total=total, offset=offset, limit=limit)
|
||||
|
||||
|
||||
@router.get("/recently-added", response_model=RecentlyAddedResponse)
|
||||
async def get_recently_added(
|
||||
limit: int = 20,
|
||||
library_service: LibraryService = Depends(get_library_service)
|
||||
):
|
||||
albums = await library_service.get_recently_added(limit=limit)
|
||||
return RecentlyAddedResponse(albums=albums, artists=[])
|
||||
|
||||
|
||||
@router.post("/sync", response_model=SyncLibraryResponse)
|
||||
async def sync_library(
|
||||
library_service: LibraryService = Depends(get_library_service)
|
||||
):
|
||||
try:
|
||||
return await library_service.sync_library(is_manual=True)
|
||||
except ExternalServiceError as e:
|
||||
logger.error(f"Couldn't sync the library: {e}")
|
||||
if "cooldown" in str(e).lower():
|
||||
raise HTTPException(status_code=429, detail="Sync is on cooldown, please wait")
|
||||
raise HTTPException(status_code=503, detail="External service unavailable")
|
||||
|
||||
|
||||
@router.get("/stats", response_model=LibraryStatsResponse)
|
||||
async def get_library_stats(
|
||||
library_service: LibraryService = Depends(get_library_service)
|
||||
):
|
||||
return await library_service.get_stats()
|
||||
|
||||
|
||||
@router.get("/mbids", response_model=LibraryMbidsResponse)
|
||||
async def get_library_mbids(
|
||||
library_service: LibraryService = Depends(get_library_service)
|
||||
):
|
||||
mbids, requested = await asyncio.gather(
|
||||
library_service.get_library_mbids(),
|
||||
library_service.get_requested_mbids(),
|
||||
)
|
||||
return LibraryMbidsResponse(mbids=mbids, requested_mbids=requested)
|
||||
|
||||
|
||||
@router.get("/grouped", response_model=LibraryGroupedResponse)
|
||||
async def get_library_grouped(
|
||||
library_service: LibraryService = Depends(get_library_service)
|
||||
):
|
||||
grouped = await library_service.get_library_grouped()
|
||||
return LibraryGroupedResponse(library=grouped)
|
||||
|
||||
|
||||
@router.get("/album/{album_mbid}/removal-preview", response_model=AlbumRemovePreviewResponse)
|
||||
async def get_album_removal_preview(
|
||||
album_mbid: str,
|
||||
library_service: LibraryService = Depends(get_library_service)
|
||||
):
|
||||
try:
|
||||
return await library_service.get_album_removal_preview(album_mbid)
|
||||
except ExternalServiceError as e:
|
||||
logger.error(f"Failed to get album removal preview: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to load removal preview")
|
||||
|
||||
|
||||
@router.delete("/album/{album_mbid}", response_model=AlbumRemoveResponse)
|
||||
async def remove_album(
|
||||
album_mbid: str,
|
||||
delete_files: bool = False,
|
||||
library_service: LibraryService = Depends(get_library_service)
|
||||
):
|
||||
try:
|
||||
return await library_service.remove_album(album_mbid, delete_files=delete_files)
|
||||
except ExternalServiceError as e:
|
||||
logger.error(f"Couldn't remove album {album_mbid}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Couldn't remove this album")
|
||||
|
||||
|
||||
@router.post("/resolve-tracks", response_model=TrackResolveResponse)
|
||||
async def resolve_tracks(
|
||||
body: TrackResolveRequest = MsgSpecBody(TrackResolveRequest),
|
||||
library_service: LibraryService = Depends(get_library_service),
|
||||
):
|
||||
return await library_service.resolve_tracks_batch(body.items)
|
||||
101
backend/api/v1/routes/local_library.py
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
import logging
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
|
||||
from api.v1.schemas.local_files import (
|
||||
LocalAlbumMatch,
|
||||
LocalAlbumSummary,
|
||||
LocalPaginatedResponse,
|
||||
LocalStorageStats,
|
||||
LocalTrackInfo,
|
||||
)
|
||||
from core.dependencies import get_local_files_service
|
||||
from core.exceptions import ExternalServiceError
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
from services.local_files_service import LocalFilesService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/local", tags=["local-files"])
|
||||
|
||||
|
||||
@router.get("/albums", response_model=LocalPaginatedResponse)
|
||||
async def get_local_albums(
|
||||
limit: int = Query(default=50, ge=1, le=200),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
sort_by: Literal["name", "date_added", "year"] = "name",
|
||||
sort_order: Literal["asc", "desc"] = Query(default="asc"),
|
||||
q: str | None = Query(default=None, min_length=1),
|
||||
service: LocalFilesService = Depends(get_local_files_service),
|
||||
) -> LocalPaginatedResponse:
|
||||
try:
|
||||
return await service.get_albums(
|
||||
limit=limit, offset=offset, sort_by=sort_by, sort_order=sort_order, search_query=q
|
||||
)
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Failed to get local albums: %s", e)
|
||||
raise HTTPException(status_code=502, detail="Failed to get local albums")
|
||||
|
||||
|
||||
@router.get("/albums/match/{musicbrainz_id}", response_model=LocalAlbumMatch)
|
||||
async def match_local_album(
|
||||
musicbrainz_id: str,
|
||||
service: LocalFilesService = Depends(get_local_files_service),
|
||||
) -> LocalAlbumMatch:
|
||||
try:
|
||||
return await service.match_album_by_mbid(musicbrainz_id)
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Failed to match local album %s: %s", musicbrainz_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to match local album")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/albums/{album_id}/tracks", response_model=list[LocalTrackInfo]
|
||||
)
|
||||
async def get_local_album_tracks(
|
||||
album_id: int,
|
||||
service: LocalFilesService = Depends(get_local_files_service),
|
||||
) -> list[LocalTrackInfo]:
|
||||
try:
|
||||
return await service.get_album_tracks_by_id(album_id)
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Failed to get local album tracks %d: %s", album_id, e)
|
||||
raise HTTPException(
|
||||
status_code=502, detail="Failed to get local album tracks"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/search", response_model=list[LocalAlbumSummary])
|
||||
async def search_local(
|
||||
q: str = Query(min_length=1),
|
||||
service: LocalFilesService = Depends(get_local_files_service),
|
||||
) -> list[LocalAlbumSummary]:
|
||||
try:
|
||||
return await service.search(q)
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Failed to search local files: %s", e)
|
||||
raise HTTPException(
|
||||
status_code=502, detail="Failed to search local files"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/recent", response_model=list[LocalAlbumSummary])
|
||||
async def get_local_recent(
|
||||
limit: int = Query(default=20, ge=1, le=50),
|
||||
service: LocalFilesService = Depends(get_local_files_service),
|
||||
) -> list[LocalAlbumSummary]:
|
||||
try:
|
||||
return await service.get_recently_added(limit=limit)
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Failed to get recent local albums: %s", e)
|
||||
raise HTTPException(
|
||||
status_code=502, detail="Failed to get recent local albums"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/stats", response_model=LocalStorageStats)
|
||||
async def get_local_stats(
|
||||
service: LocalFilesService = Depends(get_local_files_service),
|
||||
) -> LocalStorageStats:
|
||||
return await service.get_storage_stats()
|
||||
157
backend/api/v1/routes/navidrome_library.py
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from fastapi.responses import Response
|
||||
|
||||
from api.v1.schemas.navidrome import (
|
||||
NavidromeAlbumDetail,
|
||||
NavidromeAlbumMatch,
|
||||
NavidromeAlbumPage,
|
||||
NavidromeAlbumSummary,
|
||||
NavidromeArtistSummary,
|
||||
NavidromeLibraryStats,
|
||||
NavidromeSearchResponse,
|
||||
)
|
||||
from core.dependencies import get_navidrome_library_service, get_navidrome_repository
|
||||
from core.exceptions import ExternalServiceError
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
from repositories.navidrome_repository import NavidromeRepository
|
||||
from services.navidrome_library_service import NavidromeLibraryService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/navidrome", tags=["navidrome-library"])
|
||||
|
||||
|
||||
_SORT_MAP: dict[str, str] = {
|
||||
"name": "alphabeticalByName",
|
||||
"date_added": "newest",
|
||||
"year": "alphabeticalByName",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/albums", response_model=NavidromeAlbumPage)
|
||||
async def get_navidrome_albums(
|
||||
limit: int = Query(default=48, ge=1, le=500, alias="limit"),
|
||||
offset: int = Query(default=0, ge=0),
|
||||
sort_by: str = Query(default="name"),
|
||||
genre: str = Query(default=""),
|
||||
service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> NavidromeAlbumPage:
|
||||
try:
|
||||
if genre:
|
||||
subsonic_type = "byGenre"
|
||||
else:
|
||||
subsonic_type = _SORT_MAP.get(sort_by, "alphabeticalByName")
|
||||
items = await service.get_albums(type=subsonic_type, size=limit, offset=offset, genre=genre if genre else None)
|
||||
stats = await service.get_stats()
|
||||
total = stats.total_albums if len(items) >= limit else offset + len(items)
|
||||
return NavidromeAlbumPage(items=items, total=total)
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Navidrome service error getting albums: %s", e)
|
||||
raise HTTPException(status_code=502, detail="Failed to communicate with Navidrome")
|
||||
|
||||
|
||||
@router.get("/albums/{album_id}", response_model=NavidromeAlbumDetail)
|
||||
async def get_navidrome_album_detail(
|
||||
album_id: str,
|
||||
service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> NavidromeAlbumDetail:
|
||||
result = await service.get_album_detail(album_id)
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Album not found")
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/artists", response_model=list[NavidromeArtistSummary])
|
||||
async def get_navidrome_artists(
|
||||
service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> list[NavidromeArtistSummary]:
|
||||
return await service.get_artists()
|
||||
|
||||
|
||||
@router.get("/artists/{artist_id}")
|
||||
async def get_navidrome_artist_detail(
|
||||
artist_id: str,
|
||||
service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> dict:
|
||||
result = await service.get_artist_detail(artist_id)
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail="Artist not found")
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/search", response_model=NavidromeSearchResponse)
|
||||
async def search_navidrome(
|
||||
q: str = Query(..., min_length=1),
|
||||
service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> NavidromeSearchResponse:
|
||||
return await service.search(q)
|
||||
|
||||
|
||||
@router.get("/recent", response_model=list[NavidromeAlbumSummary])
|
||||
async def get_navidrome_recent(
|
||||
limit: int = Query(default=20, ge=1, le=50),
|
||||
service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> list[NavidromeAlbumSummary]:
|
||||
return await service.get_recent(limit=limit)
|
||||
|
||||
|
||||
@router.get("/favorites", response_model=list[NavidromeAlbumSummary])
|
||||
async def get_navidrome_favorites(
|
||||
service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> list[NavidromeAlbumSummary]:
|
||||
result = await service.get_favorites()
|
||||
return result.albums
|
||||
|
||||
|
||||
@router.get("/genres", response_model=list[str])
|
||||
async def get_navidrome_genres(
|
||||
service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> list[str]:
|
||||
try:
|
||||
return await service.get_genres()
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Navidrome service error getting genres: %s", e)
|
||||
raise HTTPException(status_code=502, detail="Failed to communicate with Navidrome")
|
||||
|
||||
|
||||
@router.get("/stats", response_model=NavidromeLibraryStats)
|
||||
async def get_navidrome_stats(
|
||||
service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> NavidromeLibraryStats:
|
||||
return await service.get_stats()
|
||||
|
||||
|
||||
@router.get("/cover/{cover_art_id}")
|
||||
async def get_navidrome_cover(
|
||||
cover_art_id: str,
|
||||
size: int = Query(default=500, ge=32, le=1200),
|
||||
repo: NavidromeRepository = Depends(get_navidrome_repository),
|
||||
) -> Response:
|
||||
try:
|
||||
image_bytes, content_type = await repo.get_cover_art(cover_art_id, size)
|
||||
return Response(
|
||||
content=image_bytes,
|
||||
media_type=content_type,
|
||||
headers={"Cache-Control": "public, max-age=31536000, immutable"},
|
||||
)
|
||||
except ExternalServiceError as e:
|
||||
logger.warning("Navidrome cover art failed for %s: %s", cover_art_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to fetch cover art")
|
||||
|
||||
|
||||
@router.get("/album-match/{album_id}", response_model=NavidromeAlbumMatch)
|
||||
async def match_navidrome_album(
|
||||
album_id: str,
|
||||
name: str = Query(default=""),
|
||||
artist: str = Query(default=""),
|
||||
service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> NavidromeAlbumMatch:
|
||||
try:
|
||||
return await service.get_album_match(
|
||||
album_id=album_id, album_name=name, artist_name=artist,
|
||||
)
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Failed to match Navidrome album %s: %s", album_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to match Navidrome album")
|
||||
360
backend/api/v1/routes/playlists.py
Normal file
|
|
@ -0,0 +1,360 @@
|
|||
import logging
|
||||
from fastapi import APIRouter, File, UploadFile
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
from api.v1.schemas.common import StatusMessageResponse
|
||||
from api.v1.schemas.playlists import (
|
||||
AddTracksRequest,
|
||||
AddTracksResponse,
|
||||
CheckTrackMembershipRequest,
|
||||
CheckTrackMembershipResponse,
|
||||
CoverUploadResponse,
|
||||
CreatePlaylistRequest,
|
||||
PlaylistDetailResponse,
|
||||
PlaylistListResponse,
|
||||
PlaylistSummaryResponse,
|
||||
PlaylistTrackResponse,
|
||||
RemoveTracksRequest,
|
||||
ReorderTrackRequest,
|
||||
ReorderTrackResponse,
|
||||
ResolveSourcesResponse,
|
||||
UpdatePlaylistRequest,
|
||||
UpdateTrackRequest,
|
||||
)
|
||||
from core.dependencies import JellyfinLibraryServiceDep, LocalFilesServiceDep, NavidromeLibraryServiceDep, PlaylistServiceDep
|
||||
from core.exceptions import PlaylistNotFoundError
|
||||
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
route_class=MsgSpecRoute,
|
||||
prefix="/playlists",
|
||||
tags=["playlists"],
|
||||
)
|
||||
|
||||
|
||||
def _normalize_cover_url(url: str | None) -> str | None:
|
||||
if url and url.startswith("/api/covers/"):
|
||||
return "/api/v1/covers/" + url[len("/api/covers/"):]
|
||||
return url
|
||||
|
||||
|
||||
def _normalize_source_type(source_type: str) -> str:
|
||||
return source_type
|
||||
|
||||
|
||||
def _normalize_available_sources(sources: list[str] | None) -> list[str] | None:
|
||||
if sources is None:
|
||||
return None
|
||||
return sources
|
||||
|
||||
|
||||
def _custom_cover_url(playlist_id: str, cover_image_path: str | None) -> str | None:
|
||||
if cover_image_path:
|
||||
return f"/api/v1/playlists/{playlist_id}/cover"
|
||||
return None
|
||||
|
||||
|
||||
def _track_to_response(t) -> PlaylistTrackResponse:
|
||||
return PlaylistTrackResponse(
|
||||
id=t.id,
|
||||
position=t.position,
|
||||
track_name=t.track_name,
|
||||
artist_name=t.artist_name,
|
||||
album_name=t.album_name,
|
||||
album_id=t.album_id,
|
||||
artist_id=t.artist_id,
|
||||
track_source_id=t.track_source_id,
|
||||
cover_url=_normalize_cover_url(t.cover_url),
|
||||
source_type=_normalize_source_type(t.source_type),
|
||||
available_sources=_normalize_available_sources(t.available_sources),
|
||||
format=t.format,
|
||||
track_number=t.track_number,
|
||||
disc_number=t.disc_number,
|
||||
duration=t.duration,
|
||||
created_at=t.created_at,
|
||||
)
|
||||
|
||||
|
||||
@router.get("", response_model=PlaylistListResponse)
|
||||
async def list_playlists(
|
||||
service: PlaylistServiceDep,
|
||||
) -> PlaylistListResponse:
|
||||
summaries = await service.get_all_playlists()
|
||||
return PlaylistListResponse(
|
||||
playlists=[
|
||||
PlaylistSummaryResponse(
|
||||
id=s.id,
|
||||
name=s.name,
|
||||
track_count=s.track_count,
|
||||
total_duration=s.total_duration,
|
||||
cover_urls=[_normalize_cover_url(u) for u in s.cover_urls] if s.cover_urls else [],
|
||||
custom_cover_url=_custom_cover_url(s.id, s.cover_image_path),
|
||||
created_at=s.created_at,
|
||||
updated_at=s.updated_at,
|
||||
)
|
||||
for s in summaries
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@router.post("/check-tracks", response_model=CheckTrackMembershipResponse)
|
||||
async def check_track_membership(
|
||||
service: PlaylistServiceDep,
|
||||
body: CheckTrackMembershipRequest = MsgSpecBody(CheckTrackMembershipRequest),
|
||||
) -> CheckTrackMembershipResponse:
|
||||
tracks = [(t.track_name, t.artist_name, t.album_name) for t in body.tracks]
|
||||
membership = await service.check_track_membership(tracks)
|
||||
return CheckTrackMembershipResponse(membership=membership)
|
||||
|
||||
|
||||
@router.post("", response_model=PlaylistDetailResponse, status_code=201)
|
||||
async def create_playlist(
|
||||
service: PlaylistServiceDep,
|
||||
body: CreatePlaylistRequest = MsgSpecBody(CreatePlaylistRequest),
|
||||
) -> PlaylistDetailResponse:
|
||||
playlist = await service.create_playlist(body.name)
|
||||
return PlaylistDetailResponse(
|
||||
id=playlist.id,
|
||||
name=playlist.name,
|
||||
custom_cover_url=_custom_cover_url(playlist.id, playlist.cover_image_path),
|
||||
tracks=[],
|
||||
track_count=0,
|
||||
total_duration=None,
|
||||
created_at=playlist.created_at,
|
||||
updated_at=playlist.updated_at,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{playlist_id}", response_model=PlaylistDetailResponse)
|
||||
async def get_playlist(
|
||||
playlist_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
) -> PlaylistDetailResponse:
|
||||
playlist, tracks = await service.get_playlist_with_tracks(playlist_id)
|
||||
track_responses = [_track_to_response(t) for t in tracks]
|
||||
cover_urls = list(dict.fromkeys(_normalize_cover_url(t.cover_url) for t in tracks if t.cover_url))[:4]
|
||||
total_duration = sum(t.duration for t in tracks if t.duration)
|
||||
return PlaylistDetailResponse(
|
||||
id=playlist.id,
|
||||
name=playlist.name,
|
||||
cover_urls=cover_urls,
|
||||
custom_cover_url=_custom_cover_url(playlist.id, playlist.cover_image_path),
|
||||
tracks=track_responses,
|
||||
track_count=len(tracks),
|
||||
total_duration=total_duration or None,
|
||||
created_at=playlist.created_at,
|
||||
updated_at=playlist.updated_at,
|
||||
)
|
||||
|
||||
|
||||
@router.put("/{playlist_id}", response_model=PlaylistDetailResponse)
|
||||
async def update_playlist(
|
||||
playlist_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
body: UpdatePlaylistRequest = MsgSpecBody(UpdatePlaylistRequest),
|
||||
) -> PlaylistDetailResponse:
|
||||
playlist, tracks = await service.update_playlist_with_detail(playlist_id, name=body.name)
|
||||
track_responses = [_track_to_response(t) for t in tracks]
|
||||
cover_urls = list(dict.fromkeys(_normalize_cover_url(t.cover_url) for t in tracks if t.cover_url))[:4]
|
||||
total_duration = sum(t.duration for t in tracks if t.duration)
|
||||
return PlaylistDetailResponse(
|
||||
id=playlist.id,
|
||||
name=playlist.name,
|
||||
cover_urls=cover_urls,
|
||||
custom_cover_url=_custom_cover_url(playlist.id, playlist.cover_image_path),
|
||||
tracks=track_responses,
|
||||
track_count=len(tracks),
|
||||
total_duration=total_duration or None,
|
||||
created_at=playlist.created_at,
|
||||
updated_at=playlist.updated_at,
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/{playlist_id}", response_model=StatusMessageResponse)
|
||||
async def delete_playlist(
|
||||
playlist_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
) -> StatusMessageResponse:
|
||||
await service.delete_playlist(playlist_id)
|
||||
return StatusMessageResponse(status="ok", message="Playlist deleted")
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{playlist_id}/tracks",
|
||||
response_model=AddTracksResponse,
|
||||
status_code=201,
|
||||
)
|
||||
async def add_tracks(
|
||||
playlist_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
body: AddTracksRequest = MsgSpecBody(AddTracksRequest),
|
||||
) -> AddTracksResponse:
|
||||
track_dicts = [
|
||||
{
|
||||
"track_name": t.track_name,
|
||||
"artist_name": t.artist_name,
|
||||
"album_name": t.album_name,
|
||||
"album_id": t.album_id,
|
||||
"artist_id": t.artist_id,
|
||||
"track_source_id": t.track_source_id,
|
||||
"cover_url": t.cover_url,
|
||||
"source_type": t.source_type,
|
||||
"available_sources": t.available_sources,
|
||||
"format": t.format,
|
||||
"track_number": t.track_number,
|
||||
"disc_number": t.disc_number,
|
||||
"duration": int(t.duration) if t.duration is not None else None,
|
||||
}
|
||||
for t in body.tracks
|
||||
]
|
||||
created = await service.add_tracks(playlist_id, track_dicts, body.position)
|
||||
return AddTracksResponse(tracks=[_track_to_response(t) for t in created])
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{playlist_id}/tracks/remove",
|
||||
response_model=StatusMessageResponse,
|
||||
)
|
||||
async def remove_tracks(
|
||||
playlist_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
body: RemoveTracksRequest = MsgSpecBody(RemoveTracksRequest),
|
||||
) -> StatusMessageResponse:
|
||||
removed = await service.remove_tracks(playlist_id, body.track_ids)
|
||||
return StatusMessageResponse(status="ok", message=f"{removed} track(s) removed")
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{playlist_id}/tracks/{track_id}",
|
||||
response_model=StatusMessageResponse,
|
||||
)
|
||||
async def remove_track(
|
||||
playlist_id: str,
|
||||
track_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
) -> StatusMessageResponse:
|
||||
await service.remove_track(playlist_id, track_id)
|
||||
return StatusMessageResponse(status="ok", message="Track removed")
|
||||
|
||||
|
||||
# Reorder must be registered before the {track_id} PATCH to avoid
|
||||
# "reorder" being captured as a track_id path parameter.
|
||||
@router.patch(
|
||||
"/{playlist_id}/tracks/reorder",
|
||||
response_model=ReorderTrackResponse,
|
||||
)
|
||||
async def reorder_track(
|
||||
playlist_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
body: ReorderTrackRequest = MsgSpecBody(ReorderTrackRequest),
|
||||
) -> ReorderTrackResponse:
|
||||
actual_position = await service.reorder_track(playlist_id, body.track_id, body.new_position)
|
||||
return ReorderTrackResponse(
|
||||
status="ok",
|
||||
message="Track reordered",
|
||||
actual_position=actual_position,
|
||||
)
|
||||
|
||||
|
||||
@router.patch(
|
||||
"/{playlist_id}/tracks/{track_id}",
|
||||
response_model=PlaylistTrackResponse,
|
||||
)
|
||||
async def update_track(
|
||||
playlist_id: str,
|
||||
track_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
jf_service: JellyfinLibraryServiceDep,
|
||||
local_service: LocalFilesServiceDep,
|
||||
nd_service: NavidromeLibraryServiceDep,
|
||||
body: UpdateTrackRequest = MsgSpecBody(UpdateTrackRequest),
|
||||
) -> PlaylistTrackResponse:
|
||||
result = await service.update_track_source(
|
||||
playlist_id, track_id,
|
||||
source_type=body.source_type,
|
||||
available_sources=body.available_sources,
|
||||
jf_service=jf_service,
|
||||
local_service=local_service,
|
||||
nd_service=nd_service,
|
||||
)
|
||||
return _track_to_response(result)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{playlist_id}/resolve-sources",
|
||||
response_model=ResolveSourcesResponse,
|
||||
)
|
||||
async def resolve_sources(
|
||||
playlist_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
jf_service: JellyfinLibraryServiceDep,
|
||||
local_service: LocalFilesServiceDep,
|
||||
nd_service: NavidromeLibraryServiceDep,
|
||||
) -> ResolveSourcesResponse:
|
||||
sources = await service.resolve_track_sources(
|
||||
playlist_id, jf_service=jf_service, local_service=local_service, nd_service=nd_service,
|
||||
)
|
||||
return ResolveSourcesResponse(sources=sources)
|
||||
|
||||
|
||||
@router.post("/{playlist_id}/cover", response_model=CoverUploadResponse)
|
||||
async def upload_cover(
|
||||
playlist_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
cover_image: UploadFile = File(...),
|
||||
) -> CoverUploadResponse:
|
||||
max_size = 2 * 1024 * 1024 # 2 MB
|
||||
chunk_size = 8192
|
||||
chunks: list[bytes] = []
|
||||
total = 0
|
||||
while True:
|
||||
chunk = await cover_image.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
total += len(chunk)
|
||||
if total > max_size:
|
||||
from core.exceptions import InvalidPlaylistDataError
|
||||
raise InvalidPlaylistDataError("Image too large. Maximum size is 2 MB")
|
||||
chunks.append(chunk)
|
||||
data = b"".join(chunks)
|
||||
cover_url = await service.upload_cover(
|
||||
playlist_id, data, cover_image.content_type or "",
|
||||
)
|
||||
return CoverUploadResponse(cover_url=cover_url)
|
||||
|
||||
|
||||
@router.get("/{playlist_id}/cover")
|
||||
async def get_cover(
|
||||
playlist_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
):
|
||||
path = await service.get_cover_path(playlist_id)
|
||||
if path is None:
|
||||
raise PlaylistNotFoundError("No cover found")
|
||||
|
||||
media_type = {
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".png": "image/png",
|
||||
".webp": "image/webp",
|
||||
}.get(path.suffix.lower(), "application/octet-stream")
|
||||
|
||||
return FileResponse(
|
||||
path,
|
||||
media_type=media_type,
|
||||
headers={"Cache-Control": "public, max-age=3600"},
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{playlist_id}/cover",
|
||||
response_model=StatusMessageResponse,
|
||||
)
|
||||
async def remove_cover(
|
||||
playlist_id: str,
|
||||
service: PlaylistServiceDep,
|
||||
) -> StatusMessageResponse:
|
||||
await service.remove_cover(playlist_id)
|
||||
return StatusMessageResponse(status="ok", message="Cover removed")
|
||||
208
backend/api/v1/routes/profile.py
Normal file
|
|
@ -0,0 +1,208 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
from api.v1.schemas.profile import (
|
||||
ProfileResponse,
|
||||
ProfileSettings,
|
||||
ProfileUpdateRequest,
|
||||
ServiceConnection,
|
||||
LibraryStats,
|
||||
)
|
||||
from core.dependencies import (
|
||||
get_preferences_service,
|
||||
get_jellyfin_library_service,
|
||||
get_local_files_service,
|
||||
get_navidrome_library_service,
|
||||
get_settings_service,
|
||||
)
|
||||
from core.config import Settings, get_settings
|
||||
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
|
||||
from services.preferences_service import PreferencesService
|
||||
from services.jellyfin_library_service import JellyfinLibraryService
|
||||
from services.local_files_service import LocalFilesService
|
||||
from services.navidrome_library_service import NavidromeLibraryService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
AVATAR_DIR_NAME = "profile"
|
||||
ALLOWED_IMAGE_TYPES = {"image/jpeg", "image/png", "image/webp", "image/gif"}
|
||||
MAX_AVATAR_SIZE = 5 * 1024 * 1024 # 5 MB
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/profile", tags=["profile"])
|
||||
|
||||
|
||||
@router.get("", response_model=ProfileResponse)
|
||||
async def get_profile(
|
||||
preferences: PreferencesService = Depends(get_preferences_service),
|
||||
jellyfin_service: JellyfinLibraryService = Depends(get_jellyfin_library_service),
|
||||
local_service: LocalFilesService = Depends(get_local_files_service),
|
||||
navidrome_service: NavidromeLibraryService = Depends(get_navidrome_library_service),
|
||||
) -> ProfileResponse:
|
||||
profile = preferences.get_profile_settings()
|
||||
|
||||
services: list[ServiceConnection] = []
|
||||
library_stats_list: list[LibraryStats] = []
|
||||
|
||||
jellyfin_conn = preferences.get_jellyfin_connection()
|
||||
services.append(ServiceConnection(
|
||||
name="Jellyfin",
|
||||
enabled=jellyfin_conn.enabled,
|
||||
username=jellyfin_conn.user_id,
|
||||
url=jellyfin_conn.jellyfin_url,
|
||||
))
|
||||
|
||||
lb_conn = preferences.get_listenbrainz_connection()
|
||||
services.append(ServiceConnection(
|
||||
name="ListenBrainz",
|
||||
enabled=lb_conn.enabled,
|
||||
username=lb_conn.username,
|
||||
url="https://listenbrainz.org",
|
||||
))
|
||||
|
||||
lastfm_conn = preferences.get_lastfm_connection()
|
||||
services.append(ServiceConnection(
|
||||
name="Last.fm",
|
||||
enabled=lastfm_conn.enabled,
|
||||
username=lastfm_conn.username,
|
||||
url="https://www.last.fm",
|
||||
))
|
||||
|
||||
navidrome_conn = preferences.get_navidrome_connection()
|
||||
services.append(ServiceConnection(
|
||||
name="Navidrome",
|
||||
enabled=navidrome_conn.enabled,
|
||||
username=navidrome_conn.username,
|
||||
url=navidrome_conn.navidrome_url,
|
||||
))
|
||||
|
||||
local_conn = preferences.get_local_files_connection()
|
||||
|
||||
async def _fetch_jellyfin_stats() -> LibraryStats | None:
|
||||
if not jellyfin_conn.enabled:
|
||||
return None
|
||||
try:
|
||||
s = await jellyfin_service.get_stats()
|
||||
return LibraryStats(source="Jellyfin", total_tracks=s.total_tracks, total_albums=s.total_albums, total_artists=s.total_artists)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to fetch Jellyfin stats for profile: %s", e)
|
||||
return None
|
||||
|
||||
async def _fetch_local_stats() -> LibraryStats | None:
|
||||
if not local_conn.enabled:
|
||||
return None
|
||||
try:
|
||||
s = await local_service.get_storage_stats()
|
||||
return LibraryStats(source="Local Files", total_tracks=s.total_tracks, total_albums=s.total_albums, total_artists=s.total_artists, total_size_bytes=s.total_size_bytes, total_size_human=s.total_size_human)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to fetch Local Files stats for profile: %s", e)
|
||||
return None
|
||||
|
||||
async def _fetch_navidrome_stats() -> LibraryStats | None:
|
||||
if not navidrome_conn.enabled:
|
||||
return None
|
||||
try:
|
||||
s = await navidrome_service.get_stats()
|
||||
return LibraryStats(source="Navidrome", total_tracks=s.total_tracks, total_albums=s.total_albums, total_artists=s.total_artists)
|
||||
except Exception as e:
|
||||
logger.warning("Failed to fetch Navidrome stats for profile: %s", e)
|
||||
return None
|
||||
|
||||
results = await asyncio.gather(_fetch_jellyfin_stats(), _fetch_local_stats(), _fetch_navidrome_stats())
|
||||
library_stats_list = [r for r in results if r is not None]
|
||||
|
||||
return ProfileResponse(
|
||||
display_name=profile.display_name,
|
||||
avatar_url=profile.avatar_url,
|
||||
services=services,
|
||||
library_stats=library_stats_list,
|
||||
)
|
||||
|
||||
|
||||
@router.put("", response_model=ProfileSettings)
|
||||
async def update_profile(
|
||||
body: ProfileUpdateRequest = MsgSpecBody(ProfileUpdateRequest),
|
||||
preferences: PreferencesService = Depends(get_preferences_service),
|
||||
) -> ProfileSettings:
|
||||
current = preferences.get_profile_settings()
|
||||
|
||||
updated = ProfileSettings(
|
||||
display_name=body.display_name if body.display_name is not None else current.display_name,
|
||||
avatar_url=body.avatar_url if body.avatar_url is not None else current.avatar_url,
|
||||
)
|
||||
|
||||
preferences.save_profile_settings(updated)
|
||||
return updated
|
||||
|
||||
|
||||
def _get_avatar_dir() -> Path:
|
||||
settings = get_settings()
|
||||
avatar_dir = settings.cache_dir / AVATAR_DIR_NAME
|
||||
avatar_dir.mkdir(parents=True, exist_ok=True)
|
||||
return avatar_dir
|
||||
|
||||
|
||||
@router.post("/avatar")
|
||||
async def upload_avatar(
|
||||
file: UploadFile = File(...),
|
||||
preferences: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
if file.content_type not in ALLOWED_IMAGE_TYPES:
|
||||
raise HTTPException(status_code=400, detail="Invalid image type. Allowed: JPEG, PNG, WebP, GIF")
|
||||
|
||||
data = await file.read()
|
||||
if len(data) > MAX_AVATAR_SIZE:
|
||||
raise HTTPException(status_code=400, detail="Image too large. Maximum size is 5 MB")
|
||||
|
||||
ext = {
|
||||
"image/jpeg": ".jpg",
|
||||
"image/png": ".png",
|
||||
"image/webp": ".webp",
|
||||
"image/gif": ".gif",
|
||||
}.get(file.content_type, ".jpg")
|
||||
|
||||
avatar_dir = _get_avatar_dir()
|
||||
|
||||
# Remove old avatar files
|
||||
for old_file in avatar_dir.glob("avatar.*"):
|
||||
try:
|
||||
old_file.unlink()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
filename = f"avatar{ext}"
|
||||
file_path = avatar_dir / filename
|
||||
file_path.write_bytes(data)
|
||||
|
||||
avatar_url = "/api/v1/profile/avatar"
|
||||
current = preferences.get_profile_settings()
|
||||
updated = ProfileSettings(
|
||||
display_name=current.display_name,
|
||||
avatar_url=avatar_url,
|
||||
)
|
||||
preferences.save_profile_settings(updated)
|
||||
|
||||
return {"avatar_url": avatar_url}
|
||||
|
||||
|
||||
@router.get("/avatar")
|
||||
async def get_avatar():
|
||||
avatar_dir = _get_avatar_dir()
|
||||
for ext in (".jpg", ".png", ".webp", ".gif"):
|
||||
file_path = avatar_dir / f"avatar{ext}"
|
||||
if file_path.exists():
|
||||
media_type = {
|
||||
".jpg": "image/jpeg",
|
||||
".png": "image/png",
|
||||
".webp": "image/webp",
|
||||
".gif": "image/gif",
|
||||
}[ext]
|
||||
return FileResponse(
|
||||
file_path,
|
||||
media_type=media_type,
|
||||
headers={"Cache-Control": "public, max-age=3600"},
|
||||
)
|
||||
raise HTTPException(status_code=404, detail="No avatar found")
|
||||
17
backend/api/v1/routes/queue.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import logging
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from api.v1.schemas.request import QueueItem
|
||||
from core.dependencies import get_lidarr_repository
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
from repositories.lidarr import LidarrRepository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/queue", tags=["queue"])
|
||||
|
||||
|
||||
@router.get("", response_model=list[QueueItem])
|
||||
async def get_queue(
|
||||
lidarr_repo: LidarrRepository = Depends(get_lidarr_repository)
|
||||
):
|
||||
return await lidarr_repo.get_queue()
|
||||
30
backend/api/v1/routes/requests.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
import logging
|
||||
from fastapi import APIRouter, Depends
|
||||
from api.v1.schemas.request import AlbumRequest, RequestResponse, QueueStatusResponse
|
||||
from core.dependencies import get_request_service
|
||||
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
|
||||
from services.request_service import RequestService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/requests", tags=["requests"])
|
||||
|
||||
|
||||
@router.post("/new", response_model=RequestResponse)
|
||||
async def request_album(
|
||||
album_request: AlbumRequest = MsgSpecBody(AlbumRequest),
|
||||
request_service: RequestService = Depends(get_request_service)
|
||||
):
|
||||
return await request_service.request_album(
|
||||
album_request.musicbrainz_id,
|
||||
artist=album_request.artist,
|
||||
album=album_request.album,
|
||||
year=album_request.year,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/new/queue-status", response_model=QueueStatusResponse)
|
||||
async def get_queue_status(
|
||||
request_service: RequestService = Depends(get_request_service)
|
||||
):
|
||||
return request_service.get_queue_status()
|
||||
82
backend/api/v1/routes/requests_page.py
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from typing import Optional
|
||||
|
||||
from api.v1.schemas.requests_page import (
|
||||
ActiveCountResponse,
|
||||
ActiveRequestsResponse,
|
||||
CancelRequestResponse,
|
||||
ClearHistoryResponse,
|
||||
RequestHistoryResponse,
|
||||
RetryRequestResponse,
|
||||
)
|
||||
from core.dependencies import get_requests_page_service
|
||||
from infrastructure.validators import validate_mbid
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
from services.requests_page_service import RequestsPageService
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/requests", tags=["requests-page"])
|
||||
|
||||
|
||||
@router.get("/active", response_model=ActiveRequestsResponse)
|
||||
async def get_active_requests(
|
||||
service: RequestsPageService = Depends(get_requests_page_service),
|
||||
):
|
||||
return await service.get_active_requests()
|
||||
|
||||
|
||||
@router.get("/active/count", response_model=ActiveCountResponse)
|
||||
async def get_active_request_count(
|
||||
service: RequestsPageService = Depends(get_requests_page_service),
|
||||
):
|
||||
count = await service.get_active_count()
|
||||
return ActiveCountResponse(count=count)
|
||||
|
||||
|
||||
@router.get("/history", response_model=RequestHistoryResponse)
|
||||
async def get_request_history(
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(20, ge=1, le=100),
|
||||
status: Optional[str] = Query(None),
|
||||
sort: Optional[str] = Query(None, pattern="^(newest|oldest|status)$"),
|
||||
service: RequestsPageService = Depends(get_requests_page_service),
|
||||
):
|
||||
return await service.get_request_history(
|
||||
page=page, page_size=page_size, status_filter=status, sort=sort
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/active/{musicbrainz_id}", response_model=CancelRequestResponse)
|
||||
async def cancel_request(
|
||||
musicbrainz_id: str,
|
||||
service: RequestsPageService = Depends(get_requests_page_service),
|
||||
):
|
||||
try:
|
||||
musicbrainz_id = validate_mbid(musicbrainz_id, "album")
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail="Invalid MBID format")
|
||||
return await service.cancel_request(musicbrainz_id)
|
||||
|
||||
|
||||
@router.post("/retry/{musicbrainz_id}", response_model=RetryRequestResponse)
|
||||
async def retry_request(
|
||||
musicbrainz_id: str,
|
||||
service: RequestsPageService = Depends(get_requests_page_service),
|
||||
):
|
||||
try:
|
||||
musicbrainz_id = validate_mbid(musicbrainz_id, "album")
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail="Invalid MBID format")
|
||||
return await service.retry_request(musicbrainz_id)
|
||||
|
||||
|
||||
@router.delete("/history/{musicbrainz_id}", response_model=ClearHistoryResponse)
|
||||
async def clear_history_item(
|
||||
musicbrainz_id: str,
|
||||
service: RequestsPageService = Depends(get_requests_page_service),
|
||||
):
|
||||
try:
|
||||
musicbrainz_id = validate_mbid(musicbrainz_id, "album")
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail="Invalid MBID format")
|
||||
deleted = await service.clear_history_item(musicbrainz_id)
|
||||
return ClearHistoryResponse(success=deleted)
|
||||
47
backend/api/v1/routes/scrobble.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from api.v1.schemas.scrobble import (
|
||||
NowPlayingRequest,
|
||||
ScrobbleRequest,
|
||||
ScrobbleResponse,
|
||||
)
|
||||
from core.dependencies import get_scrobble_service
|
||||
from core.exceptions import ConfigurationError, ExternalServiceError
|
||||
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
|
||||
from services.scrobble_service import ScrobbleService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/scrobble", tags=["scrobble"])
|
||||
|
||||
|
||||
@router.post("/now-playing", response_model=ScrobbleResponse)
|
||||
async def report_now_playing(
|
||||
request: NowPlayingRequest = MsgSpecBody(NowPlayingRequest),
|
||||
scrobble_service: ScrobbleService = Depends(get_scrobble_service),
|
||||
) -> ScrobbleResponse:
|
||||
try:
|
||||
return await scrobble_service.report_now_playing(request)
|
||||
except ConfigurationError as e:
|
||||
logger.warning("Scrobble now-playing config error: %s", e)
|
||||
raise HTTPException(status_code=400, detail="Scrobble not configured")
|
||||
except ExternalServiceError as e:
|
||||
logger.warning("Scrobble now-playing service error: %s", e)
|
||||
raise HTTPException(status_code=502, detail="Scrobble service unavailable")
|
||||
|
||||
|
||||
@router.post("/submit", response_model=ScrobbleResponse)
|
||||
async def submit_scrobble(
|
||||
request: ScrobbleRequest = MsgSpecBody(ScrobbleRequest),
|
||||
scrobble_service: ScrobbleService = Depends(get_scrobble_service),
|
||||
) -> ScrobbleResponse:
|
||||
try:
|
||||
return await scrobble_service.submit_scrobble(request)
|
||||
except ConfigurationError as e:
|
||||
logger.warning("Scrobble submit config error: %s", e)
|
||||
raise HTTPException(status_code=400, detail="Scrobble not configured")
|
||||
except ExternalServiceError as e:
|
||||
logger.warning("Scrobble submit service error: %s", e)
|
||||
raise HTTPException(status_code=502, detail="Scrobble service unavailable")
|
||||
127
backend/api/v1/routes/search.py
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
import logging
|
||||
import time
|
||||
from fastapi import APIRouter, Query, Path, BackgroundTasks, Depends, Request
|
||||
from core.exceptions import ClientDisconnectedError
|
||||
from api.v1.schemas.search import (
|
||||
SearchResponse,
|
||||
SearchBucketResponse,
|
||||
EnrichmentResponse,
|
||||
EnrichmentBatchRequest,
|
||||
SuggestResponse,
|
||||
)
|
||||
from core.dependencies import get_search_service, get_coverart_repository, get_search_enrichment_service
|
||||
from infrastructure.degradation import try_get_degradation_context
|
||||
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
|
||||
|
||||
import msgspec.structs
|
||||
from services.search_service import SearchService
|
||||
from services.search_enrichment_service import SearchEnrichmentService
|
||||
from repositories.coverart_repository import CoverArtRepository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/search", tags=["search"])
|
||||
|
||||
|
||||
@router.get("", response_model=SearchResponse)
|
||||
async def search(
|
||||
request: Request,
|
||||
background_tasks: BackgroundTasks,
|
||||
q: str = Query(..., min_length=1, description="Search term"),
|
||||
limit_per_bucket: int | None = Query(
|
||||
None, ge=1, le=100,
|
||||
description="Max items per bucket (deprecated, use limit_artists/limit_albums)"
|
||||
),
|
||||
limit_artists: int = Query(10, ge=0, le=100, description="Max artists to return"),
|
||||
limit_albums: int = Query(10, ge=0, le=100, description="Max albums to return"),
|
||||
buckets: str | None = Query(
|
||||
None, description="Comma-separated subset: artists,albums"
|
||||
),
|
||||
search_service: SearchService = Depends(get_search_service),
|
||||
coverart_repo: CoverArtRepository = Depends(get_coverart_repository)
|
||||
):
|
||||
if await request.is_disconnected():
|
||||
raise ClientDisconnectedError("Client disconnected")
|
||||
|
||||
buckets_list = [b.strip().lower() for b in buckets.split(",")] if buckets else None
|
||||
|
||||
final_limit_artists = limit_per_bucket if limit_per_bucket else limit_artists
|
||||
final_limit_albums = limit_per_bucket if limit_per_bucket else limit_albums
|
||||
|
||||
result = await search_service.search(
|
||||
query=q,
|
||||
limit_artists=final_limit_artists,
|
||||
limit_albums=final_limit_albums,
|
||||
buckets=buckets_list
|
||||
)
|
||||
|
||||
ctx = try_get_degradation_context()
|
||||
if ctx is not None and ctx.has_degradation():
|
||||
result = msgspec.structs.replace(result, service_status=ctx.degraded_summary())
|
||||
|
||||
album_ids = search_service.schedule_cover_prefetch(result.albums)
|
||||
if album_ids:
|
||||
background_tasks.add_task(
|
||||
coverart_repo.batch_prefetch_covers,
|
||||
album_ids,
|
||||
"250"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/suggest", response_model=SuggestResponse)
|
||||
async def suggest(
|
||||
q: str = Query(..., min_length=2, description="Search query"),
|
||||
limit: int = Query(5, ge=1, le=10, description="Max results"),
|
||||
search_service: SearchService = Depends(get_search_service),
|
||||
) -> SuggestResponse:
|
||||
stripped = q.strip()
|
||||
if len(stripped) < 2:
|
||||
return SuggestResponse()
|
||||
start = time.monotonic()
|
||||
result = await search_service.suggest(query=stripped, limit=limit)
|
||||
elapsed_ms = (time.monotonic() - start) * 1000
|
||||
logger.debug("Suggest query_len=%d results=%d time_ms=%.1f", len(stripped), len(result.results), elapsed_ms)
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/{bucket}", response_model=SearchBucketResponse)
|
||||
async def search_bucket(
|
||||
bucket: str = Path(..., pattern="^(artists|albums)$"),
|
||||
q: str = Query(..., min_length=1, description="Search term"),
|
||||
limit: int = Query(50, ge=1, le=100, description="Page size"),
|
||||
offset: int = Query(0, ge=0, description="Pagination offset"),
|
||||
search_service: SearchService = Depends(get_search_service)
|
||||
):
|
||||
results, top_result = await search_service.search_bucket(
|
||||
bucket=bucket,
|
||||
query=q,
|
||||
limit=limit,
|
||||
offset=offset
|
||||
)
|
||||
return SearchBucketResponse(bucket=bucket, limit=limit, offset=offset, results=results, top_result=top_result)
|
||||
|
||||
|
||||
@router.get("/enrich/batch", response_model=EnrichmentResponse)
|
||||
async def enrich_search_results(
|
||||
artist_mbids: str = Query("", description="Comma-separated artist MBIDs"),
|
||||
album_mbids: str = Query("", description="Comma-separated album MBIDs"),
|
||||
enrichment_service: SearchEnrichmentService = Depends(get_search_enrichment_service)
|
||||
):
|
||||
artist_list = [m.strip() for m in artist_mbids.split(",") if m.strip()]
|
||||
album_list = [m.strip() for m in album_mbids.split(",") if m.strip()]
|
||||
|
||||
return await enrichment_service.enrich(
|
||||
artist_mbids=artist_list,
|
||||
album_mbids=album_list,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/enrich/batch", response_model=EnrichmentResponse)
|
||||
async def enrich_search_results_post(
|
||||
body: EnrichmentBatchRequest = MsgSpecBody(EnrichmentBatchRequest),
|
||||
enrichment_service: SearchEnrichmentService = Depends(get_search_enrichment_service),
|
||||
):
|
||||
return await enrichment_service.enrich_batch(body)
|
||||
|
||||
484
backend/api/v1/routes/settings.py
Normal file
|
|
@ -0,0 +1,484 @@
|
|||
import logging
|
||||
import msgspec
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from api.v1.schemas.settings import (
|
||||
UserPreferences,
|
||||
LidarrSettings,
|
||||
LidarrConnectionSettings,
|
||||
JellyfinConnectionSettings,
|
||||
JellyfinVerifyResponse,
|
||||
JellyfinUserInfo,
|
||||
NavidromeConnectionSettings,
|
||||
ListenBrainzConnectionSettings,
|
||||
YouTubeConnectionSettings,
|
||||
HomeSettings,
|
||||
LidarrVerifyResponse,
|
||||
LocalFilesConnectionSettings,
|
||||
LocalFilesVerifyResponse,
|
||||
LidarrMetadataProfilePreferences,
|
||||
LidarrMetadataProfileSummary,
|
||||
LastFmConnectionSettings,
|
||||
LastFmConnectionSettingsResponse,
|
||||
LastFmVerifyResponse,
|
||||
ScrobbleSettings,
|
||||
PrimaryMusicSourceSettings,
|
||||
)
|
||||
from api.v1.schemas.common import VerifyConnectionResponse
|
||||
from api.v1.schemas.advanced_settings import AdvancedSettingsFrontend, FrontendCacheTTLs, _is_masked_api_key
|
||||
from core.dependencies import (
|
||||
get_preferences_service,
|
||||
get_settings_service,
|
||||
get_local_files_service,
|
||||
)
|
||||
from core.exceptions import ConfigurationError, ExternalServiceError
|
||||
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
|
||||
from services.local_files_service import LocalFilesService
|
||||
from services.preferences_service import PreferencesService
|
||||
from services.settings_service import SettingsService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/settings", tags=["settings"])
|
||||
|
||||
|
||||
@router.get("/preferences", response_model=UserPreferences)
|
||||
async def get_preferences(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_preferences()
|
||||
|
||||
|
||||
@router.put("/preferences", response_model=UserPreferences)
|
||||
async def update_preferences(
|
||||
preferences: UserPreferences = MsgSpecBody(UserPreferences),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_preferences(preferences)
|
||||
total_cleared = await settings_service.clear_caches_for_preference_change()
|
||||
logger.info(f"Updated user preferences. Cleared {total_cleared} cache entries.")
|
||||
return preferences
|
||||
except ConfigurationError as e:
|
||||
logger.warning(f"Configuration error updating preferences: {e}")
|
||||
raise HTTPException(status_code=400, detail="Couldn't save these settings")
|
||||
|
||||
|
||||
@router.get("/lidarr", response_model=LidarrSettings)
|
||||
async def get_lidarr_settings(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_lidarr_settings()
|
||||
|
||||
|
||||
@router.put("/lidarr", response_model=LidarrSettings)
|
||||
async def update_lidarr_settings(
|
||||
lidarr_settings: LidarrSettings = MsgSpecBody(LidarrSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_lidarr_settings(lidarr_settings)
|
||||
logger.info(f"Updated Lidarr settings: sync_frequency={lidarr_settings.sync_frequency}")
|
||||
return lidarr_settings
|
||||
except ConfigurationError as e:
|
||||
logger.warning(f"Configuration error updating Lidarr settings: {e}")
|
||||
raise HTTPException(status_code=400, detail="Lidarr settings are incomplete or invalid")
|
||||
|
||||
|
||||
@router.get("/cache-ttls", response_model=FrontendCacheTTLs)
|
||||
async def get_frontend_cache_ttls(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
backend_settings = preferences_service.get_advanced_settings()
|
||||
return FrontendCacheTTLs(
|
||||
home=backend_settings.frontend_ttl_home,
|
||||
discover=backend_settings.frontend_ttl_discover,
|
||||
library=backend_settings.frontend_ttl_library,
|
||||
recently_added=backend_settings.frontend_ttl_recently_added,
|
||||
discover_queue=backend_settings.frontend_ttl_discover_queue,
|
||||
search=backend_settings.frontend_ttl_search,
|
||||
local_files_sidebar=backend_settings.frontend_ttl_local_files_sidebar,
|
||||
jellyfin_sidebar=backend_settings.frontend_ttl_jellyfin_sidebar,
|
||||
playlist_sources=backend_settings.frontend_ttl_playlist_sources,
|
||||
discover_queue_polling_interval=backend_settings.discover_queue_polling_interval,
|
||||
discover_queue_auto_generate=backend_settings.discover_queue_auto_generate,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/advanced", response_model=AdvancedSettingsFrontend)
|
||||
async def get_advanced_settings(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
backend_settings = preferences_service.get_advanced_settings()
|
||||
return AdvancedSettingsFrontend.from_backend(backend_settings)
|
||||
|
||||
|
||||
@router.put("/advanced", response_model=AdvancedSettingsFrontend)
|
||||
async def update_advanced_settings(
|
||||
settings: AdvancedSettingsFrontend = MsgSpecBody(AdvancedSettingsFrontend),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
backend_settings = settings.to_backend()
|
||||
if _is_masked_api_key(backend_settings.audiodb_api_key):
|
||||
current = preferences_service.get_advanced_settings()
|
||||
backend_settings = msgspec.structs.replace(
|
||||
backend_settings, audiodb_api_key=current.audiodb_api_key
|
||||
)
|
||||
preferences_service.save_advanced_settings(backend_settings)
|
||||
await settings_service.on_coverart_settings_changed()
|
||||
logger.info("Updated advanced settings")
|
||||
saved = preferences_service.get_advanced_settings()
|
||||
return AdvancedSettingsFrontend.from_backend(saved)
|
||||
except ConfigurationError as e:
|
||||
logger.warning(f"Configuration error updating advanced settings: {e}")
|
||||
raise HTTPException(status_code=400, detail="Couldn't save these settings")
|
||||
except ValueError as e:
|
||||
logger.warning(f"Validation error updating advanced settings: {e}")
|
||||
raise HTTPException(status_code=400, detail="That settings value isn't valid")
|
||||
|
||||
|
||||
@router.get("/lidarr/connection", response_model=LidarrConnectionSettings)
|
||||
async def get_lidarr_connection(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_lidarr_connection()
|
||||
|
||||
|
||||
@router.put("/lidarr/connection", response_model=LidarrConnectionSettings)
|
||||
async def update_lidarr_connection(
|
||||
settings: LidarrConnectionSettings = MsgSpecBody(LidarrConnectionSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
from repositories.lidarr.base import reset_lidarr_circuit_breaker
|
||||
|
||||
preferences_service.save_lidarr_connection(settings)
|
||||
reset_lidarr_circuit_breaker()
|
||||
await settings_service.on_lidarr_settings_changed()
|
||||
logger.info("Updated Lidarr connection settings")
|
||||
return settings
|
||||
except ConfigurationError as e:
|
||||
logger.warning(f"Configuration error updating Lidarr connection: {e}")
|
||||
raise HTTPException(status_code=400, detail="Lidarr connection settings are incomplete or invalid")
|
||||
|
||||
|
||||
@router.post("/lidarr/verify", response_model=LidarrVerifyResponse)
|
||||
async def verify_lidarr_connection(
|
||||
settings: LidarrConnectionSettings = MsgSpecBody(LidarrConnectionSettings),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
return await settings_service.verify_lidarr(settings)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/lidarr/metadata-profiles",
|
||||
response_model=list[LidarrMetadataProfileSummary],
|
||||
)
|
||||
async def list_lidarr_metadata_profiles(
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
return await settings_service.list_lidarr_metadata_profiles()
|
||||
except ExternalServiceError as e:
|
||||
logger.warning(f"Lidarr metadata profiles list failed: {e}")
|
||||
raise HTTPException(status_code=502, detail="Couldn't load Lidarr metadata profiles")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/lidarr/metadata-profile/preferences",
|
||||
response_model=LidarrMetadataProfilePreferences,
|
||||
)
|
||||
async def get_lidarr_metadata_profile_preferences(
|
||||
profile_id: int | None = None,
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
return await settings_service.get_lidarr_metadata_profile_preferences(
|
||||
profile_id=profile_id
|
||||
)
|
||||
except ExternalServiceError as e:
|
||||
logger.warning(f"Lidarr metadata profile fetch failed: {e}")
|
||||
raise HTTPException(status_code=502, detail="Couldn't load the Lidarr metadata profile")
|
||||
|
||||
|
||||
@router.put(
|
||||
"/lidarr/metadata-profile/preferences",
|
||||
response_model=LidarrMetadataProfilePreferences,
|
||||
)
|
||||
async def update_lidarr_metadata_profile_preferences(
|
||||
preferences: UserPreferences = MsgSpecBody(UserPreferences),
|
||||
profile_id: int | None = None,
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
return await settings_service.update_lidarr_metadata_profile(
|
||||
preferences, profile_id=profile_id
|
||||
)
|
||||
except ExternalServiceError as e:
|
||||
logger.warning(f"Lidarr metadata profile update failed: {e}")
|
||||
raise HTTPException(status_code=502, detail="Couldn't update the Lidarr metadata profile")
|
||||
|
||||
|
||||
@router.get("/jellyfin", response_model=JellyfinConnectionSettings)
|
||||
async def get_jellyfin_settings(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_jellyfin_connection()
|
||||
|
||||
|
||||
@router.put("/jellyfin", response_model=JellyfinConnectionSettings)
|
||||
async def update_jellyfin_settings(
|
||||
settings: JellyfinConnectionSettings = MsgSpecBody(JellyfinConnectionSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_jellyfin_connection(settings)
|
||||
await settings_service.on_jellyfin_settings_changed()
|
||||
logger.info("Updated Jellyfin connection settings")
|
||||
return settings
|
||||
except ConfigurationError as e:
|
||||
logger.warning(f"Configuration error updating Jellyfin settings: {e}")
|
||||
raise HTTPException(status_code=400, detail="Jellyfin settings are incomplete or invalid")
|
||||
|
||||
|
||||
@router.post("/jellyfin/verify", response_model=JellyfinVerifyResponse)
|
||||
async def verify_jellyfin_connection(
|
||||
settings: JellyfinConnectionSettings = MsgSpecBody(JellyfinConnectionSettings),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
result = await settings_service.verify_jellyfin(settings)
|
||||
users = [JellyfinUserInfo(id=user.id, name=user.name) for user in (result.users or [])] if result.success else []
|
||||
return JellyfinVerifyResponse(success=result.success, message=result.message, users=users)
|
||||
|
||||
|
||||
@router.get("/navidrome", response_model=NavidromeConnectionSettings)
|
||||
async def get_navidrome_settings(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_navidrome_connection()
|
||||
|
||||
|
||||
@router.put("/navidrome", response_model=NavidromeConnectionSettings)
|
||||
async def update_navidrome_settings(
|
||||
settings: NavidromeConnectionSettings = MsgSpecBody(NavidromeConnectionSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_navidrome_connection(settings)
|
||||
await settings_service.on_navidrome_settings_changed(enabled=settings.enabled)
|
||||
logger.info("Updated Navidrome connection settings")
|
||||
return preferences_service.get_navidrome_connection()
|
||||
except ConfigurationError as e:
|
||||
logger.warning("Configuration error updating Navidrome settings: %s", e)
|
||||
raise HTTPException(status_code=400, detail="Navidrome settings are incomplete or invalid")
|
||||
|
||||
|
||||
@router.post("/navidrome/verify", response_model=VerifyConnectionResponse)
|
||||
async def verify_navidrome_connection(
|
||||
settings: NavidromeConnectionSettings = MsgSpecBody(NavidromeConnectionSettings),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
result = await settings_service.verify_navidrome(settings)
|
||||
return VerifyConnectionResponse(valid=result.valid, message=result.message)
|
||||
|
||||
|
||||
@router.get("/listenbrainz", response_model=ListenBrainzConnectionSettings)
|
||||
async def get_listenbrainz_settings(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_listenbrainz_connection()
|
||||
|
||||
|
||||
@router.put("/listenbrainz", response_model=ListenBrainzConnectionSettings)
|
||||
async def update_listenbrainz_settings(
|
||||
settings: ListenBrainzConnectionSettings = MsgSpecBody(ListenBrainzConnectionSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_listenbrainz_connection(settings)
|
||||
await settings_service.on_listenbrainz_settings_changed()
|
||||
logger.info("Updated ListenBrainz connection settings")
|
||||
return settings
|
||||
except ConfigurationError as e:
|
||||
logger.warning(f"Configuration error updating ListenBrainz settings: {e}")
|
||||
raise HTTPException(status_code=400, detail="ListenBrainz settings are incomplete or invalid")
|
||||
|
||||
|
||||
@router.post("/listenbrainz/verify", response_model=VerifyConnectionResponse)
|
||||
async def verify_listenbrainz_connection(
|
||||
settings: ListenBrainzConnectionSettings = MsgSpecBody(ListenBrainzConnectionSettings),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
result = await settings_service.verify_listenbrainz(settings)
|
||||
return VerifyConnectionResponse(valid=result.valid, message=result.message)
|
||||
|
||||
|
||||
@router.get("/youtube", response_model=YouTubeConnectionSettings)
|
||||
async def get_youtube_settings(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_youtube_connection()
|
||||
|
||||
|
||||
@router.put("/youtube", response_model=YouTubeConnectionSettings)
|
||||
async def update_youtube_settings(
|
||||
settings: YouTubeConnectionSettings = MsgSpecBody(YouTubeConnectionSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_youtube_connection(settings)
|
||||
await settings_service.on_youtube_settings_changed()
|
||||
logger.info("Updated YouTube connection settings")
|
||||
return settings
|
||||
except ConfigurationError as e:
|
||||
logger.warning(f"Configuration error updating YouTube settings: {e}")
|
||||
raise HTTPException(status_code=400, detail="YouTube settings are incomplete or invalid")
|
||||
|
||||
|
||||
@router.post("/youtube/verify", response_model=VerifyConnectionResponse)
|
||||
async def verify_youtube_connection(
|
||||
settings: YouTubeConnectionSettings = MsgSpecBody(YouTubeConnectionSettings),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
result = await settings_service.verify_youtube(settings)
|
||||
return VerifyConnectionResponse(valid=result.valid, message=result.message)
|
||||
|
||||
|
||||
@router.get("/home", response_model=HomeSettings)
|
||||
async def get_home_settings(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_home_settings()
|
||||
|
||||
|
||||
@router.put("/home", response_model=HomeSettings)
|
||||
async def update_home_settings(
|
||||
settings: HomeSettings = MsgSpecBody(HomeSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_home_settings(settings)
|
||||
await settings_service.clear_home_cache()
|
||||
logger.info("Updated home settings")
|
||||
return settings
|
||||
except ConfigurationError as e:
|
||||
logger.warning(f"Configuration error updating home settings: {e}")
|
||||
raise HTTPException(status_code=400, detail="Home settings are incomplete or invalid")
|
||||
|
||||
|
||||
@router.get("/local-files", response_model=LocalFilesConnectionSettings)
|
||||
async def get_local_files_settings(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_local_files_connection()
|
||||
|
||||
|
||||
@router.put("/local-files", response_model=LocalFilesConnectionSettings)
|
||||
async def update_local_files_settings(
|
||||
settings: LocalFilesConnectionSettings = MsgSpecBody(LocalFilesConnectionSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_local_files_connection(settings)
|
||||
await settings_service.on_local_files_settings_changed()
|
||||
logger.info("Updated local files settings")
|
||||
return settings
|
||||
except ConfigurationError as e:
|
||||
logger.warning("Configuration error updating local files settings: %s", e)
|
||||
raise HTTPException(status_code=400, detail="Local files settings are incomplete or invalid")
|
||||
|
||||
|
||||
@router.post("/local-files/verify", response_model=LocalFilesVerifyResponse)
|
||||
async def verify_local_files_connection(
|
||||
settings: LocalFilesConnectionSettings = MsgSpecBody(LocalFilesConnectionSettings),
|
||||
local_service: LocalFilesService = Depends(get_local_files_service),
|
||||
) -> LocalFilesVerifyResponse:
|
||||
return await local_service.verify_path(settings.music_path)
|
||||
|
||||
|
||||
@router.get("/lastfm", response_model=LastFmConnectionSettingsResponse)
|
||||
async def get_lastfm_settings(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
settings = preferences_service.get_lastfm_connection()
|
||||
return LastFmConnectionSettingsResponse.from_settings(settings)
|
||||
|
||||
|
||||
@router.put("/lastfm", response_model=LastFmConnectionSettingsResponse)
|
||||
async def update_lastfm_settings(
|
||||
settings: LastFmConnectionSettings = MsgSpecBody(LastFmConnectionSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_lastfm_connection(settings)
|
||||
await settings_service.on_lastfm_settings_changed()
|
||||
logger.info("Updated Last.fm connection settings")
|
||||
saved = preferences_service.get_lastfm_connection()
|
||||
return LastFmConnectionSettingsResponse.from_settings(saved)
|
||||
except ConfigurationError as e:
|
||||
logger.warning("Configuration error updating Last.fm settings: %s", e)
|
||||
raise HTTPException(status_code=400, detail="Last.fm settings are incomplete or invalid")
|
||||
|
||||
|
||||
@router.post("/lastfm/verify", response_model=LastFmVerifyResponse)
|
||||
async def verify_lastfm_connection(
|
||||
settings: LastFmConnectionSettings = MsgSpecBody(LastFmConnectionSettings),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
result = await settings_service.verify_lastfm(settings)
|
||||
return LastFmVerifyResponse(valid=result.valid, message=result.message)
|
||||
|
||||
|
||||
@router.get("/scrobble", response_model=ScrobbleSettings)
|
||||
async def get_scrobble_settings(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_scrobble_settings()
|
||||
|
||||
|
||||
@router.put("/scrobble", response_model=ScrobbleSettings)
|
||||
async def update_scrobble_settings(
|
||||
settings: ScrobbleSettings = MsgSpecBody(ScrobbleSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_scrobble_settings(settings)
|
||||
logger.info("Updated scrobble settings")
|
||||
return preferences_service.get_scrobble_settings()
|
||||
except ConfigurationError as e:
|
||||
logger.warning("Configuration error updating scrobble settings: %s", e)
|
||||
raise HTTPException(status_code=400, detail="Scrobbling settings are incomplete or invalid")
|
||||
|
||||
|
||||
@router.get("/primary-source", response_model=PrimaryMusicSourceSettings)
|
||||
async def get_primary_music_source(
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
):
|
||||
return preferences_service.get_primary_music_source()
|
||||
|
||||
|
||||
@router.put("/primary-source", response_model=PrimaryMusicSourceSettings)
|
||||
async def update_primary_music_source(
|
||||
settings: PrimaryMusicSourceSettings = MsgSpecBody(PrimaryMusicSourceSettings),
|
||||
preferences_service: PreferencesService = Depends(get_preferences_service),
|
||||
settings_service: SettingsService = Depends(get_settings_service),
|
||||
):
|
||||
try:
|
||||
preferences_service.save_primary_music_source(settings)
|
||||
await settings_service.clear_home_cache()
|
||||
await settings_service.clear_source_resolution_cache()
|
||||
logger.info("Updated primary music source to %s", settings.source)
|
||||
return preferences_service.get_primary_music_source()
|
||||
except ConfigurationError as e:
|
||||
logger.warning("Configuration error updating primary music source: %s", e)
|
||||
raise HTTPException(status_code=400, detail="Invalid primary music source")
|
||||
17
backend/api/v1/routes/status.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import logging
|
||||
from fastapi import APIRouter, Depends
|
||||
from api.v1.schemas.common import StatusReport
|
||||
from core.dependencies import get_status_service
|
||||
from infrastructure.msgspec_fastapi import MsgSpecRoute
|
||||
from services.status_service import StatusService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/status", tags=["status"])
|
||||
|
||||
|
||||
@router.get("", response_model=StatusReport)
|
||||
async def get_status(
|
||||
status_service: StatusService = Depends(get_status_service)
|
||||
):
|
||||
return await status_service.get_status()
|
||||
255
backend/api/v1/routes/stream.py
Normal file
|
|
@ -0,0 +1,255 @@
|
|||
import logging
|
||||
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException, Request
|
||||
from fastapi.responses import RedirectResponse, Response, StreamingResponse
|
||||
|
||||
from api.v1.schemas.stream import (
|
||||
JellyfinPlaybackUrlResponse,
|
||||
PlaybackSessionResponse,
|
||||
ProgressReportRequest,
|
||||
StartPlaybackRequest,
|
||||
StopReportRequest,
|
||||
)
|
||||
from core.dependencies import (
|
||||
get_jellyfin_repository,
|
||||
get_jellyfin_playback_service,
|
||||
get_local_files_service,
|
||||
get_navidrome_playback_service,
|
||||
)
|
||||
from core.exceptions import ExternalServiceError, PlaybackNotAllowedError, ResourceNotFoundError
|
||||
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
|
||||
from repositories.jellyfin_repository import JellyfinRepository
|
||||
from services.jellyfin_playback_service import JellyfinPlaybackService
|
||||
from services.local_files_service import LocalFilesService
|
||||
from services.navidrome_playback_service import NavidromePlaybackService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/stream", tags=["streaming"])
|
||||
|
||||
|
||||
@router.get("/jellyfin/{item_id}")
|
||||
async def stream_jellyfin_audio(
|
||||
item_id: str,
|
||||
jellyfin_repo: JellyfinRepository = Depends(get_jellyfin_repository),
|
||||
) -> JellyfinPlaybackUrlResponse:
|
||||
try:
|
||||
playback = await jellyfin_repo.get_playback_url(item_id)
|
||||
logger.info(
|
||||
"Resolved Jellyfin playback metadata",
|
||||
extra={
|
||||
"item_id": item_id,
|
||||
"play_method": playback.play_method,
|
||||
"seekable": playback.seekable,
|
||||
},
|
||||
)
|
||||
return JellyfinPlaybackUrlResponse(
|
||||
url=playback.url,
|
||||
seekable=playback.seekable,
|
||||
playSessionId=playback.play_session_id,
|
||||
)
|
||||
except ResourceNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="Audio item not found")
|
||||
except PlaybackNotAllowedError as e:
|
||||
logger.warning("Playback not allowed for %s: %s", item_id, e)
|
||||
raise HTTPException(status_code=403, detail="Playback not allowed")
|
||||
except ExternalServiceError as e:
|
||||
raise HTTPException(status_code=502, detail="Failed to stream from Jellyfin")
|
||||
|
||||
|
||||
@router.head("/jellyfin/{item_id}")
|
||||
async def head_jellyfin_audio(
|
||||
item_id: str,
|
||||
jellyfin_repo: JellyfinRepository = Depends(get_jellyfin_repository),
|
||||
) -> Response:
|
||||
try:
|
||||
playback = await jellyfin_repo.get_playback_url(item_id)
|
||||
logger.info(
|
||||
"Resolved Jellyfin playback prefetch redirect",
|
||||
extra={
|
||||
"item_id": item_id,
|
||||
"play_method": playback.play_method,
|
||||
"seekable": playback.seekable,
|
||||
},
|
||||
)
|
||||
return RedirectResponse(
|
||||
url=playback.url,
|
||||
status_code=302,
|
||||
headers={"Referrer-Policy": "no-referrer"},
|
||||
)
|
||||
except ResourceNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="Audio item not found")
|
||||
except PlaybackNotAllowedError as e:
|
||||
logger.warning("Playback not allowed for %s: %s", item_id, e)
|
||||
raise HTTPException(status_code=403, detail="Playback not allowed")
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Jellyfin head stream error for %s: %s", item_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to resolve Jellyfin stream")
|
||||
|
||||
|
||||
@router.post("/jellyfin/{item_id}/start", response_model=PlaybackSessionResponse)
|
||||
async def start_jellyfin_playback(
|
||||
item_id: str,
|
||||
body: StartPlaybackRequest | None = Body(default=None),
|
||||
playback_service: JellyfinPlaybackService = Depends(get_jellyfin_playback_service),
|
||||
) -> PlaybackSessionResponse:
|
||||
try:
|
||||
play_session_id = await playback_service.start_playback(
|
||||
item_id,
|
||||
play_session_id=body.play_session_id if body else None,
|
||||
)
|
||||
return PlaybackSessionResponse(play_session_id=play_session_id, item_id=item_id)
|
||||
except ResourceNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="Item not found")
|
||||
except PlaybackNotAllowedError as e:
|
||||
logger.warning("Playback not allowed for %s: %s", item_id, e)
|
||||
raise HTTPException(status_code=403, detail="Playback not allowed")
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Failed to start playback for %s: %s", item_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to start Jellyfin playback")
|
||||
|
||||
|
||||
@router.post("/jellyfin/{item_id}/progress", status_code=204)
|
||||
async def report_jellyfin_progress(
|
||||
item_id: str,
|
||||
body: ProgressReportRequest = MsgSpecBody(ProgressReportRequest),
|
||||
playback_service: JellyfinPlaybackService = Depends(get_jellyfin_playback_service),
|
||||
) -> Response:
|
||||
try:
|
||||
await playback_service.report_progress(
|
||||
item_id=item_id,
|
||||
play_session_id=body.play_session_id,
|
||||
position_seconds=body.position_seconds,
|
||||
is_paused=body.is_paused,
|
||||
)
|
||||
return Response(status_code=204)
|
||||
except ExternalServiceError as e:
|
||||
logger.warning("Progress report failed for %s: %s", item_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to report progress")
|
||||
|
||||
|
||||
@router.post("/jellyfin/{item_id}/stop", status_code=204)
|
||||
async def stop_jellyfin_playback(
|
||||
item_id: str,
|
||||
body: StopReportRequest = MsgSpecBody(StopReportRequest),
|
||||
playback_service: JellyfinPlaybackService = Depends(get_jellyfin_playback_service),
|
||||
) -> Response:
|
||||
try:
|
||||
await playback_service.stop_playback(
|
||||
item_id=item_id,
|
||||
play_session_id=body.play_session_id,
|
||||
position_seconds=body.position_seconds,
|
||||
)
|
||||
return Response(status_code=204)
|
||||
except ExternalServiceError as e:
|
||||
logger.warning("Stop report failed for %s: %s", item_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to report playback stop")
|
||||
|
||||
|
||||
@router.head("/local/{track_id}")
|
||||
async def head_local_file(
|
||||
track_id: int,
|
||||
local_service: LocalFilesService = Depends(get_local_files_service),
|
||||
) -> Response:
|
||||
try:
|
||||
headers = await local_service.head_track(track_id)
|
||||
return Response(
|
||||
status_code=200,
|
||||
headers=headers,
|
||||
media_type=headers.get("Content-Type", "application/octet-stream"),
|
||||
)
|
||||
except ResourceNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="Track file not found")
|
||||
except FileNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="Track file not found on disk")
|
||||
except PermissionError:
|
||||
raise HTTPException(status_code=403, detail="Access denied — path outside music directory")
|
||||
except ExternalServiceError as e:
|
||||
logger.error("Local head error for track %s: %s", track_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to check local file")
|
||||
except OSError as e:
|
||||
logger.error("OS error checking local track %s: %s", track_id, e)
|
||||
raise HTTPException(status_code=500, detail="Failed to read local file")
|
||||
|
||||
|
||||
@router.get("/local/{track_id}")
|
||||
async def stream_local_file(
|
||||
track_id: int,
|
||||
request: Request,
|
||||
local_service: LocalFilesService = Depends(get_local_files_service),
|
||||
) -> StreamingResponse:
|
||||
try:
|
||||
range_header = request.headers.get("Range")
|
||||
chunks, headers, status_code = await local_service.stream_track(
|
||||
track_file_id=track_id,
|
||||
range_header=range_header,
|
||||
)
|
||||
return StreamingResponse(
|
||||
content=chunks,
|
||||
status_code=status_code,
|
||||
headers=headers,
|
||||
media_type=headers.get("Content-Type", "application/octet-stream"),
|
||||
)
|
||||
except ResourceNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="Track file not found")
|
||||
except FileNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="Track file not found on disk")
|
||||
except PermissionError:
|
||||
raise HTTPException(status_code=403, detail="Access denied — path outside music directory")
|
||||
except ExternalServiceError as e:
|
||||
detail = str(e)
|
||||
if "Range not satisfiable" in detail:
|
||||
raise HTTPException(status_code=416, detail="Range not satisfiable")
|
||||
logger.error("Local stream error for track %s: %s", track_id, e)
|
||||
raise HTTPException(status_code=502, detail="Failed to stream local file")
|
||||
except OSError as e:
|
||||
logger.error("OS error streaming local track %s: %s", track_id, e)
|
||||
raise HTTPException(status_code=500, detail="Failed to read local file")
|
||||
|
||||
|
||||
@router.head("/navidrome/{item_id}")
|
||||
async def head_navidrome_audio(
|
||||
item_id: str,
|
||||
playback_service: NavidromePlaybackService = Depends(get_navidrome_playback_service),
|
||||
) -> Response:
|
||||
try:
|
||||
return await playback_service.proxy_head(item_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid stream request")
|
||||
except ExternalServiceError:
|
||||
raise HTTPException(status_code=502, detail="Failed to stream from Navidrome")
|
||||
|
||||
|
||||
@router.get("/navidrome/{item_id}")
|
||||
async def stream_navidrome_audio(
|
||||
item_id: str,
|
||||
request: Request,
|
||||
playback_service: NavidromePlaybackService = Depends(get_navidrome_playback_service),
|
||||
) -> StreamingResponse:
|
||||
try:
|
||||
return await playback_service.proxy_stream(item_id, request.headers.get("Range"))
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid stream request")
|
||||
except ExternalServiceError as e:
|
||||
detail = str(e)
|
||||
if "416" in detail or "Range not satisfiable" in detail:
|
||||
raise HTTPException(status_code=416, detail="Range not satisfiable")
|
||||
raise HTTPException(status_code=502, detail="Failed to stream from Navidrome")
|
||||
|
||||
|
||||
@router.post("/navidrome/{item_id}/scrobble")
|
||||
async def scrobble_navidrome(
|
||||
item_id: str,
|
||||
playback_service: NavidromePlaybackService = Depends(get_navidrome_playback_service),
|
||||
) -> dict[str, str]:
|
||||
ok = await playback_service.scrobble(item_id)
|
||||
return {"status": "ok" if ok else "error"}
|
||||
|
||||
|
||||
@router.post("/navidrome/{item_id}/now-playing")
|
||||
async def navidrome_now_playing(
|
||||
item_id: str,
|
||||
playback_service: NavidromePlaybackService = Depends(get_navidrome_playback_service),
|
||||
) -> dict[str, str]:
|
||||
ok = await playback_service.report_now_playing(item_id)
|
||||
return {"status": "ok" if ok else "error"}
|
||||
175
backend/api/v1/routes/youtube.py
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
import logging
|
||||
|
||||
from fastapi import APIRouter, Response
|
||||
|
||||
from api.v1.schemas.discover import YouTubeQuotaResponse
|
||||
from api.v1.schemas.youtube import (
|
||||
YouTubeLink,
|
||||
YouTubeLinkGenerateRequest,
|
||||
YouTubeLinkResponse,
|
||||
YouTubeLinkUpdateRequest,
|
||||
YouTubeManualLinkRequest,
|
||||
YouTubeTrackLink,
|
||||
YouTubeTrackLinkBatchGenerateRequest,
|
||||
YouTubeTrackLinkBatchResponse,
|
||||
YouTubeTrackLinkGenerateRequest,
|
||||
YouTubeTrackLinkResponse,
|
||||
)
|
||||
from core.dependencies import YouTubeServiceDep
|
||||
from infrastructure.msgspec_fastapi import MsgSpecBody, MsgSpecRoute
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(route_class=MsgSpecRoute, prefix="/youtube", tags=["YouTube"])
|
||||
|
||||
|
||||
@router.post("/generate", response_model=YouTubeLinkResponse)
|
||||
async def generate_link(
|
||||
youtube_service: YouTubeServiceDep,
|
||||
request: YouTubeLinkGenerateRequest = MsgSpecBody(YouTubeLinkGenerateRequest),
|
||||
) -> YouTubeLinkResponse:
|
||||
link = await youtube_service.generate_link(
|
||||
artist_name=request.artist_name,
|
||||
album_name=request.album_name,
|
||||
album_id=request.album_id,
|
||||
cover_url=request.cover_url,
|
||||
)
|
||||
quota = youtube_service.get_quota_status()
|
||||
return YouTubeLinkResponse(
|
||||
link=link,
|
||||
quota=quota,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/link/{album_id}", response_model=YouTubeLink | None)
|
||||
async def get_link(
|
||||
album_id: str,
|
||||
youtube_service: YouTubeServiceDep,
|
||||
) -> YouTubeLink | Response:
|
||||
link = await youtube_service.get_link(album_id)
|
||||
if link is None:
|
||||
return Response(status_code=204)
|
||||
return link
|
||||
|
||||
|
||||
@router.get("/links", response_model=list[YouTubeLink])
|
||||
async def get_all_links(
|
||||
youtube_service: YouTubeServiceDep,
|
||||
) -> list[YouTubeLink]:
|
||||
return await youtube_service.get_all_links()
|
||||
|
||||
|
||||
@router.delete("/link/{album_id}", status_code=204)
|
||||
async def delete_link(
|
||||
album_id: str,
|
||||
youtube_service: YouTubeServiceDep,
|
||||
) -> None:
|
||||
await youtube_service.delete_link(album_id)
|
||||
|
||||
|
||||
@router.put("/link/{album_id}", response_model=YouTubeLink)
|
||||
async def update_link(
|
||||
album_id: str,
|
||||
youtube_service: YouTubeServiceDep,
|
||||
request: YouTubeLinkUpdateRequest = MsgSpecBody(YouTubeLinkUpdateRequest),
|
||||
) -> YouTubeLink:
|
||||
return await youtube_service.update_link(
|
||||
album_id=album_id,
|
||||
youtube_url=request.youtube_url,
|
||||
album_name=request.album_name,
|
||||
artist_name=request.artist_name,
|
||||
cover_url=request.cover_url,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/manual", response_model=YouTubeLink)
|
||||
async def save_manual_link(
|
||||
youtube_service: YouTubeServiceDep,
|
||||
request: YouTubeManualLinkRequest = MsgSpecBody(YouTubeManualLinkRequest),
|
||||
) -> YouTubeLink:
|
||||
return await youtube_service.save_manual_link(
|
||||
album_name=request.album_name,
|
||||
artist_name=request.artist_name,
|
||||
youtube_url=request.youtube_url,
|
||||
cover_url=request.cover_url,
|
||||
album_id=request.album_id,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/generate-track", response_model=YouTubeTrackLinkResponse)
|
||||
async def generate_track_link(
|
||||
youtube_service: YouTubeServiceDep,
|
||||
request: YouTubeTrackLinkGenerateRequest = MsgSpecBody(YouTubeTrackLinkGenerateRequest),
|
||||
) -> YouTubeTrackLinkResponse:
|
||||
track_link = await youtube_service.generate_track_link(
|
||||
album_id=request.album_id,
|
||||
album_name=request.album_name,
|
||||
artist_name=request.artist_name,
|
||||
track_name=request.track_name,
|
||||
track_number=request.track_number,
|
||||
disc_number=request.disc_number,
|
||||
cover_url=request.cover_url,
|
||||
)
|
||||
quota = youtube_service.get_quota_status()
|
||||
return YouTubeTrackLinkResponse(
|
||||
track_link=track_link,
|
||||
quota=quota,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/generate-tracks", response_model=YouTubeTrackLinkBatchResponse)
|
||||
async def generate_track_links_batch(
|
||||
youtube_service: YouTubeServiceDep,
|
||||
request: YouTubeTrackLinkBatchGenerateRequest = MsgSpecBody(YouTubeTrackLinkBatchGenerateRequest),
|
||||
) -> YouTubeTrackLinkBatchResponse:
|
||||
tracks = [
|
||||
{"track_name": t.track_name, "track_number": t.track_number, "disc_number": t.disc_number}
|
||||
for t in request.tracks
|
||||
]
|
||||
generated, failed = await youtube_service.generate_track_links_batch(
|
||||
album_id=request.album_id,
|
||||
album_name=request.album_name,
|
||||
artist_name=request.artist_name,
|
||||
tracks=tracks,
|
||||
cover_url=request.cover_url,
|
||||
)
|
||||
quota = youtube_service.get_quota_status()
|
||||
return YouTubeTrackLinkBatchResponse(
|
||||
track_links=generated,
|
||||
failed=failed,
|
||||
quota=quota,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/track-links/{album_id}", response_model=list[YouTubeTrackLink])
|
||||
async def get_track_links(
|
||||
album_id: str,
|
||||
youtube_service: YouTubeServiceDep,
|
||||
) -> list[YouTubeTrackLink]:
|
||||
return await youtube_service.get_track_links(album_id)
|
||||
|
||||
|
||||
@router.delete("/track-link/{album_id}/{track_number}", status_code=204, deprecated=True)
|
||||
async def delete_track_link_legacy(
|
||||
album_id: str,
|
||||
track_number: int,
|
||||
youtube_service: YouTubeServiceDep,
|
||||
) -> None:
|
||||
await youtube_service.delete_track_link(album_id, 1, track_number)
|
||||
|
||||
|
||||
@router.delete("/track-link/{album_id}/{disc_number}/{track_number}", status_code=204)
|
||||
async def delete_track_link(
|
||||
album_id: str,
|
||||
disc_number: int,
|
||||
track_number: int,
|
||||
youtube_service: YouTubeServiceDep,
|
||||
) -> None:
|
||||
await youtube_service.delete_track_link(album_id, disc_number, track_number)
|
||||
|
||||
|
||||
@router.get("/quota", response_model=YouTubeQuotaResponse)
|
||||
async def get_quota(
|
||||
youtube_service: YouTubeServiceDep,
|
||||
) -> YouTubeQuotaResponse:
|
||||
return youtube_service.get_quota_status()
|
||||
0
backend/api/v1/schemas/__init__.py
Normal file
529
backend/api/v1/schemas/advanced_settings.py
Normal file
|
|
@ -0,0 +1,529 @@
|
|||
import msgspec
|
||||
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
def _validate_range(value: int | float, field_name: str, minimum: int | float, maximum: int | float) -> None:
|
||||
if value < minimum or value > maximum:
|
||||
raise msgspec.ValidationError(f"{field_name} must be between {minimum} and {maximum}")
|
||||
|
||||
|
||||
def _coerce_positive_int(value: object, field_name: str) -> int:
|
||||
if value is None:
|
||||
raise msgspec.ValidationError(f"{field_name} cannot be null")
|
||||
try:
|
||||
result = int(float(value))
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise msgspec.ValidationError(f"Invalid integer value for {field_name}: {value}") from exc
|
||||
if result <= 0:
|
||||
raise msgspec.ValidationError(f"{field_name} must be positive")
|
||||
return result
|
||||
|
||||
|
||||
def _mask_api_key(key: str) -> str:
|
||||
if len(key) > 3:
|
||||
return f"***…{key[-3:]}"
|
||||
return "***"
|
||||
|
||||
|
||||
def _is_masked_api_key(value: str) -> bool:
|
||||
# Known limitation: a real key starting with "***" would be treated as masked
|
||||
# and discarded on save, preserving the previous key instead.
|
||||
return value.startswith("***")
|
||||
|
||||
|
||||
class AdvancedSettings(AppStruct):
|
||||
cache_ttl_album_library: int = 86400
|
||||
cache_ttl_album_non_library: int = 21600
|
||||
cache_ttl_artist_library: int = 21600
|
||||
cache_ttl_artist_non_library: int = 21600
|
||||
cache_ttl_artist_discovery_library: int = 21600
|
||||
cache_ttl_artist_discovery_non_library: int = 3600
|
||||
cache_ttl_search: int = 3600
|
||||
cache_ttl_local_files_recently_added: int = 120
|
||||
cache_ttl_local_files_storage_stats: int = 300
|
||||
cache_ttl_jellyfin_recently_played: int = 300
|
||||
cache_ttl_jellyfin_favorites: int = 300
|
||||
cache_ttl_jellyfin_genres: int = 3600
|
||||
cache_ttl_jellyfin_library_stats: int = 600
|
||||
cache_ttl_navidrome_albums: int = 300
|
||||
cache_ttl_navidrome_artists: int = 300
|
||||
cache_ttl_navidrome_recent: int = 120
|
||||
cache_ttl_navidrome_favorites: int = 120
|
||||
cache_ttl_navidrome_search: int = 120
|
||||
cache_ttl_navidrome_genres: int = 3600
|
||||
cache_ttl_navidrome_stats: int = 600
|
||||
http_timeout: int = 10
|
||||
http_connect_timeout: int = 5
|
||||
http_max_connections: int = 200
|
||||
batch_artist_images: int = 5
|
||||
batch_albums: int = 3
|
||||
delay_artist: float = 0.5
|
||||
delay_albums: float = 1.0
|
||||
artist_discovery_warm_interval: int = 14400
|
||||
artist_discovery_warm_delay: float = 0.5
|
||||
artist_discovery_precache_delay: float = 0.3
|
||||
memory_cache_max_entries: int = 10000
|
||||
memory_cache_cleanup_interval: int = 300
|
||||
cover_memory_cache_max_entries: int = 128
|
||||
cover_memory_cache_max_size_mb: int = 16
|
||||
disk_cache_cleanup_interval: int = 600
|
||||
recent_metadata_max_size_mb: int = 500
|
||||
recent_covers_max_size_mb: int = 1024
|
||||
persistent_metadata_ttl_hours: int = 24
|
||||
musicbrainz_concurrent_searches: int = 6
|
||||
discover_queue_size: int = 10
|
||||
discover_queue_ttl: int = 86400
|
||||
discover_queue_auto_generate: bool = True
|
||||
discover_queue_polling_interval: int = 4000
|
||||
discover_queue_warm_cycle_build: bool = True
|
||||
discover_queue_seed_artists: int = 3
|
||||
discover_queue_wildcard_slots: int = 2
|
||||
discover_queue_similar_artists_limit: int = 15
|
||||
discover_queue_albums_per_similar: int = 5
|
||||
discover_queue_enrich_ttl: int = 86400
|
||||
discover_queue_lastfm_mbid_max_lookups: int = 10
|
||||
frontend_ttl_home: int = 300000
|
||||
frontend_ttl_discover: int = 1800000
|
||||
frontend_ttl_library: int = 300000
|
||||
frontend_ttl_recently_added: int = 300000
|
||||
frontend_ttl_discover_queue: int = 86400000
|
||||
frontend_ttl_search: int = 300000
|
||||
frontend_ttl_local_files_sidebar: int = 120000
|
||||
frontend_ttl_jellyfin_sidebar: int = 120000
|
||||
frontend_ttl_playlist_sources: int = 900000
|
||||
audiodb_enabled: bool = True
|
||||
audiodb_name_search_fallback: bool = False
|
||||
direct_remote_images_enabled: bool = True
|
||||
audiodb_api_key: str = "123"
|
||||
cache_ttl_audiodb_found: int = 604800
|
||||
cache_ttl_audiodb_not_found: int = 86400
|
||||
cache_ttl_audiodb_library: int = 1209600
|
||||
cache_ttl_recently_viewed_bytes: int = 172800
|
||||
genre_section_ttl: int = 21600
|
||||
request_history_retention_days: int = 180
|
||||
ignored_releases_retention_days: int = 365
|
||||
orphan_cover_demote_interval_hours: int = 24
|
||||
store_prune_interval_hours: int = 6
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if not self.audiodb_api_key or not self.audiodb_api_key.strip():
|
||||
self.audiodb_api_key = "123"
|
||||
ranges: dict[str, tuple[int | float, int | float]] = {
|
||||
"cache_ttl_album_library": (3600, 604800),
|
||||
"cache_ttl_album_non_library": (60, 86400),
|
||||
"cache_ttl_artist_library": (3600, 604800),
|
||||
"cache_ttl_artist_non_library": (3600, 604800),
|
||||
"cache_ttl_artist_discovery_library": (3600, 604800),
|
||||
"cache_ttl_artist_discovery_non_library": (3600, 604800),
|
||||
"cache_ttl_search": (60, 86400),
|
||||
"cache_ttl_local_files_recently_added": (60, 3600),
|
||||
"cache_ttl_local_files_storage_stats": (60, 3600),
|
||||
"cache_ttl_jellyfin_recently_played": (60, 3600),
|
||||
"cache_ttl_jellyfin_favorites": (60, 3600),
|
||||
"cache_ttl_jellyfin_genres": (60, 86400),
|
||||
"cache_ttl_jellyfin_library_stats": (60, 3600),
|
||||
"cache_ttl_navidrome_albums": (60, 3600),
|
||||
"cache_ttl_navidrome_artists": (60, 3600),
|
||||
"cache_ttl_navidrome_recent": (60, 3600),
|
||||
"cache_ttl_navidrome_favorites": (60, 3600),
|
||||
"cache_ttl_navidrome_search": (60, 3600),
|
||||
"cache_ttl_navidrome_genres": (60, 86400),
|
||||
"cache_ttl_navidrome_stats": (60, 3600),
|
||||
"http_timeout": (5, 60),
|
||||
"http_connect_timeout": (1, 30),
|
||||
"http_max_connections": (50, 500),
|
||||
"batch_artist_images": (1, 20),
|
||||
"batch_albums": (1, 20),
|
||||
"delay_artist": (0.0, 5.0),
|
||||
"delay_albums": (0.0, 5.0),
|
||||
"artist_discovery_warm_interval": (300, 604800),
|
||||
"artist_discovery_warm_delay": (0.0, 5.0),
|
||||
"artist_discovery_precache_delay": (0.0, 5.0),
|
||||
"memory_cache_max_entries": (1000, 100000),
|
||||
"memory_cache_cleanup_interval": (60, 3600),
|
||||
"cover_memory_cache_max_entries": (16, 2048),
|
||||
"cover_memory_cache_max_size_mb": (1, 1024),
|
||||
"disk_cache_cleanup_interval": (60, 3600),
|
||||
"recent_metadata_max_size_mb": (100, 5000),
|
||||
"recent_covers_max_size_mb": (100, 10000),
|
||||
"persistent_metadata_ttl_hours": (1, 168),
|
||||
"musicbrainz_concurrent_searches": (2, 10),
|
||||
"discover_queue_size": (1, 20),
|
||||
"discover_queue_ttl": (3600, 604800),
|
||||
"discover_queue_polling_interval": (1000, 30000),
|
||||
"discover_queue_seed_artists": (1, 10),
|
||||
"discover_queue_wildcard_slots": (0, 10),
|
||||
"discover_queue_similar_artists_limit": (5, 50),
|
||||
"discover_queue_albums_per_similar": (1, 20),
|
||||
"discover_queue_enrich_ttl": (3600, 604800),
|
||||
"discover_queue_lastfm_mbid_max_lookups": (1, 50),
|
||||
"frontend_ttl_home": (60000, 3600000),
|
||||
"frontend_ttl_discover": (60000, 86400000),
|
||||
"frontend_ttl_library": (60000, 3600000),
|
||||
"frontend_ttl_recently_added": (60000, 3600000),
|
||||
"frontend_ttl_discover_queue": (3600000, 604800000),
|
||||
"frontend_ttl_search": (60000, 3600000),
|
||||
"frontend_ttl_local_files_sidebar": (60000, 3600000),
|
||||
"frontend_ttl_jellyfin_sidebar": (60000, 3600000),
|
||||
"frontend_ttl_playlist_sources": (60000, 3600000),
|
||||
"cache_ttl_audiodb_found": (3600, 2592000),
|
||||
"cache_ttl_audiodb_not_found": (3600, 604800),
|
||||
"cache_ttl_audiodb_library": (86400, 2592000),
|
||||
"cache_ttl_recently_viewed_bytes": (3600, 604800),
|
||||
"genre_section_ttl": (3600, 604800),
|
||||
"request_history_retention_days": (30, 3650),
|
||||
"ignored_releases_retention_days": (30, 3650),
|
||||
"orphan_cover_demote_interval_hours": (1, 168),
|
||||
"store_prune_interval_hours": (1, 168),
|
||||
}
|
||||
for field_name, (minimum, maximum) in ranges.items():
|
||||
_validate_range(getattr(self, field_name), field_name, minimum, maximum)
|
||||
|
||||
|
||||
class FrontendCacheTTLs(AppStruct):
|
||||
home: int = 300000
|
||||
discover: int = 1800000
|
||||
library: int = 300000
|
||||
recently_added: int = 300000
|
||||
discover_queue: int = 86400000
|
||||
search: int = 300000
|
||||
local_files_sidebar: int = 120000
|
||||
jellyfin_sidebar: int = 120000
|
||||
playlist_sources: int = 900000
|
||||
discover_queue_polling_interval: int = 4000
|
||||
discover_queue_auto_generate: bool = True
|
||||
|
||||
|
||||
class AdvancedSettingsFrontend(AppStruct):
|
||||
cache_ttl_album_library: int = 24
|
||||
cache_ttl_album_non_library: int = 6
|
||||
cache_ttl_artist_library: int = 6
|
||||
cache_ttl_artist_non_library: int = 6
|
||||
cache_ttl_artist_discovery_library: int = 6
|
||||
cache_ttl_artist_discovery_non_library: int = 1
|
||||
cache_ttl_search: int = 60
|
||||
cache_ttl_local_files_recently_added: int = 2
|
||||
cache_ttl_local_files_storage_stats: int = 5
|
||||
cache_ttl_jellyfin_recently_played: int = 5
|
||||
cache_ttl_jellyfin_favorites: int = 5
|
||||
cache_ttl_jellyfin_genres: int = 60
|
||||
cache_ttl_jellyfin_library_stats: int = 10
|
||||
cache_ttl_navidrome_albums: int = 5
|
||||
cache_ttl_navidrome_artists: int = 5
|
||||
cache_ttl_navidrome_recent: int = 2
|
||||
cache_ttl_navidrome_favorites: int = 2
|
||||
cache_ttl_navidrome_search: int = 2
|
||||
cache_ttl_navidrome_genres: int = 60
|
||||
cache_ttl_navidrome_stats: int = 10
|
||||
http_timeout: int = 10
|
||||
http_connect_timeout: int = 5
|
||||
http_max_connections: int = 200
|
||||
batch_artist_images: int = 5
|
||||
batch_albums: int = 3
|
||||
delay_artist: float = 0.5
|
||||
delay_albums: float = 1.0
|
||||
artist_discovery_warm_interval: int = 240
|
||||
artist_discovery_warm_delay: float = 0.5
|
||||
artist_discovery_precache_delay: float = 0.3
|
||||
memory_cache_max_entries: int = 10000
|
||||
memory_cache_cleanup_interval: int = 300
|
||||
cover_memory_cache_max_entries: int = 128
|
||||
cover_memory_cache_max_size_mb: int = 16
|
||||
disk_cache_cleanup_interval: int = 10
|
||||
recent_metadata_max_size_mb: int = 500
|
||||
recent_covers_max_size_mb: int = 1024
|
||||
persistent_metadata_ttl_hours: int = 24
|
||||
musicbrainz_concurrent_searches: int = 6
|
||||
discover_queue_size: int = 10
|
||||
discover_queue_ttl: int = 24
|
||||
discover_queue_auto_generate: bool = True
|
||||
discover_queue_polling_interval: int = 4
|
||||
discover_queue_warm_cycle_build: bool = True
|
||||
discover_queue_seed_artists: int = 3
|
||||
discover_queue_wildcard_slots: int = 2
|
||||
discover_queue_similar_artists_limit: int = 15
|
||||
discover_queue_albums_per_similar: int = 5
|
||||
discover_queue_enrich_ttl: int = 24
|
||||
discover_queue_lastfm_mbid_max_lookups: int = 10
|
||||
frontend_ttl_home: int = 5
|
||||
frontend_ttl_discover: int = 30
|
||||
frontend_ttl_library: int = 5
|
||||
frontend_ttl_recently_added: int = 5
|
||||
frontend_ttl_discover_queue: int = 1440
|
||||
frontend_ttl_search: int = 5
|
||||
frontend_ttl_local_files_sidebar: int = 2
|
||||
frontend_ttl_jellyfin_sidebar: int = 2
|
||||
frontend_ttl_playlist_sources: int = 15
|
||||
audiodb_enabled: bool = True
|
||||
audiodb_name_search_fallback: bool = False
|
||||
direct_remote_images_enabled: bool = True
|
||||
audiodb_api_key: str = "123"
|
||||
cache_ttl_audiodb_found: int = 168
|
||||
cache_ttl_audiodb_not_found: int = 24
|
||||
cache_ttl_audiodb_library: int = 336
|
||||
cache_ttl_recently_viewed_bytes: int = 48
|
||||
genre_section_ttl: int = 6
|
||||
request_history_retention_days: int = 180
|
||||
ignored_releases_retention_days: int = 365
|
||||
orphan_cover_demote_interval_hours: int = 24
|
||||
store_prune_interval_hours: int = 6
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
int_coerce_fields = [
|
||||
"cache_ttl_album_library",
|
||||
"cache_ttl_album_non_library",
|
||||
"cache_ttl_artist_library",
|
||||
"cache_ttl_artist_non_library",
|
||||
"cache_ttl_artist_discovery_library",
|
||||
"cache_ttl_artist_discovery_non_library",
|
||||
"cache_ttl_search",
|
||||
"cache_ttl_local_files_recently_added",
|
||||
"cache_ttl_local_files_storage_stats",
|
||||
"cache_ttl_jellyfin_recently_played",
|
||||
"cache_ttl_jellyfin_favorites",
|
||||
"cache_ttl_jellyfin_genres",
|
||||
"cache_ttl_jellyfin_library_stats",
|
||||
"cache_ttl_navidrome_albums",
|
||||
"cache_ttl_navidrome_artists",
|
||||
"cache_ttl_navidrome_recent",
|
||||
"cache_ttl_navidrome_favorites",
|
||||
"cache_ttl_navidrome_search",
|
||||
"cache_ttl_navidrome_genres",
|
||||
"cache_ttl_navidrome_stats",
|
||||
"cache_ttl_audiodb_found",
|
||||
"cache_ttl_audiodb_not_found",
|
||||
"cache_ttl_audiodb_library",
|
||||
"cache_ttl_recently_viewed_bytes",
|
||||
"genre_section_ttl",
|
||||
"request_history_retention_days",
|
||||
"ignored_releases_retention_days",
|
||||
"orphan_cover_demote_interval_hours",
|
||||
"store_prune_interval_hours",
|
||||
]
|
||||
for field_name in int_coerce_fields:
|
||||
setattr(self, field_name, _coerce_positive_int(getattr(self, field_name), field_name))
|
||||
|
||||
ranges: dict[str, tuple[int | float, int | float]] = {
|
||||
"cache_ttl_album_library": (1, 168),
|
||||
"cache_ttl_album_non_library": (1, 24),
|
||||
"cache_ttl_artist_library": (1, 168),
|
||||
"cache_ttl_artist_non_library": (1, 168),
|
||||
"cache_ttl_artist_discovery_library": (1, 168),
|
||||
"cache_ttl_artist_discovery_non_library": (1, 168),
|
||||
"cache_ttl_search": (1, 1440),
|
||||
"cache_ttl_local_files_recently_added": (1, 60),
|
||||
"cache_ttl_local_files_storage_stats": (1, 60),
|
||||
"cache_ttl_jellyfin_recently_played": (1, 60),
|
||||
"cache_ttl_jellyfin_favorites": (1, 60),
|
||||
"cache_ttl_jellyfin_genres": (1, 1440),
|
||||
"cache_ttl_jellyfin_library_stats": (1, 60),
|
||||
"cache_ttl_navidrome_albums": (1, 60),
|
||||
"cache_ttl_navidrome_artists": (1, 60),
|
||||
"cache_ttl_navidrome_recent": (1, 60),
|
||||
"cache_ttl_navidrome_favorites": (1, 60),
|
||||
"cache_ttl_navidrome_search": (1, 60),
|
||||
"cache_ttl_navidrome_genres": (1, 1440),
|
||||
"cache_ttl_navidrome_stats": (1, 60),
|
||||
"http_timeout": (5, 60),
|
||||
"http_connect_timeout": (1, 30),
|
||||
"http_max_connections": (50, 500),
|
||||
"batch_artist_images": (1, 20),
|
||||
"batch_albums": (1, 20),
|
||||
"delay_artist": (0.0, 5.0),
|
||||
"delay_albums": (0.0, 5.0),
|
||||
"artist_discovery_warm_interval": (5, 10080),
|
||||
"artist_discovery_warm_delay": (0.0, 5.0),
|
||||
"artist_discovery_precache_delay": (0.0, 5.0),
|
||||
"memory_cache_max_entries": (1000, 100000),
|
||||
"memory_cache_cleanup_interval": (60, 3600),
|
||||
"cover_memory_cache_max_entries": (16, 2048),
|
||||
"cover_memory_cache_max_size_mb": (1, 1024),
|
||||
"disk_cache_cleanup_interval": (1, 60),
|
||||
"recent_metadata_max_size_mb": (100, 5000),
|
||||
"recent_covers_max_size_mb": (100, 10000),
|
||||
"persistent_metadata_ttl_hours": (1, 168),
|
||||
"musicbrainz_concurrent_searches": (2, 10),
|
||||
"discover_queue_size": (1, 20),
|
||||
"discover_queue_ttl": (1, 168),
|
||||
"discover_queue_polling_interval": (1, 30),
|
||||
"discover_queue_seed_artists": (1, 10),
|
||||
"discover_queue_wildcard_slots": (0, 10),
|
||||
"discover_queue_similar_artists_limit": (5, 50),
|
||||
"discover_queue_albums_per_similar": (1, 20),
|
||||
"discover_queue_enrich_ttl": (1, 168),
|
||||
"discover_queue_lastfm_mbid_max_lookups": (1, 50),
|
||||
"frontend_ttl_home": (1, 60),
|
||||
"frontend_ttl_discover": (1, 1440),
|
||||
"frontend_ttl_library": (1, 60),
|
||||
"frontend_ttl_recently_added": (1, 60),
|
||||
"frontend_ttl_discover_queue": (60, 10080),
|
||||
"frontend_ttl_search": (1, 60),
|
||||
"frontend_ttl_local_files_sidebar": (1, 60),
|
||||
"frontend_ttl_jellyfin_sidebar": (1, 60),
|
||||
"frontend_ttl_playlist_sources": (1, 60),
|
||||
"cache_ttl_audiodb_found": (1, 720),
|
||||
"cache_ttl_audiodb_not_found": (1, 168),
|
||||
"cache_ttl_audiodb_library": (24, 720),
|
||||
"cache_ttl_recently_viewed_bytes": (1, 168),
|
||||
"genre_section_ttl": (1, 168),
|
||||
"request_history_retention_days": (30, 3650),
|
||||
"ignored_releases_retention_days": (30, 3650),
|
||||
"orphan_cover_demote_interval_hours": (1, 168),
|
||||
"store_prune_interval_hours": (1, 168),
|
||||
}
|
||||
for field_name, (minimum, maximum) in ranges.items():
|
||||
_validate_range(getattr(self, field_name), field_name, minimum, maximum)
|
||||
|
||||
@staticmethod
|
||||
def from_backend(settings: AdvancedSettings) -> "AdvancedSettingsFrontend":
|
||||
return AdvancedSettingsFrontend(
|
||||
cache_ttl_album_library=settings.cache_ttl_album_library // 3600,
|
||||
cache_ttl_album_non_library=settings.cache_ttl_album_non_library // 3600,
|
||||
cache_ttl_artist_library=settings.cache_ttl_artist_library // 3600,
|
||||
cache_ttl_artist_non_library=settings.cache_ttl_artist_non_library // 3600,
|
||||
cache_ttl_artist_discovery_library=settings.cache_ttl_artist_discovery_library // 3600,
|
||||
cache_ttl_artist_discovery_non_library=settings.cache_ttl_artist_discovery_non_library // 3600,
|
||||
cache_ttl_search=settings.cache_ttl_search // 60,
|
||||
cache_ttl_local_files_recently_added=settings.cache_ttl_local_files_recently_added // 60,
|
||||
cache_ttl_local_files_storage_stats=settings.cache_ttl_local_files_storage_stats // 60,
|
||||
cache_ttl_jellyfin_recently_played=settings.cache_ttl_jellyfin_recently_played // 60,
|
||||
cache_ttl_jellyfin_favorites=settings.cache_ttl_jellyfin_favorites // 60,
|
||||
cache_ttl_jellyfin_genres=settings.cache_ttl_jellyfin_genres // 60,
|
||||
cache_ttl_jellyfin_library_stats=settings.cache_ttl_jellyfin_library_stats // 60,
|
||||
cache_ttl_navidrome_albums=settings.cache_ttl_navidrome_albums // 60,
|
||||
cache_ttl_navidrome_artists=settings.cache_ttl_navidrome_artists // 60,
|
||||
cache_ttl_navidrome_recent=settings.cache_ttl_navidrome_recent // 60,
|
||||
cache_ttl_navidrome_favorites=settings.cache_ttl_navidrome_favorites // 60,
|
||||
cache_ttl_navidrome_search=settings.cache_ttl_navidrome_search // 60,
|
||||
cache_ttl_navidrome_genres=settings.cache_ttl_navidrome_genres // 60,
|
||||
cache_ttl_navidrome_stats=settings.cache_ttl_navidrome_stats // 60,
|
||||
http_timeout=settings.http_timeout,
|
||||
http_connect_timeout=settings.http_connect_timeout,
|
||||
http_max_connections=settings.http_max_connections,
|
||||
batch_artist_images=settings.batch_artist_images,
|
||||
batch_albums=settings.batch_albums,
|
||||
delay_artist=settings.delay_artist,
|
||||
delay_albums=settings.delay_albums,
|
||||
artist_discovery_warm_interval=settings.artist_discovery_warm_interval // 60,
|
||||
artist_discovery_warm_delay=settings.artist_discovery_warm_delay,
|
||||
artist_discovery_precache_delay=settings.artist_discovery_precache_delay,
|
||||
memory_cache_max_entries=settings.memory_cache_max_entries,
|
||||
memory_cache_cleanup_interval=settings.memory_cache_cleanup_interval,
|
||||
cover_memory_cache_max_entries=settings.cover_memory_cache_max_entries,
|
||||
cover_memory_cache_max_size_mb=settings.cover_memory_cache_max_size_mb,
|
||||
disk_cache_cleanup_interval=settings.disk_cache_cleanup_interval // 60,
|
||||
recent_metadata_max_size_mb=settings.recent_metadata_max_size_mb,
|
||||
recent_covers_max_size_mb=settings.recent_covers_max_size_mb,
|
||||
persistent_metadata_ttl_hours=settings.persistent_metadata_ttl_hours,
|
||||
musicbrainz_concurrent_searches=settings.musicbrainz_concurrent_searches,
|
||||
discover_queue_size=settings.discover_queue_size,
|
||||
discover_queue_ttl=settings.discover_queue_ttl // 3600,
|
||||
discover_queue_auto_generate=settings.discover_queue_auto_generate,
|
||||
discover_queue_polling_interval=settings.discover_queue_polling_interval // 1000,
|
||||
discover_queue_warm_cycle_build=settings.discover_queue_warm_cycle_build,
|
||||
discover_queue_seed_artists=settings.discover_queue_seed_artists,
|
||||
discover_queue_wildcard_slots=settings.discover_queue_wildcard_slots,
|
||||
discover_queue_similar_artists_limit=settings.discover_queue_similar_artists_limit,
|
||||
discover_queue_albums_per_similar=settings.discover_queue_albums_per_similar,
|
||||
discover_queue_enrich_ttl=settings.discover_queue_enrich_ttl // 3600,
|
||||
discover_queue_lastfm_mbid_max_lookups=settings.discover_queue_lastfm_mbid_max_lookups,
|
||||
frontend_ttl_home=settings.frontend_ttl_home // 60000,
|
||||
frontend_ttl_discover=settings.frontend_ttl_discover // 60000,
|
||||
frontend_ttl_library=settings.frontend_ttl_library // 60000,
|
||||
frontend_ttl_recently_added=settings.frontend_ttl_recently_added // 60000,
|
||||
frontend_ttl_discover_queue=settings.frontend_ttl_discover_queue // 60000,
|
||||
frontend_ttl_search=settings.frontend_ttl_search // 60000,
|
||||
frontend_ttl_local_files_sidebar=settings.frontend_ttl_local_files_sidebar // 60000,
|
||||
frontend_ttl_jellyfin_sidebar=settings.frontend_ttl_jellyfin_sidebar // 60000,
|
||||
frontend_ttl_playlist_sources=settings.frontend_ttl_playlist_sources // 60000,
|
||||
audiodb_enabled=settings.audiodb_enabled,
|
||||
audiodb_name_search_fallback=settings.audiodb_name_search_fallback,
|
||||
direct_remote_images_enabled=settings.direct_remote_images_enabled,
|
||||
audiodb_api_key=_mask_api_key(settings.audiodb_api_key),
|
||||
cache_ttl_audiodb_found=settings.cache_ttl_audiodb_found // 3600,
|
||||
cache_ttl_audiodb_not_found=settings.cache_ttl_audiodb_not_found // 3600,
|
||||
cache_ttl_audiodb_library=settings.cache_ttl_audiodb_library // 3600,
|
||||
cache_ttl_recently_viewed_bytes=settings.cache_ttl_recently_viewed_bytes // 3600,
|
||||
genre_section_ttl=settings.genre_section_ttl // 3600,
|
||||
request_history_retention_days=settings.request_history_retention_days,
|
||||
ignored_releases_retention_days=settings.ignored_releases_retention_days,
|
||||
orphan_cover_demote_interval_hours=settings.orphan_cover_demote_interval_hours,
|
||||
store_prune_interval_hours=settings.store_prune_interval_hours,
|
||||
)
|
||||
|
||||
def to_backend(self) -> AdvancedSettings:
|
||||
return AdvancedSettings(
|
||||
cache_ttl_album_library=self.cache_ttl_album_library * 3600,
|
||||
cache_ttl_album_non_library=self.cache_ttl_album_non_library * 3600,
|
||||
cache_ttl_artist_library=self.cache_ttl_artist_library * 3600,
|
||||
cache_ttl_artist_non_library=self.cache_ttl_artist_non_library * 3600,
|
||||
cache_ttl_artist_discovery_library=self.cache_ttl_artist_discovery_library * 3600,
|
||||
cache_ttl_artist_discovery_non_library=self.cache_ttl_artist_discovery_non_library * 3600,
|
||||
cache_ttl_search=self.cache_ttl_search * 60,
|
||||
cache_ttl_local_files_recently_added=self.cache_ttl_local_files_recently_added * 60,
|
||||
cache_ttl_local_files_storage_stats=self.cache_ttl_local_files_storage_stats * 60,
|
||||
cache_ttl_jellyfin_recently_played=self.cache_ttl_jellyfin_recently_played * 60,
|
||||
cache_ttl_jellyfin_favorites=self.cache_ttl_jellyfin_favorites * 60,
|
||||
cache_ttl_jellyfin_genres=self.cache_ttl_jellyfin_genres * 60,
|
||||
cache_ttl_jellyfin_library_stats=self.cache_ttl_jellyfin_library_stats * 60,
|
||||
cache_ttl_navidrome_albums=self.cache_ttl_navidrome_albums * 60,
|
||||
cache_ttl_navidrome_artists=self.cache_ttl_navidrome_artists * 60,
|
||||
cache_ttl_navidrome_recent=self.cache_ttl_navidrome_recent * 60,
|
||||
cache_ttl_navidrome_favorites=self.cache_ttl_navidrome_favorites * 60,
|
||||
cache_ttl_navidrome_search=self.cache_ttl_navidrome_search * 60,
|
||||
cache_ttl_navidrome_genres=self.cache_ttl_navidrome_genres * 60,
|
||||
cache_ttl_navidrome_stats=self.cache_ttl_navidrome_stats * 60,
|
||||
http_timeout=self.http_timeout,
|
||||
http_connect_timeout=self.http_connect_timeout,
|
||||
http_max_connections=self.http_max_connections,
|
||||
batch_artist_images=self.batch_artist_images,
|
||||
batch_albums=self.batch_albums,
|
||||
delay_artist=self.delay_artist,
|
||||
delay_albums=self.delay_albums,
|
||||
artist_discovery_warm_interval=self.artist_discovery_warm_interval * 60,
|
||||
artist_discovery_warm_delay=self.artist_discovery_warm_delay,
|
||||
artist_discovery_precache_delay=self.artist_discovery_precache_delay,
|
||||
memory_cache_max_entries=self.memory_cache_max_entries,
|
||||
memory_cache_cleanup_interval=self.memory_cache_cleanup_interval,
|
||||
cover_memory_cache_max_entries=self.cover_memory_cache_max_entries,
|
||||
cover_memory_cache_max_size_mb=self.cover_memory_cache_max_size_mb,
|
||||
disk_cache_cleanup_interval=self.disk_cache_cleanup_interval * 60,
|
||||
recent_metadata_max_size_mb=self.recent_metadata_max_size_mb,
|
||||
recent_covers_max_size_mb=self.recent_covers_max_size_mb,
|
||||
persistent_metadata_ttl_hours=self.persistent_metadata_ttl_hours,
|
||||
musicbrainz_concurrent_searches=self.musicbrainz_concurrent_searches,
|
||||
discover_queue_size=self.discover_queue_size,
|
||||
discover_queue_ttl=self.discover_queue_ttl * 3600,
|
||||
discover_queue_auto_generate=self.discover_queue_auto_generate,
|
||||
discover_queue_polling_interval=self.discover_queue_polling_interval * 1000,
|
||||
discover_queue_warm_cycle_build=self.discover_queue_warm_cycle_build,
|
||||
discover_queue_seed_artists=self.discover_queue_seed_artists,
|
||||
discover_queue_wildcard_slots=self.discover_queue_wildcard_slots,
|
||||
discover_queue_similar_artists_limit=self.discover_queue_similar_artists_limit,
|
||||
discover_queue_albums_per_similar=self.discover_queue_albums_per_similar,
|
||||
discover_queue_enrich_ttl=self.discover_queue_enrich_ttl * 3600,
|
||||
discover_queue_lastfm_mbid_max_lookups=self.discover_queue_lastfm_mbid_max_lookups,
|
||||
frontend_ttl_home=self.frontend_ttl_home * 60000,
|
||||
frontend_ttl_discover=self.frontend_ttl_discover * 60000,
|
||||
frontend_ttl_library=self.frontend_ttl_library * 60000,
|
||||
frontend_ttl_recently_added=self.frontend_ttl_recently_added * 60000,
|
||||
frontend_ttl_discover_queue=self.frontend_ttl_discover_queue * 60000,
|
||||
frontend_ttl_search=self.frontend_ttl_search * 60000,
|
||||
frontend_ttl_local_files_sidebar=self.frontend_ttl_local_files_sidebar * 60000,
|
||||
frontend_ttl_jellyfin_sidebar=self.frontend_ttl_jellyfin_sidebar * 60000,
|
||||
frontend_ttl_playlist_sources=self.frontend_ttl_playlist_sources * 60000,
|
||||
audiodb_enabled=self.audiodb_enabled,
|
||||
audiodb_name_search_fallback=self.audiodb_name_search_fallback,
|
||||
direct_remote_images_enabled=self.direct_remote_images_enabled,
|
||||
audiodb_api_key=self.audiodb_api_key,
|
||||
cache_ttl_audiodb_found=self.cache_ttl_audiodb_found * 3600,
|
||||
cache_ttl_audiodb_not_found=self.cache_ttl_audiodb_not_found * 3600,
|
||||
cache_ttl_audiodb_library=self.cache_ttl_audiodb_library * 3600,
|
||||
cache_ttl_recently_viewed_bytes=self.cache_ttl_recently_viewed_bytes * 3600,
|
||||
genre_section_ttl=self.genre_section_ttl * 3600,
|
||||
request_history_retention_days=self.request_history_retention_days,
|
||||
ignored_releases_retention_days=self.ignored_releases_retention_days,
|
||||
orphan_cover_demote_interval_hours=self.orphan_cover_demote_interval_hours,
|
||||
store_prune_interval_hours=self.store_prune_interval_hours,
|
||||
)
|
||||
38
backend/api/v1/schemas/album.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
from api.v1.schemas.common import LastFmTagSchema
|
||||
from models.album import AlbumInfo as AlbumInfo
|
||||
from models.album import Track as Track
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class AlbumBasicInfo(AppStruct):
|
||||
"""Minimal album info for fast initial load - no tracks."""
|
||||
title: str
|
||||
musicbrainz_id: str
|
||||
artist_name: str
|
||||
artist_id: str
|
||||
release_date: str | None = None
|
||||
year: int | None = None
|
||||
type: str | None = None
|
||||
disambiguation: str | None = None
|
||||
in_library: bool = False
|
||||
requested: bool = False
|
||||
cover_url: str | None = None
|
||||
album_thumb_url: str | None = None
|
||||
|
||||
|
||||
class AlbumTracksInfo(AppStruct):
|
||||
"""Track list and extended details - loaded asynchronously."""
|
||||
tracks: list[Track] = []
|
||||
total_tracks: int = 0
|
||||
total_length: int | None = None
|
||||
label: str | None = None
|
||||
barcode: str | None = None
|
||||
country: str | None = None
|
||||
|
||||
|
||||
class LastFmAlbumEnrichment(AppStruct):
|
||||
summary: str | None = None
|
||||
tags: list[LastFmTagSchema] = []
|
||||
listeners: int = 0
|
||||
playcount: int = 0
|
||||
url: str | None = None
|
||||
36
backend/api/v1/schemas/artist.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
from api.v1.schemas.common import LastFmTagSchema
|
||||
from models.artist import ArtistInfo as ArtistInfo
|
||||
from models.artist import ExternalLink as ExternalLink
|
||||
from models.artist import LifeSpan as LifeSpan
|
||||
from models.artist import ReleaseItem as ReleaseItem
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class ArtistExtendedInfo(AppStruct):
|
||||
description: str | None = None
|
||||
image: str | None = None
|
||||
|
||||
|
||||
class ArtistReleases(AppStruct):
|
||||
albums: list[ReleaseItem] = []
|
||||
singles: list[ReleaseItem] = []
|
||||
eps: list[ReleaseItem] = []
|
||||
total_count: int = 0
|
||||
has_more: bool = False
|
||||
|
||||
|
||||
class LastFmSimilarArtistSchema(AppStruct):
|
||||
name: str
|
||||
mbid: str | None = None
|
||||
match: float = 0.0
|
||||
url: str | None = None
|
||||
|
||||
|
||||
class LastFmArtistEnrichment(AppStruct):
|
||||
bio: str | None = None
|
||||
summary: str | None = None
|
||||
tags: list[LastFmTagSchema] = []
|
||||
listeners: int = 0
|
||||
playcount: int = 0
|
||||
similar_artists: list[LastFmSimilarArtistSchema] = []
|
||||
url: str | None = None
|
||||
31
backend/api/v1/schemas/cache.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class CacheStats(AppStruct):
|
||||
memory_entries: int
|
||||
memory_size_bytes: int
|
||||
memory_size_mb: float
|
||||
disk_metadata_count: int
|
||||
disk_metadata_albums: int
|
||||
disk_metadata_artists: int
|
||||
disk_cover_count: int
|
||||
disk_cover_size_bytes: int
|
||||
disk_cover_size_mb: float
|
||||
library_db_artist_count: int
|
||||
library_db_album_count: int
|
||||
library_db_size_bytes: int
|
||||
library_db_size_mb: float
|
||||
total_size_bytes: int
|
||||
total_size_mb: float
|
||||
library_db_last_sync: int | None = None
|
||||
disk_audiodb_artist_count: int = 0
|
||||
disk_audiodb_album_count: int = 0
|
||||
|
||||
|
||||
class CacheClearResponse(AppStruct):
|
||||
success: bool
|
||||
message: str
|
||||
cleared_memory_entries: int = 0
|
||||
cleared_disk_files: int = 0
|
||||
cleared_library_artists: int = 0
|
||||
cleared_library_albums: int = 0
|
||||
16
backend/api/v1/schemas/cache_status.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class CacheSyncStatus(AppStruct):
|
||||
is_syncing: bool
|
||||
phase: str | None = None
|
||||
total_items: int = 0
|
||||
processed_items: int = 0
|
||||
progress_percent: int = 0
|
||||
current_item: str | None = None
|
||||
started_at: float | None = None
|
||||
error_message: str | None = None
|
||||
total_artists: int = 0
|
||||
processed_artists: int = 0
|
||||
total_albums: int = 0
|
||||
processed_albums: int = 0
|
||||
37
backend/api/v1/schemas/common.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from typing import Literal
|
||||
|
||||
from models.common import ServiceStatus as ServiceStatus
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
GenreArtistMap = dict[str, str | None]
|
||||
|
||||
|
||||
class IntegrationStatus(AppStruct):
|
||||
listenbrainz: bool
|
||||
jellyfin: bool
|
||||
lidarr: bool
|
||||
youtube: bool
|
||||
lastfm: bool
|
||||
navidrome: bool = False
|
||||
youtube_api: bool = False
|
||||
|
||||
|
||||
class StatusReport(AppStruct):
|
||||
status: Literal["ok", "degraded", "error"]
|
||||
services: dict[str, ServiceStatus]
|
||||
|
||||
|
||||
class LastFmTagSchema(AppStruct):
|
||||
name: str
|
||||
url: str | None = None
|
||||
|
||||
|
||||
class StatusMessageResponse(AppStruct):
|
||||
status: str
|
||||
message: str
|
||||
|
||||
|
||||
class VerifyConnectionResponse(AppStruct):
|
||||
valid: bool
|
||||
message: str
|
||||
159
backend/api/v1/schemas/discover.py
Normal file
|
|
@ -0,0 +1,159 @@
|
|||
from api.v1.schemas.home import HomeArtist, HomeSection, ServicePrompt
|
||||
from api.v1.schemas.common import GenreArtistMap, IntegrationStatus
|
||||
from api.v1.schemas.weekly_exploration import WeeklyExplorationSection
|
||||
from models.youtube import YouTubeQuotaResponse as YouTubeQuotaResponse
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class BecauseYouListenTo(AppStruct):
|
||||
seed_artist: str
|
||||
seed_artist_mbid: str
|
||||
section: HomeSection
|
||||
listen_count: int = 0
|
||||
banner_url: str | None = None
|
||||
wide_thumb_url: str | None = None
|
||||
fanart_url: str | None = None
|
||||
|
||||
|
||||
class DiscoverQueueItemLight(AppStruct):
|
||||
release_group_mbid: str
|
||||
album_name: str
|
||||
artist_name: str
|
||||
artist_mbid: str
|
||||
recommendation_reason: str
|
||||
cover_url: str | None = None
|
||||
is_wildcard: bool = False
|
||||
in_library: bool = False
|
||||
|
||||
|
||||
class DiscoverQueueEnrichment(AppStruct):
|
||||
artist_mbid: str | None = None
|
||||
release_date: str | None = None
|
||||
country: str | None = None
|
||||
tags: list[str] = []
|
||||
youtube_url: str | None = None
|
||||
youtube_search_url: str = ""
|
||||
youtube_search_available: bool = False
|
||||
artist_description: str | None = None
|
||||
listen_count: int | None = None
|
||||
|
||||
|
||||
class DiscoverQueueItemFull(DiscoverQueueItemLight):
|
||||
enrichment: DiscoverQueueEnrichment | None = None
|
||||
|
||||
|
||||
class YouTubeSearchResponse(AppStruct):
|
||||
video_id: str | None = None
|
||||
embed_url: str | None = None
|
||||
error: str | None = None
|
||||
cached: bool = False
|
||||
|
||||
|
||||
class TrackCacheCheckItem(AppStruct):
|
||||
artist: str
|
||||
track: str
|
||||
|
||||
|
||||
class TrackCacheCheckRequest(AppStruct):
|
||||
items: list[TrackCacheCheckItem] = []
|
||||
|
||||
|
||||
class TrackCacheCheckResponseItem(AppStruct):
|
||||
artist: str
|
||||
track: str
|
||||
cached: bool = False
|
||||
|
||||
|
||||
class TrackCacheCheckResponse(AppStruct):
|
||||
items: list[TrackCacheCheckResponseItem] = []
|
||||
|
||||
|
||||
class DiscoverQueueResponse(AppStruct):
|
||||
items: list[DiscoverQueueItemLight | DiscoverQueueItemFull] = []
|
||||
queue_id: str = ""
|
||||
|
||||
|
||||
class DiscoverQueueIgnoreRequest(AppStruct):
|
||||
release_group_mbid: str
|
||||
artist_mbid: str
|
||||
release_name: str
|
||||
artist_name: str
|
||||
|
||||
|
||||
class DiscoverQueueValidateRequest(AppStruct):
|
||||
release_group_mbids: list[str]
|
||||
|
||||
|
||||
class DiscoverQueueValidateResponse(AppStruct):
|
||||
in_library: list[str] = []
|
||||
|
||||
|
||||
class QueueSettings(AppStruct):
|
||||
queue_size: int
|
||||
queue_ttl: int
|
||||
seed_artists: int
|
||||
wildcard_slots: int
|
||||
similar_artists_limit: int
|
||||
albums_per_similar: int
|
||||
enrich_ttl: int
|
||||
lastfm_mbid_max_lookups: int
|
||||
|
||||
|
||||
class DiscoverQueueStatusResponse(AppStruct):
|
||||
status: str
|
||||
source: str
|
||||
queue_id: str | None = None
|
||||
item_count: int | None = None
|
||||
built_at: float | None = None
|
||||
stale: bool | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class QueueGenerateRequest(AppStruct):
|
||||
source: str | None = None
|
||||
force: bool = False
|
||||
|
||||
|
||||
class QueueGenerateResponse(AppStruct):
|
||||
action: str
|
||||
status: str
|
||||
source: str
|
||||
queue_id: str | None = None
|
||||
item_count: int | None = None
|
||||
built_at: float | None = None
|
||||
stale: bool | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class DiscoverIgnoredRelease(AppStruct):
|
||||
release_group_mbid: str
|
||||
artist_mbid: str
|
||||
release_name: str
|
||||
artist_name: str
|
||||
ignored_at: float
|
||||
|
||||
|
||||
class DiscoverIntegrationStatus(IntegrationStatus):
|
||||
pass
|
||||
|
||||
|
||||
class DiscoverResponse(AppStruct):
|
||||
because_you_listen_to: list[BecauseYouListenTo] = []
|
||||
discover_queue_enabled: bool = True
|
||||
fresh_releases: HomeSection | None = None
|
||||
missing_essentials: HomeSection | None = None
|
||||
rediscover: HomeSection | None = None
|
||||
artists_you_might_like: HomeSection | None = None
|
||||
popular_in_your_genres: HomeSection | None = None
|
||||
genre_list: HomeSection | None = None
|
||||
globally_trending: HomeSection | None = None
|
||||
weekly_exploration: WeeklyExplorationSection | None = None
|
||||
integration_status: DiscoverIntegrationStatus | None = None
|
||||
service_prompts: list[ServicePrompt] = []
|
||||
genre_artists: GenreArtistMap = {}
|
||||
genre_artist_images: GenreArtistMap = {}
|
||||
lastfm_weekly_artist_chart: HomeSection | None = None
|
||||
lastfm_weekly_album_chart: HomeSection | None = None
|
||||
lastfm_recent_scrobbles: HomeSection | None = None
|
||||
refreshing: bool = False
|
||||
service_status: dict[str, str] | None = None
|
||||
72
backend/api/v1/schemas/discovery.py
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class SimilarArtist(AppStruct):
|
||||
musicbrainz_id: str
|
||||
name: str
|
||||
listen_count: int = 0
|
||||
in_library: bool = False
|
||||
image_url: str | None = None
|
||||
|
||||
|
||||
class SimilarArtistsResponse(AppStruct):
|
||||
similar_artists: list[SimilarArtist] = []
|
||||
source: str = "listenbrainz"
|
||||
configured: bool = True
|
||||
|
||||
|
||||
class TopSong(AppStruct):
|
||||
title: str
|
||||
artist_name: str
|
||||
recording_mbid: str | None = None
|
||||
release_group_mbid: str | None = None
|
||||
original_release_mbid: str | None = None
|
||||
release_name: str | None = None
|
||||
listen_count: int = 0
|
||||
disc_number: int | None = None
|
||||
track_number: int | None = None
|
||||
|
||||
|
||||
class TopSongsResponse(AppStruct):
|
||||
songs: list[TopSong] = []
|
||||
source: str = "listenbrainz"
|
||||
configured: bool = True
|
||||
|
||||
|
||||
class TopAlbum(AppStruct):
|
||||
title: str
|
||||
artist_name: str
|
||||
release_group_mbid: str | None = None
|
||||
year: int | None = None
|
||||
listen_count: int = 0
|
||||
in_library: bool = False
|
||||
requested: bool = False
|
||||
cover_url: str | None = None
|
||||
|
||||
|
||||
class TopAlbumsResponse(AppStruct):
|
||||
albums: list[TopAlbum] = []
|
||||
source: str = "listenbrainz"
|
||||
configured: bool = True
|
||||
|
||||
|
||||
class DiscoveryAlbum(AppStruct):
|
||||
musicbrainz_id: str
|
||||
title: str
|
||||
artist_name: str
|
||||
artist_id: str | None = None
|
||||
year: int | None = None
|
||||
in_library: bool = False
|
||||
requested: bool = False
|
||||
cover_url: str | None = None
|
||||
|
||||
|
||||
class SimilarAlbumsResponse(AppStruct):
|
||||
albums: list[DiscoveryAlbum] = []
|
||||
source: str = "listenbrainz"
|
||||
configured: bool = True
|
||||
|
||||
|
||||
class MoreByArtistResponse(AppStruct):
|
||||
albums: list[DiscoveryAlbum] = []
|
||||
artist_name: str = ""
|
||||
171
backend/api/v1/schemas/home.py
Normal file
|
|
@ -0,0 +1,171 @@
|
|||
from api.v1.schemas.common import GenreArtistMap, IntegrationStatus
|
||||
from api.v1.schemas.weekly_exploration import WeeklyExplorationSection
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class HomeArtist(AppStruct):
|
||||
name: str
|
||||
mbid: str | None = None
|
||||
image_url: str | None = None
|
||||
listen_count: int | None = None
|
||||
in_library: bool = False
|
||||
source: str | None = None
|
||||
|
||||
|
||||
class HomeAlbum(AppStruct):
|
||||
name: str
|
||||
mbid: str | None = None
|
||||
artist_name: str | None = None
|
||||
artist_mbid: str | None = None
|
||||
image_url: str | None = None
|
||||
release_date: str | None = None
|
||||
listen_count: int | None = None
|
||||
in_library: bool = False
|
||||
requested: bool = False
|
||||
source: str | None = None
|
||||
|
||||
|
||||
class HomeTrack(AppStruct):
|
||||
name: str
|
||||
mbid: str | None = None
|
||||
artist_name: str | None = None
|
||||
artist_mbid: str | None = None
|
||||
album_name: str | None = None
|
||||
listen_count: int | None = None
|
||||
listened_at: str | None = None
|
||||
image_url: str | None = None
|
||||
|
||||
|
||||
class HomeGenre(AppStruct):
|
||||
name: str
|
||||
listen_count: int | None = None
|
||||
artist_count: int | None = None
|
||||
artist_mbid: str | None = None
|
||||
|
||||
|
||||
class HomeSection(AppStruct):
|
||||
title: str
|
||||
type: str
|
||||
items: list[HomeArtist | HomeAlbum | HomeTrack | HomeGenre] = []
|
||||
source: str | None = None
|
||||
fallback_message: str | None = None
|
||||
connect_service: str | None = None
|
||||
|
||||
|
||||
class ServicePrompt(AppStruct):
|
||||
service: str
|
||||
title: str
|
||||
description: str
|
||||
icon: str
|
||||
color: str
|
||||
features: list[str] = []
|
||||
|
||||
|
||||
class HomeIntegrationStatus(IntegrationStatus):
|
||||
localfiles: bool = False
|
||||
|
||||
|
||||
class DiscoverPreview(AppStruct):
|
||||
seed_artist: str
|
||||
seed_artist_mbid: str
|
||||
items: list[HomeArtist] = []
|
||||
|
||||
|
||||
class HomeResponse(AppStruct):
|
||||
recently_added: HomeSection | None = None
|
||||
library_artists: HomeSection | None = None
|
||||
library_albums: HomeSection | None = None
|
||||
recommended_artists: HomeSection | None = None
|
||||
trending_artists: HomeSection | None = None
|
||||
popular_albums: HomeSection | None = None
|
||||
recently_played: HomeSection | None = None
|
||||
top_genres: HomeSection | None = None
|
||||
genre_list: HomeSection | None = None
|
||||
fresh_releases: HomeSection | None = None
|
||||
favorite_artists: HomeSection | None = None
|
||||
your_top_albums: HomeSection | None = None
|
||||
weekly_exploration: WeeklyExplorationSection | None = None
|
||||
service_prompts: list[ServicePrompt] = []
|
||||
integration_status: HomeIntegrationStatus | None = None
|
||||
genre_artists: GenreArtistMap = {}
|
||||
genre_artist_images: GenreArtistMap = {}
|
||||
discover_preview: DiscoverPreview | None = None
|
||||
service_status: dict[str, str] | None = None
|
||||
|
||||
|
||||
class GenreLibrarySection(AppStruct):
|
||||
artists: list[HomeArtist] = []
|
||||
albums: list[HomeAlbum] = []
|
||||
artist_count: int = 0
|
||||
album_count: int = 0
|
||||
|
||||
|
||||
class GenrePopularSection(AppStruct):
|
||||
artists: list[HomeArtist] = []
|
||||
albums: list[HomeAlbum] = []
|
||||
has_more_artists: bool = False
|
||||
has_more_albums: bool = False
|
||||
|
||||
|
||||
class GenreDetailResponse(AppStruct):
|
||||
genre: str
|
||||
library: GenreLibrarySection | None = None
|
||||
popular: GenrePopularSection | None = None
|
||||
artists: list[HomeArtist] = []
|
||||
total_count: int | None = None
|
||||
|
||||
|
||||
class TrendingTimeRange(AppStruct):
|
||||
range_key: str
|
||||
label: str
|
||||
featured: HomeArtist | None = None
|
||||
items: list[HomeArtist] = []
|
||||
total_count: int = 0
|
||||
|
||||
|
||||
class TrendingArtistsResponse(AppStruct):
|
||||
this_week: TrendingTimeRange
|
||||
this_month: TrendingTimeRange
|
||||
this_year: TrendingTimeRange
|
||||
all_time: TrendingTimeRange
|
||||
|
||||
|
||||
class PopularTimeRange(AppStruct):
|
||||
range_key: str
|
||||
label: str
|
||||
featured: HomeAlbum | None = None
|
||||
items: list[HomeAlbum] = []
|
||||
total_count: int = 0
|
||||
|
||||
|
||||
class PopularAlbumsResponse(AppStruct):
|
||||
this_week: PopularTimeRange
|
||||
this_month: PopularTimeRange
|
||||
this_year: PopularTimeRange
|
||||
all_time: PopularTimeRange
|
||||
|
||||
|
||||
class TrendingArtistsRangeResponse(AppStruct):
|
||||
range_key: str
|
||||
label: str
|
||||
items: list[HomeArtist] = []
|
||||
offset: int = 0
|
||||
limit: int = 25
|
||||
has_more: bool = False
|
||||
|
||||
|
||||
class PopularAlbumsRangeResponse(AppStruct):
|
||||
range_key: str
|
||||
label: str
|
||||
items: list[HomeAlbum] = []
|
||||
offset: int = 0
|
||||
limit: int = 25
|
||||
has_more: bool = False
|
||||
|
||||
|
||||
class GenreArtistResponse(AppStruct):
|
||||
artist_mbid: str | None = None
|
||||
|
||||
|
||||
class GenreArtistsBatchResponse(AppStruct):
|
||||
genre_artists: dict[str, str | None] = {}
|
||||
69
backend/api/v1/schemas/jellyfin.py
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class JellyfinTrackInfo(AppStruct):
|
||||
jellyfin_id: str
|
||||
title: str
|
||||
track_number: int
|
||||
duration_seconds: float
|
||||
disc_number: int = 1
|
||||
album_name: str = ""
|
||||
artist_name: str = ""
|
||||
codec: str | None = None
|
||||
bitrate: int | None = None
|
||||
|
||||
|
||||
class JellyfinAlbumSummary(AppStruct):
|
||||
jellyfin_id: str
|
||||
name: str
|
||||
artist_name: str = ""
|
||||
year: int | None = None
|
||||
track_count: int = 0
|
||||
image_url: str | None = None
|
||||
musicbrainz_id: str | None = None
|
||||
artist_musicbrainz_id: str | None = None
|
||||
|
||||
|
||||
class JellyfinAlbumDetail(AppStruct):
|
||||
jellyfin_id: str
|
||||
name: str
|
||||
artist_name: str = ""
|
||||
year: int | None = None
|
||||
track_count: int = 0
|
||||
image_url: str | None = None
|
||||
musicbrainz_id: str | None = None
|
||||
artist_musicbrainz_id: str | None = None
|
||||
tracks: list[JellyfinTrackInfo] = []
|
||||
|
||||
|
||||
class JellyfinAlbumMatch(AppStruct):
|
||||
found: bool
|
||||
jellyfin_album_id: str | None = None
|
||||
tracks: list[JellyfinTrackInfo] = []
|
||||
|
||||
|
||||
class JellyfinArtistSummary(AppStruct):
|
||||
jellyfin_id: str
|
||||
name: str
|
||||
image_url: str | None = None
|
||||
album_count: int = 0
|
||||
musicbrainz_id: str | None = None
|
||||
|
||||
|
||||
class JellyfinLibraryStats(AppStruct):
|
||||
total_tracks: int = 0
|
||||
total_albums: int = 0
|
||||
total_artists: int = 0
|
||||
|
||||
|
||||
class JellyfinSearchResponse(AppStruct):
|
||||
albums: list[JellyfinAlbumSummary] = []
|
||||
artists: list[JellyfinArtistSummary] = []
|
||||
tracks: list[JellyfinTrackInfo] = []
|
||||
|
||||
|
||||
class JellyfinPaginatedResponse(AppStruct):
|
||||
items: list[JellyfinAlbumSummary] = []
|
||||
total: int = 0
|
||||
offset: int = 0
|
||||
limit: int = 50
|
||||
104
backend/api/v1/schemas/library.py
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
from models.library import LibraryAlbum as LibraryAlbum
|
||||
from models.library import LibraryGroupedAlbum as LibraryGroupedAlbum
|
||||
from models.library import LibraryGroupedArtist as LibraryGroupedArtist
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class LibraryArtist(AppStruct):
|
||||
mbid: str
|
||||
name: str
|
||||
album_count: int = 0
|
||||
date_added: int | None = None
|
||||
|
||||
|
||||
class LibraryResponse(AppStruct):
|
||||
library: list[LibraryAlbum]
|
||||
|
||||
|
||||
class LibraryArtistsResponse(AppStruct):
|
||||
artists: list[LibraryArtist]
|
||||
total: int
|
||||
|
||||
|
||||
class LibraryAlbumsResponse(AppStruct):
|
||||
albums: list[LibraryAlbum]
|
||||
total: int
|
||||
|
||||
|
||||
class PaginatedLibraryAlbumsResponse(AppStruct):
|
||||
albums: list[LibraryAlbum] = []
|
||||
total: int = 0
|
||||
offset: int = 0
|
||||
limit: int = 50
|
||||
|
||||
|
||||
class PaginatedLibraryArtistsResponse(AppStruct):
|
||||
artists: list[LibraryArtist] = []
|
||||
total: int = 0
|
||||
offset: int = 0
|
||||
limit: int = 50
|
||||
|
||||
|
||||
class RecentlyAddedResponse(AppStruct):
|
||||
albums: list[LibraryAlbum] = []
|
||||
artists: list[LibraryArtist] = []
|
||||
|
||||
|
||||
class LibraryStatsResponse(AppStruct):
|
||||
artist_count: int
|
||||
album_count: int
|
||||
db_size_bytes: int
|
||||
db_size_mb: float
|
||||
last_sync: int | None = None
|
||||
|
||||
|
||||
class AlbumRemoveResponse(AppStruct):
|
||||
success: bool
|
||||
artist_removed: bool = False
|
||||
artist_name: str | None = None
|
||||
|
||||
|
||||
class AlbumRemovePreviewResponse(AppStruct):
|
||||
success: bool
|
||||
artist_will_be_removed: bool = False
|
||||
artist_name: str | None = None
|
||||
|
||||
|
||||
class SyncLibraryResponse(AppStruct):
|
||||
status: str
|
||||
artists: int
|
||||
albums: int
|
||||
|
||||
|
||||
class LibraryMbidsResponse(AppStruct):
|
||||
mbids: list[str] = []
|
||||
requested_mbids: list[str] = []
|
||||
|
||||
|
||||
class LibraryGroupedResponse(AppStruct):
|
||||
library: list[LibraryGroupedArtist] = []
|
||||
|
||||
|
||||
class TrackResolveItem(AppStruct):
|
||||
release_group_mbid: str | None = None
|
||||
disc_number: int | None = None
|
||||
track_number: int | None = None
|
||||
|
||||
|
||||
class TrackResolveRequest(AppStruct):
|
||||
items: list[TrackResolveItem] = []
|
||||
|
||||
|
||||
class ResolvedTrack(AppStruct):
|
||||
release_group_mbid: str | None = None
|
||||
disc_number: int | None = None
|
||||
track_number: int | None = None
|
||||
source: str | None = None
|
||||
track_source_id: str | None = None
|
||||
stream_url: str | None = None
|
||||
format: str | None = None
|
||||
duration: float | None = None
|
||||
|
||||
|
||||
class TrackResolveResponse(AppStruct):
|
||||
items: list[ResolvedTrack] = []
|
||||
58
backend/api/v1/schemas/local_files.py
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class LocalTrackInfo(AppStruct):
|
||||
track_file_id: int
|
||||
title: str
|
||||
track_number: int
|
||||
disc_number: int = 1
|
||||
duration_seconds: float | None = None
|
||||
size_bytes: int = 0
|
||||
format: str = ""
|
||||
bitrate: int | None = None
|
||||
date_added: str | None = None
|
||||
|
||||
|
||||
class LocalAlbumMatch(AppStruct):
|
||||
found: bool
|
||||
tracks: list[LocalTrackInfo] = []
|
||||
total_size_bytes: int = 0
|
||||
primary_format: str | None = None
|
||||
|
||||
|
||||
class LocalAlbumSummary(AppStruct):
|
||||
lidarr_album_id: int
|
||||
musicbrainz_id: str
|
||||
name: str
|
||||
artist_name: str
|
||||
artist_mbid: str | None = None
|
||||
year: int | None = None
|
||||
track_count: int = 0
|
||||
total_size_bytes: int = 0
|
||||
primary_format: str | None = None
|
||||
cover_url: str | None = None
|
||||
date_added: str | None = None
|
||||
|
||||
|
||||
class LocalPaginatedResponse(AppStruct):
|
||||
items: list[LocalAlbumSummary] = []
|
||||
total: int = 0
|
||||
offset: int = 0
|
||||
limit: int = 50
|
||||
|
||||
|
||||
class FormatInfo(AppStruct):
|
||||
count: int = 0
|
||||
size_bytes: int = 0
|
||||
size_human: str = "0 B"
|
||||
|
||||
|
||||
class LocalStorageStats(AppStruct):
|
||||
total_tracks: int = 0
|
||||
total_albums: int = 0
|
||||
total_artists: int = 0
|
||||
total_size_bytes: int = 0
|
||||
total_size_human: str = "0 B"
|
||||
disk_free_bytes: int = 0
|
||||
disk_free_human: str = "0 B"
|
||||
format_breakdown: dict[str, FormatInfo] = {}
|
||||
69
backend/api/v1/schemas/navidrome.py
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class NavidromeTrackInfo(AppStruct):
|
||||
navidrome_id: str
|
||||
title: str
|
||||
track_number: int
|
||||
duration_seconds: float
|
||||
disc_number: int = 1
|
||||
album_name: str = ""
|
||||
artist_name: str = ""
|
||||
codec: str | None = None
|
||||
bitrate: int | None = None
|
||||
|
||||
|
||||
class NavidromeAlbumSummary(AppStruct):
|
||||
navidrome_id: str
|
||||
name: str
|
||||
artist_name: str = ""
|
||||
year: int | None = None
|
||||
track_count: int = 0
|
||||
image_url: str | None = None
|
||||
musicbrainz_id: str | None = None
|
||||
artist_musicbrainz_id: str | None = None
|
||||
|
||||
|
||||
class NavidromeAlbumDetail(AppStruct):
|
||||
navidrome_id: str
|
||||
name: str
|
||||
artist_name: str = ""
|
||||
year: int | None = None
|
||||
track_count: int = 0
|
||||
image_url: str | None = None
|
||||
musicbrainz_id: str | None = None
|
||||
artist_musicbrainz_id: str | None = None
|
||||
tracks: list[NavidromeTrackInfo] = []
|
||||
|
||||
|
||||
class NavidromeAlbumMatch(AppStruct):
|
||||
found: bool
|
||||
navidrome_album_id: str | None = None
|
||||
tracks: list[NavidromeTrackInfo] = []
|
||||
|
||||
|
||||
class NavidromeArtistSummary(AppStruct):
|
||||
navidrome_id: str
|
||||
name: str
|
||||
image_url: str | None = None
|
||||
album_count: int = 0
|
||||
musicbrainz_id: str | None = None
|
||||
|
||||
|
||||
class NavidromeLibraryStats(AppStruct):
|
||||
total_tracks: int = 0
|
||||
total_albums: int = 0
|
||||
total_artists: int = 0
|
||||
|
||||
|
||||
class NavidromeSearchResponse(AppStruct):
|
||||
albums: list[NavidromeAlbumSummary] = []
|
||||
artists: list[NavidromeArtistSummary] = []
|
||||
tracks: list[NavidromeTrackInfo] = []
|
||||
|
||||
|
||||
class NavidromeAlbumPage(AppStruct):
|
||||
items: list[NavidromeAlbumSummary] = []
|
||||
total: int = 0
|
||||
124
backend/api/v1/schemas/playlists.py
Normal file
|
|
@ -0,0 +1,124 @@
|
|||
import msgspec
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class PlaylistTrackResponse(AppStruct):
|
||||
id: str
|
||||
position: int
|
||||
track_name: str
|
||||
artist_name: str
|
||||
album_name: str
|
||||
album_id: str | None = None
|
||||
artist_id: str | None = None
|
||||
track_source_id: str | None = None
|
||||
cover_url: str | None = None
|
||||
source_type: str = ""
|
||||
available_sources: list[str] | None = None
|
||||
format: str | None = None
|
||||
track_number: int | None = None
|
||||
disc_number: int | None = None
|
||||
duration: int | None = None
|
||||
created_at: str = ""
|
||||
|
||||
|
||||
class PlaylistSummaryResponse(AppStruct):
|
||||
id: str
|
||||
name: str
|
||||
track_count: int = 0
|
||||
total_duration: int | None = None
|
||||
cover_urls: list[str] = msgspec.field(default_factory=list)
|
||||
custom_cover_url: str | None = None
|
||||
created_at: str = ""
|
||||
updated_at: str = ""
|
||||
|
||||
|
||||
class PlaylistDetailResponse(AppStruct):
|
||||
# Frontend PlaylistDetail extends PlaylistSummary — keep fields in sync with PlaylistSummaryResponse
|
||||
id: str
|
||||
name: str
|
||||
cover_urls: list[str] = msgspec.field(default_factory=list)
|
||||
custom_cover_url: str | None = None
|
||||
tracks: list[PlaylistTrackResponse] = msgspec.field(default_factory=list)
|
||||
track_count: int = 0
|
||||
total_duration: int | None = None
|
||||
created_at: str = ""
|
||||
updated_at: str = ""
|
||||
|
||||
|
||||
class PlaylistListResponse(AppStruct):
|
||||
playlists: list[PlaylistSummaryResponse] = msgspec.field(default_factory=list)
|
||||
|
||||
|
||||
class CreatePlaylistRequest(AppStruct):
|
||||
name: str
|
||||
|
||||
|
||||
class UpdatePlaylistRequest(AppStruct):
|
||||
name: str | None = None
|
||||
|
||||
|
||||
class TrackDataRequest(AppStruct):
|
||||
track_name: str
|
||||
artist_name: str
|
||||
album_name: str
|
||||
album_id: str | None = None
|
||||
artist_id: str | None = None
|
||||
track_source_id: str | None = None
|
||||
cover_url: str | None = None
|
||||
source_type: str = ""
|
||||
available_sources: list[str] | None = None
|
||||
format: str | None = None
|
||||
track_number: int | None = None
|
||||
disc_number: int | None = None
|
||||
duration: float | int | None = None
|
||||
|
||||
|
||||
class AddTracksRequest(AppStruct):
|
||||
tracks: list[TrackDataRequest]
|
||||
position: int | None = None
|
||||
|
||||
|
||||
class RemoveTracksRequest(AppStruct):
|
||||
track_ids: list[str]
|
||||
|
||||
|
||||
class ReorderTrackRequest(AppStruct):
|
||||
track_id: str
|
||||
new_position: int
|
||||
|
||||
|
||||
class ReorderTrackResponse(AppStruct):
|
||||
status: str = "ok"
|
||||
message: str = "Track reordered"
|
||||
actual_position: int = 0
|
||||
|
||||
|
||||
class UpdateTrackRequest(AppStruct):
|
||||
source_type: str | None = None
|
||||
available_sources: list[str] | None = None
|
||||
|
||||
|
||||
class AddTracksResponse(AppStruct):
|
||||
tracks: list[PlaylistTrackResponse] = msgspec.field(default_factory=list)
|
||||
|
||||
|
||||
class CoverUploadResponse(AppStruct):
|
||||
cover_url: str
|
||||
|
||||
|
||||
class TrackIdentifier(AppStruct):
|
||||
track_name: str
|
||||
artist_name: str
|
||||
album_name: str
|
||||
|
||||
|
||||
class CheckTrackMembershipRequest(AppStruct):
|
||||
tracks: list[TrackIdentifier]
|
||||
|
||||
|
||||
class CheckTrackMembershipResponse(AppStruct):
|
||||
membership: dict[str, list[int]]
|
||||
|
||||
|
||||
class ResolveSourcesResponse(AppStruct):
|
||||
sources: dict[str, list[str]]
|
||||
35
backend/api/v1/schemas/profile.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
import msgspec
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class ProfileSettings(AppStruct):
|
||||
display_name: str = ""
|
||||
avatar_url: str = ""
|
||||
|
||||
|
||||
class ServiceConnection(AppStruct):
|
||||
name: str
|
||||
enabled: bool = False
|
||||
username: str = ""
|
||||
url: str = ""
|
||||
|
||||
|
||||
class LibraryStats(AppStruct):
|
||||
source: str
|
||||
total_tracks: int = 0
|
||||
total_albums: int = 0
|
||||
total_artists: int = 0
|
||||
total_size_bytes: int = 0
|
||||
total_size_human: str = ""
|
||||
|
||||
|
||||
class ProfileResponse(AppStruct):
|
||||
display_name: str = ""
|
||||
avatar_url: str = ""
|
||||
services: list[ServiceConnection] = msgspec.field(default_factory=list)
|
||||
library_stats: list[LibraryStats] = msgspec.field(default_factory=list)
|
||||
|
||||
|
||||
class ProfileUpdateRequest(AppStruct):
|
||||
display_name: str | None = None
|
||||
avatar_url: str | None = None
|
||||
20
backend/api/v1/schemas/request.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
from models.request import QueueItem as QueueItem
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class AlbumRequest(AppStruct):
|
||||
musicbrainz_id: str
|
||||
artist: str | None = None
|
||||
album: str | None = None
|
||||
year: int | None = None
|
||||
|
||||
|
||||
class RequestResponse(AppStruct):
|
||||
success: bool
|
||||
message: str
|
||||
lidarr_response: dict | None = None
|
||||
|
||||
|
||||
class QueueStatusResponse(AppStruct):
|
||||
queue_size: int
|
||||
processing: bool
|
||||
74
backend/api/v1/schemas/requests_page.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
from datetime import datetime
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class StatusMessage(AppStruct):
|
||||
title: str | None = None
|
||||
messages: list[str] = []
|
||||
|
||||
|
||||
class ActiveRequestItem(AppStruct):
|
||||
musicbrainz_id: str
|
||||
artist_name: str
|
||||
album_title: str
|
||||
requested_at: datetime
|
||||
status: str
|
||||
artist_mbid: str | None = None
|
||||
year: int | None = None
|
||||
cover_url: str | None = None
|
||||
progress: float | None = None
|
||||
eta: datetime | None = None
|
||||
size: float | None = None
|
||||
size_remaining: float | None = None
|
||||
download_status: str | None = None
|
||||
download_state: str | None = None
|
||||
status_messages: list[StatusMessage] | None = None
|
||||
error_message: str | None = None
|
||||
lidarr_queue_id: int | None = None
|
||||
quality: str | None = None
|
||||
protocol: str | None = None
|
||||
download_client: str | None = None
|
||||
|
||||
|
||||
class RequestHistoryItem(AppStruct):
|
||||
musicbrainz_id: str
|
||||
artist_name: str
|
||||
album_title: str
|
||||
requested_at: datetime
|
||||
status: str
|
||||
artist_mbid: str | None = None
|
||||
year: int | None = None
|
||||
cover_url: str | None = None
|
||||
completed_at: datetime | None = None
|
||||
in_library: bool = False
|
||||
|
||||
|
||||
class ActiveRequestsResponse(AppStruct):
|
||||
items: list[ActiveRequestItem]
|
||||
count: int
|
||||
|
||||
|
||||
class RequestHistoryResponse(AppStruct):
|
||||
items: list[RequestHistoryItem]
|
||||
total: int
|
||||
page: int
|
||||
page_size: int
|
||||
total_pages: int
|
||||
|
||||
|
||||
class CancelRequestResponse(AppStruct):
|
||||
success: bool
|
||||
message: str
|
||||
|
||||
|
||||
class RetryRequestResponse(AppStruct):
|
||||
success: bool
|
||||
message: str
|
||||
|
||||
|
||||
class ClearHistoryResponse(AppStruct):
|
||||
success: bool
|
||||
|
||||
|
||||
class ActiveCountResponse(AppStruct):
|
||||
count: int
|
||||
46
backend/api/v1/schemas/scrobble.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import time
|
||||
|
||||
import msgspec
|
||||
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class NowPlayingRequest(AppStruct):
|
||||
track_name: str
|
||||
artist_name: str
|
||||
album_name: str = ""
|
||||
duration_ms: int = 0
|
||||
mbid: str | None = None
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.duration_ms < 0:
|
||||
raise ValueError("duration_ms must be >= 0")
|
||||
|
||||
|
||||
class ScrobbleRequest(AppStruct):
|
||||
track_name: str
|
||||
artist_name: str
|
||||
timestamp: int
|
||||
album_name: str = ""
|
||||
duration_ms: int = 0
|
||||
mbid: str | None = None
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
now = int(time.time())
|
||||
max_age = 14 * 24 * 60 * 60
|
||||
if self.duration_ms < 0:
|
||||
raise ValueError("duration_ms must be >= 0")
|
||||
if self.timestamp > now + 60:
|
||||
raise ValueError("Timestamp cannot be in the future")
|
||||
if self.timestamp < now - max_age:
|
||||
raise ValueError("Timestamp cannot be older than 14 days")
|
||||
|
||||
|
||||
class ServiceResult(AppStruct):
|
||||
success: bool
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class ScrobbleResponse(AppStruct):
|
||||
accepted: bool
|
||||
services: dict[str, ServiceResult] = {}
|
||||
72
backend/api/v1/schemas/search.py
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
from typing import Literal
|
||||
|
||||
from models.search import SearchResult as SearchResult
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
EnrichmentSource = Literal["listenbrainz", "lastfm", "none"]
|
||||
|
||||
|
||||
class SearchResponse(AppStruct):
|
||||
artists: list[SearchResult] = []
|
||||
albums: list[SearchResult] = []
|
||||
top_artist: SearchResult | None = None
|
||||
top_album: SearchResult | None = None
|
||||
service_status: dict[str, str] | None = None
|
||||
|
||||
|
||||
class SearchBucketResponse(AppStruct):
|
||||
bucket: str
|
||||
limit: int
|
||||
offset: int
|
||||
results: list[SearchResult] = []
|
||||
top_result: SearchResult | None = None
|
||||
|
||||
|
||||
class ArtistEnrichment(AppStruct):
|
||||
musicbrainz_id: str
|
||||
release_group_count: int | None = None
|
||||
listen_count: int | None = None
|
||||
|
||||
|
||||
class AlbumEnrichment(AppStruct):
|
||||
musicbrainz_id: str
|
||||
track_count: int | None = None
|
||||
listen_count: int | None = None
|
||||
|
||||
|
||||
class ArtistEnrichmentRequest(AppStruct):
|
||||
musicbrainz_id: str
|
||||
name: str = ""
|
||||
|
||||
|
||||
class AlbumEnrichmentRequest(AppStruct):
|
||||
musicbrainz_id: str
|
||||
artist_name: str = ""
|
||||
album_name: str = ""
|
||||
|
||||
|
||||
class EnrichmentBatchRequest(AppStruct):
|
||||
artists: list[ArtistEnrichmentRequest] = []
|
||||
albums: list[AlbumEnrichmentRequest] = []
|
||||
|
||||
|
||||
class EnrichmentResponse(AppStruct):
|
||||
artists: list[ArtistEnrichment] = []
|
||||
albums: list[AlbumEnrichment] = []
|
||||
source: EnrichmentSource = "none"
|
||||
|
||||
|
||||
class SuggestResult(AppStruct):
|
||||
type: Literal["artist", "album"]
|
||||
title: str
|
||||
musicbrainz_id: str
|
||||
artist: str | None = None
|
||||
year: int | None = None
|
||||
in_library: bool = False
|
||||
requested: bool = False
|
||||
disambiguation: str | None = None
|
||||
score: int = 0
|
||||
|
||||
|
||||
class SuggestResponse(AppStruct):
|
||||
results: list[SuggestResult] = []
|
||||
205
backend/api/v1/schemas/settings.py
Normal file
|
|
@ -0,0 +1,205 @@
|
|||
from typing import Literal
|
||||
|
||||
import msgspec
|
||||
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
LASTFM_SECRET_MASK = "••••••••"
|
||||
|
||||
|
||||
def _mask_secret(value: str) -> str:
|
||||
if not value:
|
||||
return ""
|
||||
if len(value) <= 4:
|
||||
return LASTFM_SECRET_MASK
|
||||
return LASTFM_SECRET_MASK + value[-4:]
|
||||
|
||||
|
||||
class LastFmConnectionSettings(AppStruct):
|
||||
api_key: str = ""
|
||||
shared_secret: str = ""
|
||||
session_key: str = ""
|
||||
username: str = ""
|
||||
enabled: bool = False
|
||||
|
||||
|
||||
class LastFmConnectionSettingsResponse(AppStruct):
|
||||
api_key: str = ""
|
||||
shared_secret: str = ""
|
||||
session_key: str = ""
|
||||
username: str = ""
|
||||
enabled: bool = False
|
||||
|
||||
@classmethod
|
||||
def from_settings(cls, settings: LastFmConnectionSettings) -> "LastFmConnectionSettingsResponse":
|
||||
return cls(
|
||||
api_key=settings.api_key,
|
||||
shared_secret=_mask_secret(settings.shared_secret),
|
||||
session_key=_mask_secret(settings.session_key),
|
||||
username=settings.username,
|
||||
enabled=settings.enabled,
|
||||
)
|
||||
|
||||
|
||||
class LastFmVerifyResponse(AppStruct):
|
||||
valid: bool
|
||||
message: str
|
||||
|
||||
|
||||
class LastFmAuthTokenResponse(AppStruct):
|
||||
token: str
|
||||
auth_url: str
|
||||
|
||||
|
||||
class LastFmAuthSessionRequest(AppStruct):
|
||||
token: str
|
||||
|
||||
|
||||
class LastFmAuthSessionResponse(AppStruct):
|
||||
success: bool
|
||||
message: str
|
||||
username: str = ""
|
||||
|
||||
|
||||
class UserPreferences(AppStruct):
|
||||
primary_types: list[str] = msgspec.field(default_factory=lambda: ["album", "ep", "single"])
|
||||
secondary_types: list[str] = msgspec.field(default_factory=lambda: ["studio"])
|
||||
release_statuses: list[str] = msgspec.field(default_factory=lambda: ["official"])
|
||||
|
||||
|
||||
class LidarrConnectionSettings(AppStruct):
|
||||
lidarr_url: str = "http://lidarr:8686"
|
||||
lidarr_api_key: str = ""
|
||||
quality_profile_id: int = 1
|
||||
metadata_profile_id: int = 1
|
||||
root_folder_path: str = "/music"
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.lidarr_url = self.lidarr_url.rstrip("/")
|
||||
if self.quality_profile_id < 1:
|
||||
raise msgspec.ValidationError("quality_profile_id must be >= 1")
|
||||
if self.metadata_profile_id < 1:
|
||||
raise msgspec.ValidationError("metadata_profile_id must be >= 1")
|
||||
|
||||
|
||||
class JellyfinConnectionSettings(AppStruct):
|
||||
jellyfin_url: str = "http://jellyfin:8096"
|
||||
api_key: str = ""
|
||||
user_id: str = ""
|
||||
enabled: bool = False
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.jellyfin_url = self.jellyfin_url.rstrip("/")
|
||||
|
||||
|
||||
NAVIDROME_PASSWORD_MASK = "********"
|
||||
|
||||
|
||||
class NavidromeConnectionSettings(AppStruct):
|
||||
navidrome_url: str = ""
|
||||
username: str = ""
|
||||
password: str = ""
|
||||
enabled: bool = False
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.navidrome_url = self.navidrome_url.rstrip("/") if self.navidrome_url else ""
|
||||
|
||||
|
||||
class JellyfinUserInfo(AppStruct):
|
||||
id: str
|
||||
name: str
|
||||
|
||||
|
||||
class JellyfinVerifyResponse(AppStruct):
|
||||
success: bool
|
||||
message: str
|
||||
users: list[JellyfinUserInfo] = []
|
||||
|
||||
|
||||
class ListenBrainzConnectionSettings(AppStruct):
|
||||
username: str = ""
|
||||
user_token: str = ""
|
||||
enabled: bool = False
|
||||
|
||||
|
||||
class YouTubeConnectionSettings(AppStruct):
|
||||
api_key: str = ""
|
||||
enabled: bool = False
|
||||
api_enabled: bool = False
|
||||
daily_quota_limit: int = 80
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.daily_quota_limit < 1 or self.daily_quota_limit > 10000:
|
||||
raise msgspec.ValidationError("daily_quota_limit must be between 1 and 10000")
|
||||
|
||||
def has_valid_api_key(self) -> bool:
|
||||
return bool(self.api_key and self.api_key.strip())
|
||||
|
||||
|
||||
class HomeSettings(AppStruct):
|
||||
cache_ttl_trending: int = 3600
|
||||
cache_ttl_personal: int = 300
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.cache_ttl_trending < 300 or self.cache_ttl_trending > 86400:
|
||||
raise msgspec.ValidationError("cache_ttl_trending must be between 300 and 86400")
|
||||
if self.cache_ttl_personal < 60 or self.cache_ttl_personal > 3600:
|
||||
raise msgspec.ValidationError("cache_ttl_personal must be between 60 and 3600")
|
||||
|
||||
|
||||
class LocalFilesConnectionSettings(AppStruct):
|
||||
enabled: bool = False
|
||||
music_path: str = "/music"
|
||||
lidarr_root_path: str = "/music"
|
||||
|
||||
|
||||
class LocalFilesVerifyResponse(AppStruct):
|
||||
success: bool
|
||||
message: str
|
||||
track_count: int = 0
|
||||
|
||||
|
||||
class LidarrSettings(AppStruct):
|
||||
sync_frequency: Literal["manual", "5min", "10min", "30min", "1hr"] = "10min"
|
||||
last_sync: int | None = None
|
||||
last_sync_success: bool = True
|
||||
|
||||
|
||||
class LidarrProfileSummary(AppStruct):
|
||||
id: int
|
||||
name: str
|
||||
|
||||
|
||||
class LidarrRootFolderSummary(AppStruct):
|
||||
id: str
|
||||
path: str
|
||||
|
||||
|
||||
class LidarrVerifyResponse(AppStruct):
|
||||
success: bool
|
||||
message: str
|
||||
quality_profiles: list[LidarrProfileSummary] = []
|
||||
metadata_profiles: list[LidarrProfileSummary] = []
|
||||
root_folders: list[LidarrRootFolderSummary] = []
|
||||
|
||||
|
||||
class LidarrMetadataProfileSummary(AppStruct):
|
||||
id: int
|
||||
name: str
|
||||
|
||||
|
||||
class ScrobbleSettings(AppStruct):
|
||||
scrobble_to_lastfm: bool = False
|
||||
scrobble_to_listenbrainz: bool = False
|
||||
|
||||
|
||||
class PrimaryMusicSourceSettings(AppStruct):
|
||||
source: Literal["listenbrainz", "lastfm"] = "listenbrainz"
|
||||
|
||||
|
||||
class LidarrMetadataProfilePreferences(AppStruct):
|
||||
profile_id: int
|
||||
profile_name: str
|
||||
primary_types: list[str] = []
|
||||
secondary_types: list[str] = []
|
||||
release_statuses: list[str] = []
|
||||
27
backend/api/v1/schemas/stream.py
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class PlaybackSessionResponse(AppStruct):
|
||||
play_session_id: str
|
||||
item_id: str
|
||||
|
||||
|
||||
class StartPlaybackRequest(AppStruct):
|
||||
play_session_id: str | None = None
|
||||
|
||||
|
||||
class JellyfinPlaybackUrlResponse(AppStruct):
|
||||
url: str
|
||||
seekable: bool
|
||||
playSessionId: str
|
||||
|
||||
|
||||
class ProgressReportRequest(AppStruct):
|
||||
play_session_id: str
|
||||
position_seconds: float
|
||||
is_paused: bool = False
|
||||
|
||||
|
||||
class StopReportRequest(AppStruct):
|
||||
play_session_id: str
|
||||
position_seconds: float
|
||||
19
backend/api/v1/schemas/weekly_exploration.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class WeeklyExplorationTrack(AppStruct):
|
||||
title: str
|
||||
artist_name: str
|
||||
album_name: str
|
||||
recording_mbid: str | None = None
|
||||
artist_mbid: str | None = None
|
||||
release_group_mbid: str | None = None
|
||||
cover_url: str | None = None
|
||||
duration_ms: int | None = None
|
||||
|
||||
|
||||
class WeeklyExplorationSection(AppStruct):
|
||||
title: str
|
||||
playlist_date: str
|
||||
tracks: list[WeeklyExplorationTrack] = []
|
||||
source_url: str = ""
|
||||
97
backend/api/v1/schemas/youtube.py
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
import msgspec
|
||||
|
||||
from api.v1.schemas.discover import YouTubeQuotaResponse
|
||||
from infrastructure.msgspec_fastapi import AppStruct
|
||||
|
||||
|
||||
class YouTubeLinkGenerateRequest(AppStruct):
|
||||
artist_name: str
|
||||
album_name: str
|
||||
album_id: str
|
||||
cover_url: str | None = None
|
||||
|
||||
|
||||
class YouTubeTrackLink(AppStruct):
|
||||
album_id: str
|
||||
track_number: int
|
||||
track_name: str
|
||||
video_id: str
|
||||
artist_name: str
|
||||
embed_url: str
|
||||
created_at: str
|
||||
disc_number: int = 1
|
||||
album_name: str = ""
|
||||
|
||||
|
||||
class YouTubeLink(AppStruct):
|
||||
album_id: str
|
||||
album_name: str
|
||||
artist_name: str
|
||||
created_at: str
|
||||
video_id: str | None = None
|
||||
embed_url: str | None = None
|
||||
cover_url: str | None = None
|
||||
is_manual: bool = False
|
||||
track_count: int = 0
|
||||
|
||||
|
||||
class YouTubeLinkResponse(AppStruct):
|
||||
link: YouTubeLink
|
||||
quota: YouTubeQuotaResponse
|
||||
|
||||
|
||||
class YouTubeTrackLinkGenerateRequest(AppStruct):
|
||||
album_id: str
|
||||
album_name: str
|
||||
artist_name: str
|
||||
track_name: str
|
||||
track_number: int
|
||||
disc_number: int = 1
|
||||
cover_url: str | None = None
|
||||
|
||||
|
||||
class TrackInput(AppStruct):
|
||||
track_name: str
|
||||
track_number: int
|
||||
disc_number: int = 1
|
||||
|
||||
|
||||
class YouTubeTrackLinkBatchGenerateRequest(AppStruct):
|
||||
album_id: str
|
||||
album_name: str
|
||||
artist_name: str
|
||||
tracks: list[TrackInput]
|
||||
cover_url: str | None = None
|
||||
|
||||
|
||||
class YouTubeTrackLinkResponse(AppStruct):
|
||||
track_link: YouTubeTrackLink
|
||||
quota: YouTubeQuotaResponse
|
||||
|
||||
|
||||
class YouTubeTrackLinkFailure(AppStruct):
|
||||
track_number: int
|
||||
track_name: str
|
||||
reason: str
|
||||
disc_number: int = 1
|
||||
|
||||
|
||||
class YouTubeTrackLinkBatchResponse(AppStruct):
|
||||
track_links: list[YouTubeTrackLink]
|
||||
quota: YouTubeQuotaResponse
|
||||
failed: list[YouTubeTrackLinkFailure] = []
|
||||
|
||||
|
||||
class YouTubeManualLinkRequest(AppStruct):
|
||||
album_name: str
|
||||
artist_name: str
|
||||
youtube_url: str
|
||||
cover_url: str | None = None
|
||||
album_id: str | None = None
|
||||
|
||||
|
||||
class YouTubeLinkUpdateRequest(AppStruct):
|
||||
youtube_url: str | None = None
|
||||
album_name: str | None = None
|
||||
artist_name: str | None = None
|
||||
cover_url: str | None | msgspec.UnsetType = msgspec.UNSET
|
||||
0
backend/cache/.gitignore-check
vendored
Normal file
0
backend/core/__init__.py
Normal file
251
backend/core/config.py
Normal file
|
|
@ -0,0 +1,251 @@
|
|||
from pathlib import Path
|
||||
from pydantic import Field, TypeAdapter, ValidationError as PydanticValidationError, field_validator, model_validator
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
from typing import Self
|
||||
import logging
|
||||
import msgspec
|
||||
from core.exceptions import ConfigurationError
|
||||
from infrastructure.file_utils import atomic_write_json, read_json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_VALID_LOG_LEVELS = frozenset({"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"})
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
case_sensitive=False,
|
||||
extra="allow"
|
||||
)
|
||||
|
||||
lidarr_url: str = Field(default="http://lidarr:8686")
|
||||
lidarr_api_key: str = Field(default="")
|
||||
|
||||
jellyfin_url: str = Field(default="http://jellyfin:8096")
|
||||
|
||||
contact_email: str = Field(
|
||||
default="contact@musicseerr.com",
|
||||
description="Contact email for MusicBrainz API User-Agent. Override with your own if desired."
|
||||
)
|
||||
|
||||
quality_profile_id: int = Field(default=1)
|
||||
metadata_profile_id: int = Field(default=1)
|
||||
root_folder_path: str = Field(default="/music")
|
||||
|
||||
port: int = Field(default=8688)
|
||||
debug: bool = Field(default=False)
|
||||
log_level: str = Field(default="INFO")
|
||||
|
||||
cache_ttl_default: int = Field(default=60)
|
||||
cache_ttl_artist: int = Field(default=3600)
|
||||
cache_ttl_album: int = Field(default=3600)
|
||||
cache_ttl_covers: int = Field(default=86400, description="Cover cache TTL in seconds (default: 24 hours)")
|
||||
cache_cleanup_interval: int = Field(default=300)
|
||||
|
||||
cache_dir: Path = Field(default=Path("/app/cache"), description="Root directory for all cache files")
|
||||
library_db_path: Path = Field(default=Path("/app/cache/library.db"), description="SQLite library database path")
|
||||
cover_cache_max_size_mb: int = Field(default=500, description="Maximum cover cache size in MB")
|
||||
queue_db_path: Path = Field(default=Path("/app/cache/queue.db"), description="SQLite queue database path")
|
||||
shutdown_grace_period: float = Field(default=10.0, description="Seconds to wait for tasks on shutdown")
|
||||
|
||||
http_timeout: float = Field(default=10.0)
|
||||
http_connect_timeout: float = Field(default=5.0)
|
||||
http_max_connections: int = Field(default=200)
|
||||
http_max_keepalive: int = Field(default=50)
|
||||
|
||||
config_file_path: Path = Field(default=Path("/app/config/config.json"))
|
||||
audiodb_api_key: str = Field(default="123")
|
||||
audiodb_premium: bool = Field(default=False, description="Set to true if using a premium AudioDB API key")
|
||||
|
||||
@field_validator("log_level")
|
||||
@classmethod
|
||||
def validate_log_level(cls, v: str) -> str:
|
||||
normalised = v.upper()
|
||||
if normalised not in _VALID_LOG_LEVELS:
|
||||
raise ValueError(
|
||||
f"Invalid log_level '{v}'. Must be one of: {', '.join(sorted(_VALID_LOG_LEVELS))}"
|
||||
)
|
||||
return normalised
|
||||
|
||||
@field_validator("lidarr_url", "jellyfin_url")
|
||||
@classmethod
|
||||
def validate_url(cls, v: str) -> str:
|
||||
return v.rstrip("/")
|
||||
|
||||
@model_validator(mode='after')
|
||||
def validate_config(self) -> Self:
|
||||
errors = []
|
||||
warnings = []
|
||||
|
||||
for url_field in ['lidarr_url', 'jellyfin_url']:
|
||||
url = getattr(self, url_field, '')
|
||||
if url and not url.startswith(('http://', 'https://')):
|
||||
errors.append(f"{url_field} must start with http:// or https://")
|
||||
|
||||
if self.http_max_connections < self.http_max_keepalive * 2:
|
||||
warnings.append(
|
||||
f"http_max_connections ({self.http_max_connections}) should be "
|
||||
f"at least 2x http_max_keepalive ({self.http_max_keepalive})"
|
||||
)
|
||||
|
||||
if not self.lidarr_api_key:
|
||||
warnings.append("LIDARR_API_KEY is not set - Lidarr features will not work")
|
||||
|
||||
for warning in warnings:
|
||||
logger.warning(warning)
|
||||
|
||||
if errors:
|
||||
raise ConfigurationError(
|
||||
f"Critical configuration errors: {'; '.join(errors)}"
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
def get_user_agent(self) -> str:
|
||||
return f"Musicseerr/1.0 ({self.contact_email}; https://www.musicseerr.com)"
|
||||
|
||||
def load_from_file(self) -> None:
|
||||
if not self.config_file_path.exists():
|
||||
self._create_default_config()
|
||||
return
|
||||
|
||||
try:
|
||||
config_data = read_json(self.config_file_path, default={})
|
||||
if not isinstance(config_data, dict):
|
||||
raise ValueError("Config file JSON root must be an object")
|
||||
|
||||
type_errors: list[str] = []
|
||||
model_fields = type(self).model_fields
|
||||
validated_values: dict[str, object] = {}
|
||||
for key, value in config_data.items():
|
||||
if key not in model_fields:
|
||||
logger.warning("Unknown config key '%s' — ignoring", key)
|
||||
continue
|
||||
try:
|
||||
field_info = model_fields[key]
|
||||
adapter = TypeAdapter(field_info.annotation)
|
||||
validated_values[key] = adapter.validate_python(value)
|
||||
except PydanticValidationError as e:
|
||||
type_errors.append(
|
||||
f"'{key}': {e.errors()[0].get('msg', str(e))}"
|
||||
)
|
||||
except (TypeError, ValueError) as e:
|
||||
type_errors.append(f"'{key}': {e}")
|
||||
|
||||
if type_errors:
|
||||
raise ConfigurationError(
|
||||
f"Config file type errors: {'; '.join(type_errors)}"
|
||||
)
|
||||
|
||||
# Run field validators that TypeAdapter doesn't invoke
|
||||
try:
|
||||
for url_field in ('lidarr_url', 'jellyfin_url'):
|
||||
if url_field in validated_values:
|
||||
validated_values[url_field] = type(self).validate_url(
|
||||
validated_values[url_field]
|
||||
)
|
||||
if 'log_level' in validated_values:
|
||||
validated_values['log_level'] = type(self).validate_log_level(
|
||||
validated_values['log_level']
|
||||
)
|
||||
except ValueError as e:
|
||||
raise ConfigurationError(f"Config file validation error: {e}")
|
||||
|
||||
# Dry-run cross-field validation on merged candidate state
|
||||
self._validate_merged(validated_values)
|
||||
|
||||
# All validation passed — apply atomically
|
||||
for key, value in validated_values.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
logger.info(f"Loaded configuration from {self.config_file_path}")
|
||||
except (ConfigurationError, ValueError):
|
||||
raise
|
||||
except msgspec.DecodeError as e:
|
||||
logger.error(f"Invalid JSON in config file: {e}")
|
||||
raise ValueError(f"Config file is not valid JSON: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load config: {e}")
|
||||
raise
|
||||
|
||||
def _validate_merged(self, overrides: dict[str, object]) -> None:
|
||||
"""Validate cross-field constraints against candidate merged state without mutating self."""
|
||||
errors = []
|
||||
|
||||
def _get(field: str) -> object:
|
||||
return overrides.get(field, getattr(self, field))
|
||||
|
||||
for url_field in ('lidarr_url', 'jellyfin_url'):
|
||||
url = _get(url_field)
|
||||
if url and not str(url).startswith(('http://', 'https://')):
|
||||
errors.append(f"{url_field} must start with http:// or https://")
|
||||
|
||||
if errors:
|
||||
raise ConfigurationError(
|
||||
f"Critical configuration errors: {'; '.join(errors)}"
|
||||
)
|
||||
|
||||
def _create_default_config(self) -> None:
|
||||
self.config_file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
config_data = {
|
||||
"lidarr_url": self.lidarr_url,
|
||||
"lidarr_api_key": self.lidarr_api_key,
|
||||
"jellyfin_url": self.jellyfin_url,
|
||||
"contact_email": self.contact_email,
|
||||
"quality_profile_id": self.quality_profile_id,
|
||||
"metadata_profile_id": self.metadata_profile_id,
|
||||
"root_folder_path": self.root_folder_path,
|
||||
"port": self.port,
|
||||
"audiodb_api_key": self.audiodb_api_key,
|
||||
"audiodb_premium": self.audiodb_premium,
|
||||
"user_preferences": {
|
||||
"primary_types": ["album", "ep", "single"],
|
||||
"secondary_types": ["studio"],
|
||||
"release_statuses": ["official"],
|
||||
},
|
||||
}
|
||||
atomic_write_json(self.config_file_path, config_data)
|
||||
logger.info(f"Created default config at {self.config_file_path}")
|
||||
|
||||
def save_to_file(self) -> None:
|
||||
try:
|
||||
self.config_file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
config_data = {}
|
||||
if self.config_file_path.exists():
|
||||
loaded = read_json(self.config_file_path, default={})
|
||||
config_data = loaded if isinstance(loaded, dict) else {}
|
||||
|
||||
config_data.update({
|
||||
"lidarr_url": self.lidarr_url,
|
||||
"lidarr_api_key": self.lidarr_api_key,
|
||||
"jellyfin_url": self.jellyfin_url,
|
||||
"contact_email": self.contact_email,
|
||||
"quality_profile_id": self.quality_profile_id,
|
||||
"metadata_profile_id": self.metadata_profile_id,
|
||||
"root_folder_path": self.root_folder_path,
|
||||
"port": self.port,
|
||||
"audiodb_api_key": self.audiodb_api_key,
|
||||
"audiodb_premium": self.audiodb_premium,
|
||||
})
|
||||
|
||||
atomic_write_json(self.config_file_path, config_data)
|
||||
|
||||
logger.info(f"Saved config to {self.config_file_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save config: {e}")
|
||||
raise
|
||||
|
||||
|
||||
_settings: Settings | None = None
|
||||
|
||||
|
||||
def get_settings() -> Settings:
|
||||
global _settings
|
||||
if _settings is None:
|
||||
settings = Settings()
|
||||
settings.load_from_file()
|
||||
_settings = settings
|
||||
return _settings
|
||||
122
backend/core/dependencies/__init__.py
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
"""Dependency injection providers for the MusicSeerr backend.
|
||||
|
||||
This package replaces the former monolithic ``core/dependencies.py``.
|
||||
All public names are re-exported here so that existing imports like
|
||||
``from core.dependencies import get_home_service`` continue to work.
|
||||
"""
|
||||
|
||||
from .cache_providers import ( # noqa: F401
|
||||
get_cache,
|
||||
get_disk_cache,
|
||||
get_library_db,
|
||||
get_genre_index,
|
||||
get_youtube_store,
|
||||
get_mbid_store,
|
||||
get_sync_state_store,
|
||||
get_persistence_write_lock,
|
||||
get_preferences_service,
|
||||
get_cache_service,
|
||||
get_cache_status_service,
|
||||
)
|
||||
|
||||
from .repo_providers import ( # noqa: F401
|
||||
get_lidarr_repository,
|
||||
get_musicbrainz_repository,
|
||||
get_wikidata_repository,
|
||||
get_listenbrainz_repository,
|
||||
get_jellyfin_repository,
|
||||
get_navidrome_repository,
|
||||
get_coverart_repository,
|
||||
get_youtube_repo,
|
||||
get_audiodb_repository,
|
||||
get_audiodb_image_service,
|
||||
get_audiodb_browse_queue,
|
||||
get_lastfm_repository,
|
||||
get_playlist_repository,
|
||||
get_request_history_store,
|
||||
)
|
||||
|
||||
from .service_providers import ( # noqa: F401
|
||||
get_search_service,
|
||||
get_search_enrichment_service,
|
||||
get_artist_service,
|
||||
get_album_service,
|
||||
get_request_queue,
|
||||
get_request_service,
|
||||
get_requests_page_service,
|
||||
get_playlist_service,
|
||||
get_library_service,
|
||||
get_status_service,
|
||||
get_home_service,
|
||||
get_genre_cover_prewarm_service,
|
||||
get_home_charts_service,
|
||||
get_settings_service,
|
||||
get_artist_discovery_service,
|
||||
get_artist_enrichment_service,
|
||||
get_album_enrichment_service,
|
||||
get_album_discovery_service,
|
||||
get_youtube_service,
|
||||
get_lastfm_auth_service,
|
||||
get_scrobble_service,
|
||||
get_discover_service,
|
||||
get_discover_queue_manager,
|
||||
get_jellyfin_playback_service,
|
||||
get_local_files_service,
|
||||
get_jellyfin_library_service,
|
||||
get_navidrome_library_service,
|
||||
get_navidrome_playback_service,
|
||||
)
|
||||
|
||||
from .type_aliases import ( # noqa: F401
|
||||
SettingsDep,
|
||||
CacheDep,
|
||||
DiskCacheDep,
|
||||
PreferencesServiceDep,
|
||||
LidarrRepositoryDep,
|
||||
MusicBrainzRepositoryDep,
|
||||
WikidataRepositoryDep,
|
||||
ListenBrainzRepositoryDep,
|
||||
JellyfinRepositoryDep,
|
||||
CoverArtRepositoryDep,
|
||||
SearchServiceDep,
|
||||
SearchEnrichmentServiceDep,
|
||||
ArtistServiceDep,
|
||||
AlbumServiceDep,
|
||||
RequestQueueDep,
|
||||
RequestServiceDep,
|
||||
LibraryServiceDep,
|
||||
StatusServiceDep,
|
||||
CacheServiceDep,
|
||||
HomeServiceDep,
|
||||
HomeChartsServiceDep,
|
||||
SettingsServiceDep,
|
||||
ArtistDiscoveryServiceDep,
|
||||
AlbumDiscoveryServiceDep,
|
||||
DiscoverServiceDep,
|
||||
DiscoverQueueManagerDep,
|
||||
YouTubeRepositoryDep,
|
||||
YouTubeServiceDep,
|
||||
RequestHistoryStoreDep,
|
||||
RequestsPageServiceDep,
|
||||
JellyfinPlaybackServiceDep,
|
||||
LocalFilesServiceDep,
|
||||
JellyfinLibraryServiceDep,
|
||||
LastFmRepositoryDep,
|
||||
LastFmAuthServiceDep,
|
||||
ScrobbleServiceDep,
|
||||
PlaylistRepositoryDep,
|
||||
PlaylistServiceDep,
|
||||
NavidromeRepositoryDep,
|
||||
NavidromeLibraryServiceDep,
|
||||
NavidromePlaybackServiceDep,
|
||||
CacheStatusServiceDep,
|
||||
)
|
||||
|
||||
from .cleanup import ( # noqa: F401
|
||||
init_app_state,
|
||||
cleanup_app_state,
|
||||
clear_lastfm_dependent_caches,
|
||||
clear_listenbrainz_dependent_caches,
|
||||
)
|
||||
|
||||
from ._registry import clear_all_singletons, _singleton_registry # noqa: F401
|
||||
35
backend/core/dependencies/_registry.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
"""Singleton decorator and automatic cleanup registry for DI providers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import lru_cache, wraps
|
||||
from typing import Callable, TypeVar
|
||||
|
||||
F = TypeVar("F", bound=Callable)
|
||||
|
||||
_singleton_registry: list[Callable] = []
|
||||
|
||||
|
||||
def singleton(fn: F) -> F:
|
||||
"""Wrap *fn* with ``@lru_cache(maxsize=1)`` and register it for automatic cleanup."""
|
||||
cached = lru_cache(maxsize=1)(fn)
|
||||
_singleton_registry.append(cached)
|
||||
|
||||
@wraps(fn)
|
||||
def wrapper(*args, **kwargs):
|
||||
return cached(*args, **kwargs)
|
||||
|
||||
# Expose cache_clear so callers can invalidate individual singletons
|
||||
wrapper.cache_clear = cached.cache_clear # type: ignore[attr-defined]
|
||||
wrapper.cache_info = cached.cache_info # type: ignore[attr-defined]
|
||||
wrapper._cached = cached # type: ignore[attr-defined]
|
||||
_singleton_registry[-1] = wrapper # replace with the wrapper so clear_all hits wrapper
|
||||
return wrapper # type: ignore[return-value]
|
||||
|
||||
|
||||
def clear_all_singletons() -> None:
|
||||
"""Call ``cache_clear()`` on every registered singleton provider."""
|
||||
for fn in _singleton_registry:
|
||||
cache_clear = getattr(fn, "cache_clear", None)
|
||||
if callable(cache_clear):
|
||||
cache_clear()
|
||||
111
backend/core/dependencies/cache_providers.py
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
"""Tier 2 — Cache layer, persistence stores, and foundation providers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import threading
|
||||
|
||||
from core.config import get_settings
|
||||
from infrastructure.cache.memory_cache import InMemoryCache, CacheInterface
|
||||
from infrastructure.cache.disk_cache import DiskMetadataCache
|
||||
from infrastructure.persistence import (
|
||||
LibraryDB,
|
||||
GenreIndex,
|
||||
YouTubeStore,
|
||||
MBIDStore,
|
||||
SyncStateStore,
|
||||
)
|
||||
|
||||
from ._registry import singleton
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_cache() -> CacheInterface:
|
||||
preferences_service = get_preferences_service()
|
||||
advanced = preferences_service.get_advanced_settings()
|
||||
max_entries = advanced.memory_cache_max_entries
|
||||
logger.info(f"Initialized RAM cache with max {max_entries} entries")
|
||||
return InMemoryCache(max_entries=max_entries)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_disk_cache() -> DiskMetadataCache:
|
||||
settings = get_settings()
|
||||
preferences_service = get_preferences_service()
|
||||
advanced = preferences_service.get_advanced_settings()
|
||||
cache_dir = settings.cache_dir / "metadata"
|
||||
logger.info(f"Initialized disk metadata cache at {cache_dir}")
|
||||
return DiskMetadataCache(
|
||||
base_path=cache_dir,
|
||||
recent_metadata_max_size_mb=advanced.recent_metadata_max_size_mb,
|
||||
recent_covers_max_size_mb=advanced.recent_covers_max_size_mb,
|
||||
persistent_metadata_ttl_hours=advanced.persistent_metadata_ttl_hours,
|
||||
)
|
||||
|
||||
|
||||
# -- Persistence store providers (shared write lock + DB path) --
|
||||
|
||||
@singleton
|
||||
def get_persistence_write_lock() -> threading.Lock:
|
||||
return threading.Lock()
|
||||
|
||||
|
||||
@singleton
|
||||
def get_library_db() -> LibraryDB:
|
||||
settings = get_settings()
|
||||
lock = get_persistence_write_lock()
|
||||
return LibraryDB(db_path=settings.library_db_path, write_lock=lock)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_genre_index() -> GenreIndex:
|
||||
settings = get_settings()
|
||||
lock = get_persistence_write_lock()
|
||||
return GenreIndex(db_path=settings.library_db_path, write_lock=lock)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_youtube_store() -> YouTubeStore:
|
||||
settings = get_settings()
|
||||
lock = get_persistence_write_lock()
|
||||
return YouTubeStore(db_path=settings.library_db_path, write_lock=lock)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_mbid_store() -> MBIDStore:
|
||||
settings = get_settings()
|
||||
lock = get_persistence_write_lock()
|
||||
return MBIDStore(db_path=settings.library_db_path, write_lock=lock)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_sync_state_store() -> SyncStateStore:
|
||||
settings = get_settings()
|
||||
lock = get_persistence_write_lock()
|
||||
return SyncStateStore(db_path=settings.library_db_path, write_lock=lock)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_preferences_service() -> "PreferencesService":
|
||||
from services.preferences_service import PreferencesService
|
||||
|
||||
settings = get_settings()
|
||||
return PreferencesService(settings)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_cache_service() -> "CacheService":
|
||||
from services.cache_service import CacheService
|
||||
|
||||
cache = get_cache()
|
||||
library_db = get_library_db()
|
||||
disk_cache = get_disk_cache()
|
||||
return CacheService(cache, library_db, disk_cache)
|
||||
|
||||
|
||||
def get_cache_status_service() -> "CacheStatusService":
|
||||
from services.cache_status_service import CacheStatusService
|
||||
|
||||
return CacheStatusService()
|
||||
80
backend/core/dependencies/cleanup.py
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
"""Application lifecycle and targeted cache invalidation."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from infrastructure.http.client import close_http_clients
|
||||
|
||||
from ._registry import clear_all_singletons
|
||||
from .service_providers import (
|
||||
get_artist_discovery_service,
|
||||
get_artist_enrichment_service,
|
||||
get_album_enrichment_service,
|
||||
get_album_discovery_service,
|
||||
get_search_enrichment_service,
|
||||
get_scrobble_service,
|
||||
get_home_charts_service,
|
||||
get_home_service,
|
||||
get_discover_service,
|
||||
get_discover_queue_manager,
|
||||
get_lastfm_auth_service,
|
||||
get_genre_cover_prewarm_service,
|
||||
)
|
||||
from .repo_providers import get_listenbrainz_repository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def clear_lastfm_dependent_caches() -> None:
|
||||
"""Clear LRU caches for all services that hold a reference to LastFmRepository."""
|
||||
get_artist_discovery_service.cache_clear()
|
||||
get_artist_enrichment_service.cache_clear()
|
||||
get_album_enrichment_service.cache_clear()
|
||||
get_search_enrichment_service.cache_clear()
|
||||
get_scrobble_service.cache_clear()
|
||||
get_home_charts_service.cache_clear()
|
||||
get_home_service.cache_clear()
|
||||
get_discover_service.cache_clear()
|
||||
get_discover_queue_manager.cache_clear()
|
||||
get_lastfm_auth_service.cache_clear()
|
||||
|
||||
|
||||
def clear_listenbrainz_dependent_caches() -> None:
|
||||
"""Clear LRU caches for all services that hold a reference to ListenBrainzRepository."""
|
||||
get_listenbrainz_repository.cache_clear()
|
||||
get_artist_discovery_service.cache_clear()
|
||||
get_album_discovery_service.cache_clear()
|
||||
get_search_enrichment_service.cache_clear()
|
||||
get_scrobble_service.cache_clear()
|
||||
get_home_charts_service.cache_clear()
|
||||
get_home_service.cache_clear()
|
||||
get_discover_service.cache_clear()
|
||||
get_discover_queue_manager.cache_clear()
|
||||
|
||||
|
||||
async def init_app_state(app) -> None:
|
||||
logger.info("Application state initialized")
|
||||
|
||||
|
||||
async def cleanup_app_state() -> None:
|
||||
# Graceful service shutdown
|
||||
try:
|
||||
queue_mgr = get_discover_queue_manager()
|
||||
queue_mgr.invalidate()
|
||||
except (AttributeError, RuntimeError) as exc:
|
||||
logger.error("Failed to invalidate discover queue manager during cleanup: %s", exc)
|
||||
|
||||
await close_http_clients()
|
||||
|
||||
# Shutdown genre prewarm service before clearing singletons
|
||||
try:
|
||||
prewarm_svc = get_genre_cover_prewarm_service()
|
||||
await prewarm_svc.shutdown()
|
||||
except (AttributeError, RuntimeError, OSError) as exc:
|
||||
logger.error("Failed to shut down genre prewarm service during cleanup: %s", exc)
|
||||
|
||||
# Automatic cleanup via registry — no manual list needed
|
||||
clear_all_singletons()
|
||||
|
||||
logger.info("Application state cleaned up")
|
||||
239
backend/core/dependencies/repo_providers.py
Normal file
|
|
@ -0,0 +1,239 @@
|
|||
"""Tier 3 — Repository providers and infrastructure services."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import httpx
|
||||
|
||||
from core.config import get_settings
|
||||
from infrastructure.http.client import get_http_client, get_listenbrainz_http_client
|
||||
|
||||
from ._registry import singleton
|
||||
from .cache_providers import (
|
||||
get_cache,
|
||||
get_disk_cache,
|
||||
get_mbid_store,
|
||||
get_preferences_service,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_configured_http_client() -> httpx.AsyncClient:
|
||||
settings = get_settings()
|
||||
advanced = get_preferences_service().get_advanced_settings()
|
||||
return get_http_client(
|
||||
settings,
|
||||
timeout=float(advanced.http_timeout),
|
||||
connect_timeout=float(advanced.http_connect_timeout),
|
||||
max_connections=advanced.http_max_connections,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_lidarr_repository() -> "LidarrRepository":
|
||||
from repositories.lidarr import LidarrRepository
|
||||
|
||||
settings = get_settings()
|
||||
cache = get_cache()
|
||||
http_client = _get_configured_http_client()
|
||||
return LidarrRepository(settings, http_client, cache)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_musicbrainz_repository() -> "MusicBrainzRepository":
|
||||
from repositories.musicbrainz_repository import MusicBrainzRepository
|
||||
|
||||
cache = get_cache()
|
||||
preferences_service = get_preferences_service()
|
||||
http_client = _get_configured_http_client()
|
||||
return MusicBrainzRepository(http_client, cache, preferences_service)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_wikidata_repository() -> "WikidataRepository":
|
||||
from repositories.wikidata_repository import WikidataRepository
|
||||
|
||||
cache = get_cache()
|
||||
http_client = _get_configured_http_client()
|
||||
return WikidataRepository(http_client, cache)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_listenbrainz_repository() -> "ListenBrainzRepository":
|
||||
from repositories.listenbrainz_repository import ListenBrainzRepository
|
||||
|
||||
cache = get_cache()
|
||||
http_client = get_listenbrainz_http_client(
|
||||
settings=get_settings(),
|
||||
timeout=float(get_preferences_service().get_advanced_settings().http_timeout),
|
||||
connect_timeout=float(get_preferences_service().get_advanced_settings().http_connect_timeout),
|
||||
)
|
||||
preferences = get_preferences_service()
|
||||
lb_settings = preferences.get_listenbrainz_connection()
|
||||
return ListenBrainzRepository(
|
||||
http_client=http_client,
|
||||
cache=cache,
|
||||
username=lb_settings.username if lb_settings.enabled else "",
|
||||
user_token=lb_settings.user_token if lb_settings.enabled else "",
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_jellyfin_repository() -> "JellyfinRepository":
|
||||
from repositories.jellyfin_repository import JellyfinRepository
|
||||
|
||||
cache = get_cache()
|
||||
mbid_store = get_mbid_store()
|
||||
http_client = _get_configured_http_client()
|
||||
preferences = get_preferences_service()
|
||||
jf_settings = preferences.get_jellyfin_connection()
|
||||
return JellyfinRepository(
|
||||
http_client=http_client,
|
||||
cache=cache,
|
||||
base_url=jf_settings.jellyfin_url if jf_settings.enabled else "",
|
||||
api_key=jf_settings.api_key if jf_settings.enabled else "",
|
||||
user_id=jf_settings.user_id if jf_settings.enabled else "",
|
||||
mbid_store=mbid_store,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_navidrome_repository() -> "NavidromeRepository":
|
||||
from repositories.navidrome_repository import NavidromeRepository
|
||||
|
||||
cache = get_cache()
|
||||
http_client = _get_configured_http_client()
|
||||
preferences = get_preferences_service()
|
||||
nd_settings = preferences.get_navidrome_connection_raw()
|
||||
repo = NavidromeRepository(http_client=http_client, cache=cache)
|
||||
if nd_settings.enabled:
|
||||
repo.configure(
|
||||
url=nd_settings.navidrome_url,
|
||||
username=nd_settings.username,
|
||||
password=nd_settings.password,
|
||||
)
|
||||
adv = preferences.get_advanced_settings()
|
||||
repo.configure_cache_ttls(
|
||||
list_ttl=getattr(adv, "cache_ttl_navidrome_albums", 300),
|
||||
search_ttl=getattr(adv, "cache_ttl_navidrome_search", 120),
|
||||
genres_ttl=getattr(adv, "cache_ttl_navidrome_genres", 3600),
|
||||
detail_ttl=getattr(adv, "cache_ttl_navidrome_albums", 300),
|
||||
)
|
||||
return repo
|
||||
|
||||
|
||||
@singleton
|
||||
def get_youtube_repo() -> "YouTubeRepository":
|
||||
from repositories.youtube import YouTubeRepository
|
||||
|
||||
http_client = _get_configured_http_client()
|
||||
preferences_service = get_preferences_service()
|
||||
yt_settings = preferences_service.get_youtube_connection()
|
||||
api_key = yt_settings.api_key.strip() if (yt_settings.enabled and yt_settings.api_enabled and yt_settings.has_valid_api_key()) else ""
|
||||
return YouTubeRepository(
|
||||
http_client=http_client,
|
||||
api_key=api_key,
|
||||
daily_quota_limit=yt_settings.daily_quota_limit,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_audiodb_repository() -> "AudioDBRepository":
|
||||
from repositories.audiodb_repository import AudioDBRepository
|
||||
|
||||
settings = get_settings()
|
||||
http_client = _get_configured_http_client()
|
||||
preferences_service = get_preferences_service()
|
||||
return AudioDBRepository(
|
||||
http_client=http_client,
|
||||
preferences_service=preferences_service,
|
||||
api_key=settings.audiodb_api_key,
|
||||
premium=settings.audiodb_premium,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_audiodb_image_service() -> "AudioDBImageService":
|
||||
from services.audiodb_image_service import AudioDBImageService
|
||||
|
||||
audiodb_repo = get_audiodb_repository()
|
||||
disk_cache = get_disk_cache()
|
||||
preferences_service = get_preferences_service()
|
||||
memory_cache = get_cache()
|
||||
return AudioDBImageService(
|
||||
audiodb_repo=audiodb_repo,
|
||||
disk_cache=disk_cache,
|
||||
preferences_service=preferences_service,
|
||||
memory_cache=memory_cache,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_audiodb_browse_queue() -> "AudioDBBrowseQueue":
|
||||
from services.audiodb_browse_queue import AudioDBBrowseQueue
|
||||
|
||||
return AudioDBBrowseQueue()
|
||||
|
||||
|
||||
@singleton
|
||||
def get_lastfm_repository() -> "LastFmRepository":
|
||||
from repositories.lastfm_repository import LastFmRepository
|
||||
|
||||
http_client = _get_configured_http_client()
|
||||
preferences = get_preferences_service()
|
||||
lf_settings = preferences.get_lastfm_connection()
|
||||
cache = get_cache()
|
||||
return LastFmRepository(
|
||||
http_client=http_client,
|
||||
cache=cache,
|
||||
api_key=lf_settings.api_key,
|
||||
shared_secret=lf_settings.shared_secret,
|
||||
session_key=lf_settings.session_key,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_playlist_repository() -> "PlaylistRepository":
|
||||
from repositories.playlist_repository import PlaylistRepository
|
||||
|
||||
settings = get_settings()
|
||||
return PlaylistRepository(db_path=settings.library_db_path)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_request_history_store() -> "RequestHistoryStore":
|
||||
from infrastructure.persistence.request_history import RequestHistoryStore
|
||||
from .cache_providers import get_persistence_write_lock
|
||||
|
||||
settings = get_settings()
|
||||
return RequestHistoryStore(db_path=settings.library_db_path, write_lock=get_persistence_write_lock())
|
||||
|
||||
|
||||
@singleton
|
||||
def get_coverart_repository() -> "CoverArtRepository":
|
||||
from repositories.coverart_repository import CoverArtRepository
|
||||
|
||||
settings = get_settings()
|
||||
advanced = get_preferences_service().get_advanced_settings()
|
||||
cache = get_cache()
|
||||
mb_repo = get_musicbrainz_repository()
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
jellyfin_repo = get_jellyfin_repository()
|
||||
audiodb_service = get_audiodb_image_service()
|
||||
http_client = _get_configured_http_client()
|
||||
cache_dir = settings.cache_dir / "covers"
|
||||
return CoverArtRepository(
|
||||
http_client,
|
||||
cache,
|
||||
mb_repo,
|
||||
lidarr_repo,
|
||||
jellyfin_repo,
|
||||
audiodb_service=audiodb_service,
|
||||
cache_dir=cache_dir,
|
||||
cover_cache_max_size_mb=settings.cover_cache_max_size_mb,
|
||||
cover_memory_cache_max_entries=advanced.cover_memory_cache_max_entries,
|
||||
cover_memory_cache_max_bytes=advanced.cover_memory_cache_max_size_mb * 1024 * 1024,
|
||||
cover_non_monitored_ttl_seconds=advanced.cache_ttl_recently_viewed_bytes,
|
||||
)
|
||||
514
backend/core/dependencies/service_providers.py
Normal file
|
|
@ -0,0 +1,514 @@
|
|||
"""Tier 4 — Business-logic service providers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from infrastructure.cache.cache_keys import (
|
||||
lidarr_raw_albums_key,
|
||||
lidarr_requested_mbids_key,
|
||||
HOME_RESPONSE_PREFIX,
|
||||
ALBUM_INFO_PREFIX,
|
||||
ARTIST_INFO_PREFIX,
|
||||
LIDARR_PREFIX,
|
||||
LIDARR_ALBUM_DETAILS_PREFIX,
|
||||
)
|
||||
from infrastructure.persistence.request_history import RequestHistoryRecord
|
||||
|
||||
from ._registry import singleton
|
||||
from .cache_providers import (
|
||||
get_cache,
|
||||
get_disk_cache,
|
||||
get_library_db,
|
||||
get_genre_index,
|
||||
get_youtube_store,
|
||||
get_mbid_store,
|
||||
get_sync_state_store,
|
||||
get_preferences_service,
|
||||
get_cache_status_service,
|
||||
)
|
||||
from .repo_providers import (
|
||||
get_lidarr_repository,
|
||||
get_musicbrainz_repository,
|
||||
get_wikidata_repository,
|
||||
get_listenbrainz_repository,
|
||||
get_jellyfin_repository,
|
||||
get_navidrome_repository,
|
||||
get_coverart_repository,
|
||||
get_youtube_repo,
|
||||
get_audiodb_image_service,
|
||||
get_audiodb_browse_queue,
|
||||
get_lastfm_repository,
|
||||
get_playlist_repository,
|
||||
get_request_history_store,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_search_service() -> "SearchService":
|
||||
from services.search_service import SearchService
|
||||
|
||||
mb_repo = get_musicbrainz_repository()
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
coverart_repo = get_coverart_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
audiodb_image_service = get_audiodb_image_service()
|
||||
browse_queue = get_audiodb_browse_queue()
|
||||
return SearchService(mb_repo, lidarr_repo, coverart_repo, preferences_service, audiodb_image_service, browse_queue)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_artist_service() -> "ArtistService":
|
||||
from services.artist_service import ArtistService
|
||||
|
||||
mb_repo = get_musicbrainz_repository()
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
wikidata_repo = get_wikidata_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
memory_cache = get_cache()
|
||||
disk_cache = get_disk_cache()
|
||||
audiodb_image_service = get_audiodb_image_service()
|
||||
browse_queue = get_audiodb_browse_queue()
|
||||
library_db = get_library_db()
|
||||
return ArtistService(mb_repo, lidarr_repo, wikidata_repo, preferences_service, memory_cache, disk_cache, audiodb_image_service, browse_queue, library_db)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_album_service() -> "AlbumService":
|
||||
from services.album_service import AlbumService
|
||||
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
mb_repo = get_musicbrainz_repository()
|
||||
library_db = get_library_db()
|
||||
memory_cache = get_cache()
|
||||
disk_cache = get_disk_cache()
|
||||
preferences_service = get_preferences_service()
|
||||
audiodb_image_service = get_audiodb_image_service()
|
||||
browse_queue = get_audiodb_browse_queue()
|
||||
return AlbumService(lidarr_repo, mb_repo, library_db, memory_cache, disk_cache, preferences_service, audiodb_image_service, browse_queue)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_request_queue() -> "RequestQueue":
|
||||
from infrastructure.queue.request_queue import RequestQueue
|
||||
from infrastructure.queue.queue_store import QueueStore
|
||||
from core.config import get_settings
|
||||
settings = get_settings()
|
||||
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
disk_cache = get_disk_cache()
|
||||
cover_repo = get_coverart_repository()
|
||||
|
||||
async def processor(album_mbid: str) -> dict:
|
||||
result = await lidarr_repo.add_album(album_mbid)
|
||||
|
||||
payload = result.get("payload", {})
|
||||
if payload and isinstance(payload, dict):
|
||||
is_monitored = payload.get("monitored", False)
|
||||
|
||||
if is_monitored:
|
||||
logger.info(f"Album {album_mbid[:8]}... successfully monitored - promoting cache entries to persistent")
|
||||
|
||||
try:
|
||||
await disk_cache.promote_album_to_persistent(album_mbid)
|
||||
await cover_repo.promote_cover_to_persistent(album_mbid, identifier_type="album")
|
||||
|
||||
artist_data = payload.get("artist", {})
|
||||
if artist_data:
|
||||
artist_mbid = artist_data.get("foreignArtistId") or artist_data.get("mbId")
|
||||
if artist_mbid:
|
||||
await disk_cache.promote_artist_to_persistent(artist_mbid)
|
||||
await cover_repo.promote_cover_to_persistent(artist_mbid, identifier_type="artist")
|
||||
|
||||
logger.info(f"Cache promotion complete for album {album_mbid[:8]}...")
|
||||
except Exception as e: # noqa: BLE001
|
||||
logger.error(f"Failed to promote cache entries for album {album_mbid[:8]}...: {e}")
|
||||
else:
|
||||
logger.warning(f"Album {album_mbid[:8]}... added but not monitored - skipping cache promotion")
|
||||
|
||||
return result
|
||||
|
||||
store = QueueStore(db_path=settings.queue_db_path)
|
||||
return RequestQueue(processor, store=store)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_request_service() -> "RequestService":
|
||||
from services.request_service import RequestService
|
||||
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
request_queue = get_request_queue()
|
||||
request_history = get_request_history_store()
|
||||
return RequestService(lidarr_repo, request_queue, request_history)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_requests_page_service() -> "RequestsPageService":
|
||||
from services.requests_page_service import RequestsPageService
|
||||
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
request_history = get_request_history_store()
|
||||
memory_cache = get_cache()
|
||||
disk_cache = get_disk_cache()
|
||||
library_db = get_library_db()
|
||||
|
||||
async def on_import(record: RequestHistoryRecord) -> None:
|
||||
invalidations = [
|
||||
memory_cache.delete(lidarr_raw_albums_key()),
|
||||
memory_cache.clear_prefix(f"{LIDARR_PREFIX}library:"),
|
||||
memory_cache.delete(lidarr_requested_mbids_key()),
|
||||
memory_cache.clear_prefix(HOME_RESPONSE_PREFIX),
|
||||
memory_cache.delete(f"{ALBUM_INFO_PREFIX}{record.musicbrainz_id}"),
|
||||
memory_cache.delete(f"{LIDARR_ALBUM_DETAILS_PREFIX}{record.musicbrainz_id}"),
|
||||
]
|
||||
if record.artist_mbid:
|
||||
invalidations.append(
|
||||
memory_cache.delete(f"{ARTIST_INFO_PREFIX}{record.artist_mbid}")
|
||||
)
|
||||
await asyncio.gather(*invalidations, return_exceptions=True)
|
||||
if record.artist_mbid:
|
||||
await asyncio.gather(
|
||||
disk_cache.delete_album(record.musicbrainz_id),
|
||||
disk_cache.delete_artist(record.artist_mbid),
|
||||
return_exceptions=True,
|
||||
)
|
||||
else:
|
||||
try:
|
||||
await disk_cache.delete_album(record.musicbrainz_id)
|
||||
except OSError as exc:
|
||||
logger.warning(
|
||||
"Failed to delete disk cache album %s during import invalidation: %s",
|
||||
record.musicbrainz_id,
|
||||
exc,
|
||||
)
|
||||
try:
|
||||
await library_db.upsert_album({
|
||||
"mbid": record.musicbrainz_id,
|
||||
"artist_mbid": record.artist_mbid or "",
|
||||
"artist_name": record.artist_name or "",
|
||||
"title": record.album_title or "",
|
||||
"year": record.year,
|
||||
"cover_url": record.cover_url or "",
|
||||
"monitored": True,
|
||||
})
|
||||
except Exception as ex: # noqa: BLE001
|
||||
logger.warning("Failed to upsert album into library cache: %s", ex)
|
||||
logger.info(
|
||||
"Invalidated caches after import: album=%s artist=%s",
|
||||
record.musicbrainz_id[:8],
|
||||
(record.artist_mbid or "?")[:8],
|
||||
)
|
||||
|
||||
return RequestsPageService(
|
||||
lidarr_repo=lidarr_repo,
|
||||
request_history=request_history,
|
||||
library_mbids_fn=lidarr_repo.get_library_mbids,
|
||||
on_import_callback=on_import,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_playlist_service() -> "PlaylistService":
|
||||
from services.playlist_service import PlaylistService
|
||||
from core.config import get_settings
|
||||
|
||||
settings = get_settings()
|
||||
playlist_repo = get_playlist_repository()
|
||||
return PlaylistService(
|
||||
repo=playlist_repo,
|
||||
cache_dir=settings.cache_dir,
|
||||
cache=get_cache(),
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_library_service() -> "LibraryService":
|
||||
from services.library_service import LibraryService
|
||||
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
library_db = get_library_db()
|
||||
cover_repo = get_coverart_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
memory_cache = get_cache()
|
||||
disk_cache = get_disk_cache()
|
||||
artist_discovery_service = get_artist_discovery_service()
|
||||
audiodb_image_service = get_audiodb_image_service()
|
||||
local_files_service = get_local_files_service()
|
||||
jellyfin_library_service = get_jellyfin_library_service()
|
||||
navidrome_library_service = get_navidrome_library_service()
|
||||
sync_state_store = get_sync_state_store()
|
||||
genre_index = get_genre_index()
|
||||
return LibraryService(
|
||||
lidarr_repo, library_db, cover_repo, preferences_service,
|
||||
memory_cache, disk_cache,
|
||||
artist_discovery_service=artist_discovery_service,
|
||||
audiodb_image_service=audiodb_image_service,
|
||||
local_files_service=local_files_service,
|
||||
jellyfin_library_service=jellyfin_library_service,
|
||||
navidrome_library_service=navidrome_library_service,
|
||||
sync_state_store=sync_state_store,
|
||||
genre_index=genre_index,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_status_service() -> "StatusService":
|
||||
from services.status_service import StatusService
|
||||
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
return StatusService(lidarr_repo)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_home_service() -> "HomeService":
|
||||
from services.home_service import HomeService
|
||||
from core.config import get_settings
|
||||
|
||||
settings = get_settings()
|
||||
listenbrainz_repo = get_listenbrainz_repository()
|
||||
jellyfin_repo = get_jellyfin_repository()
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
musicbrainz_repo = get_musicbrainz_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
memory_cache = get_cache()
|
||||
lastfm_repo = get_lastfm_repository()
|
||||
audiodb_image_service = get_audiodb_image_service()
|
||||
return HomeService(
|
||||
listenbrainz_repo=listenbrainz_repo,
|
||||
jellyfin_repo=jellyfin_repo,
|
||||
lidarr_repo=lidarr_repo,
|
||||
musicbrainz_repo=musicbrainz_repo,
|
||||
preferences_service=preferences_service,
|
||||
memory_cache=memory_cache,
|
||||
lastfm_repo=lastfm_repo,
|
||||
audiodb_image_service=audiodb_image_service,
|
||||
cache_dir=settings.cache_dir,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_genre_cover_prewarm_service() -> "GenreCoverPrewarmService":
|
||||
from services.genre_cover_prewarm_service import GenreCoverPrewarmService
|
||||
|
||||
cover_repo = get_coverart_repository()
|
||||
return GenreCoverPrewarmService(cover_repo=cover_repo)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_home_charts_service() -> "HomeChartsService":
|
||||
from services.home_charts_service import HomeChartsService
|
||||
|
||||
listenbrainz_repo = get_listenbrainz_repository()
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
musicbrainz_repo = get_musicbrainz_repository()
|
||||
genre_index = get_genre_index()
|
||||
lastfm_repo = get_lastfm_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
prewarm_service = get_genre_cover_prewarm_service()
|
||||
return HomeChartsService(
|
||||
listenbrainz_repo=listenbrainz_repo,
|
||||
lidarr_repo=lidarr_repo,
|
||||
musicbrainz_repo=musicbrainz_repo,
|
||||
genre_index=genre_index,
|
||||
lastfm_repo=lastfm_repo,
|
||||
preferences_service=preferences_service,
|
||||
prewarm_service=prewarm_service,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_settings_service() -> "SettingsService":
|
||||
from services.settings_service import SettingsService
|
||||
|
||||
preferences_service = get_preferences_service()
|
||||
cache = get_cache()
|
||||
return SettingsService(preferences_service, cache)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_artist_discovery_service() -> "ArtistDiscoveryService":
|
||||
from services.artist_discovery_service import ArtistDiscoveryService
|
||||
|
||||
listenbrainz_repo = get_listenbrainz_repository()
|
||||
musicbrainz_repo = get_musicbrainz_repository()
|
||||
library_db = get_library_db()
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
lastfm_repo = get_lastfm_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
memory_cache = get_cache()
|
||||
return ArtistDiscoveryService(
|
||||
listenbrainz_repo=listenbrainz_repo,
|
||||
musicbrainz_repo=musicbrainz_repo,
|
||||
library_db=library_db,
|
||||
lidarr_repo=lidarr_repo,
|
||||
memory_cache=memory_cache,
|
||||
lastfm_repo=lastfm_repo,
|
||||
preferences_service=preferences_service,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_artist_enrichment_service() -> "ArtistEnrichmentService":
|
||||
from services.artist_enrichment_service import ArtistEnrichmentService
|
||||
|
||||
lastfm_repo = get_lastfm_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
return ArtistEnrichmentService(
|
||||
lastfm_repo=lastfm_repo,
|
||||
preferences_service=preferences_service,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_album_enrichment_service() -> "AlbumEnrichmentService":
|
||||
from services.album_enrichment_service import AlbumEnrichmentService
|
||||
|
||||
lastfm_repo = get_lastfm_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
return AlbumEnrichmentService(
|
||||
lastfm_repo=lastfm_repo,
|
||||
preferences_service=preferences_service,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_album_discovery_service() -> "AlbumDiscoveryService":
|
||||
from services.album_discovery_service import AlbumDiscoveryService
|
||||
|
||||
listenbrainz_repo = get_listenbrainz_repository()
|
||||
musicbrainz_repo = get_musicbrainz_repository()
|
||||
library_db = get_library_db()
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
return AlbumDiscoveryService(
|
||||
listenbrainz_repo=listenbrainz_repo,
|
||||
musicbrainz_repo=musicbrainz_repo,
|
||||
library_db=library_db,
|
||||
lidarr_repo=lidarr_repo,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_search_enrichment_service() -> "SearchEnrichmentService":
|
||||
from services.search_enrichment_service import SearchEnrichmentService
|
||||
|
||||
mb_repo = get_musicbrainz_repository()
|
||||
lb_repo = get_listenbrainz_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
lastfm_repo = get_lastfm_repository()
|
||||
return SearchEnrichmentService(mb_repo, lb_repo, preferences_service, lastfm_repo)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_youtube_service() -> "YouTubeService":
|
||||
from services.youtube_service import YouTubeService
|
||||
|
||||
youtube_repo = get_youtube_repo()
|
||||
youtube_store = get_youtube_store()
|
||||
return YouTubeService(youtube_repo=youtube_repo, youtube_store=youtube_store)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_lastfm_auth_service() -> "LastFmAuthService":
|
||||
from services.lastfm_auth_service import LastFmAuthService
|
||||
|
||||
lastfm_repo = get_lastfm_repository()
|
||||
return LastFmAuthService(lastfm_repo=lastfm_repo)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_scrobble_service() -> "ScrobbleService":
|
||||
from services.scrobble_service import ScrobbleService
|
||||
|
||||
lastfm_repo = get_lastfm_repository()
|
||||
listenbrainz_repo = get_listenbrainz_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
return ScrobbleService(lastfm_repo, listenbrainz_repo, preferences_service)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_discover_service() -> "DiscoverService":
|
||||
from services.discover_service import DiscoverService
|
||||
|
||||
listenbrainz_repo = get_listenbrainz_repository()
|
||||
jellyfin_repo = get_jellyfin_repository()
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
musicbrainz_repo = get_musicbrainz_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
memory_cache = get_cache()
|
||||
library_db = get_library_db()
|
||||
mbid_store = get_mbid_store()
|
||||
wikidata_repo = get_wikidata_repository()
|
||||
lastfm_repo = get_lastfm_repository()
|
||||
audiodb_image_service = get_audiodb_image_service()
|
||||
return DiscoverService(
|
||||
listenbrainz_repo=listenbrainz_repo,
|
||||
jellyfin_repo=jellyfin_repo,
|
||||
lidarr_repo=lidarr_repo,
|
||||
musicbrainz_repo=musicbrainz_repo,
|
||||
preferences_service=preferences_service,
|
||||
memory_cache=memory_cache,
|
||||
library_db=library_db,
|
||||
mbid_store=mbid_store,
|
||||
wikidata_repo=wikidata_repo,
|
||||
lastfm_repo=lastfm_repo,
|
||||
audiodb_image_service=audiodb_image_service,
|
||||
)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_discover_queue_manager() -> "DiscoverQueueManager":
|
||||
from services.discover_queue_manager import DiscoverQueueManager
|
||||
|
||||
discover_service = get_discover_service()
|
||||
preferences_service = get_preferences_service()
|
||||
cover_repo = get_coverart_repository()
|
||||
return DiscoverQueueManager(discover_service, preferences_service, cover_repo=cover_repo)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_jellyfin_playback_service() -> "JellyfinPlaybackService":
|
||||
from services.jellyfin_playback_service import JellyfinPlaybackService
|
||||
|
||||
jellyfin_repo = get_jellyfin_repository()
|
||||
return JellyfinPlaybackService(jellyfin_repo)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_local_files_service() -> "LocalFilesService":
|
||||
from services.local_files_service import LocalFilesService
|
||||
|
||||
lidarr_repo = get_lidarr_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
cache = get_cache()
|
||||
return LocalFilesService(lidarr_repo, preferences_service, cache)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_jellyfin_library_service() -> "JellyfinLibraryService":
|
||||
from services.jellyfin_library_service import JellyfinLibraryService
|
||||
|
||||
jellyfin_repo = get_jellyfin_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
return JellyfinLibraryService(jellyfin_repo, preferences_service)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_navidrome_library_service() -> "NavidromeLibraryService":
|
||||
from services.navidrome_library_service import NavidromeLibraryService
|
||||
|
||||
navidrome_repo = get_navidrome_repository()
|
||||
preferences_service = get_preferences_service()
|
||||
library_db = get_library_db()
|
||||
mbid_store = get_mbid_store()
|
||||
return NavidromeLibraryService(navidrome_repo, preferences_service, library_db, mbid_store)
|
||||
|
||||
|
||||
@singleton
|
||||
def get_navidrome_playback_service() -> "NavidromePlaybackService":
|
||||
from services.navidrome_playback_service import NavidromePlaybackService
|
||||
|
||||
navidrome_repo = get_navidrome_repository()
|
||||
return NavidromePlaybackService(navidrome_repo)
|
||||
142
backend/core/dependencies/type_aliases.py
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
"""FastAPI ``Annotated[..., Depends()]`` type aliases for route handlers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi import Depends
|
||||
|
||||
from core.config import Settings, get_settings
|
||||
from infrastructure.cache.memory_cache import CacheInterface
|
||||
from infrastructure.cache.disk_cache import DiskMetadataCache
|
||||
from infrastructure.queue.request_queue import RequestQueue
|
||||
from infrastructure.persistence.request_history import RequestHistoryStore
|
||||
from repositories.lidarr import LidarrRepository
|
||||
from repositories.musicbrainz_repository import MusicBrainzRepository
|
||||
from repositories.wikidata_repository import WikidataRepository
|
||||
from repositories.listenbrainz_repository import ListenBrainzRepository
|
||||
from repositories.jellyfin_repository import JellyfinRepository
|
||||
from repositories.coverart_repository import CoverArtRepository
|
||||
from repositories.youtube import YouTubeRepository
|
||||
from repositories.lastfm_repository import LastFmRepository
|
||||
from repositories.playlist_repository import PlaylistRepository
|
||||
from repositories.navidrome_repository import NavidromeRepository
|
||||
from services.preferences_service import PreferencesService
|
||||
from services.search_service import SearchService
|
||||
from services.search_enrichment_service import SearchEnrichmentService
|
||||
from services.artist_service import ArtistService
|
||||
from services.album_service import AlbumService
|
||||
from services.request_service import RequestService
|
||||
from services.library_service import LibraryService
|
||||
from services.status_service import StatusService
|
||||
from services.cache_service import CacheService
|
||||
from services.home_service import HomeService
|
||||
from services.home_charts_service import HomeChartsService
|
||||
from services.settings_service import SettingsService
|
||||
from services.artist_discovery_service import ArtistDiscoveryService
|
||||
from services.album_discovery_service import AlbumDiscoveryService
|
||||
from services.discover_service import DiscoverService
|
||||
from services.discover_queue_manager import DiscoverQueueManager
|
||||
from services.youtube_service import YouTubeService
|
||||
from services.requests_page_service import RequestsPageService
|
||||
from services.jellyfin_playback_service import JellyfinPlaybackService
|
||||
from services.local_files_service import LocalFilesService
|
||||
from services.jellyfin_library_service import JellyfinLibraryService
|
||||
from services.navidrome_library_service import NavidromeLibraryService
|
||||
from services.navidrome_playback_service import NavidromePlaybackService
|
||||
from services.playlist_service import PlaylistService
|
||||
from services.lastfm_auth_service import LastFmAuthService
|
||||
from services.scrobble_service import ScrobbleService
|
||||
from services.cache_status_service import CacheStatusService
|
||||
|
||||
from .cache_providers import (
|
||||
get_cache,
|
||||
get_disk_cache,
|
||||
get_preferences_service,
|
||||
get_cache_service,
|
||||
get_cache_status_service,
|
||||
)
|
||||
from .repo_providers import (
|
||||
get_lidarr_repository,
|
||||
get_musicbrainz_repository,
|
||||
get_wikidata_repository,
|
||||
get_listenbrainz_repository,
|
||||
get_jellyfin_repository,
|
||||
get_coverart_repository,
|
||||
get_youtube_repo,
|
||||
get_lastfm_repository,
|
||||
get_playlist_repository,
|
||||
get_request_history_store,
|
||||
get_navidrome_repository,
|
||||
)
|
||||
from .service_providers import (
|
||||
get_search_service,
|
||||
get_search_enrichment_service,
|
||||
get_artist_service,
|
||||
get_album_service,
|
||||
get_request_queue,
|
||||
get_request_service,
|
||||
get_requests_page_service,
|
||||
get_playlist_service,
|
||||
get_library_service,
|
||||
get_status_service,
|
||||
get_home_service,
|
||||
get_home_charts_service,
|
||||
get_settings_service,
|
||||
get_artist_discovery_service,
|
||||
get_album_discovery_service,
|
||||
get_discover_service,
|
||||
get_discover_queue_manager,
|
||||
get_youtube_service,
|
||||
get_lastfm_auth_service,
|
||||
get_scrobble_service,
|
||||
get_jellyfin_playback_service,
|
||||
get_local_files_service,
|
||||
get_jellyfin_library_service,
|
||||
get_navidrome_library_service,
|
||||
get_navidrome_playback_service,
|
||||
)
|
||||
|
||||
|
||||
SettingsDep = Annotated[Settings, Depends(get_settings)]
|
||||
CacheDep = Annotated[CacheInterface, Depends(get_cache)]
|
||||
DiskCacheDep = Annotated[DiskMetadataCache, Depends(get_disk_cache)]
|
||||
PreferencesServiceDep = Annotated[PreferencesService, Depends(get_preferences_service)]
|
||||
LidarrRepositoryDep = Annotated[LidarrRepository, Depends(get_lidarr_repository)]
|
||||
MusicBrainzRepositoryDep = Annotated[MusicBrainzRepository, Depends(get_musicbrainz_repository)]
|
||||
WikidataRepositoryDep = Annotated[WikidataRepository, Depends(get_wikidata_repository)]
|
||||
ListenBrainzRepositoryDep = Annotated[ListenBrainzRepository, Depends(get_listenbrainz_repository)]
|
||||
JellyfinRepositoryDep = Annotated[JellyfinRepository, Depends(get_jellyfin_repository)]
|
||||
CoverArtRepositoryDep = Annotated[CoverArtRepository, Depends(get_coverart_repository)]
|
||||
SearchServiceDep = Annotated[SearchService, Depends(get_search_service)]
|
||||
SearchEnrichmentServiceDep = Annotated[SearchEnrichmentService, Depends(get_search_enrichment_service)]
|
||||
ArtistServiceDep = Annotated[ArtistService, Depends(get_artist_service)]
|
||||
AlbumServiceDep = Annotated[AlbumService, Depends(get_album_service)]
|
||||
RequestQueueDep = Annotated[RequestQueue, Depends(get_request_queue)]
|
||||
RequestServiceDep = Annotated[RequestService, Depends(get_request_service)]
|
||||
LibraryServiceDep = Annotated[LibraryService, Depends(get_library_service)]
|
||||
StatusServiceDep = Annotated[StatusService, Depends(get_status_service)]
|
||||
CacheServiceDep = Annotated[CacheService, Depends(get_cache_service)]
|
||||
HomeServiceDep = Annotated[HomeService, Depends(get_home_service)]
|
||||
HomeChartsServiceDep = Annotated[HomeChartsService, Depends(get_home_charts_service)]
|
||||
SettingsServiceDep = Annotated[SettingsService, Depends(get_settings_service)]
|
||||
ArtistDiscoveryServiceDep = Annotated[ArtistDiscoveryService, Depends(get_artist_discovery_service)]
|
||||
AlbumDiscoveryServiceDep = Annotated[AlbumDiscoveryService, Depends(get_album_discovery_service)]
|
||||
DiscoverServiceDep = Annotated[DiscoverService, Depends(get_discover_service)]
|
||||
DiscoverQueueManagerDep = Annotated[DiscoverQueueManager, Depends(get_discover_queue_manager)]
|
||||
YouTubeRepositoryDep = Annotated[YouTubeRepository, Depends(get_youtube_repo)]
|
||||
YouTubeServiceDep = Annotated[YouTubeService, Depends(get_youtube_service)]
|
||||
RequestHistoryStoreDep = Annotated[RequestHistoryStore, Depends(get_request_history_store)]
|
||||
RequestsPageServiceDep = Annotated[RequestsPageService, Depends(get_requests_page_service)]
|
||||
JellyfinPlaybackServiceDep = Annotated[JellyfinPlaybackService, Depends(get_jellyfin_playback_service)]
|
||||
LocalFilesServiceDep = Annotated[LocalFilesService, Depends(get_local_files_service)]
|
||||
JellyfinLibraryServiceDep = Annotated[JellyfinLibraryService, Depends(get_jellyfin_library_service)]
|
||||
LastFmRepositoryDep = Annotated[LastFmRepository, Depends(get_lastfm_repository)]
|
||||
LastFmAuthServiceDep = Annotated[LastFmAuthService, Depends(get_lastfm_auth_service)]
|
||||
ScrobbleServiceDep = Annotated[ScrobbleService, Depends(get_scrobble_service)]
|
||||
PlaylistRepositoryDep = Annotated[PlaylistRepository, Depends(get_playlist_repository)]
|
||||
PlaylistServiceDep = Annotated[PlaylistService, Depends(get_playlist_service)]
|
||||
NavidromeRepositoryDep = Annotated[NavidromeRepository, Depends(get_navidrome_repository)]
|
||||
NavidromeLibraryServiceDep = Annotated[NavidromeLibraryService, Depends(get_navidrome_library_service)]
|
||||
NavidromePlaybackServiceDep = Annotated[NavidromePlaybackService, Depends(get_navidrome_playback_service)]
|
||||
CacheStatusServiceDep = Annotated[CacheStatusService, Depends(get_cache_status_service)]
|
||||
95
backend/core/exception_handlers.py
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
import logging
|
||||
from fastapi import Request, HTTPException, status
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from starlette.exceptions import HTTPException as StarletteHTTPException
|
||||
from starlette.responses import Response
|
||||
|
||||
from core.exceptions import (
|
||||
ResourceNotFoundError,
|
||||
ExternalServiceError,
|
||||
SourceResolutionError,
|
||||
ValidationError,
|
||||
ConfigurationError,
|
||||
ClientDisconnectedError,
|
||||
)
|
||||
from infrastructure.msgspec_fastapi import MsgSpecJSONResponse
|
||||
from infrastructure.resilience.retry import CircuitOpenError
|
||||
from models.error import (
|
||||
error_response,
|
||||
VALIDATION_ERROR,
|
||||
NOT_FOUND,
|
||||
EXTERNAL_SERVICE_UNAVAILABLE,
|
||||
SERVICE_UNAVAILABLE,
|
||||
CONFIGURATION_ERROR,
|
||||
SOURCE_RESOLUTION_ERROR,
|
||||
INTERNAL_ERROR,
|
||||
STATUS_TO_CODE,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def resource_not_found_handler(request: Request, exc: ResourceNotFoundError) -> MsgSpecJSONResponse:
|
||||
logger.warning("Resource not found: %s - %s %s", exc, request.method, request.url.path)
|
||||
return error_response(status.HTTP_404_NOT_FOUND, NOT_FOUND, str(exc))
|
||||
|
||||
|
||||
async def external_service_error_handler(request: Request, exc: ExternalServiceError) -> MsgSpecJSONResponse:
|
||||
logger.error("External service error: %s - %s %s", exc, request.method, request.url.path)
|
||||
return error_response(status.HTTP_503_SERVICE_UNAVAILABLE, EXTERNAL_SERVICE_UNAVAILABLE, "External service unavailable")
|
||||
|
||||
|
||||
async def circuit_open_error_handler(request: Request, exc: CircuitOpenError) -> MsgSpecJSONResponse:
|
||||
logger.error("Circuit breaker open: %s - %s %s", exc, request.method, request.url.path)
|
||||
return error_response(status.HTTP_503_SERVICE_UNAVAILABLE, SERVICE_UNAVAILABLE, "Service temporarily unavailable")
|
||||
|
||||
|
||||
async def validation_error_handler(request: Request, exc: ValidationError) -> MsgSpecJSONResponse:
|
||||
logger.warning("Validation error: %s - %s %s", exc, request.method, request.url.path)
|
||||
return error_response(status.HTTP_400_BAD_REQUEST, VALIDATION_ERROR, str(exc))
|
||||
|
||||
|
||||
async def configuration_error_handler(request: Request, exc: ConfigurationError) -> MsgSpecJSONResponse:
|
||||
logger.warning("Configuration error: %s - %s %s", exc, request.method, request.url.path)
|
||||
return error_response(status.HTTP_400_BAD_REQUEST, CONFIGURATION_ERROR, str(exc))
|
||||
|
||||
|
||||
async def source_resolution_error_handler(request: Request, exc: SourceResolutionError) -> MsgSpecJSONResponse:
|
||||
logger.warning("Source resolution error: %s - %s %s", exc, request.method, request.url.path)
|
||||
return error_response(status.HTTP_422_UNPROCESSABLE_ENTITY, SOURCE_RESOLUTION_ERROR, str(exc))
|
||||
|
||||
|
||||
async def general_exception_handler(request: Request, exc: Exception) -> MsgSpecJSONResponse:
|
||||
logger.exception("Unexpected error: %s - %s %s", exc, request.method, request.url.path)
|
||||
return error_response(status.HTTP_500_INTERNAL_SERVER_ERROR, INTERNAL_ERROR, "Internal server error")
|
||||
|
||||
|
||||
async def http_exception_handler(request: Request, exc: HTTPException) -> MsgSpecJSONResponse:
|
||||
code = STATUS_TO_CODE.get(exc.status_code, INTERNAL_ERROR)
|
||||
message = exc.detail if isinstance(exc.detail, str) else "Request failed"
|
||||
return error_response(exc.status_code, code, message)
|
||||
|
||||
|
||||
async def starlette_http_exception_handler(request: Request, exc: StarletteHTTPException) -> MsgSpecJSONResponse:
|
||||
code = STATUS_TO_CODE.get(exc.status_code, INTERNAL_ERROR)
|
||||
message = exc.detail if isinstance(exc.detail, str) else "Request failed"
|
||||
return error_response(exc.status_code, code, message)
|
||||
|
||||
|
||||
async def request_validation_error_handler(request: Request, exc: RequestValidationError) -> MsgSpecJSONResponse:
|
||||
logger.warning("Request validation error: %s %s", request.method, request.url.path)
|
||||
clean_errors = [
|
||||
{k: v for k, v in err.items() if k != "ctx"}
|
||||
for err in exc.errors()
|
||||
]
|
||||
return error_response(
|
||||
status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
VALIDATION_ERROR,
|
||||
"Validation failed",
|
||||
details=clean_errors,
|
||||
)
|
||||
|
||||
|
||||
async def client_disconnected_handler(request: Request, exc: ClientDisconnectedError) -> Response:
|
||||
logger.debug("Client disconnected: %s %s", request.method, request.url.path)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
83
backend/core/exceptions.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
from typing import Any
|
||||
|
||||
|
||||
class MusicseerrException(Exception):
|
||||
def __init__(self, message: str, details: Any = None):
|
||||
self.message = message
|
||||
self.details = details
|
||||
super().__init__(message)
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.details:
|
||||
return f"{self.message}: {self.details}"
|
||||
return self.message
|
||||
|
||||
|
||||
class ExternalServiceError(MusicseerrException):
|
||||
pass
|
||||
|
||||
|
||||
class RateLimitedError(ExternalServiceError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
details: Any = None,
|
||||
retry_after_seconds: float | None = None,
|
||||
):
|
||||
super().__init__(message, details)
|
||||
self.retry_after_seconds = retry_after_seconds
|
||||
|
||||
|
||||
class ResourceNotFoundError(MusicseerrException):
|
||||
pass
|
||||
|
||||
|
||||
class ValidationError(MusicseerrException):
|
||||
pass
|
||||
|
||||
|
||||
class PlaylistNotFoundError(ResourceNotFoundError):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidPlaylistDataError(ValidationError):
|
||||
pass
|
||||
|
||||
|
||||
class SourceResolutionError(ValidationError):
|
||||
pass
|
||||
|
||||
|
||||
class ConfigurationError(MusicseerrException):
|
||||
pass
|
||||
|
||||
|
||||
class CacheError(MusicseerrException):
|
||||
pass
|
||||
|
||||
|
||||
class PlaybackNotAllowedError(ExternalServiceError):
|
||||
pass
|
||||
|
||||
|
||||
class TokenNotAuthorizedError(ExternalServiceError):
|
||||
pass
|
||||
|
||||
|
||||
class NavidromeApiError(ExternalServiceError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
details: Any = None,
|
||||
code: int | None = None,
|
||||
):
|
||||
super().__init__(message, details)
|
||||
self.code = code
|
||||
|
||||
|
||||
class NavidromeAuthError(NavidromeApiError):
|
||||
pass
|
||||
|
||||
|
||||
class ClientDisconnectedError(MusicseerrException):
|
||||
pass
|
||||
74
backend/core/task_registry.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import threading
|
||||
from typing import ClassVar
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TaskRegistry:
|
||||
_instance: ClassVar["TaskRegistry | None"] = None
|
||||
_instance_lock: ClassVar[threading.Lock] = threading.Lock()
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._tasks: dict[str, asyncio.Task] = {}
|
||||
self._lock = threading.Lock()
|
||||
|
||||
@classmethod
|
||||
def get_instance(cls) -> "TaskRegistry":
|
||||
if cls._instance is None:
|
||||
with cls._instance_lock:
|
||||
if cls._instance is None:
|
||||
cls._instance = cls()
|
||||
return cls._instance
|
||||
|
||||
def register(self, name: str, task: asyncio.Task) -> asyncio.Task:
|
||||
with self._lock:
|
||||
existing = self._tasks.get(name)
|
||||
if existing is not None and not existing.done():
|
||||
raise RuntimeError(f"Task '{name}' is already running")
|
||||
self._tasks[name] = task
|
||||
task.add_done_callback(lambda _t, _name=name: self._auto_unregister(_name, _t))
|
||||
return task
|
||||
|
||||
def _auto_unregister(self, name: str, task: asyncio.Task) -> None:
|
||||
with self._lock:
|
||||
if self._tasks.get(name) is task:
|
||||
del self._tasks[name]
|
||||
|
||||
def unregister(self, name: str) -> None:
|
||||
with self._lock:
|
||||
self._tasks.pop(name, None)
|
||||
|
||||
async def cancel_all(self, grace_period: float = 10.0) -> None:
|
||||
with self._lock:
|
||||
tasks = dict(self._tasks)
|
||||
self._tasks.clear()
|
||||
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
for name, task in tasks.items():
|
||||
if not task.done():
|
||||
task.cancel()
|
||||
|
||||
done, pending = await asyncio.wait(
|
||||
tasks.values(), timeout=grace_period, return_when=asyncio.ALL_COMPLETED
|
||||
)
|
||||
|
||||
for name, task in tasks.items():
|
||||
if task in pending:
|
||||
logger.warning("Task '%s' did not finish within grace period", name)
|
||||
|
||||
def get_all(self) -> dict[str, asyncio.Task]:
|
||||
with self._lock:
|
||||
return dict(self._tasks)
|
||||
|
||||
def is_running(self, name: str) -> bool:
|
||||
with self._lock:
|
||||
task = self._tasks.get(name)
|
||||
return task is not None and not task.done()
|
||||
|
||||
def reset(self) -> None:
|
||||
with self._lock:
|
||||
self._tasks.clear()
|
||||
728
backend/core/tasks.py
Normal file
|
|
@ -0,0 +1,728 @@
|
|||
import asyncio
|
||||
import logging
|
||||
from time import time
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from infrastructure.cache.memory_cache import CacheInterface
|
||||
from infrastructure.cache.disk_cache import DiskMetadataCache
|
||||
from infrastructure.serialization import clone_with_updates
|
||||
from infrastructure.validators import is_unknown_mbid
|
||||
from services.library_service import LibraryService
|
||||
from services.preferences_service import PreferencesService
|
||||
from core.task_registry import TaskRegistry
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from services.album_service import AlbumService
|
||||
from services.audiodb_image_service import AudioDBImageService
|
||||
from services.home_service import HomeService
|
||||
from services.discover_service import DiscoverService
|
||||
from services.discover_queue_manager import DiscoverQueueManager
|
||||
from services.artist_discovery_service import ArtistDiscoveryService
|
||||
from services.library_precache_service import LibraryPrecacheService
|
||||
from infrastructure.persistence import LibraryDB
|
||||
from infrastructure.persistence.request_history import RequestHistoryStore
|
||||
from infrastructure.persistence.mbid_store import MBIDStore
|
||||
from infrastructure.persistence.youtube_store import YouTubeStore
|
||||
from services.requests_page_service import RequestsPageService
|
||||
from repositories.coverart_disk_cache import CoverDiskCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def cleanup_cache_periodically(cache: CacheInterface, interval: int = 300) -> None:
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(interval)
|
||||
await cache.cleanup_expired()
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error("Cache cleanup task failed: %s", e, exc_info=True)
|
||||
|
||||
|
||||
def start_cache_cleanup_task(cache: CacheInterface, interval: int = 300) -> asyncio.Task:
|
||||
task = asyncio.create_task(cleanup_cache_periodically(cache, interval=interval))
|
||||
TaskRegistry.get_instance().register("cache-cleanup", task)
|
||||
return task
|
||||
|
||||
|
||||
async def cleanup_disk_cache_periodically(
|
||||
disk_cache: DiskMetadataCache,
|
||||
interval: int = 600,
|
||||
cover_disk_cache: Optional["CoverDiskCache"] = None,
|
||||
) -> None:
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(interval)
|
||||
logger.debug("Running disk cache cleanup...")
|
||||
await disk_cache.cleanup_expired_recent()
|
||||
await disk_cache.enforce_recent_size_limits()
|
||||
await disk_cache.cleanup_expired_covers()
|
||||
await disk_cache.enforce_cover_size_limits()
|
||||
if cover_disk_cache:
|
||||
await cover_disk_cache.enforce_size_limit(force=True)
|
||||
expired = await asyncio.to_thread(cover_disk_cache.cleanup_expired)
|
||||
if expired:
|
||||
logger.info("Cover expiry sweep removed %d expired covers", expired)
|
||||
logger.debug("Disk cache cleanup complete")
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error("Disk cache cleanup task failed: %s", e, exc_info=True)
|
||||
|
||||
|
||||
def start_disk_cache_cleanup_task(
|
||||
disk_cache: DiskMetadataCache,
|
||||
interval: int = 600,
|
||||
cover_disk_cache: Optional["CoverDiskCache"] = None,
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(
|
||||
cleanup_disk_cache_periodically(disk_cache, interval=interval, cover_disk_cache=cover_disk_cache)
|
||||
)
|
||||
TaskRegistry.get_instance().register("disk-cache-cleanup", task)
|
||||
return task
|
||||
|
||||
|
||||
async def sync_library_periodically(
|
||||
library_service: LibraryService,
|
||||
preferences_service: PreferencesService
|
||||
) -> None:
|
||||
while True:
|
||||
try:
|
||||
if not library_service._lidarr_repo.is_configured():
|
||||
await asyncio.sleep(3600)
|
||||
continue
|
||||
|
||||
lidarr_settings = preferences_service.get_lidarr_settings()
|
||||
sync_freq = lidarr_settings.sync_frequency
|
||||
|
||||
if sync_freq == "manual":
|
||||
await asyncio.sleep(3600)
|
||||
continue
|
||||
elif sync_freq == "5min":
|
||||
interval = 300
|
||||
elif sync_freq == "10min":
|
||||
interval = 600
|
||||
elif sync_freq == "30min":
|
||||
interval = 1800
|
||||
elif sync_freq == "1hr":
|
||||
interval = 3600
|
||||
else:
|
||||
interval = 600
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
logger.info(f"Auto-syncing library (frequency: {sync_freq})")
|
||||
sync_success = False
|
||||
try:
|
||||
result = await library_service.sync_library()
|
||||
if result.status == "skipped":
|
||||
logger.info("Auto-sync skipped - sync already in progress")
|
||||
continue
|
||||
sync_success = True
|
||||
logger.info("Auto-sync completed successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Auto-sync library call failed: %s", e, exc_info=True)
|
||||
sync_success = False
|
||||
|
||||
finally:
|
||||
lidarr_settings = preferences_service.get_lidarr_settings()
|
||||
updated_settings = clone_with_updates(lidarr_settings, {
|
||||
'last_sync': int(time()),
|
||||
'last_sync_success': sync_success
|
||||
})
|
||||
preferences_service.save_lidarr_settings(updated_settings)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Library sync task cancelled")
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error("Library sync task failed: %s", e, exc_info=True)
|
||||
await asyncio.sleep(60)
|
||||
|
||||
|
||||
def start_library_sync_task(
|
||||
library_service: LibraryService,
|
||||
preferences_service: PreferencesService
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(sync_library_periodically(library_service, preferences_service))
|
||||
TaskRegistry.get_instance().register("library-sync", task)
|
||||
return task
|
||||
|
||||
|
||||
async def warm_library_cache(
|
||||
library_service: LibraryService,
|
||||
album_service: 'AlbumService',
|
||||
library_db: 'LibraryDB'
|
||||
) -> None:
|
||||
try:
|
||||
logger.info("Warming cache with recently-added library albums...")
|
||||
|
||||
await asyncio.sleep(5)
|
||||
|
||||
albums_data = await library_db.get_albums()
|
||||
|
||||
if not albums_data:
|
||||
logger.info("No library albums to warm cache with")
|
||||
return
|
||||
|
||||
max_warm = 30
|
||||
albums_to_warm = albums_data[:max_warm]
|
||||
|
||||
logger.info(f"Warming cache with {len(albums_to_warm)} of {len(albums_data)} library albums (first {max_warm})")
|
||||
|
||||
warmed = 0
|
||||
for i, album_data in enumerate(albums_to_warm):
|
||||
mbid = album_data.get('mbid')
|
||||
if mbid and not is_unknown_mbid(mbid):
|
||||
try:
|
||||
if not await album_service.is_album_cached(mbid):
|
||||
await album_service.get_album_info(mbid)
|
||||
warmed += 1
|
||||
|
||||
if i % 5 == 0:
|
||||
await asyncio.sleep(1)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Library cache warm item failed album=%s mbid=%s error=%s",
|
||||
album_data.get('title'),
|
||||
mbid,
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
continue
|
||||
|
||||
logger.info(f"Cache warming complete: {warmed} albums fetched, {len(albums_to_warm) - warmed} already cached")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Library cache warming failed: %s", e, exc_info=True)
|
||||
|
||||
|
||||
async def warm_home_cache_periodically(
|
||||
home_service: 'HomeService',
|
||||
interval: int = 240
|
||||
) -> None:
|
||||
await asyncio.sleep(10)
|
||||
|
||||
while True:
|
||||
try:
|
||||
for src in ("listenbrainz", "lastfm"):
|
||||
try:
|
||||
logger.debug("Warming home page cache (source=%s)...", src)
|
||||
await home_service.get_home_data(source=src)
|
||||
logger.debug("Home cache warming complete (source=%s)", src)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Home cache warming failed (source=%s): %s",
|
||||
src,
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Home cache warming task cancelled")
|
||||
break
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
|
||||
def start_home_cache_warming_task(home_service: 'HomeService') -> asyncio.Task:
|
||||
task = asyncio.create_task(warm_home_cache_periodically(home_service))
|
||||
TaskRegistry.get_instance().register("home-cache-warming", task)
|
||||
return task
|
||||
|
||||
|
||||
async def warm_genre_cache_periodically(
|
||||
home_service: 'HomeService',
|
||||
interval: int = 21600,
|
||||
) -> None:
|
||||
from api.v1.schemas.home import HomeGenre
|
||||
|
||||
RETRY_INTERVAL = 60
|
||||
|
||||
await asyncio.sleep(30)
|
||||
|
||||
while True:
|
||||
warmed = 0
|
||||
try:
|
||||
for src in ("listenbrainz", "lastfm"):
|
||||
try:
|
||||
cached_home = await home_service.get_cached_home_data(source=src)
|
||||
if not cached_home or not cached_home.genre_list or not cached_home.genre_list.items:
|
||||
logger.debug("No cached home data for genre warming (source=%s), skipping", src)
|
||||
continue
|
||||
genre_names = [
|
||||
g.name for g in cached_home.genre_list.items[:20]
|
||||
if isinstance(g, HomeGenre)
|
||||
]
|
||||
if genre_names:
|
||||
logger.debug("Warming genre cache (source=%s, %d genres)...", src, len(genre_names))
|
||||
await home_service._genre.build_and_cache_genre_section(src, genre_names)
|
||||
logger.debug("Genre cache warming complete (source=%s)", src)
|
||||
warmed += 1
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Genre cache warming failed (source=%s): %s",
|
||||
src,
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Genre cache warming task cancelled")
|
||||
break
|
||||
|
||||
if warmed == 0:
|
||||
await asyncio.sleep(RETRY_INTERVAL)
|
||||
else:
|
||||
try:
|
||||
ttl = home_service._genre._get_genre_section_ttl()
|
||||
except Exception: # noqa: BLE001
|
||||
ttl = interval
|
||||
await asyncio.sleep(ttl)
|
||||
|
||||
|
||||
def start_genre_cache_warming_task(home_service: 'HomeService') -> asyncio.Task:
|
||||
task = asyncio.create_task(warm_genre_cache_periodically(home_service))
|
||||
TaskRegistry.get_instance().register("genre-cache-warming", task)
|
||||
return task
|
||||
|
||||
|
||||
async def warm_discover_cache_periodically(
|
||||
discover_service: 'DiscoverService',
|
||||
interval: int = 43200,
|
||||
queue_manager: 'DiscoverQueueManager | None' = None,
|
||||
preferences_service: 'PreferencesService | None' = None,
|
||||
) -> None:
|
||||
await asyncio.sleep(30)
|
||||
|
||||
while True:
|
||||
try:
|
||||
for src in ("listenbrainz", "lastfm"):
|
||||
try:
|
||||
logger.info("Warming discover cache (source=%s)...", src)
|
||||
await discover_service.warm_cache(source=src)
|
||||
logger.info("Discover cache warming complete (source=%s)", src)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Discover cache warming failed (source=%s): %s",
|
||||
src,
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
if queue_manager and preferences_service:
|
||||
try:
|
||||
adv = preferences_service.get_advanced_settings()
|
||||
if adv.discover_queue_auto_generate and adv.discover_queue_warm_cycle_build:
|
||||
resolved = discover_service.resolve_source(None)
|
||||
logger.info("Pre-building discover queue (source=%s)...", resolved)
|
||||
await queue_manager.start_build(resolved)
|
||||
except Exception as e:
|
||||
logger.error("Discover queue pre-build failed: %s", e, exc_info=True)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Discover cache warming task cancelled")
|
||||
break
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
|
||||
def start_discover_cache_warming_task(
|
||||
discover_service: 'DiscoverService',
|
||||
queue_manager: 'DiscoverQueueManager | None' = None,
|
||||
preferences_service: 'PreferencesService | None' = None,
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(
|
||||
warm_discover_cache_periodically(
|
||||
discover_service,
|
||||
queue_manager=queue_manager,
|
||||
preferences_service=preferences_service,
|
||||
)
|
||||
)
|
||||
TaskRegistry.get_instance().register("discover-cache-warming", task)
|
||||
return task
|
||||
|
||||
|
||||
async def warm_jellyfin_mbid_index(jellyfin_repo: 'JellyfinRepository') -> None:
|
||||
from repositories.jellyfin_repository import JellyfinRepository as _JR
|
||||
|
||||
await asyncio.sleep(8)
|
||||
try:
|
||||
index = await jellyfin_repo.build_mbid_index()
|
||||
logger.info("Jellyfin MBID index warmed with %d entries", len(index))
|
||||
except Exception as e:
|
||||
logger.error("Jellyfin MBID index warming failed: %s", e, exc_info=True)
|
||||
|
||||
|
||||
async def warm_navidrome_mbid_cache() -> None:
|
||||
from core.dependencies import get_navidrome_library_service
|
||||
|
||||
await asyncio.sleep(12)
|
||||
while True:
|
||||
try:
|
||||
service = get_navidrome_library_service()
|
||||
await service.warm_mbid_cache()
|
||||
except Exception as e:
|
||||
logger.error("Navidrome MBID cache warming failed: %s", e, exc_info=True)
|
||||
await asyncio.sleep(14400) # Re-warm every 4 hours
|
||||
|
||||
|
||||
async def warm_artist_discovery_cache_periodically(
|
||||
artist_discovery_service: 'ArtistDiscoveryService',
|
||||
library_db: 'LibraryDB',
|
||||
interval: int = 14400,
|
||||
delay: float = 0.5,
|
||||
) -> None:
|
||||
await asyncio.sleep(60)
|
||||
|
||||
while True:
|
||||
try:
|
||||
artists = await library_db.get_artists()
|
||||
if not artists:
|
||||
logger.debug("No library artists for discovery cache warming")
|
||||
await asyncio.sleep(interval)
|
||||
continue
|
||||
|
||||
mbids = [
|
||||
a['mbid'] for a in artists
|
||||
if a.get('mbid') and not is_unknown_mbid(a['mbid'])
|
||||
]
|
||||
if not mbids:
|
||||
await asyncio.sleep(interval)
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
"Warming artist discovery cache for %d library artists...", len(mbids)
|
||||
)
|
||||
cached = await artist_discovery_service.precache_artist_discovery(
|
||||
mbids, delay=delay
|
||||
)
|
||||
logger.info(
|
||||
"Artist discovery cache warming complete: %d/%d artists refreshed",
|
||||
cached, len(mbids),
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Artist discovery cache warming task cancelled")
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error("Artist discovery cache warming failed: %s", e, exc_info=True)
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
|
||||
def start_artist_discovery_cache_warming_task(
|
||||
artist_discovery_service: 'ArtistDiscoveryService',
|
||||
library_db: 'LibraryDB',
|
||||
interval: int = 14400,
|
||||
delay: float = 0.5,
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(
|
||||
warm_artist_discovery_cache_periodically(
|
||||
artist_discovery_service,
|
||||
library_db,
|
||||
interval=interval,
|
||||
delay=delay,
|
||||
)
|
||||
)
|
||||
TaskRegistry.get_instance().register("artist-discovery-warming", task)
|
||||
return task
|
||||
|
||||
|
||||
_AUDIODB_SWEEP_INTERVAL = 86400
|
||||
_AUDIODB_SWEEP_INITIAL_DELAY = 120
|
||||
_AUDIODB_SWEEP_MAX_ITEMS = 5000
|
||||
_AUDIODB_SWEEP_INTER_ITEM_DELAY = 2.0
|
||||
_AUDIODB_SWEEP_CURSOR_PERSIST_INTERVAL = 50
|
||||
_AUDIODB_SWEEP_LOG_INTERVAL = 100
|
||||
|
||||
|
||||
async def warm_audiodb_cache_periodically(
|
||||
audiodb_image_service: 'AudioDBImageService',
|
||||
library_db: 'LibraryDB',
|
||||
preferences_service: 'PreferencesService',
|
||||
precache_service: 'LibraryPrecacheService | None' = None,
|
||||
) -> None:
|
||||
if precache_service is None:
|
||||
logger.warning("AudioDB sweep: precache_service not available, byte downloads disabled")
|
||||
await asyncio.sleep(_AUDIODB_SWEEP_INITIAL_DELAY)
|
||||
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(_AUDIODB_SWEEP_INTERVAL)
|
||||
|
||||
settings = preferences_service.get_advanced_settings()
|
||||
if not settings.audiodb_enabled:
|
||||
logger.debug("AudioDB sweep skipped (audiodb_enabled=false)")
|
||||
continue
|
||||
|
||||
artists = await library_db.get_artists()
|
||||
albums = await library_db.get_albums()
|
||||
if not artists and not albums:
|
||||
logger.debug("AudioDB sweep: no library items")
|
||||
continue
|
||||
|
||||
cursor = preferences_service.get_setting('audiodb_sweep_cursor')
|
||||
all_items: list[tuple[str, str, dict]] = []
|
||||
|
||||
for a in (artists or []):
|
||||
mbid = a.get('mbid')
|
||||
if mbid and not is_unknown_mbid(mbid):
|
||||
all_items.append(("artist", mbid, a))
|
||||
for a in (albums or []):
|
||||
mbid = a.get('mbid') if isinstance(a, dict) else getattr(a, 'musicbrainz_id', None)
|
||||
if mbid and not is_unknown_mbid(mbid):
|
||||
all_items.append(("album", mbid, a))
|
||||
|
||||
all_items.sort(key=lambda x: x[1])
|
||||
|
||||
if cursor:
|
||||
start_idx = 0
|
||||
for i, (_, mbid, _) in enumerate(all_items):
|
||||
if mbid > cursor:
|
||||
start_idx = i
|
||||
break
|
||||
else:
|
||||
start_idx = 0
|
||||
cursor = None
|
||||
all_items = all_items[start_idx:]
|
||||
|
||||
items_needing_refresh: list[tuple[str, str, dict]] = []
|
||||
for entity_type, mbid, data in all_items:
|
||||
if len(items_needing_refresh) >= _AUDIODB_SWEEP_MAX_ITEMS:
|
||||
break
|
||||
if entity_type == "artist":
|
||||
cached = await audiodb_image_service.get_cached_artist_images(mbid)
|
||||
else:
|
||||
cached = await audiodb_image_service.get_cached_album_images(mbid)
|
||||
if cached is None:
|
||||
items_needing_refresh.append((entity_type, mbid, data))
|
||||
|
||||
if not items_needing_refresh:
|
||||
preferences_service.save_setting('audiodb_sweep_cursor', None)
|
||||
preferences_service.save_setting('audiodb_sweep_last_completed', time())
|
||||
logger.info("AudioDB sweep complete: all items up to date")
|
||||
continue
|
||||
|
||||
logger.info(
|
||||
"audiodb.sweep action=start items=%d cursor=%s",
|
||||
len(items_needing_refresh), cursor[:8] if cursor else 'start',
|
||||
)
|
||||
|
||||
processed = 0
|
||||
bytes_ok = 0
|
||||
bytes_fail = 0
|
||||
for entity_type, mbid, data in items_needing_refresh:
|
||||
if not preferences_service.get_advanced_settings().audiodb_enabled:
|
||||
logger.info("AudioDB disabled during sweep, stopping")
|
||||
break
|
||||
|
||||
try:
|
||||
if entity_type == "artist":
|
||||
name = data.get('name') if isinstance(data, dict) else None
|
||||
result = await audiodb_image_service.fetch_and_cache_artist_images(
|
||||
mbid, name, is_monitored=True,
|
||||
)
|
||||
if result and not result.is_negative and result.thumb_url and precache_service:
|
||||
if await precache_service._download_audiodb_bytes(result.thumb_url, "artist", mbid):
|
||||
bytes_ok += 1
|
||||
else:
|
||||
bytes_fail += 1
|
||||
else:
|
||||
artist_name = data.get('artist_name') if isinstance(data, dict) else getattr(data, 'artist_name', None)
|
||||
album_name = data.get('title') if isinstance(data, dict) else getattr(data, 'title', None)
|
||||
result = await audiodb_image_service.fetch_and_cache_album_images(
|
||||
mbid, artist_name=artist_name,
|
||||
album_name=album_name, is_monitored=True,
|
||||
)
|
||||
if result and not result.is_negative and result.album_thumb_url and precache_service:
|
||||
if await precache_service._download_audiodb_bytes(result.album_thumb_url, "album", mbid):
|
||||
bytes_ok += 1
|
||||
else:
|
||||
bytes_fail += 1
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"audiodb.sweep action=item_error entity_type=%s mbid=%s error=%s",
|
||||
entity_type,
|
||||
mbid[:8],
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
processed += 1
|
||||
if processed % _AUDIODB_SWEEP_CURSOR_PERSIST_INTERVAL == 0:
|
||||
preferences_service.save_setting('audiodb_sweep_cursor', mbid)
|
||||
|
||||
if processed % _AUDIODB_SWEEP_LOG_INTERVAL == 0:
|
||||
logger.info(
|
||||
"audiodb.sweep processed=%d total=%d cursor=%s bytes_ok=%d bytes_fail=%d remaining=%d",
|
||||
processed, len(items_needing_refresh), mbid[:8],
|
||||
bytes_ok, bytes_fail, len(items_needing_refresh) - processed,
|
||||
)
|
||||
|
||||
await asyncio.sleep(_AUDIODB_SWEEP_INTER_ITEM_DELAY)
|
||||
|
||||
if processed >= len(items_needing_refresh):
|
||||
preferences_service.save_setting('audiodb_sweep_cursor', None)
|
||||
preferences_service.save_setting('audiodb_sweep_last_completed', time())
|
||||
logger.info(
|
||||
"audiodb.sweep action=complete refreshed=%d bytes_ok=%d bytes_fail=%d",
|
||||
processed, bytes_ok, bytes_fail,
|
||||
)
|
||||
else:
|
||||
preferences_service.save_setting('audiodb_sweep_cursor', mbid)
|
||||
logger.info(
|
||||
"audiodb.sweep action=interrupted processed=%d total=%d bytes_ok=%d bytes_fail=%d",
|
||||
processed, len(items_needing_refresh), bytes_ok, bytes_fail,
|
||||
)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
logger.info("AudioDB sweep task cancelled")
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error("AudioDB sweep cycle failed: %s", e, exc_info=True)
|
||||
|
||||
|
||||
def start_audiodb_sweep_task(
|
||||
audiodb_image_service: 'AudioDBImageService',
|
||||
library_db: 'LibraryDB',
|
||||
preferences_service: 'PreferencesService',
|
||||
precache_service: 'LibraryPrecacheService | None' = None,
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(
|
||||
warm_audiodb_cache_periodically(
|
||||
audiodb_image_service, library_db, preferences_service,
|
||||
precache_service=precache_service,
|
||||
)
|
||||
)
|
||||
TaskRegistry.get_instance().register("audiodb-sweep", task)
|
||||
return task
|
||||
|
||||
|
||||
_REQUEST_SYNC_INTERVAL = 60
|
||||
_REQUEST_SYNC_INITIAL_DELAY = 15
|
||||
|
||||
|
||||
async def sync_request_statuses_periodically(
|
||||
requests_page_service: 'RequestsPageService',
|
||||
interval: int = _REQUEST_SYNC_INTERVAL,
|
||||
) -> None:
|
||||
await asyncio.sleep(_REQUEST_SYNC_INITIAL_DELAY)
|
||||
|
||||
while True:
|
||||
try:
|
||||
await requests_page_service.sync_request_statuses()
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Request status sync task cancelled")
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error("Periodic request status sync failed: %s", e, exc_info=True)
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
|
||||
def start_request_status_sync_task(
|
||||
requests_page_service: 'RequestsPageService',
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(
|
||||
sync_request_statuses_periodically(requests_page_service)
|
||||
)
|
||||
TaskRegistry.get_instance().register("request-status-sync", task)
|
||||
return task
|
||||
|
||||
|
||||
# --- Orphan cover demotion ---
|
||||
|
||||
async def demote_orphaned_covers_periodically(
|
||||
cover_disk_cache: 'CoverDiskCache',
|
||||
library_db: 'LibraryDB',
|
||||
interval: int = 86400,
|
||||
) -> None:
|
||||
from repositories.coverart_disk_cache import get_cache_filename
|
||||
|
||||
await asyncio.sleep(300)
|
||||
while True:
|
||||
try:
|
||||
album_mbids = await library_db.get_all_album_mbids()
|
||||
artist_mbids = await library_db.get_all_artist_mbids()
|
||||
|
||||
valid_hashes: set[str] = set()
|
||||
for mbid in album_mbids:
|
||||
for suffix in ("500", "250", "1200", "orig"):
|
||||
valid_hashes.add(get_cache_filename(f"rg_{mbid}", suffix))
|
||||
for mbid in artist_mbids:
|
||||
for size in ("250", "500"):
|
||||
valid_hashes.add(get_cache_filename(f"artist_{mbid}_{size}", "img"))
|
||||
valid_hashes.add(get_cache_filename(f"artist_{mbid}", "img"))
|
||||
|
||||
demoted = await asyncio.to_thread(cover_disk_cache.demote_orphaned, valid_hashes)
|
||||
if demoted:
|
||||
logger.info("Orphan cover demotion: %d covers demoted to expiring", demoted)
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Orphan cover demotion task cancelled")
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error("Orphan cover demotion failed: %s", e, exc_info=True)
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
|
||||
def start_orphan_cover_demotion_task(
|
||||
cover_disk_cache: 'CoverDiskCache',
|
||||
library_db: 'LibraryDB',
|
||||
interval: int = 86400,
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(
|
||||
demote_orphaned_covers_periodically(cover_disk_cache, library_db, interval=interval)
|
||||
)
|
||||
TaskRegistry.get_instance().register("orphan-cover-demotion", task)
|
||||
return task
|
||||
|
||||
|
||||
# --- Store pruning (request history + ignored releases + youtube orphans) ---
|
||||
|
||||
async def prune_stores_periodically(
|
||||
request_history: 'RequestHistoryStore',
|
||||
mbid_store: 'MBIDStore',
|
||||
youtube_store: 'YouTubeStore',
|
||||
request_retention_days: int = 180,
|
||||
ignored_retention_days: int = 365,
|
||||
interval: int = 21600,
|
||||
) -> None:
|
||||
await asyncio.sleep(600)
|
||||
while True:
|
||||
try:
|
||||
pruned_requests = await request_history.prune_old_terminal_requests(request_retention_days)
|
||||
pruned_ignored = await mbid_store.prune_old_ignored_releases(ignored_retention_days)
|
||||
orphaned_yt = await youtube_store.delete_orphaned_track_links()
|
||||
if pruned_requests or pruned_ignored or orphaned_yt:
|
||||
logger.info(
|
||||
"Store prune: requests=%d ignored_releases=%d youtube_orphans=%d",
|
||||
pruned_requests, pruned_ignored, orphaned_yt,
|
||||
)
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Store prune task cancelled")
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error("Store prune task failed: %s", e, exc_info=True)
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
|
||||
|
||||
def start_store_prune_task(
|
||||
request_history: 'RequestHistoryStore',
|
||||
mbid_store: 'MBIDStore',
|
||||
youtube_store: 'YouTubeStore',
|
||||
request_retention_days: int = 180,
|
||||
ignored_retention_days: int = 365,
|
||||
interval: int = 21600,
|
||||
) -> asyncio.Task:
|
||||
task = asyncio.create_task(
|
||||
prune_stores_periodically(
|
||||
request_history, mbid_store, youtube_store,
|
||||
request_retention_days=request_retention_days,
|
||||
ignored_retention_days=ignored_retention_days,
|
||||
interval=interval,
|
||||
)
|
||||
)
|
||||
TaskRegistry.get_instance().register("store-prune", task)
|
||||
return task
|
||||
0
backend/infrastructure/__init__.py
Normal file
15
backend/infrastructure/cache/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
"""Ephemeral caching infrastructure — all data here can be cleared without data loss.
|
||||
|
||||
For durable storage, see ``infrastructure.persistence``.
|
||||
"""
|
||||
|
||||
from infrastructure.cache.memory_cache import CacheInterface, InMemoryCache
|
||||
from infrastructure.cache.disk_cache import DiskMetadataCache
|
||||
from infrastructure.cache.protocol import CacheProtocol
|
||||
|
||||
__all__ = [
|
||||
"CacheInterface",
|
||||
"CacheProtocol",
|
||||
"InMemoryCache",
|
||||
"DiskMetadataCache",
|
||||
]
|
||||
194
backend/infrastructure/cache/cache_keys.py
vendored
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
"""Centralized cache key generation for consistent, sorted, testable cache keys."""
|
||||
from typing import Optional
|
||||
|
||||
|
||||
|
||||
MB_ARTIST_SEARCH_PREFIX = "mb:artist:search:"
|
||||
MB_ARTIST_DETAIL_PREFIX = "mb:artist:detail:"
|
||||
MB_ALBUM_SEARCH_PREFIX = "mb:album:search:"
|
||||
MB_RG_DETAIL_PREFIX = "mb:rg:detail:"
|
||||
MB_RELEASE_DETAIL_PREFIX = "mb:release:detail:"
|
||||
MB_RELEASE_TO_RG_PREFIX = "mb:release_to_rg:"
|
||||
MB_RELEASE_REC_PREFIX = "mb:release_rec_positions:"
|
||||
MB_RECORDING_PREFIX = "mb:recording:"
|
||||
MB_ARTIST_RELS_PREFIX = "mb:artist_rels:"
|
||||
MB_ARTISTS_BY_TAG_PREFIX = "mb_artists_by_tag:"
|
||||
MB_RG_BY_TAG_PREFIX = "mb_rg_by_tag:"
|
||||
|
||||
LB_PREFIX = "lb_"
|
||||
|
||||
LFM_PREFIX = "lfm_"
|
||||
|
||||
JELLYFIN_PREFIX = "jellyfin_"
|
||||
|
||||
NAVIDROME_PREFIX = "navidrome:"
|
||||
|
||||
LIDARR_PREFIX = "lidarr:"
|
||||
LIDARR_REQUESTED_PREFIX = "lidarr_requested"
|
||||
LIDARR_ARTIST_IMAGE_PREFIX = "lidarr_artist_image:"
|
||||
LIDARR_ARTIST_DETAILS_PREFIX = "lidarr_artist_details:"
|
||||
LIDARR_ARTIST_ALBUMS_PREFIX = "lidarr_artist_albums:"
|
||||
LIDARR_ALBUM_IMAGE_PREFIX = "lidarr_album_image:"
|
||||
LIDARR_ALBUM_DETAILS_PREFIX = "lidarr_album_details:"
|
||||
LIDARR_ALBUM_TRACKS_PREFIX = "lidarr_album_tracks:"
|
||||
LIDARR_TRACKFILE_PREFIX = "lidarr_trackfile:"
|
||||
LIDARR_ALBUM_TRACKFILES_PREFIX = "lidarr_album_trackfiles_raw:"
|
||||
|
||||
LOCAL_FILES_PREFIX = "local_files_"
|
||||
|
||||
HOME_RESPONSE_PREFIX = "home_response:"
|
||||
DISCOVER_RESPONSE_PREFIX = "discover_response:"
|
||||
GENRE_ARTIST_PREFIX = "genre_artist:"
|
||||
GENRE_SECTION_PREFIX = "genre_section:"
|
||||
|
||||
SOURCE_RESOLUTION_PREFIX = "source_resolution"
|
||||
|
||||
ARTIST_INFO_PREFIX = "artist_info:"
|
||||
ALBUM_INFO_PREFIX = "album_info:"
|
||||
|
||||
ARTIST_DISCOVERY_PREFIX = "artist_discovery:"
|
||||
DISCOVER_QUEUE_ENRICH_PREFIX = "discover_queue_enrich:"
|
||||
|
||||
ARTIST_WIKIDATA_PREFIX = "artist_wikidata:"
|
||||
WIKIDATA_IMAGE_PREFIX = "wikidata:image:"
|
||||
WIKIDATA_URL_PREFIX = "wikidata:url:"
|
||||
WIKIPEDIA_PREFIX = "wikipedia:extract:"
|
||||
|
||||
PREFERENCES_PREFIX = "preferences:"
|
||||
|
||||
AUDIODB_PREFIX = "audiodb_"
|
||||
|
||||
|
||||
|
||||
def musicbrainz_prefixes() -> list[str]:
|
||||
"""All MusicBrainz cache key prefixes — for bulk invalidation."""
|
||||
return [
|
||||
MB_ARTIST_SEARCH_PREFIX,
|
||||
MB_ARTIST_DETAIL_PREFIX,
|
||||
MB_ALBUM_SEARCH_PREFIX,
|
||||
MB_RG_DETAIL_PREFIX,
|
||||
MB_RELEASE_DETAIL_PREFIX,
|
||||
MB_RELEASE_TO_RG_PREFIX,
|
||||
MB_RELEASE_REC_PREFIX,
|
||||
MB_RECORDING_PREFIX,
|
||||
MB_ARTIST_RELS_PREFIX,
|
||||
MB_ARTISTS_BY_TAG_PREFIX,
|
||||
MB_RG_BY_TAG_PREFIX,
|
||||
]
|
||||
|
||||
|
||||
def listenbrainz_prefixes() -> list[str]:
|
||||
"""All ListenBrainz cache key prefixes."""
|
||||
return [LB_PREFIX]
|
||||
|
||||
|
||||
def lastfm_prefixes() -> list[str]:
|
||||
"""All Last.fm cache key prefixes."""
|
||||
return [LFM_PREFIX]
|
||||
|
||||
|
||||
def home_prefixes() -> list[str]:
|
||||
"""Cache prefixes cleared on home/discover invalidation."""
|
||||
return [HOME_RESPONSE_PREFIX, DISCOVER_RESPONSE_PREFIX, GENRE_ARTIST_PREFIX, GENRE_SECTION_PREFIX]
|
||||
|
||||
|
||||
|
||||
def _sort_params(**kwargs) -> str:
|
||||
"""Sort parameters for consistent key generation."""
|
||||
return ":".join(f"{k}={v}" for k, v in sorted(kwargs.items()) if v is not None)
|
||||
|
||||
|
||||
def mb_artist_search_key(query: str, limit: int, offset: int) -> str:
|
||||
"""Generate cache key for MusicBrainz artist search."""
|
||||
return f"{MB_ARTIST_SEARCH_PREFIX}{query}:{limit}:{offset}"
|
||||
|
||||
|
||||
def mb_album_search_key(
|
||||
query: str,
|
||||
limit: int,
|
||||
offset: int,
|
||||
included_secondary_types: Optional[set[str]] = None
|
||||
) -> str:
|
||||
"""Generate cache key for MusicBrainz album search."""
|
||||
types_str = ",".join(sorted(included_secondary_types)) if included_secondary_types else "none"
|
||||
return f"{MB_ALBUM_SEARCH_PREFIX}{query}:{limit}:{offset}:{types_str}"
|
||||
|
||||
|
||||
def mb_artist_detail_key(mbid: str) -> str:
|
||||
"""Generate cache key for MusicBrainz artist details."""
|
||||
return f"{MB_ARTIST_DETAIL_PREFIX}{mbid}"
|
||||
|
||||
|
||||
def mb_release_group_key(mbid: str, includes: Optional[list[str]] = None) -> str:
|
||||
"""Generate cache key for MusicBrainz release group."""
|
||||
includes_str = ",".join(sorted(includes)) if includes else "default"
|
||||
return f"{MB_RG_DETAIL_PREFIX}{mbid}:{includes_str}"
|
||||
|
||||
|
||||
def mb_release_key(release_id: str, includes: Optional[list[str]] = None) -> str:
|
||||
"""Generate cache key for MusicBrainz release."""
|
||||
includes_str = ",".join(sorted(includes)) if includes else "default"
|
||||
return f"{MB_RELEASE_DETAIL_PREFIX}{release_id}:{includes_str}"
|
||||
|
||||
|
||||
def lidarr_library_albums_key(include_unmonitored: bool = False) -> str:
|
||||
"""Generate cache key for full Lidarr library album list."""
|
||||
suffix = "all" if include_unmonitored else "monitored"
|
||||
return f"{LIDARR_PREFIX}library:albums:{suffix}"
|
||||
|
||||
|
||||
def lidarr_library_artists_key(include_unmonitored: bool = False) -> str:
|
||||
"""Generate cache key for Lidarr library artist list."""
|
||||
suffix = "all" if include_unmonitored else "monitored"
|
||||
return f"{LIDARR_PREFIX}library:artists:{suffix}"
|
||||
|
||||
|
||||
def lidarr_library_mbids_key(include_release_ids: bool = False) -> str:
|
||||
"""Generate cache key for Lidarr library MBIDs."""
|
||||
suffix = "with_releases" if include_release_ids else "albums_only"
|
||||
return f"{LIDARR_PREFIX}library:mbids:{suffix}"
|
||||
|
||||
|
||||
def lidarr_artist_mbids_key() -> str:
|
||||
"""Generate cache key for Lidarr artist MBIDs."""
|
||||
return f"{LIDARR_PREFIX}artists:mbids"
|
||||
|
||||
|
||||
def lidarr_raw_albums_key() -> str:
|
||||
"""Generate cache key for raw Lidarr album payload."""
|
||||
return f"{LIDARR_PREFIX}raw:albums"
|
||||
|
||||
|
||||
def lidarr_library_grouped_key() -> str:
|
||||
"""Generate cache key for grouped Lidarr library albums."""
|
||||
return f"{LIDARR_PREFIX}library:grouped"
|
||||
|
||||
|
||||
def lidarr_requested_mbids_key() -> str:
|
||||
"""Generate cache key for Lidarr requested (pending download) MBIDs."""
|
||||
return f"{LIDARR_REQUESTED_PREFIX}_mbids"
|
||||
|
||||
|
||||
def lidarr_status_key() -> str:
|
||||
"""Generate cache key for Lidarr status."""
|
||||
return f"{LIDARR_PREFIX}status"
|
||||
|
||||
|
||||
def wikidata_artist_image_key(wikidata_id: str) -> str:
|
||||
"""Generate cache key for Wikidata artist image."""
|
||||
return f"{WIKIDATA_IMAGE_PREFIX}{wikidata_id}"
|
||||
|
||||
|
||||
def wikidata_url_key(artist_id: str) -> str:
|
||||
"""Generate cache key for artist Wikidata URL."""
|
||||
return f"{WIKIDATA_URL_PREFIX}{artist_id}"
|
||||
|
||||
|
||||
def wikipedia_extract_key(url: str) -> str:
|
||||
"""Generate cache key for Wikipedia extract."""
|
||||
return f"{WIKIPEDIA_PREFIX}{url}"
|
||||
|
||||
|
||||
def preferences_key() -> str:
|
||||
"""Generate cache key for preferences."""
|
||||
return f"{PREFERENCES_PREFIX}current"
|
||||
475
backend/infrastructure/cache/disk_cache.py
vendored
Normal file
|
|
@ -0,0 +1,475 @@
|
|||
import asyncio
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import shutil
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from infrastructure.serialization import to_jsonable
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _encode_json(value: Any) -> str:
|
||||
return json.dumps(value, ensure_ascii=True, separators=(",", ":"))
|
||||
|
||||
|
||||
def _decode_json(text: str) -> Any:
|
||||
return json.loads(text)
|
||||
|
||||
|
||||
class DiskMetadataCache:
|
||||
def __init__(
|
||||
self,
|
||||
base_path: Path,
|
||||
recent_metadata_max_size_mb: int = 128,
|
||||
recent_covers_max_size_mb: int = 0,
|
||||
persistent_metadata_ttl_hours: int = 24,
|
||||
):
|
||||
self.base_path = Path(base_path)
|
||||
self.recent_metadata_max_size_bytes = max(recent_metadata_max_size_mb, 0) * 1024 * 1024
|
||||
self.recent_covers_max_size_bytes = max(recent_covers_max_size_mb, 0) * 1024 * 1024
|
||||
self.default_ttl_seconds = max(persistent_metadata_ttl_hours, 1) * 3600
|
||||
|
||||
self._recent_albums_dir = self.base_path / "recent" / "albums"
|
||||
self._recent_artists_dir = self.base_path / "recent" / "artists"
|
||||
self._recent_covers_dir = self.base_path / "recent" / "covers"
|
||||
self._persistent_albums_dir = self.base_path / "persistent" / "albums"
|
||||
self._persistent_artists_dir = self.base_path / "persistent" / "artists"
|
||||
self._recent_audiodb_artists_dir = self.base_path / "recent" / "audiodb_artists"
|
||||
self._recent_audiodb_albums_dir = self.base_path / "recent" / "audiodb_albums"
|
||||
self._persistent_audiodb_artists_dir = self.base_path / "persistent" / "audiodb_artists"
|
||||
self._persistent_audiodb_albums_dir = self.base_path / "persistent" / "audiodb_albums"
|
||||
self._ensure_dirs()
|
||||
|
||||
def _ensure_dirs(self) -> None:
|
||||
for path in (
|
||||
self._recent_albums_dir,
|
||||
self._recent_artists_dir,
|
||||
self._recent_covers_dir,
|
||||
self._persistent_albums_dir,
|
||||
self._persistent_artists_dir,
|
||||
self._recent_audiodb_artists_dir,
|
||||
self._recent_audiodb_albums_dir,
|
||||
self._persistent_audiodb_artists_dir,
|
||||
self._persistent_audiodb_albums_dir,
|
||||
):
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@staticmethod
|
||||
def _cache_hash(identifier: str) -> str:
|
||||
return hashlib.sha1(identifier.encode()).hexdigest()
|
||||
|
||||
@staticmethod
|
||||
def _meta_path(file_path: Path) -> Path:
|
||||
return file_path.with_suffix(".meta.json")
|
||||
|
||||
def _entity_paths(self, entity_type: str, identifier: str) -> tuple[Path, Path]:
|
||||
cache_hash = self._cache_hash(identifier)
|
||||
if entity_type == "album":
|
||||
return (
|
||||
self._recent_albums_dir / f"{cache_hash}.json",
|
||||
self._persistent_albums_dir / f"{cache_hash}.json",
|
||||
)
|
||||
if entity_type == "artist":
|
||||
return (
|
||||
self._recent_artists_dir / f"{cache_hash}.json",
|
||||
self._persistent_artists_dir / f"{cache_hash}.json",
|
||||
)
|
||||
if entity_type == "audiodb_artist":
|
||||
return (
|
||||
self._recent_audiodb_artists_dir / f"{cache_hash}.json",
|
||||
self._persistent_audiodb_artists_dir / f"{cache_hash}.json",
|
||||
)
|
||||
if entity_type == "audiodb_album":
|
||||
return (
|
||||
self._recent_audiodb_albums_dir / f"{cache_hash}.json",
|
||||
self._persistent_audiodb_albums_dir / f"{cache_hash}.json",
|
||||
)
|
||||
raise ValueError(f"Unsupported entity type: {entity_type}")
|
||||
|
||||
def _delete_file_pair(self, file_path: Path) -> None:
|
||||
file_path.unlink(missing_ok=True)
|
||||
self._meta_path(file_path).unlink(missing_ok=True)
|
||||
|
||||
def _load_meta(self, meta_path: Path) -> dict[str, Any]:
|
||||
if not meta_path.exists():
|
||||
return {}
|
||||
try:
|
||||
payload = _decode_json(meta_path.read_text())
|
||||
except (json.JSONDecodeError, OSError, TypeError):
|
||||
return {}
|
||||
return payload if isinstance(payload, dict) else {}
|
||||
|
||||
@staticmethod
|
||||
def _is_expired(meta: dict[str, Any]) -> bool:
|
||||
expires_at = meta.get("expires_at")
|
||||
return isinstance(expires_at, (int, float)) and time.time() > float(expires_at)
|
||||
|
||||
def _cleanup_expired_directory(self, directory: Path) -> int:
|
||||
removed = 0
|
||||
handled_meta_paths: set[Path] = set()
|
||||
|
||||
for data_path in directory.iterdir():
|
||||
if not data_path.is_file() or data_path.name.endswith(".meta.json"):
|
||||
continue
|
||||
meta_path = self._meta_path(data_path)
|
||||
handled_meta_paths.add(meta_path)
|
||||
if self._is_expired(self._load_meta(meta_path)):
|
||||
self._delete_file_pair(data_path)
|
||||
removed += 1
|
||||
|
||||
for meta_path in directory.glob("*.meta.json"):
|
||||
if meta_path in handled_meta_paths:
|
||||
continue
|
||||
if self._is_expired(self._load_meta(meta_path)):
|
||||
meta_path.unlink(missing_ok=True)
|
||||
removed += 1
|
||||
|
||||
return removed
|
||||
|
||||
def _enforce_size_limit_for_directory(self, directory: Path, max_size_bytes: int) -> int:
|
||||
if max_size_bytes <= 0:
|
||||
return 0
|
||||
|
||||
candidates: list[tuple[float, Path, int]] = []
|
||||
total_size = 0
|
||||
for data_path in directory.iterdir():
|
||||
if not data_path.is_file() or data_path.name.endswith(".meta.json"):
|
||||
continue
|
||||
try:
|
||||
size_bytes = data_path.stat().st_size
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
meta = self._load_meta(self._meta_path(data_path))
|
||||
last_accessed = float(meta.get("last_accessed", meta.get("created_at", 0.0)) or 0.0)
|
||||
total_size += size_bytes
|
||||
candidates.append((last_accessed, data_path, size_bytes))
|
||||
|
||||
if total_size <= max_size_bytes:
|
||||
return 0
|
||||
|
||||
bytes_to_free = total_size - max_size_bytes
|
||||
freed = 0
|
||||
for _, data_path, size_bytes in sorted(candidates, key=lambda item: item[0]):
|
||||
self._delete_file_pair(data_path)
|
||||
freed += size_bytes
|
||||
if freed >= bytes_to_free:
|
||||
break
|
||||
return freed
|
||||
|
||||
def _write_json_entry(self, file_path: Path, payload: dict[str, Any], expires_at: float | None) -> None:
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
now = time.time()
|
||||
file_path.write_text(_encode_json(payload))
|
||||
meta = {
|
||||
"created_at": now,
|
||||
"last_accessed": now,
|
||||
}
|
||||
if expires_at is not None:
|
||||
meta["expires_at"] = expires_at
|
||||
self._meta_path(file_path).write_text(_encode_json(meta))
|
||||
|
||||
def _read_json_entry(self, file_path: Path, honor_expiry: bool) -> dict[str, Any] | None:
|
||||
if not file_path.exists():
|
||||
return None
|
||||
|
||||
meta_path = self._meta_path(file_path)
|
||||
meta: dict[str, Any] = {}
|
||||
if meta_path.exists():
|
||||
try:
|
||||
meta = _decode_json(meta_path.read_text())
|
||||
except (json.JSONDecodeError, OSError, TypeError):
|
||||
meta = {}
|
||||
|
||||
if honor_expiry:
|
||||
expires_at = meta.get("expires_at")
|
||||
if isinstance(expires_at, (int, float)) and time.time() > float(expires_at):
|
||||
self._delete_file_pair(file_path)
|
||||
return None
|
||||
|
||||
try:
|
||||
payload = _decode_json(file_path.read_text())
|
||||
except (json.JSONDecodeError, OSError, TypeError):
|
||||
self._delete_file_pair(file_path)
|
||||
return None
|
||||
|
||||
if not isinstance(payload, dict):
|
||||
self._delete_file_pair(file_path)
|
||||
return None
|
||||
|
||||
if meta_path.exists():
|
||||
meta["last_accessed"] = time.time()
|
||||
try:
|
||||
meta_path.write_text(_encode_json(meta))
|
||||
except OSError as exc:
|
||||
logger.debug("Failed to update disk cache access time for %s: %s", meta_path, exc)
|
||||
|
||||
return payload
|
||||
|
||||
async def _set_entity(
|
||||
self,
|
||||
entity_type: str,
|
||||
identifier: str,
|
||||
payload: Any,
|
||||
is_monitored: bool,
|
||||
ttl_seconds: int | None,
|
||||
) -> None:
|
||||
builtins = to_jsonable(payload)
|
||||
if not isinstance(builtins, dict):
|
||||
raise TypeError(f"Expected mapping payload for {entity_type} cache, got {type(builtins)!r}")
|
||||
|
||||
recent_path, persistent_path = self._entity_paths(entity_type, identifier)
|
||||
|
||||
def operation() -> None:
|
||||
target_path = persistent_path if is_monitored else recent_path
|
||||
other_path = recent_path if is_monitored else persistent_path
|
||||
self._delete_file_pair(other_path)
|
||||
expires_at = None
|
||||
if ttl_seconds is not None:
|
||||
expires_at = time.time() + max(ttl_seconds, 1)
|
||||
elif not is_monitored:
|
||||
expires_at = time.time() + max(self.default_ttl_seconds, 1)
|
||||
self._write_json_entry(target_path, builtins, expires_at)
|
||||
|
||||
await asyncio.to_thread(operation)
|
||||
|
||||
async def _get_entity(self, entity_type: str, identifier: str) -> dict[str, Any] | None:
|
||||
recent_path, persistent_path = self._entity_paths(entity_type, identifier)
|
||||
|
||||
def operation() -> dict[str, Any] | None:
|
||||
persistent_payload = self._read_json_entry(persistent_path, honor_expiry=True)
|
||||
if persistent_payload is not None:
|
||||
return persistent_payload
|
||||
return self._read_json_entry(recent_path, honor_expiry=True)
|
||||
|
||||
return await asyncio.to_thread(operation)
|
||||
|
||||
async def set_album(
|
||||
self,
|
||||
musicbrainz_id: str,
|
||||
album_info: Any,
|
||||
is_monitored: bool = False,
|
||||
ttl_seconds: int | None = None,
|
||||
) -> None:
|
||||
await self._set_entity("album", musicbrainz_id, album_info, is_monitored, ttl_seconds)
|
||||
|
||||
async def get_album(self, musicbrainz_id: str) -> dict[str, Any] | None:
|
||||
return await self._get_entity("album", musicbrainz_id)
|
||||
|
||||
async def set_artist(
|
||||
self,
|
||||
musicbrainz_id: str,
|
||||
artist_info: Any,
|
||||
is_monitored: bool = False,
|
||||
ttl_seconds: int | None = None,
|
||||
) -> None:
|
||||
await self._set_entity("artist", musicbrainz_id, artist_info, is_monitored, ttl_seconds)
|
||||
|
||||
async def get_artist(self, musicbrainz_id: str) -> dict[str, Any] | None:
|
||||
return await self._get_entity("artist", musicbrainz_id)
|
||||
|
||||
async def set_audiodb_artist(
|
||||
self,
|
||||
identifier: str,
|
||||
payload: Any,
|
||||
is_monitored: bool = False,
|
||||
ttl_seconds: int | None = None,
|
||||
) -> None:
|
||||
await self._set_entity("audiodb_artist", identifier, payload, is_monitored, ttl_seconds)
|
||||
|
||||
async def get_audiodb_artist(self, identifier: str) -> dict[str, Any] | None:
|
||||
return await self._get_entity("audiodb_artist", identifier)
|
||||
|
||||
async def set_audiodb_album(
|
||||
self,
|
||||
identifier: str,
|
||||
payload: Any,
|
||||
is_monitored: bool = False,
|
||||
ttl_seconds: int | None = None,
|
||||
) -> None:
|
||||
await self._set_entity("audiodb_album", identifier, payload, is_monitored, ttl_seconds)
|
||||
|
||||
async def get_audiodb_album(self, identifier: str) -> dict[str, Any] | None:
|
||||
return await self._get_entity("audiodb_album", identifier)
|
||||
|
||||
async def delete_album(self, musicbrainz_id: str) -> None:
|
||||
recent_path, persistent_path = self._entity_paths("album", musicbrainz_id)
|
||||
await asyncio.to_thread(self._delete_file_pair, recent_path)
|
||||
await asyncio.to_thread(self._delete_file_pair, persistent_path)
|
||||
|
||||
async def delete_artist(self, musicbrainz_id: str) -> None:
|
||||
recent_path, persistent_path = self._entity_paths("artist", musicbrainz_id)
|
||||
await asyncio.to_thread(self._delete_file_pair, recent_path)
|
||||
await asyncio.to_thread(self._delete_file_pair, persistent_path)
|
||||
|
||||
async def delete_entity(self, entity_type: str, identifier: str) -> None:
|
||||
recent_path, persistent_path = self._entity_paths(entity_type, identifier)
|
||||
await asyncio.to_thread(self._delete_file_pair, recent_path)
|
||||
await asyncio.to_thread(self._delete_file_pair, persistent_path)
|
||||
|
||||
async def promote_to_persistent(self, identifier: str, identifier_type: str) -> bool:
|
||||
entity_type = "artist" if identifier_type == "artist" else "album"
|
||||
recent_path, persistent_path = self._entity_paths(entity_type, identifier)
|
||||
|
||||
def operation() -> bool:
|
||||
if persistent_path.exists():
|
||||
return True
|
||||
if not recent_path.exists():
|
||||
return False
|
||||
persistent_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.move(str(recent_path), str(persistent_path))
|
||||
recent_meta = self._meta_path(recent_path)
|
||||
persistent_meta = self._meta_path(persistent_path)
|
||||
if recent_meta.exists():
|
||||
meta = _decode_json(recent_meta.read_text())
|
||||
meta.pop("expires_at", None)
|
||||
meta["last_accessed"] = time.time()
|
||||
persistent_meta.write_text(_encode_json(meta))
|
||||
recent_meta.unlink(missing_ok=True)
|
||||
return True
|
||||
|
||||
return await asyncio.to_thread(operation)
|
||||
|
||||
async def promote_album_to_persistent(self, musicbrainz_id: str) -> bool:
|
||||
return await self.promote_to_persistent(musicbrainz_id, "album")
|
||||
|
||||
async def promote_artist_to_persistent(self, musicbrainz_id: str) -> bool:
|
||||
return await self.promote_to_persistent(musicbrainz_id, "artist")
|
||||
|
||||
async def cleanup_expired_recent(self) -> int:
|
||||
def operation() -> int:
|
||||
removed = 0
|
||||
for base_dir in (
|
||||
self._recent_albums_dir,
|
||||
self._recent_artists_dir,
|
||||
self._recent_audiodb_artists_dir,
|
||||
self._recent_audiodb_albums_dir,
|
||||
):
|
||||
removed += self._cleanup_expired_directory(base_dir)
|
||||
return removed
|
||||
|
||||
return await asyncio.to_thread(operation)
|
||||
|
||||
async def enforce_recent_size_limits(self) -> int:
|
||||
if self.recent_metadata_max_size_bytes <= 0:
|
||||
return 0
|
||||
|
||||
def operation() -> int:
|
||||
candidates: list[tuple[float, Path, int]] = []
|
||||
total_size = 0
|
||||
for base_dir in (
|
||||
self._recent_albums_dir,
|
||||
self._recent_artists_dir,
|
||||
self._recent_audiodb_artists_dir,
|
||||
self._recent_audiodb_albums_dir,
|
||||
):
|
||||
for data_path in base_dir.glob("*.json"):
|
||||
if data_path.name.endswith(".meta.json"):
|
||||
continue
|
||||
try:
|
||||
size_bytes = data_path.stat().st_size
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
meta_path = self._meta_path(data_path)
|
||||
meta: dict[str, Any] = {}
|
||||
if meta_path.exists():
|
||||
try:
|
||||
meta = _decode_json(meta_path.read_text())
|
||||
except Exception: # noqa: BLE001
|
||||
meta = {}
|
||||
last_accessed = float(meta.get("last_accessed", meta.get("created_at", 0.0)) or 0.0)
|
||||
total_size += size_bytes
|
||||
candidates.append((last_accessed, data_path, size_bytes))
|
||||
|
||||
if total_size <= self.recent_metadata_max_size_bytes:
|
||||
return 0
|
||||
|
||||
bytes_to_free = total_size - self.recent_metadata_max_size_bytes
|
||||
freed = 0
|
||||
for _, data_path, size_bytes in sorted(candidates, key=lambda item: item[0]):
|
||||
self._delete_file_pair(data_path)
|
||||
freed += size_bytes
|
||||
if freed >= bytes_to_free:
|
||||
break
|
||||
return freed
|
||||
|
||||
return await asyncio.to_thread(operation)
|
||||
|
||||
async def cleanup_expired_covers(self) -> int:
|
||||
return await asyncio.to_thread(self._cleanup_expired_directory, self._recent_covers_dir)
|
||||
|
||||
async def enforce_cover_size_limits(self) -> int:
|
||||
return await asyncio.to_thread(
|
||||
self._enforce_size_limit_for_directory,
|
||||
self._recent_covers_dir,
|
||||
self.recent_covers_max_size_bytes,
|
||||
)
|
||||
|
||||
def get_stats(self) -> dict[str, Any]:
|
||||
total_count = 0
|
||||
album_count = 0
|
||||
artist_count = 0
|
||||
audiodb_artist_count = 0
|
||||
audiodb_album_count = 0
|
||||
total_size_bytes = 0
|
||||
|
||||
for base_dir, counter_name in (
|
||||
(self._recent_albums_dir, "album"),
|
||||
(self._persistent_albums_dir, "album"),
|
||||
(self._recent_artists_dir, "artist"),
|
||||
(self._persistent_artists_dir, "artist"),
|
||||
(self._recent_audiodb_artists_dir, "audiodb_artist"),
|
||||
(self._persistent_audiodb_artists_dir, "audiodb_artist"),
|
||||
(self._recent_audiodb_albums_dir, "audiodb_album"),
|
||||
(self._persistent_audiodb_albums_dir, "audiodb_album"),
|
||||
):
|
||||
for data_path in base_dir.glob("*.json"):
|
||||
if data_path.name.endswith(".meta.json"):
|
||||
continue
|
||||
total_count += 1
|
||||
if counter_name == "album":
|
||||
album_count += 1
|
||||
elif counter_name == "artist":
|
||||
artist_count += 1
|
||||
elif counter_name == "audiodb_artist":
|
||||
audiodb_artist_count += 1
|
||||
elif counter_name == "audiodb_album":
|
||||
audiodb_album_count += 1
|
||||
try:
|
||||
total_size_bytes += data_path.stat().st_size
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
return {
|
||||
"total_count": total_count,
|
||||
"album_count": album_count,
|
||||
"artist_count": artist_count,
|
||||
"audiodb_artist_count": audiodb_artist_count,
|
||||
"audiodb_album_count": audiodb_album_count,
|
||||
"total_size_bytes": total_size_bytes,
|
||||
}
|
||||
|
||||
async def clear_all(self) -> None:
|
||||
def operation() -> None:
|
||||
if self.base_path.exists():
|
||||
shutil.rmtree(self.base_path)
|
||||
self._ensure_dirs()
|
||||
|
||||
await asyncio.to_thread(operation)
|
||||
|
||||
async def clear_audiodb(self) -> None:
|
||||
def operation() -> None:
|
||||
for d in (
|
||||
self._recent_audiodb_artists_dir,
|
||||
self._recent_audiodb_albums_dir,
|
||||
self._persistent_audiodb_artists_dir,
|
||||
self._persistent_audiodb_albums_dir,
|
||||
):
|
||||
if d.exists():
|
||||
shutil.rmtree(d)
|
||||
self._ensure_dirs()
|
||||
|
||||
await asyncio.to_thread(operation)
|
||||
154
backend/infrastructure/cache/memory_cache.py
vendored
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
from typing import Any, Optional
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import OrderedDict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CacheInterface(ABC):
|
||||
@abstractmethod
|
||||
async def get(self, key: str) -> Optional[Any]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def set(self, key: str, value: Any, ttl_seconds: int = 60) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def delete(self, key: str) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def clear(self) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def clear_prefix(self, prefix: str) -> int:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def cleanup_expired(self) -> int:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def size(self) -> int:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def estimate_memory_bytes(self) -> int:
|
||||
pass
|
||||
|
||||
|
||||
class CacheEntry:
|
||||
__slots__ = ('value', 'expires_at')
|
||||
|
||||
def __init__(self, value: Any, ttl_seconds: int):
|
||||
self.value = value
|
||||
self.expires_at = time.time() + ttl_seconds
|
||||
|
||||
def is_expired(self) -> bool:
|
||||
return time.time() > self.expires_at
|
||||
|
||||
|
||||
class InMemoryCache(CacheInterface):
|
||||
def __init__(self, max_entries: int = 10000):
|
||||
self._cache: OrderedDict[str, CacheEntry] = OrderedDict()
|
||||
self._lock = asyncio.Lock()
|
||||
self._max_entries = max_entries
|
||||
self._evictions = 0
|
||||
self._hits = 0
|
||||
self._misses = 0
|
||||
|
||||
async def get(self, key: str) -> Optional[Any]:
|
||||
async with self._lock:
|
||||
entry = self._cache.get(key)
|
||||
if entry is None:
|
||||
self._misses += 1
|
||||
return None
|
||||
|
||||
if entry.is_expired():
|
||||
self._cache.pop(key, None)
|
||||
self._misses += 1
|
||||
return None
|
||||
|
||||
self._cache.move_to_end(key)
|
||||
self._hits += 1
|
||||
return entry.value
|
||||
|
||||
async def set(self, key: str, value: Any, ttl_seconds: int = 60) -> None:
|
||||
async with self._lock:
|
||||
if key not in self._cache and len(self._cache) >= self._max_entries:
|
||||
oldest_key, _ = self._cache.popitem(last=False)
|
||||
self._evictions += 1
|
||||
if self._evictions % 100 == 0:
|
||||
logger.info(f"Cache LRU evictions: {self._evictions}, current size: {len(self._cache)}")
|
||||
|
||||
self._cache[key] = CacheEntry(value, ttl_seconds)
|
||||
self._cache.move_to_end(key)
|
||||
|
||||
async def delete(self, key: str) -> None:
|
||||
async with self._lock:
|
||||
self._cache.pop(key, None)
|
||||
|
||||
async def clear(self) -> None:
|
||||
async with self._lock:
|
||||
self._cache.clear()
|
||||
|
||||
async def clear_prefix(self, prefix: str) -> int:
|
||||
async with self._lock:
|
||||
keys_to_remove = [k for k in self._cache.keys() if k.startswith(prefix)]
|
||||
for key in keys_to_remove:
|
||||
self._cache.pop(key, None)
|
||||
|
||||
if keys_to_remove:
|
||||
logger.info(f"Cleared {len(keys_to_remove)} cache entries with prefix '{prefix}'")
|
||||
|
||||
return len(keys_to_remove)
|
||||
|
||||
async def cleanup_expired(self) -> int:
|
||||
now = time.time()
|
||||
|
||||
async with self._lock:
|
||||
expired_keys = [
|
||||
key for key, entry in self._cache.items()
|
||||
if now > entry.expires_at
|
||||
]
|
||||
for key in expired_keys:
|
||||
self._cache.pop(key, None)
|
||||
|
||||
if expired_keys:
|
||||
logger.debug(f"Cleaned up {len(expired_keys)} expired cache entries")
|
||||
|
||||
return len(expired_keys)
|
||||
|
||||
def size(self) -> int:
|
||||
return len(self._cache)
|
||||
|
||||
def estimate_memory_bytes(self) -> int:
|
||||
total_size = 0
|
||||
|
||||
total_size += sys.getsizeof(self._cache)
|
||||
|
||||
for key, entry in self._cache.items():
|
||||
total_size += sys.getsizeof(key)
|
||||
total_size += sys.getsizeof(entry)
|
||||
total_size += sys.getsizeof(entry.value)
|
||||
|
||||
return total_size
|
||||
|
||||
def get_stats(self) -> dict[str, Any]:
|
||||
total = self._hits + self._misses
|
||||
hit_rate = (self._hits / total * 100) if total > 0 else 0.0
|
||||
return {
|
||||
"size": len(self._cache),
|
||||
"max_entries": self._max_entries,
|
||||
"hits": self._hits,
|
||||
"misses": self._misses,
|
||||
"hit_rate_percent": round(hit_rate, 2),
|
||||
"evictions": self._evictions,
|
||||
"memory_bytes": self.estimate_memory_bytes()
|
||||
}
|
||||
15
backend/infrastructure/cache/protocol.py
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
"""Cache protocol — structural subtyping for ephemeral caches."""
|
||||
|
||||
from typing import Any, Optional, Protocol, runtime_checkable
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class CacheProtocol(Protocol):
|
||||
async def get(self, key: str) -> Optional[Any]: ...
|
||||
async def set(self, key: str, value: Any, ttl_seconds: int = 60) -> None: ...
|
||||
async def delete(self, key: str) -> None: ...
|
||||
async def clear(self) -> None: ...
|
||||
async def clear_prefix(self, prefix: str) -> int: ...
|
||||
async def cleanup_expired(self) -> int: ...
|
||||
def size(self) -> int: ...
|
||||
def estimate_memory_bytes(self) -> int: ...
|
||||
4
backend/infrastructure/cache/request_history.py
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
"""Backward-compat shim — re-exports from infrastructure.persistence."""
|
||||
from infrastructure.persistence.request_history import RequestHistoryRecord, RequestHistoryStore
|
||||
|
||||
__all__ = ["RequestHistoryRecord", "RequestHistoryStore"]
|
||||
31
backend/infrastructure/constants.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
STREAM_CHUNK_SIZE = 64 * 1024
|
||||
|
||||
JELLYFIN_TICKS_PER_SECOND = 10_000_000
|
||||
|
||||
BROWSER_AUDIO_DEVICE_PROFILE: dict[str, object] = {
|
||||
"MaxStreamingBitrate": 8000000,
|
||||
"MaxStaticBitrate": 8000000,
|
||||
"MusicStreamingTranscodingBitrate": 128000,
|
||||
"MaxStaticMusicBitrate": 8000000,
|
||||
"DirectPlayProfiles": [
|
||||
{"Container": "opus", "Type": "Audio"},
|
||||
{"Container": "webm", "AudioCodec": "opus", "Type": "Audio"},
|
||||
{"Container": "mp3", "Type": "Audio"},
|
||||
{"Container": "aac", "Type": "Audio"},
|
||||
{"Container": "m4a", "AudioCodec": "aac", "Type": "Audio"},
|
||||
{"Container": "m4b", "AudioCodec": "aac", "Type": "Audio"},
|
||||
{"Container": "flac", "Type": "Audio"},
|
||||
{"Container": "wav", "Type": "Audio"},
|
||||
{"Container": "ts", "AudioCodec": "mp3", "Type": "Audio"},
|
||||
],
|
||||
"TranscodingProfiles": [
|
||||
{
|
||||
"Container": "opus",
|
||||
"Type": "Audio",
|
||||
"AudioCodec": "opus",
|
||||
"Context": "Streaming",
|
||||
"Protocol": "http",
|
||||
"MaxAudioChannels": "2",
|
||||
}
|
||||
],
|
||||
}
|
||||