mirror of
https://github.com/navidrome/navidrome.git
synced 2025-12-31 19:08:06 -05:00
Compare commits
269 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0f1ddecbe | ||
|
|
1e4e3eac6e | ||
|
|
19d443ec7f | ||
|
|
db92cf9e47 | ||
|
|
ec9f9aa243 | ||
|
|
0d1f2bcc8a | ||
|
|
dfa217ab51 | ||
|
|
3d6a2380bc | ||
|
|
53aa640f35 | ||
|
|
e4d65a7828 | ||
|
|
b41123f75e | ||
|
|
6f52c0201c | ||
|
|
4944f8035a | ||
|
|
0d5097d888 | ||
|
|
ed7ee3d9f8 | ||
|
|
74803bb43e | ||
|
|
0159cf73e2 | ||
|
|
ac1d51f9d0 | ||
|
|
91eb661db5 | ||
|
|
524d508916 | ||
|
|
a6f1f7b7e3 | ||
|
|
49b8cfc261 | ||
|
|
bcea8b832a | ||
|
|
58367afaea | ||
|
|
6b59f5f73a | ||
|
|
5f0c1e7387 | ||
|
|
a057a680f1 | ||
|
|
f9081bbe6b | ||
|
|
73eb0e254b | ||
|
|
2b84c574ba | ||
|
|
88f87e6c4f | ||
|
|
cf100c4eb4 | ||
|
|
5ab345c83e | ||
|
|
46a2ec0ba1 | ||
|
|
3394580413 | ||
|
|
112ea281d9 | ||
|
|
c837838d58 | ||
|
|
9e9465567d | ||
|
|
651ce163c7 | ||
|
|
55ce28b2c6 | ||
|
|
d331ee904b | ||
|
|
3a0ce6aafa | ||
|
|
1806552ef6 | ||
|
|
223e88d481 | ||
|
|
57e0f6d3ea | ||
|
|
1c691ac0e6 | ||
|
|
264d73d73e | ||
|
|
296259d781 | ||
|
|
3f9d173495 | ||
|
|
b386981b7f | ||
|
|
be7cb59dc5 | ||
|
|
63dc0e2062 | ||
|
|
1e1dce92b6 | ||
|
|
d78c6f6a04 | ||
|
|
59ece40393 | ||
|
|
491210ac12 | ||
|
|
cd552a55ef | ||
|
|
ee2c2b19e9 | ||
|
|
0147bb5f12 | ||
|
|
1ed8930107 | ||
|
|
e457f21306 | ||
|
|
b04647309f | ||
|
|
2adb098f32 | ||
|
|
212887214c | ||
|
|
beb768cd9c | ||
|
|
ed1109ddb2 | ||
|
|
98808e4b6d | ||
|
|
422ba2284e | ||
|
|
938c3d44cc | ||
|
|
2838ac36df | ||
|
|
b952672877 | ||
|
|
5c0b6fb9b7 | ||
|
|
5fb1db6031 | ||
|
|
226be78bf5 | ||
|
|
7c13878075 | ||
|
|
0bb4b881e9 | ||
|
|
70f536e04d | ||
|
|
2a15a217de | ||
|
|
a28462a7ab | ||
|
|
5c67297dce | ||
|
|
365df5220b | ||
|
|
b2b5c00331 | ||
|
|
ee18489b85 | ||
|
|
57d3be8604 | ||
|
|
0d42b9a4a5 | ||
|
|
a1a6047c37 | ||
|
|
2171c44503 | ||
|
|
fac01ccecb | ||
|
|
98a6819390 | ||
|
|
4156602158 | ||
|
|
21a5528f5e | ||
|
|
31e003e6f3 | ||
|
|
e467e32c06 | ||
|
|
36ed880e61 | ||
|
|
1c192d8a6d | ||
|
|
5869f7caaf | ||
|
|
8732fc7226 | ||
|
|
0372339e1b | ||
|
|
a04167672c | ||
|
|
dc4e091622 | ||
|
|
8ab2a11d22 | ||
|
|
637c909e93 | ||
|
|
453873fa26 | ||
|
|
de37e0f720 | ||
|
|
f3cb85cb0d | ||
|
|
0c4c223127 | ||
|
|
3892f70c35 | ||
|
|
1468a56808 | ||
|
|
d6ec52b9d4 | ||
|
|
5fa19f9cfa | ||
|
|
15a3d2ca66 | ||
|
|
efab198d4a | ||
|
|
5ad9f546b2 | ||
|
|
20297c2aea | ||
|
|
f6eee65955 | ||
|
|
aee19e747c | ||
|
|
f34f15ba1c | ||
|
|
74348a340f | ||
|
|
09ae41a2da | ||
|
|
70487a09f4 | ||
|
|
d4147c2330 | ||
|
|
dd4802c0c6 | ||
|
|
efed7f1b40 | ||
|
|
6cc95d53a9 | ||
|
|
c795bcfcf7 | ||
|
|
46a963a02a | ||
|
|
195ae56001 | ||
|
|
f9db449e7e | ||
|
|
657fe11f53 | ||
|
|
47e3fdb1b8 | ||
|
|
c37583fa9f | ||
|
|
9d86f63f15 | ||
|
|
73ccfbd839 | ||
|
|
920fd53e58 | ||
|
|
3179966270 | ||
|
|
537e2fc033 | ||
|
|
f1478d40f5 | ||
|
|
beff1afad7 | ||
|
|
ba2623e3f1 | ||
|
|
d60e83176c | ||
|
|
acce3c97d5 | ||
|
|
734eb30ac5 | ||
|
|
1eedee9086 | ||
|
|
51eed74a0e | ||
|
|
3942275689 | ||
|
|
98b038c1fb | ||
|
|
f0302525a7 | ||
|
|
0299e488b5 | ||
|
|
630c304080 | ||
|
|
0bebd396df | ||
|
|
0b18489327 | ||
|
|
8880f67035 | ||
|
|
99dfb832eb | ||
|
|
51c16aa69f | ||
|
|
972229d1e8 | ||
|
|
c8f174ea84 | ||
|
|
d4dc8180a2 | ||
|
|
851f54ea57 | ||
|
|
72a0f59be3 | ||
|
|
c11fd9ce28 | ||
|
|
3e47819f7a | ||
|
|
906ac635c2 | ||
|
|
6bc4c0317f | ||
|
|
04f296cc73 | ||
|
|
21dd04cb7d | ||
|
|
2d8507cfd7 | ||
|
|
6c11649b06 | ||
|
|
4f8cd5307c | ||
|
|
32afe9698c | ||
|
|
3e7c4b6f70 | ||
|
|
dcc84e29d9 | ||
|
|
0d520dea2d | ||
|
|
2bb918f8a1 | ||
|
|
8e2052ff95 | ||
|
|
bc3576e092 | ||
|
|
44bc70b269 | ||
|
|
297f72ff1a | ||
|
|
181c29613f | ||
|
|
1a36f06147 | ||
|
|
9cbdb20a31 | ||
|
|
7f030b0859 | ||
|
|
177a1f853f | ||
|
|
627417dae3 | ||
|
|
2b0bfbd75a | ||
|
|
8fb09e71b6 | ||
|
|
c94def801e | ||
|
|
cbf5e3d51b | ||
|
|
1c0ebb9460 | ||
|
|
94bc1a1d41 | ||
|
|
9ae898d071 | ||
|
|
054946dc42 | ||
|
|
ccce1c0f6d | ||
|
|
81edef925c | ||
|
|
2d4f483812 | ||
|
|
d229ff39e5 | ||
|
|
3982ba7258 | ||
|
|
1bf94531fd | ||
|
|
6c38dc234f | ||
|
|
c1adf407a1 | ||
|
|
c952dc343a | ||
|
|
3671598121 | ||
|
|
cd0cf7c12b | ||
|
|
6c6223f2f9 | ||
|
|
6ff7ab52f4 | ||
|
|
faed2ea8d7 | ||
|
|
cf69df877a | ||
|
|
075a7e2640 | ||
|
|
3fda7445b0 | ||
|
|
fcb5e1b806 | ||
|
|
154e13f7c9 | ||
|
|
69e2a6d620 | ||
|
|
15b2dc6b48 | ||
|
|
1c48a55759 | ||
|
|
a9b301dfc5 | ||
|
|
b86a69567d | ||
|
|
67474b776c | ||
|
|
9d8c49750e | ||
|
|
a557f37834 | ||
|
|
0a650de357 | ||
|
|
9c3b456165 | ||
|
|
23bebe4e06 | ||
|
|
8808eaddda | ||
|
|
bbb3182bc9 | ||
|
|
82633d7490 | ||
|
|
28668782c6 | ||
|
|
97c06aba1a | ||
|
|
9c46e2b262 | ||
|
|
3713032f57 | ||
|
|
a358d107aa | ||
|
|
5f6a90e5aa | ||
|
|
0232afd98d | ||
|
|
270ae3549d | ||
|
|
8b5af67647 | ||
|
|
00c6a0ed1f | ||
|
|
ff79ac4336 | ||
|
|
8d37781a47 | ||
|
|
a6fb7fd705 | ||
|
|
fd81039f1b | ||
|
|
bc4aa55de3 | ||
|
|
6ec6ac1595 | ||
|
|
06e38a8024 | ||
|
|
6e5eea980d | ||
|
|
943b456d3f | ||
|
|
16d1314a68 | ||
|
|
ae6499b941 | ||
|
|
214287e00d | ||
|
|
af1add4312 | ||
|
|
b14c790641 | ||
|
|
d9fa19dab3 | ||
|
|
0281d06b01 | ||
|
|
de04393b47 | ||
|
|
55730514ea | ||
|
|
768160b05e | ||
|
|
b7285b28cf | ||
|
|
eab6aadc0f | ||
|
|
640a734896 | ||
|
|
a9334b7787 | ||
|
|
5e8085bf3c | ||
|
|
b2eb533082 | ||
|
|
936af2d895 | ||
|
|
b1c18a428b | ||
|
|
1fac9cc3ee | ||
|
|
92a1f19271 | ||
|
|
06c9c1e64a | ||
|
|
ed3ab5385d | ||
|
|
fcdd30ba8f | ||
|
|
dd48a23f92 | ||
|
|
6040a50297 | ||
|
|
9e5849e4dc |
@@ -4,7 +4,7 @@
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
// Update the VARIANT arg to pick a version of Go: 1, 1.15, 1.14
|
||||
"VARIANT": "1.23",
|
||||
"VARIANT": "1.24",
|
||||
// Options
|
||||
"INSTALL_NODE": "true",
|
||||
"NODE_VERSION": "v20"
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
.DS_Store
|
||||
ui/node_modules
|
||||
ui/build
|
||||
!ui/build/.gitkeep
|
||||
Dockerfile
|
||||
docker-compose*.yml
|
||||
data
|
||||
*.db
|
||||
testDB
|
||||
navidrome
|
||||
navidrome.db
|
||||
navidrome.toml
|
||||
tmp
|
||||
!tmp/taglib
|
||||
dist
|
||||
binaries
|
||||
cache
|
||||
music
|
||||
!Dockerfile
|
||||
23
.github/actions/download-taglib/action.yml
vendored
Normal file
23
.github/actions/download-taglib/action.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: 'Download TagLib'
|
||||
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version of TagLib to download'
|
||||
required: true
|
||||
platform:
|
||||
description: 'Platform to download TagLib for'
|
||||
default: 'linux-amd64'
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Download TagLib
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p /tmp/taglib
|
||||
cd /tmp
|
||||
FILE=taglib-${{ inputs.platform }}.tar.gz
|
||||
wget https://github.com/navidrome/cross-taglib/releases/download/v${{ inputs.version }}/${FILE}
|
||||
tar -xzf ${FILE} -C taglib
|
||||
PKG_CONFIG_PREFIX=/tmp/taglib
|
||||
echo "PKG_CONFIG_PREFIX=${PKG_CONFIG_PREFIX}" >> $GITHUB_ENV
|
||||
echo "PKG_CONFIG_PATH=${PKG_CONFIG_PATH}:${PKG_CONFIG_PREFIX}/lib/pkgconfig" >> $GITHUB_ENV
|
||||
84
.github/actions/prepare-docker/action.yml
vendored
Normal file
84
.github/actions/prepare-docker/action.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: 'Prepare Docker Buildx environment'
|
||||
description: 'Downloads and extracts the TagLib library, adding it to PKG_CONFIG_PATH'
|
||||
inputs:
|
||||
github_token:
|
||||
description: 'GitHub token'
|
||||
required: true
|
||||
default: ''
|
||||
hub_repository:
|
||||
description: 'Docker Hub repository to push images to'
|
||||
required: false
|
||||
default: ''
|
||||
hub_username:
|
||||
description: 'Docker Hub username'
|
||||
required: false
|
||||
default: ''
|
||||
hub_password:
|
||||
description: 'Docker Hub password'
|
||||
required: false
|
||||
default: ''
|
||||
outputs:
|
||||
tags:
|
||||
description: 'Docker image tags'
|
||||
value: ${{ steps.meta.outputs.tags }}
|
||||
labels:
|
||||
description: 'Docker image labels'
|
||||
value: ${{ steps.meta.outputs.labels }}
|
||||
annotations:
|
||||
description: 'Docker image annotations'
|
||||
value: ${{ steps.meta.outputs.annotations }}
|
||||
version:
|
||||
description: 'Docker image version'
|
||||
value: ${{ steps.meta.outputs.version }}
|
||||
hub_repository:
|
||||
description: 'Docker Hub repository'
|
||||
value: ${{ env.DOCKER_HUB_REPO }}
|
||||
hub_enabled:
|
||||
description: 'Is Docker Hub enabled'
|
||||
value: ${{ env.DOCKER_HUB_ENABLED }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Check Docker Hub configuration
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -z "${{inputs.hub_repository}}" ]; then
|
||||
echo "DOCKER_HUB_REPO=none" >> $GITHUB_ENV
|
||||
echo "DOCKER_HUB_ENABLED=false" >> $GITHUB_ENV
|
||||
else
|
||||
echo "DOCKER_HUB_REPO=${{inputs.hub_repository}}" >> $GITHUB_ENV
|
||||
echo "DOCKER_HUB_ENABLED=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: inputs.hub_username != '' && inputs.hub_password != ''
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ inputs.hub_username }}
|
||||
password: ${{ inputs.hub_password }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ inputs.github_token }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Extract metadata for Docker image
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
labels: |
|
||||
maintainer=deluan@navidrome.org
|
||||
images: |
|
||||
name=${{env.DOCKER_HUB_REPO}},enable=${{env.DOCKER_HUB_ENABLED}}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=raw,value=develop,enable={{is_default_branch}}
|
||||
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -10,3 +10,13 @@ updates:
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: docker
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/workflows"
|
||||
schedule:
|
||||
interval: weekly
|
||||
open-pull-requests-limit: 10
|
||||
40
.github/workflows/pipeline.dockerfile
vendored
40
.github/workflows/pipeline.dockerfile
vendored
@@ -1,40 +0,0 @@
|
||||
#####################################################
|
||||
### Copy platform specific binary
|
||||
FROM bash as copy-binary
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN echo "Target Platform = ${TARGETPLATFORM}"
|
||||
|
||||
COPY dist .
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then cp navidrome_linux_amd64_linux_amd64_v1/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/386" ]; then cp navidrome_linux_386_linux_386/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then cp navidrome_linux_arm64_linux_arm64/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then cp navidrome_linux_arm_linux_arm_6/navidrome /navidrome; fi
|
||||
RUN if [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then cp navidrome_linux_arm_linux_arm_7/navidrome /navidrome; fi
|
||||
RUN chmod +x /navidrome
|
||||
|
||||
|
||||
#####################################################
|
||||
### Build Final Image
|
||||
FROM alpine:3.18
|
||||
LABEL maintainer="deluan@navidrome.org"
|
||||
|
||||
# Install ffmpeg and mpv
|
||||
RUN apk add -U --no-cache ffmpeg mpv
|
||||
|
||||
# Show ffmpeg build info, for troubleshooting purposes
|
||||
RUN ffmpeg -buildconf
|
||||
|
||||
COPY --from=copy-binary /navidrome /app/
|
||||
|
||||
VOLUME ["/data", "/music"]
|
||||
ENV ND_MUSICFOLDER /music
|
||||
ENV ND_DATAFOLDER /data
|
||||
ENV ND_PORT 4533
|
||||
ENV GODEBUG "asyncpreemptoff=1"
|
||||
|
||||
EXPOSE ${ND_PORT}
|
||||
HEALTHCHECK CMD wget -O- http://localhost:${ND_PORT}/ping || exit 1
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/app/navidrome"]
|
||||
434
.github/workflows/pipeline.yml
vendored
434
.github/workflows/pipeline.yml
vendored
@@ -9,28 +9,76 @@ on:
|
||||
branches:
|
||||
- master
|
||||
|
||||
concurrency:
|
||||
group: ${{ startsWith(github.ref, 'refs/tags/v') && 'tag' || 'branch' }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CROSS_TAGLIB_VERSION: "2.0.2-1"
|
||||
IS_RELEASE: ${{ startsWith(github.ref, 'refs/tags/') && 'true' || 'false' }}
|
||||
|
||||
jobs:
|
||||
git-version:
|
||||
name: Get version info
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
git_tag: ${{ steps.git-version.outputs.GIT_TAG }}
|
||||
git_sha: ${{ steps.git-version.outputs.GIT_SHA }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- name: Show git version info
|
||||
run: |
|
||||
echo "git describe (dirty): $(git describe --dirty --always --tags)"
|
||||
echo "git describe --tags: $(git describe --tags `git rev-list --tags --max-count=1`)"
|
||||
echo "git tag: $(git tag --sort=-committerdate | head -n 1)"
|
||||
echo "github_ref: $GITHUB_REF"
|
||||
echo "github_head_sha: ${{ github.event.pull_request.head.sha }}"
|
||||
git tag -l
|
||||
- name: Determine git current SHA and latest tag
|
||||
id: git-version
|
||||
run: |
|
||||
GIT_TAG=$(git tag --sort=-committerdate | head -n 1)
|
||||
if [ -n "$GIT_TAG" ]; then
|
||||
if [[ "$GITHUB_REF" != refs/tags/* ]]; then
|
||||
GIT_TAG=${GIT_TAG}-SNAPSHOT
|
||||
fi
|
||||
echo "GIT_TAG=$GIT_TAG" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
GIT_SHA=$(git rev-parse --short HEAD)
|
||||
PR_NUM=$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH")
|
||||
if [[ $PR_NUM != "null" ]]; then
|
||||
GIT_SHA=$(echo "${{ github.event.pull_request.head.sha }}" | cut -c1-8)
|
||||
GIT_SHA="pr-${PR_NUM}/${GIT_SHA}"
|
||||
fi
|
||||
echo "GIT_SHA=$GIT_SHA" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "GIT_TAG=$GIT_TAG"
|
||||
echo "GIT_SHA=$GIT_SHA"
|
||||
|
||||
go-lint:
|
||||
name: Lint Go code
|
||||
runs-on: ubuntu-latest
|
||||
container: deluan/ci-goreleaser:1.23.0-1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Config workspace folder as trusted
|
||||
run: git config --global --add safe.directory $GITHUB_WORKSPACE; git describe --dirty --always --tags
|
||||
- name: Download TagLib
|
||||
uses: ./.github/actions/download-taglib
|
||||
with:
|
||||
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v6
|
||||
uses: golangci/golangci-lint-action@v7
|
||||
with:
|
||||
version: latest
|
||||
problem-matchers: true
|
||||
args: --timeout 2m
|
||||
|
||||
- name: Install goimports
|
||||
run: go install golang.org/x/tools/cmd/goimports@latest
|
||||
|
||||
- run: goimports -w `find . -name '*.go' | grep -v '_gen.go$'`
|
||||
- name: Run go goimports
|
||||
run: go run golang.org/x/tools/cmd/goimports@latest -w `find . -name '*.go' | grep -v '_gen.go$'`
|
||||
- run: go mod tidy
|
||||
- name: Verify no changes from goimports and go mod tidy
|
||||
run: |
|
||||
@@ -43,25 +91,25 @@ jobs:
|
||||
go:
|
||||
name: Test Go code
|
||||
runs-on: ubuntu-latest
|
||||
container: deluan/ci-goreleaser:1.23.0-1
|
||||
steps:
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Config workspace folder as trusted
|
||||
run: git config --global --add safe.directory $GITHUB_WORKSPACE; git describe --dirty --always --tags
|
||||
- name: Download TagLib
|
||||
uses: ./.github/actions/download-taglib
|
||||
with:
|
||||
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: Download dependencies
|
||||
if: steps.cache-go.outputs.cache-hit != 'true'
|
||||
continue-on-error: ${{contains(matrix.go_version, 'beta') || contains(matrix.go_version, 'rc')}}
|
||||
run: go mod download
|
||||
|
||||
- name: Test
|
||||
continue-on-error: ${{contains(matrix.go_version, 'beta') || contains(matrix.go_version, 'rc')}}
|
||||
run: go test -shuffle=on -race -cover ./... -v
|
||||
run: |
|
||||
pkg-config --define-prefix --cflags --libs taglib # for debugging
|
||||
go test -shuffle=on -tags netgo -race -cover ./... -v
|
||||
|
||||
js:
|
||||
name: Build JS bundle
|
||||
name: Test JS code
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
NODE_OPTIONS: "--max_old_space_size=4096"
|
||||
@@ -93,12 +141,6 @@ jobs:
|
||||
cd ui
|
||||
npm run build
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: js-bundle
|
||||
path: ui/build
|
||||
retention-days: 7
|
||||
|
||||
i18n-lint:
|
||||
name: Lint i18n files
|
||||
runs-on: ubuntu-latest
|
||||
@@ -116,108 +158,272 @@ jobs:
|
||||
fi
|
||||
done
|
||||
|
||||
binaries:
|
||||
name: Build binaries
|
||||
needs: [js, go, go-lint, i18n-lint]
|
||||
check-push-enabled:
|
||||
name: Check Docker configuration
|
||||
runs-on: ubuntu-latest
|
||||
container: deluan/ci-goreleaser:1.23.0-1
|
||||
outputs:
|
||||
is_enabled: ${{ steps.check.outputs.is_enabled }}
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Check if Docker push is configured
|
||||
id: check
|
||||
run: echo "is_enabled=${{ secrets.DOCKER_HUB_USERNAME != '' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Config workspace folder as trusted
|
||||
run: git config --global --add safe.directory $GITHUB_WORKSPACE; git describe --dirty --always --tags
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: js-bundle
|
||||
path: ui/build
|
||||
|
||||
- name: Run GoReleaser - SNAPSHOT
|
||||
if: startsWith(github.ref, 'refs/tags/') != true
|
||||
run: goreleaser release --clean --skip=publish --snapshot
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run GoReleaser - RELEASE
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
run: goreleaser release --clean
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: binaries
|
||||
path: |
|
||||
dist
|
||||
!dist/*.tar.gz
|
||||
!dist/*.zip
|
||||
retention-days: 7
|
||||
|
||||
docker:
|
||||
name: Build and publish Docker images
|
||||
needs: [binaries]
|
||||
build:
|
||||
name: Build
|
||||
needs: [js, go, go-lint, i18n-lint, git-version, check-push-enabled]
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ linux/amd64, linux/arm64, linux/arm/v5, linux/arm/v6, linux/arm/v7, linux/386, darwin/amd64, darwin/arm64, windows/amd64, windows/386 ]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCKER_IMAGE: ${{secrets.DOCKER_IMAGE}}
|
||||
IS_LINUX: ${{ startsWith(matrix.platform, 'linux/') && 'true' || 'false' }}
|
||||
IS_ARMV5: ${{ matrix.platform == 'linux/arm/v5' && 'true' || 'false' }}
|
||||
IS_DOCKER_PUSH_CONFIGURED: ${{ needs.check-push-enabled.outputs.is_enabled == 'true' }}
|
||||
DOCKER_BUILD_SUMMARY: false
|
||||
GIT_SHA: ${{ needs.git-version.outputs.git_sha }}
|
||||
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v3
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
- name: Sanitize platform name
|
||||
id: set-platform
|
||||
run: |
|
||||
PLATFORM=$(echo ${{ matrix.platform }} | tr '/' '_')
|
||||
echo "PLATFORM=$PLATFORM" >> $GITHUB_ENV
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
- name: Prepare Docker Buildx
|
||||
uses: ./.github/actions/prepare-docker
|
||||
id: docker
|
||||
with:
|
||||
name: binaries
|
||||
path: dist
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
|
||||
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
labels: |
|
||||
maintainer=deluan
|
||||
images: |
|
||||
name=${{secrets.DOCKER_IMAGE}}
|
||||
name=ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=raw,value=develop,enable={{is_default_branch}}
|
||||
|
||||
- name: Build and Push
|
||||
if: env.DOCKER_IMAGE != ''
|
||||
uses: docker/build-push-action@v5
|
||||
- name: Build Binaries
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: .github/workflows/pipeline.dockerfile
|
||||
platforms: linux/amd64,linux/386,linux/arm/v6,linux/arm/v7,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
file: Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: |
|
||||
type=local,dest=./output/${{ env.PLATFORM }}
|
||||
target: binary
|
||||
build-args: |
|
||||
GIT_SHA=${{ env.GIT_SHA }}
|
||||
GIT_TAG=${{ env.GIT_TAG }}
|
||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: Upload Binaries
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: navidrome-${{ env.PLATFORM }}
|
||||
path: ./output
|
||||
retention-days: 7
|
||||
|
||||
- name: Build and push image by digest
|
||||
id: push-image
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.docker.outputs.labels }}
|
||||
build-args: |
|
||||
GIT_SHA=${{ env.GIT_SHA }}
|
||||
GIT_TAG=${{ env.GIT_TAG }}
|
||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||
outputs: |
|
||||
type=image,name=${{ steps.docker.outputs.hub_repository }},push-by-digest=true,name-canonical=true,push=${{ steps.docker.outputs.hub_enabled }}
|
||||
type=image,name=ghcr.io/${{ github.repository }},push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.push-image.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
push-manifest:
|
||||
name: Push Docker manifest
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, check-push-enabled]
|
||||
if: needs.check-push-enabled.outputs.is_enabled == 'true'
|
||||
env:
|
||||
REGISTRY_IMAGE: ghcr.io/${{ github.repository }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Prepare Docker Buildx
|
||||
uses: ./.github/actions/prepare-docker
|
||||
id: docker
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
hub_repository: ${{ vars.DOCKER_HUB_REPO }}
|
||||
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Create manifest list and push to ghcr.io
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||
|
||||
- name: Create manifest list and push to Docker Hub
|
||||
working-directory: /tmp/digests
|
||||
if: vars.DOCKER_HUB_REPO != ''
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ vars.DOCKER_HUB_REPO }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image in ghcr.io
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
- name: Inspect image in Docker Hub
|
||||
if: vars.DOCKER_HUB_REPO != ''
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ vars.DOCKER_HUB_REPO }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
- name: Delete unnecessary digest artifacts
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("digests-")) | .id'); do
|
||||
gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
|
||||
done
|
||||
|
||||
msi:
|
||||
name: Build Windows installers
|
||||
needs: [build, git-version]
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-windows*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Install Wix
|
||||
run: sudo apt-get install -y wixl jq
|
||||
|
||||
- name: Build MSI
|
||||
env:
|
||||
GIT_TAG: ${{ needs.git-version.outputs.git_tag }}
|
||||
run: |
|
||||
rm -rf binaries/msi
|
||||
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} 386
|
||||
sudo GIT_TAG=$GIT_TAG release/wix/build_msi.sh ${GITHUB_WORKSPACE} amd64
|
||||
du -h binaries/msi/*.msi
|
||||
|
||||
- name: Upload MSI files
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: navidrome-windows-installers
|
||||
path: binaries/msi/*.msi
|
||||
retention-days: 7
|
||||
|
||||
release:
|
||||
name: Package/Release
|
||||
needs: [build, msi]
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
package_list: ${{ steps.set-package-list.outputs.package_list }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-*
|
||||
merge-multiple: true
|
||||
|
||||
- run: ls -lR ./binaries
|
||||
|
||||
- name: Set RELEASE_FLAGS for snapshot releases
|
||||
if: env.IS_RELEASE == 'false'
|
||||
run: echo 'RELEASE_FLAGS=--skip=publish --snapshot' >> $GITHUB_ENV
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
version: '~> v2'
|
||||
args: "release --clean -f release/goreleaser.yml ${{ env.RELEASE_FLAGS }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Remove build artifacts
|
||||
run: |
|
||||
ls -l ./dist
|
||||
rm ./dist/*.tar.gz ./dist/*.zip
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: packages
|
||||
path: dist/navidrome_0*
|
||||
|
||||
- id: set-package-list
|
||||
name: Export list of generated packages
|
||||
run: |
|
||||
cd dist
|
||||
set +x
|
||||
ITEMS=$(ls navidrome_0* | sed 's/^navidrome_0[^_]*_linux_//' | jq -R -s -c 'split("\n")[:-1]')
|
||||
echo $ITEMS
|
||||
echo "package_list=${ITEMS}" >> $GITHUB_OUTPUT
|
||||
|
||||
upload-packages:
|
||||
name: Upload Linux PKG
|
||||
runs-on: ubuntu-latest
|
||||
needs: [release]
|
||||
strategy:
|
||||
matrix:
|
||||
item: ${{ fromJson(needs.release.outputs.package_list) }}
|
||||
steps:
|
||||
- name: Download all-packages artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: packages
|
||||
path: ./dist
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: navidrome_linux_${{ matrix.item }}
|
||||
path: dist/navidrome_0*_linux_${{ matrix.item }}
|
||||
|
||||
# delete-artifacts:
|
||||
# name: Delete unused artifacts
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: [upload-packages]
|
||||
# steps:
|
||||
# - name: Delete all-packages artifact
|
||||
# env:
|
||||
# GH_TOKEN: ${{ github.token }}
|
||||
# run: |
|
||||
# for artifact in $(gh api repos/${{ github.repository }}/actions/artifacts | jq -r '.artifacts[] | select(.name | startswith("packages")) | .id'); do
|
||||
# gh api --method DELETE repos/${{ github.repository }}/actions/artifacts/$artifact
|
||||
# done
|
||||
55
.github/workflows/update-translations.sh
vendored
55
.github/workflows/update-translations.sh
vendored
@@ -9,6 +9,7 @@ process_json() {
|
||||
jq 'walk(if type == "object" then with_entries(select(.value != null and .value != "" and .value != [] and .value != {})) | to_entries | sort_by(.key) | from_entries else . end)' "$1"
|
||||
}
|
||||
|
||||
# Function to check differences between local and remote translations
|
||||
check_lang_diff() {
|
||||
filename=${I18N_DIR}/"$1".json
|
||||
url=$(curl -s -X POST https://poeditor.com/api/ \
|
||||
@@ -35,10 +36,58 @@ check_lang_diff() {
|
||||
rm -f poeditor.json poeditor.tmp "$filename".tmp
|
||||
}
|
||||
|
||||
# Function to get the list of languages
|
||||
get_language_list() {
|
||||
response=$(curl -s -X POST https://api.poeditor.com/v2/languages/list \
|
||||
-d api_token="${POEDITOR_APIKEY}" \
|
||||
-d id="${POEDITOR_PROJECTID}")
|
||||
|
||||
echo $response
|
||||
}
|
||||
|
||||
# Function to get the language name from the language code
|
||||
get_language_name() {
|
||||
lang_code="$1"
|
||||
lang_list="$2"
|
||||
|
||||
lang_name=$(echo "$lang_list" | jq -r ".result.languages[] | select(.code == \"$lang_code\") | .name")
|
||||
|
||||
if [ -z "$lang_name" ]; then
|
||||
echo "Error: Language code '$lang_code' not found" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$lang_name"
|
||||
}
|
||||
|
||||
# Function to get the language code from the file path
|
||||
get_lang_code() {
|
||||
filepath="$1"
|
||||
# Extract just the filename
|
||||
filename=$(basename "$filepath")
|
||||
|
||||
# Remove the extension
|
||||
lang_code="${filename%.*}"
|
||||
|
||||
echo "$lang_code"
|
||||
}
|
||||
|
||||
lang_list=$(get_language_list)
|
||||
|
||||
# Check differences for each language
|
||||
for file in ${I18N_DIR}/*.json; do
|
||||
name=$(basename "$file")
|
||||
code=$(echo "$name" | cut -f1 -d.)
|
||||
code=$(get_lang_code "$file")
|
||||
lang=$(jq -r .languageName < "$file")
|
||||
echo "Downloading $lang ($code)"
|
||||
lang_name=$(get_language_name "$code" "$lang_list")
|
||||
echo "Downloading $lang_name - $lang ($code)"
|
||||
check_lang_diff "$code"
|
||||
done
|
||||
|
||||
# List changed languages to stderr
|
||||
languages=""
|
||||
for file in $(git diff --name-only --exit-code | grep json); do
|
||||
lang_code=$(get_lang_code "$file")
|
||||
lang_name=$(get_language_name "$lang_code" "$lang_list")
|
||||
languages="${languages}$(echo "$lang_name" | tr -d '\n'), "
|
||||
done
|
||||
echo "${languages%??}" 1>&2
|
||||
13
.github/workflows/update-translations.yml
vendored
13
.github/workflows/update-translations.yml
vendored
@@ -10,19 +10,24 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Get updated translations
|
||||
id: poeditor
|
||||
env:
|
||||
POEDITOR_PROJECTID: ${{ secrets.POEDITOR_PROJECTID }}
|
||||
POEDITOR_APIKEY: ${{ secrets.POEDITOR_APIKEY }}
|
||||
run: |
|
||||
.github/workflows/update-translations.sh
|
||||
.github/workflows/update-translations.sh 2> title.tmp
|
||||
title=$(cat title.tmp)
|
||||
echo "::set-output name=title::$title"
|
||||
rm title.tmp
|
||||
- name: Show changes, if any
|
||||
run: |
|
||||
git status --porcelain
|
||||
git diff
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.PAT }}
|
||||
commit-message: Update translations
|
||||
title: "fix(ui): update translations from POEditor"
|
||||
author: "navidrome-bot <navidrome-bot@navidrome.org>"
|
||||
commit-message: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
|
||||
title: "fix(ui): update ${{ steps.poeditor.outputs.title }} translations from POEditor"
|
||||
branch: update-translations
|
||||
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -11,18 +11,17 @@ wiki
|
||||
TODO.md
|
||||
var
|
||||
navidrome.toml
|
||||
!release/linux/navidrome.toml
|
||||
master.zip
|
||||
testDB
|
||||
navidrome.db
|
||||
cache/*
|
||||
*.swp
|
||||
embedded_gen.go
|
||||
dist
|
||||
music
|
||||
navidrome.db-shm
|
||||
navidrome.db-wal
|
||||
tags
|
||||
*.db*
|
||||
.gitinfo
|
||||
docker-compose.yml
|
||||
!contrib/docker-compose.yml
|
||||
test-123.db
|
||||
binaries
|
||||
navidrome-master
|
||||
*.exe
|
||||
@@ -1,3 +1,7 @@
|
||||
version: "2"
|
||||
run:
|
||||
build-tags:
|
||||
- netgo
|
||||
linters:
|
||||
enable:
|
||||
- asasalint
|
||||
@@ -7,31 +11,48 @@ linters:
|
||||
- copyloopvar
|
||||
- dogsled
|
||||
- durationcheck
|
||||
- errcheck
|
||||
- errorlint
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- goprintffuncname
|
||||
- gosec
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nakedret
|
||||
- nilerr
|
||||
- rowserrcheck
|
||||
- staticcheck
|
||||
- typecheck
|
||||
- unconvert
|
||||
- unused
|
||||
- whitespace
|
||||
|
||||
linters-settings:
|
||||
govet:
|
||||
enable:
|
||||
- nilness
|
||||
gosec:
|
||||
excludes:
|
||||
- G501
|
||||
- G401
|
||||
- G505
|
||||
- G115 # Can't check context, where the warning is clearly a false positive. See discussion in https://github.com/securego/gosec/pull/1149
|
||||
disable:
|
||||
- staticcheck
|
||||
settings:
|
||||
gocritic:
|
||||
disable-all: true
|
||||
enabled-checks:
|
||||
- deprecatedComment
|
||||
gosec:
|
||||
excludes:
|
||||
- G501
|
||||
- G401
|
||||
- G505
|
||||
- G115
|
||||
govet:
|
||||
enable:
|
||||
- nilness
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
|
||||
172
.goreleaser.yml
172
.goreleaser.yml
@@ -1,172 +0,0 @@
|
||||
# GoReleaser config
|
||||
project_name: navidrome
|
||||
version: 2
|
||||
|
||||
builds:
|
||||
- id: navidrome_linux_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static -lz'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_386
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- PKG_CONFIG_PATH=/i386/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- "386"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_arm
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=arm-linux-gnueabi-gcc
|
||||
- CXX=arm-linux-gnueabi-g++
|
||||
- PKG_CONFIG_PATH=/arm/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- arm
|
||||
goarm:
|
||||
- "5"
|
||||
- "6"
|
||||
- "7"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_linux_arm64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=aarch64-linux-gnu-gcc
|
||||
- CXX=aarch64-linux-gnu-g++
|
||||
- PKG_CONFIG_PATH=/arm64/lib/pkgconfig
|
||||
goos:
|
||||
- linux
|
||||
goarch:
|
||||
- arm64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_windows_386
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=i686-w64-mingw32-gcc
|
||||
- CXX=i686-w64-mingw32-g++
|
||||
- PKG_CONFIG_PATH=/mingw32/lib/pkgconfig
|
||||
goos:
|
||||
- windows
|
||||
goarch:
|
||||
- "386"
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_windows_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=x86_64-w64-mingw32-gcc
|
||||
- CXX=x86_64-w64-mingw32-g++
|
||||
- PKG_CONFIG_PATH=/mingw64/lib/pkgconfig
|
||||
goos:
|
||||
- windows
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- "-extldflags '-static'"
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
- id: navidrome_darwin_amd64
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- CC=o64-clang
|
||||
- CXX=o64-clang++
|
||||
- PKG_CONFIG_PATH=/darwin/lib/pkgconfig
|
||||
goos:
|
||||
- darwin
|
||||
goarch:
|
||||
- amd64
|
||||
flags:
|
||||
- -tags=netgo
|
||||
ldflags:
|
||||
- -s -w -X github.com/navidrome/navidrome/consts.gitSha={{.ShortCommit}} -X github.com/navidrome/navidrome/consts.gitTag={{.Version}}
|
||||
|
||||
archives:
|
||||
- format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
|
||||
checksum:
|
||||
name_template: "{{ .ProjectName }}_checksums.txt"
|
||||
|
||||
snapshot:
|
||||
version_template: "{{ .Tag }}-SNAPSHOT"
|
||||
|
||||
release:
|
||||
draft: true
|
||||
mode: append
|
||||
footer: |
|
||||
**Full Changelog**: https://github.com/navidrome/navidrome/compare/{{ .PreviousTag }}...{{ .Tag }}
|
||||
|
||||
## Helping out
|
||||
|
||||
This release is only possible thanks to the support of some **awesome people**!
|
||||
|
||||
Want to be one of them?
|
||||
You can [sponsor](https://github.com/sponsors/deluan), pay me a [Ko-fi](https://ko-fi.com/deluan) or [contribute with code](https://www.navidrome.org/docs/developers/).
|
||||
|
||||
## Where to go next?
|
||||
|
||||
* Read installation instructions on our [website](https://www.navidrome.org/docs/installation/).
|
||||
* Reach out on [Discord](https://discord.gg/xh7j7yF), [Reddit](https://www.reddit.com/r/navidrome/) and [Twitter](https://twitter.com/navidrome)!
|
||||
|
||||
changelog:
|
||||
sort: asc
|
||||
use: github
|
||||
filters:
|
||||
exclude:
|
||||
- "^test:"
|
||||
- Merge pull request
|
||||
- Merge remote-tracking branch
|
||||
- Merge branch
|
||||
- go mod tidy
|
||||
groups:
|
||||
- title: "New Features"
|
||||
regexp: '^.*?feat(\(.+\))??!?:.+$'
|
||||
order: 100
|
||||
- title: "Security updates"
|
||||
regexp: '^.*?sec(\(.+\))??!?:.+$'
|
||||
order: 150
|
||||
- title: "Bug fixes"
|
||||
regexp: '^.*?(fix|refactor)(\(.+\))??!?:.+$'
|
||||
order: 200
|
||||
- title: "Documentation updates"
|
||||
regexp: ^.*?docs?(\(.+\))??!?:.+$
|
||||
order: 400
|
||||
- title: "Build process updates"
|
||||
regexp: ^.*?(build|ci)(\(.+\))??!?:.+$
|
||||
order: 400
|
||||
- title: Other work
|
||||
order: 9999
|
||||
110
AGENTS.md
Normal file
110
AGENTS.md
Normal file
@@ -0,0 +1,110 @@
|
||||
# Testing Instructions
|
||||
|
||||
- **No implementation task is considered complete until it includes thorough, passing tests that cover the new or
|
||||
changed functionality. All new code must be accompanied by Ginkgo/Gomega tests, and PRs/commits without tests should
|
||||
be considered incomplete.**
|
||||
- All Go tests in this project **MUST** be written using the **Ginkgo v2** and **Gomega** frameworks.
|
||||
- To run all tests, use `make test`.
|
||||
- To run tests for a specific package, use `make test PKG=./pkgname/...`
|
||||
- Do not run tests in parallel
|
||||
- Don't use `--fail-on-pending`
|
||||
|
||||
## Mocking Convention
|
||||
|
||||
- Always try to use the mocks provided in the `tests` package before creating a new mock implementation.
|
||||
- Only create a new mock if the required functionality is not covered by the existing mocks in `tests`.
|
||||
- Never mock a real implementation when testing. Remember: there is no value in testing an interface, only the real implementation.
|
||||
|
||||
## Example
|
||||
|
||||
Every package that you write tests for, should have a `*_suite_test.go` file, to hook up the Ginkgo test suite. Example:
|
||||
```
|
||||
package core
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestCore(t *testing.T) {
|
||||
tests.Init(t, false)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Core Suite")
|
||||
}
|
||||
```
|
||||
Never put a `func Test*` in regular *_test.go files, only in `*_suite_test.go` files.
|
||||
|
||||
Refer to existing test suites for examples of proper setup and usage, such as the one defined in @core_suite_test.go
|
||||
|
||||
## Exceptions
|
||||
|
||||
There should be no exceptions to this rule. If you encounter tests written with the standard `testing` package or other frameworks, they should be refactored to use Ginkgo/Gomega. If you need a new mock, first confirm that it does not already exist in the `tests` package.
|
||||
|
||||
### Configuration
|
||||
|
||||
You can set config values in the BeforeEach/BeforeAll blocks. If you do so, remember to add `DeferCleanup(configtest.SetupConfig())` to reset the values. Example:
|
||||
|
||||
```go
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.EnableDownloads = true
|
||||
})
|
||||
```
|
||||
|
||||
# Logging System Usage Guide
|
||||
|
||||
This project uses a custom logging system built on top of logrus, `log/log.go`. Follow these conventions for all logging:
|
||||
|
||||
## Logging API
|
||||
- Use the provided functions for logging at different levels:
|
||||
- `Error(...)`, `Warn(...)`, `Info(...)`, `Debug(...)`, `Trace(...)`, `Fatal(...)`
|
||||
- These functions accept flexible arguments:
|
||||
- The first argument can be a context (`context.Context`), an HTTP request, or `nil`.
|
||||
- The next argument is the log message (string or error).
|
||||
- Additional arguments are key-value pairs (e.g., `"key", value`).
|
||||
- If the last argument is an error, it is logged under the `error` key.
|
||||
|
||||
**Examples:**
|
||||
```go
|
||||
log.Error("A message")
|
||||
log.Error(ctx, "A message with context")
|
||||
log.Error("Failed to save", "id", 123, err)
|
||||
log.Info(req, "Request received", "user", userID)
|
||||
```
|
||||
|
||||
## Logging errors
|
||||
- You don't need to add "err" key when logging an error, it is automatically added.
|
||||
- Error must always be the last parameter in the log call.
|
||||
Examples:
|
||||
```go
|
||||
log.Error("Failed to save", "id", 123, err) // GOOD
|
||||
log.Error("Failed to save", "id", 123, "err", err) // BAD
|
||||
log.Error("Failed to save", err, "id", 123) // BAD
|
||||
```
|
||||
|
||||
## Context and Request Logging
|
||||
- If a context or HTTP request is passed as the first argument, any logger fields in the context are included in the log entry.
|
||||
- Use `log.NewContext(ctx, "key", value, ...)` to add fields to a context for logging.
|
||||
|
||||
## Log Levels
|
||||
- Set the global log level with `log.SetLevel(log.LevelInfo)` or `log.SetLevelString("info")`.
|
||||
- Per-path log levels can be set with `log.SetLogLevels(map[string]string{"path": "level"})`.
|
||||
- Use `log.IsGreaterOrEqualTo(level)` to check if a log level is enabled for the current code path.
|
||||
|
||||
## Source Line Logging
|
||||
- Enable source file/line logging with `log.SetLogSourceLine(true)`.
|
||||
|
||||
## Best Practices
|
||||
- Always use the logging API, never log directly with logrus or fmt.
|
||||
- Prefer structured logging (key-value pairs) for important data.
|
||||
- Use context/request logging for traceability in web handlers.
|
||||
- For tests, use Ginkgo/Gomega and set up a test logger as in `log/log_test.go`.
|
||||
|
||||
## See Also
|
||||
- `log/log.go` for implementation details
|
||||
- `log/log_test.go` for usage examples and test patterns
|
||||
145
Dockerfile
Normal file
145
Dockerfile
Normal file
@@ -0,0 +1,145 @@
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/crazy-max/osxcross:14.5-debian AS osxcross
|
||||
|
||||
########################################################################################################################
|
||||
### Build xx (orignal image: tonistiigi/xx)
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.21 AS xx-build
|
||||
|
||||
# v1.5.0
|
||||
ENV XX_VERSION=b4e4c451c778822e6742bfc9d9a91d7c7d885c8a
|
||||
|
||||
RUN apk add -U --no-cache git
|
||||
RUN git clone https://github.com/tonistiigi/xx && \
|
||||
cd xx && \
|
||||
git checkout ${XX_VERSION} && \
|
||||
mkdir -p /out && \
|
||||
cp src/xx-* /out/
|
||||
|
||||
RUN cd /out && \
|
||||
ln -s xx-cc /out/xx-clang && \
|
||||
ln -s xx-cc /out/xx-clang++ && \
|
||||
ln -s xx-cc /out/xx-c++ && \
|
||||
ln -s xx-apt /out/xx-apt-get
|
||||
|
||||
# xx mimics the original tonistiigi/xx image
|
||||
FROM scratch AS xx
|
||||
COPY --from=xx-build /out/ /usr/bin/
|
||||
|
||||
########################################################################################################################
|
||||
### Get TagLib
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.21 AS taglib-build
|
||||
ARG TARGETPLATFORM
|
||||
ARG CROSS_TAGLIB_VERSION=2.0.2-1
|
||||
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
|
||||
|
||||
RUN <<EOT
|
||||
PLATFORM=$(echo ${TARGETPLATFORM} | tr '/' '-')
|
||||
FILE=taglib-${PLATFORM}.tar.gz
|
||||
|
||||
DOWNLOAD_URL=${CROSS_TAGLIB_RELEASES_URL}${FILE}
|
||||
wget ${DOWNLOAD_URL}
|
||||
|
||||
mkdir /taglib
|
||||
tar -xzf ${FILE} -C /taglib
|
||||
EOT
|
||||
|
||||
########################################################################################################################
|
||||
### Build Navidrome UI
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/node:lts-alpine AS ui
|
||||
WORKDIR /app
|
||||
|
||||
# Install node dependencies
|
||||
COPY ui/package.json ui/package-lock.json ./
|
||||
COPY ui/bin/ ./bin/
|
||||
RUN npm ci
|
||||
|
||||
# Build bundle
|
||||
COPY ui/ ./
|
||||
RUN npm run build -- --outDir=/build
|
||||
|
||||
FROM scratch AS ui-bundle
|
||||
COPY --from=ui /build /build
|
||||
|
||||
########################################################################################################################
|
||||
### Build Navidrome binary
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/golang:1.24-bookworm AS base
|
||||
RUN apt-get update && apt-get install -y clang lld
|
||||
COPY --from=xx / /
|
||||
WORKDIR /workspace
|
||||
|
||||
FROM --platform=$BUILDPLATFORM base AS build
|
||||
|
||||
# Install build dependencies for the target platform
|
||||
ARG TARGETPLATFORM
|
||||
|
||||
RUN xx-apt install -y binutils gcc g++ libc6-dev zlib1g-dev
|
||||
RUN xx-verify --setup
|
||||
|
||||
RUN --mount=type=bind,source=. \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
go mod download
|
||||
|
||||
ARG GIT_SHA
|
||||
ARG GIT_TAG
|
||||
|
||||
RUN --mount=type=bind,source=. \
|
||||
--mount=from=ui,source=/build,target=./ui/build,ro \
|
||||
--mount=from=osxcross,src=/osxcross/SDK,target=/xx-sdk,ro \
|
||||
--mount=type=cache,target=/root/.cache \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
--mount=from=taglib-build,target=/taglib,src=/taglib,ro <<EOT
|
||||
|
||||
# Setup CGO cross-compilation environment
|
||||
xx-go --wrap
|
||||
export CGO_ENABLED=1
|
||||
export PKG_CONFIG_PATH=/taglib/lib/pkgconfig
|
||||
cat $(go env GOENV)
|
||||
|
||||
# Only Darwin (macOS) requires clang (default), Windows requires gcc, everything else can use any compiler.
|
||||
# So let's use gcc for everything except Darwin.
|
||||
if [ "$(xx-info os)" != "darwin" ]; then
|
||||
export CC=$(xx-info)-gcc
|
||||
export CXX=$(xx-info)-g++
|
||||
export LD_EXTRA="-extldflags '-static -latomic'"
|
||||
fi
|
||||
if [ "$(xx-info os)" = "windows" ]; then
|
||||
export EXT=".exe"
|
||||
fi
|
||||
|
||||
go build -tags=netgo -ldflags="${LD_EXTRA} -w -s \
|
||||
-X github.com/navidrome/navidrome/consts.gitSha=${GIT_SHA} \
|
||||
-X github.com/navidrome/navidrome/consts.gitTag=${GIT_TAG}" \
|
||||
-o /out/navidrome${EXT} .
|
||||
EOT
|
||||
|
||||
# Verify if the binary was built for the correct platform and it is statically linked
|
||||
RUN xx-verify --static /out/navidrome*
|
||||
|
||||
FROM scratch AS binary
|
||||
COPY --from=build /out /
|
||||
|
||||
########################################################################################################################
|
||||
### Build Final Image
|
||||
FROM public.ecr.aws/docker/library/alpine:3.21 AS final
|
||||
LABEL maintainer="deluan@navidrome.org"
|
||||
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
|
||||
|
||||
# Install ffmpeg and mpv
|
||||
RUN apk add -U --no-cache ffmpeg mpv sqlite
|
||||
|
||||
# Copy navidrome binary
|
||||
COPY --from=build /out/navidrome /app/
|
||||
|
||||
VOLUME ["/data", "/music"]
|
||||
ENV ND_MUSICFOLDER=/music
|
||||
ENV ND_DATAFOLDER=/data
|
||||
ENV ND_CONFIGFILE=/data/navidrome.toml
|
||||
ENV ND_PORT=4533
|
||||
ENV GODEBUG="asyncpreemptoff=1"
|
||||
RUN touch /.nddockerenv
|
||||
|
||||
EXPOSE ${ND_PORT}
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/app/navidrome"]
|
||||
|
||||
118
Makefile
118
Makefile
@@ -3,13 +3,19 @@ NODE_VERSION=$(shell cat .nvmrc)
|
||||
|
||||
ifneq ("$(wildcard .git/HEAD)","")
|
||||
GIT_SHA=$(shell git rev-parse --short HEAD)
|
||||
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)
|
||||
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)-SNAPSHOT
|
||||
else
|
||||
GIT_SHA=source_archive
|
||||
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))
|
||||
GIT_TAG=$(patsubst navidrome-%,v%,$(notdir $(PWD)))-SNAPSHOT
|
||||
endif
|
||||
|
||||
CI_RELEASER_VERSION ?= 1.23.0-1 ## https://github.com/navidrome/ci-goreleaser
|
||||
SUPPORTED_PLATFORMS ?= linux/amd64,linux/arm64,linux/arm/v5,linux/arm/v6,linux/arm/v7,linux/386,darwin/amd64,darwin/arm64,windows/amd64,windows/386
|
||||
IMAGE_PLATFORMS ?= $(shell echo $(SUPPORTED_PLATFORMS) | tr ',' '\n' | grep "linux" | grep -v "arm/v5" | tr '\n' ',' | sed 's/,$$//')
|
||||
PLATFORMS ?= $(SUPPORTED_PLATFORMS)
|
||||
DOCKER_TAG ?= deluan/navidrome:develop
|
||||
|
||||
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
|
||||
CROSS_TAGLIB_VERSION ?= 2.0.2-1
|
||||
|
||||
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
|
||||
|
||||
@@ -19,27 +25,32 @@ setup: check_env download-deps setup-git ##@1_Run_First Install dependencies and
|
||||
.PHONY: setup
|
||||
|
||||
dev: check_env ##@Development Start Navidrome in development mode, with hot-reload for both frontend and backend
|
||||
npx foreman -j Procfile.dev -p 4533 start
|
||||
ND_ENABLEINSIGHTSCOLLECTOR="false" npx foreman -j Procfile.dev -p 4533 start
|
||||
.PHONY: dev
|
||||
|
||||
server: check_go_env buildjs ##@Development Start the backend in development mode
|
||||
@go run github.com/cespare/reflex@latest -d none -c reflex.conf
|
||||
@ND_ENABLEINSIGHTSCOLLECTOR="false" go tool reflex -d none -c reflex.conf
|
||||
.PHONY: server
|
||||
|
||||
watch: ##@Development Start Go tests in watch mode (re-run when code changes)
|
||||
go run github.com/onsi/ginkgo/v2/ginkgo@latest watch -notify ./...
|
||||
go tool ginkgo watch -tags=netgo -notify ./...
|
||||
.PHONY: watch
|
||||
|
||||
PKG ?= ./...
|
||||
test: ##@Development Run Go tests
|
||||
go test -race -shuffle=on ./...
|
||||
go test -tags netgo $(PKG)
|
||||
.PHONY: test
|
||||
|
||||
testall: test ##@Development Run Go and JS tests
|
||||
@(cd ./ui && npm test -- --watchAll=false)
|
||||
testrace: ##@Development Run Go tests with race detector
|
||||
go test -tags netgo -race -shuffle=on ./...
|
||||
.PHONY: test
|
||||
|
||||
testall: testrace ##@Development Run Go and JS tests
|
||||
@(cd ./ui && npm run test:ci)
|
||||
.PHONY: testall
|
||||
|
||||
lint: ##@Development Lint Go code
|
||||
go run github.com/golangci/golangci-lint/cmd/golangci-lint@latest run -v --timeout 5m
|
||||
go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest run -v --timeout 5m
|
||||
.PHONY: lint
|
||||
|
||||
lintall: lint ##@Development Lint Go and JS code
|
||||
@@ -49,16 +60,16 @@ lintall: lint ##@Development Lint Go and JS code
|
||||
|
||||
format: ##@Development Format code
|
||||
@(cd ./ui && npm run prettier)
|
||||
@go run golang.org/x/tools/cmd/goimports@latest -w `find . -name '*.go' | grep -v _gen.go$$`
|
||||
@go tool goimports -w `find . -name '*.go' | grep -v _gen.go$$`
|
||||
@go mod tidy
|
||||
.PHONY: format
|
||||
|
||||
wire: check_go_env ##@Development Update Dependency Injection
|
||||
go run github.com/google/wire/cmd/wire@latest ./...
|
||||
go tool wire gen -tags=netgo ./...
|
||||
.PHONY: wire
|
||||
|
||||
snapshots: ##@Development Update (GoLang) Snapshot tests
|
||||
UPDATE_SNAPSHOTS=true go run github.com/onsi/ginkgo/v2/ginkgo@latest ./server/subsonic/...
|
||||
UPDATE_SNAPSHOTS=true go tool ginkgo ./server/subsonic/responses/...
|
||||
.PHONY: snapshots
|
||||
|
||||
migration-sql: ##@Development Create an empty SQL migration file
|
||||
@@ -81,53 +92,75 @@ setup-git: ##@Development Setup Git hooks (pre-commit and pre-push)
|
||||
.PHONY: setup-git
|
||||
|
||||
build: check_go_env buildjs ##@Build Build the project
|
||||
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)-SNAPSHOT" -tags=netgo
|
||||
go build -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
|
||||
.PHONY: build
|
||||
|
||||
buildall: deprecated build
|
||||
.PHONY: buildall
|
||||
|
||||
debug-build: check_go_env buildjs ##@Build Build the project (with remote debug on)
|
||||
go build -gcflags="all=-N -l" -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)-SNAPSHOT" -tags=netgo
|
||||
go build -gcflags="all=-N -l" -ldflags="-X github.com/navidrome/navidrome/consts.gitSha=$(GIT_SHA) -X github.com/navidrome/navidrome/consts.gitTag=$(GIT_TAG)" -tags=netgo
|
||||
.PHONY: debug-build
|
||||
|
||||
buildjs: check_node_env ui/build/index.html ##@Build Build only frontend
|
||||
.PHONY: buildjs
|
||||
|
||||
docker-buildjs: ##@Build Build only frontend using Docker
|
||||
docker build --output "./ui" --target ui-bundle .
|
||||
.PHONY: docker-buildjs
|
||||
|
||||
ui/build/index.html: $(UI_SRC_FILES)
|
||||
@(cd ./ui && npm run build)
|
||||
|
||||
all: buildjs ##@Cross_Compilation Build binaries for all supported platforms.
|
||||
@echo "Building binaries for all platforms using builder ${CI_RELEASER_VERSION}"
|
||||
docker run -t -v $(PWD):/workspace -w /workspace deluan/ci-goreleaser:$(CI_RELEASER_VERSION) \
|
||||
goreleaser release --clean --skip=publish --snapshot
|
||||
.PHONY: all
|
||||
docker-platforms: ##@Cross_Compilation List supported platforms
|
||||
@echo "Supported platforms:"
|
||||
@echo "$(SUPPORTED_PLATFORMS)" | tr ',' '\n' | sort | sed 's/^/ /'
|
||||
@echo "\nUsage: make PLATFORMS=\"linux/amd64\" docker-build"
|
||||
@echo " make IMAGE_PLATFORMS=\"linux/amd64\" docker-image"
|
||||
.PHONY: docker-platforms
|
||||
|
||||
single: buildjs ##@Cross_Compilation Build binaries for a single supported platforms.
|
||||
@if [ -z "${GOOS}" -o -z "${GOARCH}" ]; then \
|
||||
echo "Usage: GOOS=<os> GOARCH=<arch> make single"; \
|
||||
echo "Options:"; \
|
||||
grep -- "- id: navidrome_" .goreleaser.yml | sed 's/- id: navidrome_//g'; \
|
||||
exit 1; \
|
||||
fi
|
||||
@echo "Building binaries for ${GOOS}/${GOARCH} using builder ${CI_RELEASER_VERSION}"
|
||||
docker run -t -v $(PWD):/workspace -e GOOS -e GOARCH -w /workspace deluan/ci-goreleaser:$(CI_RELEASER_VERSION) \
|
||||
goreleaser build --clean --snapshot -p 2 --single-target --id navidrome_${GOOS}_${GOARCH}
|
||||
.PHONY: single
|
||||
docker-build: ##@Cross_Compilation Cross-compile for any supported platform (check `make docker-platforms`)
|
||||
docker buildx build \
|
||||
--platform $(PLATFORMS) \
|
||||
--build-arg GIT_TAG=${GIT_TAG} \
|
||||
--build-arg GIT_SHA=${GIT_SHA} \
|
||||
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
|
||||
--output "./binaries" --target binary .
|
||||
.PHONY: docker-build
|
||||
|
||||
docker: buildjs ##@Build Build Docker linux/amd64 image (tagged as `deluan/navidrome:develop`)
|
||||
GOOS=linux GOARCH=amd64 make single
|
||||
@echo "Building Docker image"
|
||||
docker build . --platform linux/amd64 -t deluan/navidrome:develop -f .github/workflows/pipeline.dockerfile
|
||||
.PHONY: docker
|
||||
docker-image: ##@Cross_Compilation Build Docker image, tagged as `deluan/navidrome:develop`, override with DOCKER_TAG var. Use IMAGE_PLATFORMS to specify target platforms
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "windows" && echo "ERROR: Windows is not supported for Docker builds" && exit 1 || true
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "darwin" && echo "ERROR: macOS is not supported for Docker builds" && exit 1 || true
|
||||
@echo $(IMAGE_PLATFORMS) | grep -q "arm/v5" && echo "ERROR: Linux ARMv5 is not supported for Docker builds" && exit 1 || true
|
||||
docker buildx build \
|
||||
--platform $(IMAGE_PLATFORMS) \
|
||||
--build-arg GIT_TAG=${GIT_TAG} \
|
||||
--build-arg GIT_SHA=${GIT_SHA} \
|
||||
--build-arg CROSS_TAGLIB_VERSION=${CROSS_TAGLIB_VERSION} \
|
||||
--tag $(DOCKER_TAG) .
|
||||
.PHONY: docker-image
|
||||
|
||||
docker-msi: ##@Cross_Compilation Build MSI installer for Windows
|
||||
make docker-build PLATFORMS=windows/386,windows/amd64
|
||||
DOCKER_CLI_HINTS=false docker build -q -t navidrome-msi-builder -f release/wix/msitools.dockerfile .
|
||||
@rm -rf binaries/msi
|
||||
docker run -it --rm -v $(PWD):/workspace -v $(PWD)/binaries:/workspace/binaries -e GIT_TAG=${GIT_TAG} \
|
||||
navidrome-msi-builder sh -c "release/wix/build_msi.sh /workspace 386 && release/wix/build_msi.sh /workspace amd64"
|
||||
@du -h binaries/msi/*.msi
|
||||
.PHONY: docker-msi
|
||||
|
||||
package: docker-build ##@Cross_Compilation Create binaries and packages for ALL supported platforms
|
||||
@if [ -z `which goreleaser` ]; then echo "Please install goreleaser first: https://goreleaser.com/install/"; exit 1; fi
|
||||
goreleaser release -f release/goreleaser.yml --clean --skip=publish --snapshot
|
||||
.PHONY: package
|
||||
|
||||
get-music: ##@Development Download some free music from Navidrome's demo instance
|
||||
mkdir -p music
|
||||
( cd music; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=ec2093ec4801402f1e17cc462195cdbb" > brock.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=b376eeb4652d2498aa2b25ba0696725e" > back_on_earth.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=e49c609b542fc51899ee8b53aa858cb4" > ugress.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=350bcab3a4c1d93869e39ce496464f03" > voodoocuts.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=2Y3qQA6zJC3ObbBrF9ZBoV" > brock.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=04HrSORpypcLGNUdQp37gn" > back_on_earth.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=5xcMPJdeEgNrGtnzYbzAqb" > ugress.zip; \
|
||||
curl "https://demo.navidrome.org/rest/download?u=demo&p=demo&f=json&v=1.8.0&c=dev_download&id=1jjQMAZrG3lUsJ0YH6ZRS0" > voodoocuts.zip; \
|
||||
for file in *.zip; do unzip -n $${file}; done )
|
||||
@echo "Done. Remember to set your MusicFolder to ./music"
|
||||
.PHONY: get-music
|
||||
@@ -136,6 +169,11 @@ get-music: ##@Development Download some free music from Navidrome's demo instanc
|
||||
##########################################
|
||||
#### Miscellaneous
|
||||
|
||||
clean:
|
||||
@rm -rf ./binaries ./dist ./ui/build/*
|
||||
@touch ./ui/build/.gitkeep
|
||||
.PHONY: clean
|
||||
|
||||
release:
|
||||
@if [[ ! "${V}" =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$$ ]]; then echo "Usage: make release V=X.X.X"; exit 1; fi
|
||||
go mod tidy
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
JS: sh -c "cd ./ui && npm start"
|
||||
GO: go run github.com/cespare/reflex@latest -d none -c reflex.conf
|
||||
GO: go tool reflex -d none -c reflex.conf
|
||||
|
||||
10
README.md
10
README.md
@@ -9,6 +9,7 @@
|
||||
[](https://discord.gg/xh7j7yF)
|
||||
[](https://www.reddit.com/r/navidrome/)
|
||||
[](CODE_OF_CONDUCT.md)
|
||||
[](https://gurubase.io/g/navidrome)
|
||||
|
||||
Navidrome is an open source web-based music collection server and streamer. It gives you freedom to listen to your
|
||||
music collection from any browser or mobile device. It's like your personal Spotify!
|
||||
@@ -56,6 +57,15 @@ A share of the revenue helps fund the development of Navidrome at no additional
|
||||
- **Transcoding** on the fly. Can be set per user/player. **Opus encoding is supported**
|
||||
- Translated to **various languages**
|
||||
|
||||
## Translations
|
||||
|
||||
Navidrome uses [POEditor](https://poeditor.com/) for translations, and we are always looking
|
||||
for [more contributors](https://www.navidrome.org/docs/developers/translations/)
|
||||
|
||||
<a href="https://poeditor.com/">
|
||||
<img height="32" src="https://github.com/user-attachments/assets/c19b1d2b-01e1-4682-a007-12356c42147c">
|
||||
</a>
|
||||
|
||||
## Documentation
|
||||
All documentation can be found in the project's website: https://www.navidrome.org/docs.
|
||||
Here are some useful direct links:
|
||||
|
||||
154
adapters/taglib/end_to_end_test.go
Normal file
154
adapters/taglib/end_to_end_test.go
Normal file
@@ -0,0 +1,154 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/djherbis/times"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type testFileInfo struct {
|
||||
fs.FileInfo
|
||||
}
|
||||
|
||||
func (t testFileInfo) BirthTime() time.Time {
|
||||
if ts := times.Get(t.FileInfo); ts.HasBirthTime() {
|
||||
return ts.BirthTime()
|
||||
}
|
||||
return t.FileInfo.ModTime()
|
||||
}
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
toP := func(name, sortName, mbid string) model.Participant {
|
||||
return model.Participant{
|
||||
Artist: model.Artist{Name: name, SortArtistName: sortName, MbzArtistID: mbid},
|
||||
}
|
||||
}
|
||||
|
||||
roles := []struct {
|
||||
model.Role
|
||||
model.ParticipantList
|
||||
}{
|
||||
{model.RoleComposer, model.ParticipantList{
|
||||
toP("coma a", "a, coma", "bf13b584-f27c-43db-8f42-32898d33d4e2"),
|
||||
toP("comb", "comb", "924039a2-09c6-4d29-9b4f-50cc54447d36"),
|
||||
}},
|
||||
{model.RoleLyricist, model.ParticipantList{
|
||||
toP("la a", "a, la", "c84f648f-68a6-40a2-a0cb-d135b25da3c2"),
|
||||
toP("lb", "lb", "0a7c582d-143a-4540-b4e9-77200835af65"),
|
||||
}},
|
||||
{model.RoleArranger, model.ParticipantList{
|
||||
toP("aa", "", "4605a1d4-8d15-42a3-bd00-9c20e42f71e6"),
|
||||
toP("ab", "", "002f0ff8-77bf-42cc-8216-61a9c43dc145"),
|
||||
}},
|
||||
{model.RoleConductor, model.ParticipantList{
|
||||
toP("cona", "", "af86879b-2141-42af-bad2-389a4dc91489"),
|
||||
toP("conb", "", "3dfa3c70-d7d3-4b97-b953-c298dd305e12"),
|
||||
}},
|
||||
{model.RoleDirector, model.ParticipantList{
|
||||
toP("dia", "", "f943187f-73de-4794-be47-88c66f0fd0f4"),
|
||||
toP("dib", "", "bceb75da-1853-4b3d-b399-b27f0cafc389"),
|
||||
}},
|
||||
{model.RoleEngineer, model.ParticipantList{
|
||||
toP("ea", "", "f634bf6d-d66a-425d-888a-28ad39392759"),
|
||||
toP("eb", "", "243d64ae-d514-44e1-901a-b918d692baee"),
|
||||
}},
|
||||
{model.RoleProducer, model.ParticipantList{
|
||||
toP("pra", "", "d971c8d7-999c-4a5f-ac31-719721ab35d6"),
|
||||
toP("prb", "", "f0a09070-9324-434f-a599-6d25ded87b69"),
|
||||
}},
|
||||
{model.RoleRemixer, model.ParticipantList{
|
||||
toP("ra", "", "c7dc6095-9534-4c72-87cc-aea0103462cf"),
|
||||
toP("rb", "", "8ebeef51-c08c-4736-992f-c37870becedd"),
|
||||
}},
|
||||
{model.RoleDJMixer, model.ParticipantList{
|
||||
toP("dja", "", "d063f13b-7589-4efc-ab7f-c60e6db17247"),
|
||||
toP("djb", "", "3636670c-385f-4212-89c8-0ff51d6bc456"),
|
||||
}},
|
||||
{model.RoleMixer, model.ParticipantList{
|
||||
toP("ma", "", "53fb5a2d-7016-427e-a563-d91819a5f35a"),
|
||||
toP("mb", "", "64c13e65-f0da-4ab9-a300-71ee53b0376a"),
|
||||
}},
|
||||
}
|
||||
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("Participants", func() {
|
||||
DescribeTable("test tags consistent across formats", func(format string) {
|
||||
path := "tests/fixtures/test." + format
|
||||
mds, err := e.Parse(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
info := mds[path]
|
||||
fileInfo, _ := os.Stat(path)
|
||||
info.FileInfo = testFileInfo{FileInfo: fileInfo}
|
||||
|
||||
metadata := metadata.New(path, info)
|
||||
mf := metadata.ToMediaFile(1, "folderID")
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
|
||||
actual := mf.Participants[role]
|
||||
Expect(actual).To(HaveLen(len(artists)))
|
||||
|
||||
for i := range artists {
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[i]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
Expect(actualArtist.SortArtistName).To(Equal(expectedArtist.SortArtistName))
|
||||
Expect(actualArtist.MbzArtistID).To(Equal(expectedArtist.MbzArtistID))
|
||||
}
|
||||
}
|
||||
|
||||
if format != "m4a" {
|
||||
performers := mf.Participants[model.RolePerformer]
|
||||
Expect(performers).To(HaveLen(8))
|
||||
|
||||
rules := map[string][]string{
|
||||
"pgaa": {"2fd0b311-9fa8-4ff9-be5d-f6f3d16b835e", "Guitar"},
|
||||
"pgbb": {"223d030b-bf97-4c2a-ad26-b7f7bbe25c93", "Guitar", ""},
|
||||
"pvaa": {"cb195f72-448f-41c8-b962-3f3c13d09d38", "Vocals"},
|
||||
"pvbb": {"60a1f832-8ca2-49f6-8660-84d57f07b520", "Vocals", "Flute"},
|
||||
"pfaa": {"51fb40c-0305-4bf9-a11b-2ee615277725", "", "Flute"},
|
||||
}
|
||||
|
||||
for name, rule := range rules {
|
||||
mbid := rule[0]
|
||||
for i := 1; i < len(rule); i++ {
|
||||
found := false
|
||||
|
||||
for _, mapped := range performers {
|
||||
if mapped.Name == name && mapped.MbzArtistID == mbid && mapped.SubRole == rule[i] {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
Expect(found).To(BeTrue(), "Could not find matching artist")
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Entry("FLAC format", "flac"),
|
||||
Entry("M4a format", "m4a"),
|
||||
Entry("OGG format", "ogg"),
|
||||
Entry("WMA format", "wv"),
|
||||
|
||||
Entry("MP3 format", "mp3"),
|
||||
Entry("WAV format", "wav"),
|
||||
Entry("AIFF format", "aiff"),
|
||||
)
|
||||
})
|
||||
})
|
||||
151
adapters/taglib/taglib.go
Normal file
151
adapters/taglib/taglib.go
Normal file
@@ -0,0 +1,151 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/core/storage/local"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
)
|
||||
|
||||
type extractor struct {
|
||||
baseDir string
|
||||
}
|
||||
|
||||
func (e extractor) Parse(files ...string) (map[string]metadata.Info, error) {
|
||||
results := make(map[string]metadata.Info)
|
||||
for _, path := range files {
|
||||
props, err := e.extractMetadata(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
results[path] = *props
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (e extractor) Version() string {
|
||||
return Version()
|
||||
}
|
||||
|
||||
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
|
||||
fullPath := filepath.Join(e.baseDir, filePath)
|
||||
tags, err := Read(fullPath)
|
||||
if err != nil {
|
||||
log.Warn("extractor: Error reading metadata from file. Skipping", "filePath", fullPath, err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse audio properties
|
||||
ap := metadata.AudioProperties{}
|
||||
if length, ok := tags["_lengthinmilliseconds"]; ok && len(length) > 0 {
|
||||
millis, _ := strconv.Atoi(length[0])
|
||||
if millis > 0 {
|
||||
ap.Duration = (time.Millisecond * time.Duration(millis)).Round(time.Millisecond * 10)
|
||||
}
|
||||
delete(tags, "_lengthinmilliseconds")
|
||||
}
|
||||
parseProp := func(prop string, target *int) {
|
||||
if value, ok := tags[prop]; ok && len(value) > 0 {
|
||||
*target, _ = strconv.Atoi(value[0])
|
||||
delete(tags, prop)
|
||||
}
|
||||
}
|
||||
parseProp("_bitrate", &ap.BitRate)
|
||||
parseProp("_channels", &ap.Channels)
|
||||
parseProp("_samplerate", &ap.SampleRate)
|
||||
parseProp("_bitspersample", &ap.BitDepth)
|
||||
|
||||
// Parse track/disc totals
|
||||
parseTuple := func(prop string) {
|
||||
tagName := prop + "number"
|
||||
tagTotal := prop + "total"
|
||||
if value, ok := tags[tagName]; ok && len(value) > 0 {
|
||||
parts := strings.Split(value[0], "/")
|
||||
tags[tagName] = []string{parts[0]}
|
||||
if len(parts) == 2 {
|
||||
tags[tagTotal] = []string{parts[1]}
|
||||
}
|
||||
}
|
||||
}
|
||||
parseTuple("track")
|
||||
parseTuple("disc")
|
||||
|
||||
// Adjust some ID3 tags
|
||||
parseLyrics(tags)
|
||||
parseTIPL(tags)
|
||||
delete(tags, "tmcl") // TMCL is already parsed by TagLib
|
||||
|
||||
return &metadata.Info{
|
||||
Tags: tags,
|
||||
AudioProperties: ap,
|
||||
HasPicture: tags["has_picture"] != nil && len(tags["has_picture"]) > 0 && tags["has_picture"][0] == "true",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// parseLyrics make sure lyrics tags have language
|
||||
func parseLyrics(tags map[string][]string) {
|
||||
lyrics := tags["lyrics"]
|
||||
if len(lyrics) > 0 {
|
||||
tags["lyrics:xxx"] = lyrics
|
||||
delete(tags, "lyrics")
|
||||
}
|
||||
}
|
||||
|
||||
// These are the only roles we support, based on Picard's tag map:
|
||||
// https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
var tiplMapping = map[string]string{
|
||||
"arranger": "arranger",
|
||||
"engineer": "engineer",
|
||||
"producer": "producer",
|
||||
"mix": "mixer",
|
||||
"DJ-mix": "djmixer",
|
||||
}
|
||||
|
||||
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
|
||||
//
|
||||
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".
|
||||
//
|
||||
// and breaks it down into a map of roles and names, e.g.:
|
||||
//
|
||||
// {"arranger": ["Andrew Powell"], "engineer": ["Chris Blair", "Pat Stapley"], "producer": ["Eric Woolfson"]}.
|
||||
func parseTIPL(tags map[string][]string) {
|
||||
tipl := tags["tipl"]
|
||||
if len(tipl) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
addRole := func(currentRole string, currentValue []string) {
|
||||
if currentRole != "" && len(currentValue) > 0 {
|
||||
role := tiplMapping[currentRole]
|
||||
tags[role] = append(tags[role], strings.Join(currentValue, " "))
|
||||
}
|
||||
}
|
||||
|
||||
var currentRole string
|
||||
var currentValue []string
|
||||
for _, part := range strings.Split(tipl[0], " ") {
|
||||
if _, ok := tiplMapping[part]; ok {
|
||||
addRole(currentRole, currentValue)
|
||||
currentRole = part
|
||||
currentValue = nil
|
||||
continue
|
||||
}
|
||||
currentValue = append(currentValue, part)
|
||||
}
|
||||
addRole(currentRole, currentValue)
|
||||
delete(tags, "tipl")
|
||||
}
|
||||
|
||||
var _ local.Extractor = (*extractor)(nil)
|
||||
|
||||
func init() {
|
||||
local.RegisterExtractor("taglib", func(_ fs.FS, baseDir string) local.Extractor {
|
||||
// ignores fs, as taglib extractor only works with local files
|
||||
return &extractor{baseDir}
|
||||
})
|
||||
}
|
||||
296
adapters/taglib/taglib_test.go
Normal file
296
adapters/taglib/taglib_test.go
Normal file
@@ -0,0 +1,296 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("Parse", func() {
|
||||
It("correctly parses metadata from all files in folder", func() {
|
||||
mds, err := e.Parse(
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
|
||||
// Test MP3
|
||||
m := mds["tests/fixtures/test.mp3"]
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Song"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal("1.02s"))
|
||||
Expect(m.AudioProperties.BitRate).To(Equal(192))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(44100))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("tcmp", []string{"1"})),
|
||||
)
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014-05-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("originaldate", []string{"1996-11-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("releasedate", []string{"2020-12-31"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("discnumber", []string{"1"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_gain", []string{"+3.21518 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_peak", []string{"0.9125"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_gain", []string{"-1.48 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_peak", []string{"0.4512"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracknumber", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
|
||||
Expect(m.Tags).ToNot(HaveKey("lyrics"))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}), HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
})))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}), HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
})))
|
||||
|
||||
// Test OGG
|
||||
m = mds["tests/fixtures/test.ogg"]
|
||||
Expect(err).To(BeNil())
|
||||
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||
|
||||
// TabLib 1.12 returns 18, previous versions return 39.
|
||||
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
|
||||
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
})
|
||||
|
||||
DescribeTable("Format-Specific tests",
|
||||
func(file, duration string, channels, samplerate, bitdepth int, albumGain, albumPeak, trackGain, trackPeak string, id3Lyrics bool, image bool) {
|
||||
file = "tests/fixtures/" + file
|
||||
mds, err := e.Parse(file)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(1))
|
||||
|
||||
m := mds[file]
|
||||
|
||||
Expect(m.HasPicture).To(Equal(image))
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal(duration))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(channels))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(samplerate))
|
||||
Expect(m.AudioProperties.BitDepth).To(Equal(bitdepth))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{albumGain}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_peak", []string{albumPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_peak", []string{albumPeak}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_gain", []string{trackGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{trackGain}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_peak", []string{trackPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_peak", []string{trackPeak}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Title"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("tracknumber", []string{"3"}),
|
||||
HaveKeyWithValue("tracknumber", []string{"3/10"}),
|
||||
))
|
||||
if !strings.HasSuffix(file, "test.wma") {
|
||||
// TODO Not sure why this is not working for WMA
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
}
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("discnumber", []string{"1"}),
|
||||
HaveKeyWithValue("discnumber", []string{"1/2"}),
|
||||
))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
|
||||
// WMA does not have a "compilation" tag, but "wm/iscompilation"
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("wm/iscompilation", []string{"1"})),
|
||||
)
|
||||
|
||||
if id3Lyrics {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}))
|
||||
} else {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
}
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
},
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1200:duration=1" test.flac
|
||||
Entry("correctly parses flac tags", "test.flac", "1s", 1, 44100, 16, "+4.06 dB", "0.12496948", "+4.06 dB", "0.12496948", false, true),
|
||||
|
||||
Entry("correctly parses m4a (aac) gain tags", "01 Invisible (RED) Edit Version.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
|
||||
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
|
||||
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=800:duration=1" test.wv
|
||||
Entry("correctly parses wv (wavpak) tags", "test.wv", "1s", 1, 44100, 16, "3.43 dB", "0.125061", "3.43 dB", "0.125061", false, false),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1000:duration=1" test.wav
|
||||
Entry("correctly parses wav tags", "test.wav", "1s", 1, 44100, 16, "3.06 dB", "0.125056", "3.06 dB", "0.125056", true, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1400:duration=1" test.aiff
|
||||
Entry("correctly parses aiff tags", "test.aiff", "1s", 1, 44100, 16, "2.00 dB", "0.124972", "2.00 dB", "0.124972", true, true),
|
||||
)
|
||||
|
||||
// Skip these tests when running as root
|
||||
Context("Access Forbidden", func() {
|
||||
var accessForbiddenFile string
|
||||
var RegularUserContext = XContext
|
||||
var isRegularUser = os.Getuid() != 0
|
||||
if isRegularUser {
|
||||
RegularUserContext = Context
|
||||
}
|
||||
|
||||
// Only run permission tests if we are not root
|
||||
RegularUserContext("when run without root privileges", func() {
|
||||
BeforeEach(func() {
|
||||
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
|
||||
|
||||
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
DeferCleanup(func() {
|
||||
Expect(f.Close()).To(Succeed())
|
||||
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
|
||||
})
|
||||
})
|
||||
|
||||
It("correctly handle unreadable file due to insufficient read permission", func() {
|
||||
_, err := e.extractMetadata(accessForbiddenFile)
|
||||
Expect(err).To(MatchError(os.ErrPermission))
|
||||
})
|
||||
|
||||
It("skips the file if it cannot be read", func() {
|
||||
files := []string{
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
accessForbiddenFile,
|
||||
}
|
||||
mds, err := e.Parse(files...)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
Expect(mds).ToNot(HaveKey(accessForbiddenFile))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Describe("Error Checking", func() {
|
||||
It("returns a generic ErrPath if file does not exist", func() {
|
||||
testFilePath := "tests/fixtures/NON_EXISTENT.ogg"
|
||||
_, err := e.extractMetadata(testFilePath)
|
||||
Expect(err).To(MatchError(fs.ErrNotExist))
|
||||
})
|
||||
It("does not throw a SIGSEGV error when reading a file with an invalid frame", func() {
|
||||
// File has an empty TDAT frame
|
||||
md, err := e.extractMetadata("tests/fixtures/invalid-files/test-invalid-frame.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(md.Tags).To(HaveKeyWithValue("albumartist", []string{"Elvis Presley"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("parseTIPL", func() {
|
||||
var tags map[string][]string
|
||||
|
||||
BeforeEach(func() {
|
||||
tags = make(map[string][]string)
|
||||
})
|
||||
|
||||
Context("when the TIPL string is populated", func() {
|
||||
It("correctly parses roles and names", func() {
|
||||
tags["tipl"] = []string{"arranger Andrew Powell DJ-mix François Kevorkian DJ-mix Jane Doe engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["arranger"]).To(ConsistOf("Andrew Powell"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Chris Blair"))
|
||||
Expect(tags["djmixer"]).To(ConsistOf("François Kevorkian", "Jane Doe"))
|
||||
})
|
||||
|
||||
It("handles multiple names for a single role", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer Eric Woolfson engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["producer"]).To(ConsistOf("Eric Woolfson"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
|
||||
It("discards roles without names", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).ToNot(HaveKey("producer"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL string is empty", func() {
|
||||
It("does nothing", func() {
|
||||
tags["tipl"] = []string{""}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL is not present", func() {
|
||||
It("does nothing", func() {
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
@@ -3,8 +3,11 @@
|
||||
#include <typeinfo>
|
||||
|
||||
#define TAGLIB_STATIC
|
||||
#include <apeproperties.h>
|
||||
#include <apetag.h>
|
||||
#include <aifffile.h>
|
||||
#include <asffile.h>
|
||||
#include <dsffile.h>
|
||||
#include <fileref.h>
|
||||
#include <flacfile.h>
|
||||
#include <id3v2tag.h>
|
||||
@@ -16,6 +19,8 @@
|
||||
#include <tpropertymap.h>
|
||||
#include <vorbisfile.h>
|
||||
#include <wavfile.h>
|
||||
#include <wavfile.h>
|
||||
#include <wavpackfile.h>
|
||||
|
||||
#include "taglib_wrapper.h"
|
||||
|
||||
@@ -41,35 +46,31 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
|
||||
// Add audio properties to the tags
|
||||
const TagLib::AudioProperties *props(f.audioProperties());
|
||||
go_map_put_int(id, (char *)"duration", props->lengthInSeconds());
|
||||
go_map_put_int(id, (char *)"lengthinmilliseconds", props->lengthInMilliseconds());
|
||||
go_map_put_int(id, (char *)"bitrate", props->bitrate());
|
||||
go_map_put_int(id, (char *)"channels", props->channels());
|
||||
go_map_put_int(id, (char *)"samplerate", props->sampleRate());
|
||||
goPutInt(id, (char *)"_lengthinmilliseconds", props->lengthInMilliseconds());
|
||||
goPutInt(id, (char *)"_bitrate", props->bitrate());
|
||||
goPutInt(id, (char *)"_channels", props->channels());
|
||||
goPutInt(id, (char *)"_samplerate", props->sampleRate());
|
||||
|
||||
// Create a map to collect all the tags
|
||||
if (const auto* apeProperties{ dynamic_cast<const TagLib::APE::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", apeProperties->bitsPerSample());
|
||||
if (const auto* asfProperties{ dynamic_cast<const TagLib::ASF::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", asfProperties->bitsPerSample());
|
||||
else if (const auto* flacProperties{ dynamic_cast<const TagLib::FLAC::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", flacProperties->bitsPerSample());
|
||||
else if (const auto* mp4Properties{ dynamic_cast<const TagLib::MP4::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", mp4Properties->bitsPerSample());
|
||||
else if (const auto* wavePackProperties{ dynamic_cast<const TagLib::WavPack::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", wavePackProperties->bitsPerSample());
|
||||
else if (const auto* aiffProperties{ dynamic_cast<const TagLib::RIFF::AIFF::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", aiffProperties->bitsPerSample());
|
||||
else if (const auto* wavProperties{ dynamic_cast<const TagLib::RIFF::WAV::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", wavProperties->bitsPerSample());
|
||||
else if (const auto* dsfProperties{ dynamic_cast<const TagLib::DSF::Properties*>(props) })
|
||||
goPutInt(id, (char *)"_bitspersample", dsfProperties->bitsPerSample());
|
||||
|
||||
// Send all properties to the Go map
|
||||
TagLib::PropertyMap tags = f.file()->properties();
|
||||
|
||||
// Make sure at least the basic properties are extracted
|
||||
TagLib::Tag *basic = f.file()->tag();
|
||||
if (!basic->isEmpty()) {
|
||||
if (!basic->title().isEmpty()) {
|
||||
tags.insert("title", basic->title());
|
||||
}
|
||||
if (!basic->artist().isEmpty()) {
|
||||
tags.insert("artist", basic->artist());
|
||||
}
|
||||
if (!basic->album().isEmpty()) {
|
||||
tags.insert("album", basic->album());
|
||||
}
|
||||
if (basic->year() > 0) {
|
||||
tags.insert("date", TagLib::String::number(basic->year()));
|
||||
}
|
||||
if (basic->track() > 0) {
|
||||
tags.insert("_track", TagLib::String::number(basic->track()));
|
||||
}
|
||||
}
|
||||
|
||||
TagLib::ID3v2::Tag *id3Tags = NULL;
|
||||
|
||||
// Get some extended/non-standard ID3-only tags (ex: iTunes extended frames)
|
||||
@@ -114,7 +115,7 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
|
||||
char *val = (char *)frame->text().toCString(true);
|
||||
|
||||
go_map_put_lyrics(id, language, val);
|
||||
goPutLyrics(id, language, val);
|
||||
}
|
||||
} else if (kv.first == "SYLT") {
|
||||
for (const auto &tag: kv.second) {
|
||||
@@ -132,7 +133,7 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
|
||||
for (const auto &line: frame->synchedText()) {
|
||||
char *text = (char *)line.text.toCString(true);
|
||||
go_map_put_lyric_line(id, language, text, line.time);
|
||||
goPutLyricLine(id, language, text, line.time);
|
||||
}
|
||||
} else if (format == TagLib::ID3v2::SynchronizedLyricsFrame::AbsoluteMpegFrames) {
|
||||
const int sampleRate = props->sampleRate();
|
||||
@@ -141,12 +142,12 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
for (const auto &line: frame->synchedText()) {
|
||||
const int timeInMs = (line.time * 1000) / sampleRate;
|
||||
char *text = (char *)line.text.toCString(true);
|
||||
go_map_put_lyric_line(id, language, text, timeInMs);
|
||||
goPutLyricLine(id, language, text, timeInMs);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
} else if (kv.first == "TIPL"){
|
||||
if (!kv.second.isEmpty()) {
|
||||
tags.insert(kv.first, kv.second.front()->toString());
|
||||
}
|
||||
@@ -154,7 +155,7 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
}
|
||||
}
|
||||
|
||||
// M4A may have some iTunes specific tags
|
||||
// M4A may have some iTunes specific tags not captured by the PropertyMap interface
|
||||
TagLib::MP4::File *m4afile(dynamic_cast<TagLib::MP4::File *>(f.file()));
|
||||
if (m4afile != NULL) {
|
||||
const auto itemListMap = m4afile->tag()->itemMap();
|
||||
@@ -162,12 +163,12 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
char *key = (char *)item.first.toCString(true);
|
||||
for (const auto value: item.second.toStringList()) {
|
||||
char *val = (char *)value.toCString(true);
|
||||
go_map_put_m4a_str(id, key, val);
|
||||
goPutM4AStr(id, key, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WMA/ASF files may have additional tags not captured by the general iterator
|
||||
// WMA/ASF files may have additional tags not captured by the PropertyMap interface
|
||||
TagLib::ASF::File *asfFile(dynamic_cast<TagLib::ASF::File *>(f.file()));
|
||||
if (asfFile != NULL) {
|
||||
const TagLib::ASF::Tag *asfTags{asfFile->tag()};
|
||||
@@ -184,13 +185,13 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
for (TagLib::StringList::ConstIterator j = i->second.begin();
|
||||
j != i->second.end(); ++j) {
|
||||
char *val = (char *)(*j).toCString(true);
|
||||
go_map_put_str(id, key, val);
|
||||
goPutStr(id, key, val);
|
||||
}
|
||||
}
|
||||
|
||||
// Cover art has to be handled separately
|
||||
if (has_cover(f)) {
|
||||
go_map_put_str(id, (char *)"has_picture", (char *)"true");
|
||||
goPutStr(id, (char *)"has_picture", (char *)"true");
|
||||
}
|
||||
|
||||
return 0;
|
||||
@@ -200,40 +201,48 @@ int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id) {
|
||||
char has_cover(const TagLib::FileRef f) {
|
||||
char hasCover = 0;
|
||||
// ----- MP3
|
||||
if (TagLib::MPEG::File *
|
||||
mp3File{dynamic_cast<TagLib::MPEG::File *>(f.file())}) {
|
||||
if (TagLib::MPEG::File * mp3File{dynamic_cast<TagLib::MPEG::File *>(f.file())}) {
|
||||
if (mp3File->ID3v2Tag()) {
|
||||
const auto &frameListMap{mp3File->ID3v2Tag()->frameListMap()};
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- FLAC
|
||||
else if (TagLib::FLAC::File *
|
||||
flacFile{dynamic_cast<TagLib::FLAC::File *>(f.file())}) {
|
||||
else if (TagLib::FLAC::File * flacFile{dynamic_cast<TagLib::FLAC::File *>(f.file())}) {
|
||||
hasCover = !flacFile->pictureList().isEmpty();
|
||||
}
|
||||
// ----- MP4
|
||||
else if (TagLib::MP4::File *
|
||||
mp4File{dynamic_cast<TagLib::MP4::File *>(f.file())}) {
|
||||
else if (TagLib::MP4::File * mp4File{dynamic_cast<TagLib::MP4::File *>(f.file())}) {
|
||||
auto &coverItem{mp4File->tag()->itemMap()["covr"]};
|
||||
TagLib::MP4::CoverArtList coverArtList{coverItem.toCoverArtList()};
|
||||
hasCover = !coverArtList.isEmpty();
|
||||
}
|
||||
// ----- Ogg
|
||||
else if (TagLib::Ogg::Vorbis::File *
|
||||
vorbisFile{dynamic_cast<TagLib::Ogg::Vorbis::File *>(f.file())}) {
|
||||
else if (TagLib::Ogg::Vorbis::File * vorbisFile{dynamic_cast<TagLib::Ogg::Vorbis::File *>(f.file())}) {
|
||||
hasCover = !vorbisFile->tag()->pictureList().isEmpty();
|
||||
}
|
||||
// ----- Opus
|
||||
else if (TagLib::Ogg::Opus::File *
|
||||
opusFile{dynamic_cast<TagLib::Ogg::Opus::File *>(f.file())}) {
|
||||
else if (TagLib::Ogg::Opus::File * opusFile{dynamic_cast<TagLib::Ogg::Opus::File *>(f.file())}) {
|
||||
hasCover = !opusFile->tag()->pictureList().isEmpty();
|
||||
}
|
||||
// ----- WAV
|
||||
else if (TagLib::RIFF::WAV::File * wavFile{ dynamic_cast<TagLib::RIFF::WAV::File*>(f.file()) }) {
|
||||
if (wavFile->hasID3v2Tag()) {
|
||||
const auto& frameListMap{ wavFile->ID3v2Tag()->frameListMap() };
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- AIFF
|
||||
else if (TagLib::RIFF::AIFF::File * aiffFile{ dynamic_cast<TagLib::RIFF::AIFF::File *>(f.file())}) {
|
||||
if (aiffFile->hasID3v2Tag()) {
|
||||
const auto& frameListMap{ aiffFile->tag()->frameListMap() };
|
||||
hasCover = !frameListMap["APIC"].isEmpty();
|
||||
}
|
||||
}
|
||||
// ----- WMA
|
||||
if (TagLib::ASF::File *
|
||||
asfFile{dynamic_cast<TagLib::ASF::File *>(f.file())}) {
|
||||
const TagLib::ASF::Tag *tag{asfFile->tag()};
|
||||
hasCover = tag && tag->attributeListMap().contains("WM/Picture");
|
||||
else if (TagLib::ASF::File * asfFile{dynamic_cast<TagLib::ASF::File *>(f.file())}) {
|
||||
const TagLib::ASF::Tag *tag{ asfFile->tag() };
|
||||
hasCover = tag && asfFile->tag()->attributeListMap().contains("WM/Picture");
|
||||
}
|
||||
|
||||
return hasCover;
|
||||
157
adapters/taglib/taglib_wrapper.go
Normal file
157
adapters/taglib/taglib_wrapper.go
Normal file
@@ -0,0 +1,157 @@
|
||||
package taglib
|
||||
|
||||
/*
|
||||
#cgo !windows pkg-config: --define-prefix taglib
|
||||
#cgo windows pkg-config: taglib
|
||||
#cgo illumos LDFLAGS: -lstdc++ -lsendfile
|
||||
#cgo linux darwin CXXFLAGS: -std=c++11
|
||||
#cgo darwin LDFLAGS: -L/opt/homebrew/opt/taglib/lib
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "taglib_wrapper.h"
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime/debug"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
)
|
||||
|
||||
const iTunesKeyPrefix = "----:com.apple.itunes:"
|
||||
|
||||
func Version() string {
|
||||
return C.GoString(C.taglib_version())
|
||||
}
|
||||
|
||||
func Read(filename string) (tags map[string][]string, err error) {
|
||||
// Do not crash on failures in the C code/library
|
||||
debug.SetPanicOnFault(true)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Error("extractor: recovered from panic when reading tags", "file", filename, "error", r)
|
||||
err = fmt.Errorf("extractor: recovered from panic: %s", r)
|
||||
}
|
||||
}()
|
||||
|
||||
fp := getFilename(filename)
|
||||
defer C.free(unsafe.Pointer(fp))
|
||||
id, m, release := newMap()
|
||||
defer release()
|
||||
|
||||
log.Trace("extractor: reading tags", "filename", filename, "map_id", id)
|
||||
res := C.taglib_read(fp, C.ulong(id))
|
||||
switch res {
|
||||
case C.TAGLIB_ERR_PARSE:
|
||||
// Check additional case whether the file is unreadable due to permission
|
||||
file, fileErr := os.OpenFile(filename, os.O_RDONLY, 0600)
|
||||
defer file.Close()
|
||||
|
||||
if os.IsPermission(fileErr) {
|
||||
return nil, fmt.Errorf("navidrome does not have permission: %w", fileErr)
|
||||
} else if fileErr != nil {
|
||||
return nil, fmt.Errorf("cannot parse file media file: %w", fileErr)
|
||||
} else {
|
||||
return nil, fmt.Errorf("cannot parse file media file")
|
||||
}
|
||||
case C.TAGLIB_ERR_AUDIO_PROPS:
|
||||
return nil, fmt.Errorf("can't get audio properties from file")
|
||||
}
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
j, _ := json.Marshal(m)
|
||||
log.Trace("extractor: read tags", "tags", string(j), "filename", filename, "id", id)
|
||||
} else {
|
||||
log.Trace("extractor: read tags", "tags", m, "filename", filename, "id", id)
|
||||
}
|
||||
|
||||
return m, nil
|
||||
}
|
||||
|
||||
type tagMap map[string][]string
|
||||
|
||||
var allMaps sync.Map
|
||||
var mapsNextID atomic.Uint32
|
||||
|
||||
func newMap() (uint32, tagMap, func()) {
|
||||
id := mapsNextID.Add(1)
|
||||
|
||||
m := tagMap{}
|
||||
allMaps.Store(id, m)
|
||||
|
||||
return id, m, func() {
|
||||
allMaps.Delete(id)
|
||||
}
|
||||
}
|
||||
|
||||
func doPutTag(id C.ulong, key string, val *C.char) {
|
||||
if key == "" {
|
||||
return
|
||||
}
|
||||
|
||||
r, _ := allMaps.Load(uint32(id))
|
||||
m := r.(tagMap)
|
||||
k := strings.ToLower(key)
|
||||
v := strings.TrimSpace(C.GoString(val))
|
||||
m[k] = append(m[k], v)
|
||||
}
|
||||
|
||||
//export goPutM4AStr
|
||||
func goPutM4AStr(id C.ulong, key *C.char, val *C.char) {
|
||||
k := C.GoString(key)
|
||||
|
||||
// Special for M4A, do not catch keys that have no actual name
|
||||
k = strings.TrimPrefix(k, iTunesKeyPrefix)
|
||||
doPutTag(id, k, val)
|
||||
}
|
||||
|
||||
//export goPutStr
|
||||
func goPutStr(id C.ulong, key *C.char, val *C.char) {
|
||||
doPutTag(id, C.GoString(key), val)
|
||||
}
|
||||
|
||||
//export goPutInt
|
||||
func goPutInt(id C.ulong, key *C.char, val C.int) {
|
||||
valStr := strconv.Itoa(int(val))
|
||||
vp := C.CString(valStr)
|
||||
defer C.free(unsafe.Pointer(vp))
|
||||
goPutStr(id, key, vp)
|
||||
}
|
||||
|
||||
//export goPutLyrics
|
||||
func goPutLyrics(id C.ulong, lang *C.char, val *C.char) {
|
||||
doPutTag(id, "lyrics:"+C.GoString(lang), val)
|
||||
}
|
||||
|
||||
//export goPutLyricLine
|
||||
func goPutLyricLine(id C.ulong, lang *C.char, text *C.char, time C.int) {
|
||||
language := C.GoString(lang)
|
||||
line := C.GoString(text)
|
||||
timeGo := int64(time)
|
||||
|
||||
ms := timeGo % 1000
|
||||
timeGo /= 1000
|
||||
sec := timeGo % 60
|
||||
timeGo /= 60
|
||||
minimum := timeGo % 60
|
||||
formattedLine := fmt.Sprintf("[%02d:%02d.%02d]%s\n", minimum, sec, ms/10, line)
|
||||
|
||||
key := "lyrics:" + language
|
||||
|
||||
r, _ := allMaps.Load(uint32(id))
|
||||
m := r.(tagMap)
|
||||
k := strings.ToLower(key)
|
||||
existing, ok := m[k]
|
||||
if ok {
|
||||
existing[0] += formattedLine
|
||||
} else {
|
||||
m[k] = []string{formattedLine}
|
||||
}
|
||||
}
|
||||
24
adapters/taglib/taglib_wrapper.h
Normal file
24
adapters/taglib/taglib_wrapper.h
Normal file
@@ -0,0 +1,24 @@
|
||||
#define TAGLIB_ERR_PARSE -1
|
||||
#define TAGLIB_ERR_AUDIO_PROPS -2
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#ifdef WIN32
|
||||
#define FILENAME_CHAR_T wchar_t
|
||||
#else
|
||||
#define FILENAME_CHAR_T char
|
||||
#endif
|
||||
|
||||
extern void goPutM4AStr(unsigned long id, char *key, char *val);
|
||||
extern void goPutStr(unsigned long id, char *key, char *val);
|
||||
extern void goPutInt(unsigned long id, char *key, int val);
|
||||
extern void goPutLyrics(unsigned long id, char *lang, char *val);
|
||||
extern void goPutLyricLine(unsigned long id, char *lang, char *text, int time);
|
||||
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id);
|
||||
char* taglib_version();
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
186
cmd/backup.go
Normal file
186
cmd/backup.go
Normal file
@@ -0,0 +1,186 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
backupCount int
|
||||
backupDir string
|
||||
force bool
|
||||
restorePath string
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(backupRoot)
|
||||
|
||||
backupCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory to manually make backup")
|
||||
backupRoot.AddCommand(backupCmd)
|
||||
|
||||
pruneCmd.Flags().StringVarP(&backupDir, "backup-dir", "d", "", "directory holding Navidrome backups")
|
||||
pruneCmd.Flags().IntVarP(&backupCount, "keep-count", "k", -1, "specify the number of backups to keep. 0 remove ALL backups, and negative values mean to use the default from configuration")
|
||||
pruneCmd.Flags().BoolVarP(&force, "force", "f", false, "bypass warning when backup count is zero")
|
||||
backupRoot.AddCommand(pruneCmd)
|
||||
|
||||
restoreCommand.Flags().StringVarP(&restorePath, "backup-file", "b", "", "path of backup database to restore")
|
||||
restoreCommand.Flags().BoolVarP(&force, "force", "f", false, "bypass restore warning")
|
||||
_ = restoreCommand.MarkFlagRequired("backup-file")
|
||||
backupRoot.AddCommand(restoreCommand)
|
||||
}
|
||||
|
||||
var (
|
||||
backupRoot = &cobra.Command{
|
||||
Use: "backup",
|
||||
Aliases: []string{"bkp"},
|
||||
Short: "Create, restore and prune database backups",
|
||||
Long: "Create, restore and prune database backups",
|
||||
}
|
||||
|
||||
backupCmd = &cobra.Command{
|
||||
Use: "create",
|
||||
Short: "Create a backup database",
|
||||
Long: "Manually backup Navidrome database. This will ignore BackupCount",
|
||||
Run: func(cmd *cobra.Command, _ []string) {
|
||||
runBackup(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
pruneCmd = &cobra.Command{
|
||||
Use: "prune",
|
||||
Short: "Prune database backups",
|
||||
Long: "Manually prune database backups according to backup rules",
|
||||
Run: func(cmd *cobra.Command, _ []string) {
|
||||
runPrune(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
restoreCommand = &cobra.Command{
|
||||
Use: "restore",
|
||||
Short: "Restore Navidrome database",
|
||||
Long: "Restore Navidrome database from a backup. This must be done offline",
|
||||
Run: func(cmd *cobra.Command, _ []string) {
|
||||
runRestore(cmd.Context())
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func runBackup(ctx context.Context) {
|
||||
if backupDir != "" {
|
||||
conf.Server.Backup.Path = backupDir
|
||||
}
|
||||
|
||||
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
var path string
|
||||
|
||||
if idx == -1 {
|
||||
path = conf.Server.DbPath
|
||||
} else {
|
||||
path = conf.Server.DbPath[:idx]
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Fatal("No existing database", "path", path)
|
||||
return
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
path, err := db.Backup(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Error backing up database", "backup path", conf.Server.BasePath, err)
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
log.Info("Backup complete", "elapsed", elapsed, "path", path)
|
||||
}
|
||||
|
||||
func runPrune(ctx context.Context) {
|
||||
if backupDir != "" {
|
||||
conf.Server.Backup.Path = backupDir
|
||||
}
|
||||
|
||||
if backupCount != -1 {
|
||||
conf.Server.Backup.Count = backupCount
|
||||
}
|
||||
|
||||
if conf.Server.Backup.Count == 0 && !force {
|
||||
fmt.Println("Warning: pruning ALL backups")
|
||||
fmt.Printf("Please enter YES (all caps) to continue: ")
|
||||
var input string
|
||||
_, err := fmt.Scanln(&input)
|
||||
|
||||
if input != "YES" || err != nil {
|
||||
log.Warn("Prune cancelled")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
var path string
|
||||
|
||||
if idx == -1 {
|
||||
path = conf.Server.DbPath
|
||||
} else {
|
||||
path = conf.Server.DbPath[:idx]
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Fatal("No existing database", "path", path)
|
||||
return
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
count, err := db.Prune(ctx)
|
||||
if err != nil {
|
||||
log.Fatal("Error pruning up database", "backup path", conf.Server.BasePath, err)
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
|
||||
log.Info("Prune complete", "elapsed", elapsed, "successfully pruned", count)
|
||||
}
|
||||
|
||||
func runRestore(ctx context.Context) {
|
||||
idx := strings.LastIndex(conf.Server.DbPath, "?")
|
||||
var path string
|
||||
|
||||
if idx == -1 {
|
||||
path = conf.Server.DbPath
|
||||
} else {
|
||||
path = conf.Server.DbPath[:idx]
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Fatal("No existing database", "path", path)
|
||||
return
|
||||
}
|
||||
|
||||
if !force {
|
||||
fmt.Println("Warning: restoring the Navidrome database should only be done offline, especially if your backup is very old.")
|
||||
fmt.Printf("Please enter YES (all caps) to continue: ")
|
||||
var input string
|
||||
_, err := fmt.Scanln(&input)
|
||||
|
||||
if input != "YES" || err != nil {
|
||||
log.Warn("Restore cancelled")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
err := db.Restore(ctx, restorePath)
|
||||
if err != nil {
|
||||
log.Fatal("Error restoring database", "backup path", conf.Server.BasePath, err)
|
||||
}
|
||||
|
||||
elapsed := time.Since(start)
|
||||
log.Info("Restore complete", "elapsed", elapsed)
|
||||
}
|
||||
@@ -5,25 +5,20 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/scanner/metadata"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var (
|
||||
extractor string
|
||||
format string
|
||||
format string
|
||||
)
|
||||
|
||||
func init() {
|
||||
inspectCmd.Flags().StringVarP(&extractor, "extractor", "x", "", "extractor to use (ffmpeg or taglib, default: auto)")
|
||||
inspectCmd.Flags().StringVarP(&format, "format", "f", "pretty", "output format (pretty, toml, yaml, json, jsonindent)")
|
||||
inspectCmd.Flags().StringVarP(&format, "format", "f", "jsonindent", "output format (pretty, toml, yaml, json, jsonindent)")
|
||||
rootCmd.AddCommand(inspectCmd)
|
||||
}
|
||||
|
||||
@@ -48,7 +43,7 @@ var marshalers = map[string]func(interface{}) ([]byte, error){
|
||||
}
|
||||
|
||||
func prettyMarshal(v interface{}) ([]byte, error) {
|
||||
out := v.([]inspectorOutput)
|
||||
out := v.([]core.InspectOutput)
|
||||
var res strings.Builder
|
||||
for i := range out {
|
||||
res.WriteString(fmt.Sprintf("====================\nFile: %s\n\n", out[i].File))
|
||||
@@ -60,39 +55,24 @@ func prettyMarshal(v interface{}) ([]byte, error) {
|
||||
return []byte(res.String()), nil
|
||||
}
|
||||
|
||||
type inspectorOutput struct {
|
||||
File string
|
||||
RawTags metadata.ParsedTags
|
||||
MappedTags model.MediaFile
|
||||
}
|
||||
|
||||
func runInspector(args []string) {
|
||||
if extractor != "" {
|
||||
conf.Server.Scanner.Extractor = extractor
|
||||
}
|
||||
log.Info("Using extractor", "extractor", conf.Server.Scanner.Extractor)
|
||||
md, err := metadata.Extract(args...)
|
||||
if err != nil {
|
||||
log.Fatal("Error extracting tags", err)
|
||||
}
|
||||
mapper := scanner.NewMediaFileMapper(conf.Server.MusicFolder, &tests.MockedGenreRepo{})
|
||||
marshal := marshalers[format]
|
||||
if marshal == nil {
|
||||
log.Fatal("Invalid format", "format", format)
|
||||
}
|
||||
var out []inspectorOutput
|
||||
for k, v := range md {
|
||||
if !model.IsAudioFile(k) {
|
||||
var out []core.InspectOutput
|
||||
for _, filePath := range args {
|
||||
if !model.IsAudioFile(filePath) {
|
||||
log.Warn("Not an audio file", "file", filePath)
|
||||
continue
|
||||
}
|
||||
if len(v.Tags) == 0 {
|
||||
output, err := core.Inspect(filePath, 1, "")
|
||||
if err != nil {
|
||||
log.Warn("Unable to process file", "file", filePath, "error", err)
|
||||
continue
|
||||
}
|
||||
out = append(out, inspectorOutput{
|
||||
File: k,
|
||||
RawTags: v.Tags,
|
||||
MappedTags: mapper.ToMediaFile(v),
|
||||
})
|
||||
|
||||
out = append(out, *output)
|
||||
}
|
||||
data, _ := marshal(out)
|
||||
fmt.Println(string(data))
|
||||
|
||||
109
cmd/pls.go
109
cmd/pls.go
@@ -2,8 +2,12 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
@@ -15,31 +19,57 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
playlistID string
|
||||
outputFile string
|
||||
playlistID string
|
||||
outputFile string
|
||||
userID string
|
||||
outputFormat string
|
||||
)
|
||||
|
||||
type displayPlaylist struct {
|
||||
Id string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
OwnerName string `json:"ownerName"`
|
||||
OwnerId string `json:"ownerId"`
|
||||
Public bool `json:"public"`
|
||||
}
|
||||
|
||||
type displayPlaylists []displayPlaylist
|
||||
|
||||
func init() {
|
||||
plsCmd.Flags().StringVarP(&playlistID, "playlist", "p", "", "playlist name or ID")
|
||||
plsCmd.Flags().StringVarP(&outputFile, "output", "o", "", "output file (default stdout)")
|
||||
_ = plsCmd.MarkFlagRequired("playlist")
|
||||
rootCmd.AddCommand(plsCmd)
|
||||
|
||||
listCommand.Flags().StringVarP(&userID, "user", "u", "", "username or ID")
|
||||
listCommand.Flags().StringVarP(&outputFormat, "format", "f", "csv", "output format [supported values: csv, json]")
|
||||
plsCmd.AddCommand(listCommand)
|
||||
}
|
||||
|
||||
var plsCmd = &cobra.Command{
|
||||
Use: "pls",
|
||||
Short: "Export playlists",
|
||||
Long: "Export Navidrome playlists to M3U files",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runExporter()
|
||||
},
|
||||
}
|
||||
var (
|
||||
plsCmd = &cobra.Command{
|
||||
Use: "pls",
|
||||
Short: "Export playlists",
|
||||
Long: "Export Navidrome playlists to M3U files",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runExporter()
|
||||
},
|
||||
}
|
||||
|
||||
listCommand = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List playlists",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runList()
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func runExporter() {
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
@@ -49,7 +79,7 @@ func runExporter() {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
if len(playlists) > 0 {
|
||||
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true)
|
||||
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true, false)
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
@@ -69,3 +99,58 @@ func runExporter() {
|
||||
log.Fatal("Error writing to the output file", "file", outputFile, err)
|
||||
}
|
||||
}
|
||||
|
||||
func runList() {
|
||||
if outputFormat != "csv" && outputFormat != "json" {
|
||||
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
||||
}
|
||||
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
||||
|
||||
options := model.QueryOptions{Sort: "owner_name"}
|
||||
|
||||
if userID != "" {
|
||||
user, err := ds.User(ctx).FindByUsername(userID)
|
||||
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
log.Fatal("Error retrieving user by name", "name", userID, err)
|
||||
}
|
||||
|
||||
if errors.Is(err, model.ErrNotFound) {
|
||||
user, err = ds.User(ctx).Get(userID)
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving user by id", "id", userID, err)
|
||||
}
|
||||
}
|
||||
|
||||
options.Filters = squirrel.Eq{"owner_id": user.ID}
|
||||
}
|
||||
|
||||
playlists, err := ds.Playlist(ctx).GetAll(options)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to retrieve playlists", err)
|
||||
}
|
||||
|
||||
if outputFormat == "csv" {
|
||||
w := csv.NewWriter(os.Stdout)
|
||||
_ = w.Write([]string{"playlist id", "playlist name", "owner id", "owner name", "public"})
|
||||
for _, playlist := range playlists {
|
||||
_ = w.Write([]string{playlist.ID, playlist.Name, playlist.OwnerID, playlist.OwnerName, strconv.FormatBool(playlist.Public)})
|
||||
}
|
||||
w.Flush()
|
||||
} else {
|
||||
display := make(displayPlaylists, len(playlists))
|
||||
for idx, playlist := range playlists {
|
||||
display[idx].Id = playlist.ID
|
||||
display[idx].Name = playlist.Name
|
||||
display[idx].OwnerId = playlist.OwnerID
|
||||
display[idx].OwnerName = playlist.OwnerName
|
||||
display[idx].Public = playlist.Public
|
||||
}
|
||||
|
||||
j, _ := json.Marshal(display)
|
||||
fmt.Printf("%s\n", j)
|
||||
}
|
||||
}
|
||||
|
||||
214
cmd/root.go
214
cmd/root.go
@@ -2,7 +2,6 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strings"
|
||||
@@ -10,15 +9,16 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/resources"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/scheduler"
|
||||
"github.com/navidrome/navidrome/server/backgrounds"
|
||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"golang.org/x/sync/errgroup"
|
||||
@@ -37,7 +37,7 @@ Complete documentation is available at https://www.navidrome.org/docs`,
|
||||
preRun()
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runNavidrome()
|
||||
runNavidrome(cmd.Context())
|
||||
},
|
||||
PostRun: func(cmd *cobra.Command, args []string) {
|
||||
postRun()
|
||||
@@ -48,10 +48,12 @@ Complete documentation is available at https://www.navidrome.org/docs`,
|
||||
|
||||
// Execute runs the root cobra command, which will start the Navidrome server by calling the runNavidrome function.
|
||||
func Execute() {
|
||||
ctx, cancel := mainContext(context.Background())
|
||||
defer cancel()
|
||||
|
||||
rootCmd.SetVersionTemplate(`{{println .Version}}`)
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
fmt.Println(err)
|
||||
os.Exit(1)
|
||||
if err := rootCmd.ExecuteContext(ctx); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,7 +61,7 @@ func preRun() {
|
||||
if !noBanner {
|
||||
println(resources.Banner())
|
||||
}
|
||||
conf.Load()
|
||||
conf.Load(noBanner)
|
||||
}
|
||||
|
||||
func postRun() {
|
||||
@@ -69,18 +71,24 @@ func postRun() {
|
||||
// runNavidrome is the main entry point for the Navidrome server. It starts all the services and blocks.
|
||||
// If any of the services returns an error, it will log it and exit. If the process receives a signal to exit,
|
||||
// it will cancel the context and exit gracefully.
|
||||
func runNavidrome() {
|
||||
defer db.Init()()
|
||||
|
||||
ctx, cancel := mainContext()
|
||||
defer cancel()
|
||||
func runNavidrome(ctx context.Context) {
|
||||
defer db.Init(ctx)()
|
||||
|
||||
g, ctx := errgroup.WithContext(ctx)
|
||||
g.Go(startServer(ctx))
|
||||
g.Go(startSignaller(ctx))
|
||||
g.Go(startScheduler(ctx))
|
||||
g.Go(startPlaybackServer(ctx))
|
||||
g.Go(schedulePeriodicScan(ctx))
|
||||
g.Go(schedulePeriodicBackup(ctx))
|
||||
g.Go(startInsightsCollector(ctx))
|
||||
g.Go(scheduleDBOptimizer(ctx))
|
||||
if conf.Server.Scanner.Enabled {
|
||||
g.Go(runInitialScan(ctx))
|
||||
g.Go(startScanWatcher(ctx))
|
||||
g.Go(schedulePeriodicScan(ctx))
|
||||
} else {
|
||||
log.Warn(ctx, "Automatic Scanning is DISABLED")
|
||||
}
|
||||
|
||||
if err := g.Wait(); err != nil {
|
||||
log.Error("Fatal error in Navidrome. Aborting", err)
|
||||
@@ -88,8 +96,8 @@ func runNavidrome() {
|
||||
}
|
||||
|
||||
// mainContext returns a context that is cancelled when the process receives a signal to exit.
|
||||
func mainContext() (context.Context, context.CancelFunc) {
|
||||
return signal.NotifyContext(context.Background(),
|
||||
func mainContext(ctx context.Context) (context.Context, context.CancelFunc) {
|
||||
return signal.NotifyContext(ctx,
|
||||
os.Interrupt,
|
||||
syscall.SIGHUP,
|
||||
syscall.SIGTERM,
|
||||
@@ -100,9 +108,9 @@ func mainContext() (context.Context, context.CancelFunc) {
|
||||
// startServer starts the Navidrome web server, adding all the necessary routers.
|
||||
func startServer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
a := CreateServer(conf.Server.MusicFolder)
|
||||
a := CreateServer()
|
||||
a.MountRouter("Native API", consts.URLPathNativeAPI, CreateNativeAPIRouter())
|
||||
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter())
|
||||
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter(ctx))
|
||||
a.MountRouter("Public Endpoints", consts.URLPathPublic, CreatePublicRouter())
|
||||
if conf.Server.LastFM.Enabled {
|
||||
a.MountRouter("LastFM Auth", consts.URLPathNativeAPI+"/lastfm", CreateLastFMRouter())
|
||||
@@ -111,9 +119,10 @@ func startServer(ctx context.Context) func() error {
|
||||
a.MountRouter("ListenBrainz Auth", consts.URLPathNativeAPI+"/listenbrainz", CreateListenBrainzRouter())
|
||||
}
|
||||
if conf.Server.Prometheus.Enabled {
|
||||
// blocking call because takes <1ms but useful if fails
|
||||
core.WriteInitialMetrics()
|
||||
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, promhttp.Handler())
|
||||
p := CreatePrometheus()
|
||||
// blocking call because takes <100ms but useful if fails
|
||||
p.WriteInitialMetrics(ctx)
|
||||
a.MountRouter("Prometheus metrics", conf.Server.Prometheus.MetricsPath, p.GetHandler())
|
||||
}
|
||||
if conf.Server.DevEnableProfiler {
|
||||
a.MountRouter("Profiling", "/debug", middleware.Profiler())
|
||||
@@ -128,30 +137,148 @@ func startServer(ctx context.Context) func() error {
|
||||
// schedulePeriodicScan schedules a periodic scan of the music library, if configured.
|
||||
func schedulePeriodicScan(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
schedule := conf.Server.ScanSchedule
|
||||
schedule := conf.Server.Scanner.Schedule
|
||||
if schedule == "" {
|
||||
log.Warn("Periodic scan is DISABLED")
|
||||
log.Info(ctx, "Periodic scan is DISABLED")
|
||||
return nil
|
||||
}
|
||||
|
||||
scanner := GetScanner()
|
||||
s := CreateScanner(ctx)
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
|
||||
log.Info("Scheduling periodic scan", "schedule", schedule)
|
||||
err := schedulerInstance.Add(schedule, func() {
|
||||
_ = scanner.RescanAll(ctx, false)
|
||||
_, err := s.ScanAll(ctx, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error executing periodic scan", err)
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
log.Error("Error scheduling periodic scan", err)
|
||||
log.Error(ctx, "Error scheduling periodic scan", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func pidHashChanged(ds model.DataStore) (bool, error) {
|
||||
pidAlbum, err := ds.Property(context.Background()).DefaultGet(consts.PIDAlbumKey, "")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
pidTrack, err := ds.Property(context.Background()).DefaultGet(consts.PIDTrackKey, "")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return !strings.EqualFold(pidAlbum, conf.Server.PID.Album) || !strings.EqualFold(pidTrack, conf.Server.PID.Track), nil
|
||||
}
|
||||
|
||||
func runInitialScan(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
ds := CreateDataStore()
|
||||
fullScanRequired, err := ds.Property(ctx).DefaultGet(consts.FullScanAfterMigrationFlagKey, "0")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
inProgress, err := ds.Library(ctx).ScanInProgress()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pidHasChanged, err := pidHashChanged(ds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
scanNeeded := conf.Server.Scanner.ScanOnStartup || inProgress || fullScanRequired == "1" || pidHasChanged
|
||||
time.Sleep(2 * time.Second) // Wait 2 seconds before the initial scan
|
||||
if scanNeeded {
|
||||
scanner := CreateScanner(ctx)
|
||||
switch {
|
||||
case fullScanRequired == "1":
|
||||
log.Warn(ctx, "Full scan required after migration")
|
||||
_ = ds.Property(ctx).Delete(consts.FullScanAfterMigrationFlagKey)
|
||||
case pidHasChanged:
|
||||
log.Warn(ctx, "PID config changed, performing full scan")
|
||||
fullScanRequired = "1"
|
||||
case inProgress:
|
||||
log.Warn(ctx, "Resuming interrupted scan")
|
||||
default:
|
||||
log.Info("Executing initial scan")
|
||||
}
|
||||
|
||||
_, err = scanner.ScanAll(ctx, fullScanRequired == "1")
|
||||
if err != nil {
|
||||
log.Error(ctx, "Scan failed", err)
|
||||
} else {
|
||||
log.Info(ctx, "Scan completed")
|
||||
}
|
||||
} else {
|
||||
log.Debug(ctx, "Initial scan not needed")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func startScanWatcher(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if conf.Server.Scanner.WatcherWait == 0 {
|
||||
log.Debug("Folder watcher is DISABLED")
|
||||
return nil
|
||||
}
|
||||
w := CreateScanWatcher(ctx)
|
||||
err := w.Run(ctx)
|
||||
if err != nil {
|
||||
log.Error("Error starting watcher", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func schedulePeriodicBackup(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
schedule := conf.Server.Backup.Schedule
|
||||
if schedule == "" {
|
||||
log.Info(ctx, "Periodic backup is DISABLED")
|
||||
return nil
|
||||
}
|
||||
|
||||
time.Sleep(2 * time.Second) // Wait 2 seconds before the initial scan
|
||||
log.Debug("Executing initial scan")
|
||||
if err := scanner.RescanAll(ctx, false); err != nil {
|
||||
log.Error("Error executing initial scan", err)
|
||||
}
|
||||
log.Debug("Finished initial scan")
|
||||
return nil
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
|
||||
log.Info("Scheduling periodic backup", "schedule", schedule)
|
||||
err := schedulerInstance.Add(schedule, func() {
|
||||
start := time.Now()
|
||||
path, err := db.Backup(ctx)
|
||||
elapsed := time.Since(start)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error backing up database", "elapsed", elapsed, err)
|
||||
return
|
||||
}
|
||||
log.Info(ctx, "Backup complete", "elapsed", elapsed, "path", path)
|
||||
|
||||
count, err := db.Prune(ctx)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error pruning database", "error", err)
|
||||
} else if count > 0 {
|
||||
log.Info(ctx, "Successfully pruned old files", "count", count)
|
||||
} else {
|
||||
log.Info(ctx, "No backups pruned")
|
||||
}
|
||||
})
|
||||
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
func scheduleDBOptimizer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
log.Info(ctx, "Scheduling DB optimizer", "schedule", consts.OptimizeDBSchedule)
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
err := schedulerInstance.Add(consts.OptimizeDBSchedule, func() {
|
||||
if scanner.IsScanning() {
|
||||
log.Debug(ctx, "Skipping DB optimization because a scan is in progress")
|
||||
return
|
||||
}
|
||||
db.Optimize(ctx)
|
||||
})
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,6 +292,25 @@ func startScheduler(ctx context.Context) func() error {
|
||||
}
|
||||
}
|
||||
|
||||
// startInsightsCollector starts the Navidrome Insight Collector, if configured.
|
||||
func startInsightsCollector(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if !conf.Server.EnableInsightsCollector {
|
||||
log.Info(ctx, "Insight Collector is DISABLED")
|
||||
return nil
|
||||
}
|
||||
log.Info(ctx, "Starting Insight Collector")
|
||||
select {
|
||||
case <-time.After(conf.Server.DevInsightsInitialDelay):
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
}
|
||||
ic := CreateInsights()
|
||||
ic.Run(ctx)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// startPlaybackServer starts the Navidrome playback server, if configured.
|
||||
// It is responsible for the Jukebox functionality
|
||||
func startPlaybackServer(ctx context.Context) func() error {
|
||||
@@ -191,11 +337,13 @@ func init() {
|
||||
rootCmd.PersistentFlags().String("datafolder", viper.GetString("datafolder"), "folder to store application data (DB), needs write access")
|
||||
rootCmd.PersistentFlags().String("cachefolder", viper.GetString("cachefolder"), "folder to store cache data (transcoding, images...), needs write access")
|
||||
rootCmd.PersistentFlags().StringP("loglevel", "l", viper.GetString("loglevel"), "log level, possible values: error, info, debug, trace")
|
||||
rootCmd.PersistentFlags().String("logfile", viper.GetString("logfile"), "log file path, if not set logs will be printed to stderr")
|
||||
|
||||
_ = viper.BindPFlag("musicfolder", rootCmd.PersistentFlags().Lookup("musicfolder"))
|
||||
_ = viper.BindPFlag("datafolder", rootCmd.PersistentFlags().Lookup("datafolder"))
|
||||
_ = viper.BindPFlag("cachefolder", rootCmd.PersistentFlags().Lookup("cachefolder"))
|
||||
_ = viper.BindPFlag("loglevel", rootCmd.PersistentFlags().Lookup("loglevel"))
|
||||
_ = viper.BindPFlag("logfile", rootCmd.PersistentFlags().Lookup("logfile"))
|
||||
|
||||
rootCmd.Flags().StringP("address", "a", viper.GetString("address"), "IP address to bind to")
|
||||
rootCmd.Flags().IntP("port", "p", viper.GetInt("port"), "HTTP port Navidrome will listen to")
|
||||
|
||||
64
cmd/scan.go
64
cmd/scan.go
@@ -2,15 +2,28 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/gob"
|
||||
"os"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/utils/pl"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var fullRescan bool
|
||||
var (
|
||||
fullScan bool
|
||||
subprocess bool
|
||||
)
|
||||
|
||||
func init() {
|
||||
scanCmd.Flags().BoolVarP(&fullRescan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
|
||||
rootCmd.AddCommand(scanCmd)
|
||||
}
|
||||
|
||||
@@ -19,16 +32,53 @@ var scanCmd = &cobra.Command{
|
||||
Short: "Scan music folder",
|
||||
Long: "Scan music folder for updates",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runScanner()
|
||||
runScanner(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
func runScanner() {
|
||||
scanner := GetScanner()
|
||||
_ = scanner.RescanAll(context.Background(), fullRescan)
|
||||
if fullRescan {
|
||||
func trackScanInteractively(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
|
||||
for status := range pl.ReadOrDone(ctx, progress) {
|
||||
if status.Warning != "" {
|
||||
log.Warn(ctx, "Scan warning", "error", status.Warning)
|
||||
}
|
||||
if status.Error != "" {
|
||||
log.Error(ctx, "Scan error", "error", status.Error)
|
||||
}
|
||||
// Discard the progress status, we only care about errors
|
||||
}
|
||||
|
||||
if fullScan {
|
||||
log.Info("Finished full rescan")
|
||||
} else {
|
||||
log.Info("Finished rescan")
|
||||
}
|
||||
}
|
||||
|
||||
func trackScanAsSubprocess(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
|
||||
encoder := gob.NewEncoder(os.Stdout)
|
||||
for status := range pl.ReadOrDone(ctx, progress) {
|
||||
err := encoder.Encode(status)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Failed to encode status", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func runScanner(ctx context.Context) {
|
||||
sqlDB := db.Db()
|
||||
defer db.Db().Close()
|
||||
ds := persistence.New(sqlDB)
|
||||
pls := core.NewPlaylists(ds)
|
||||
|
||||
progress, err := scanner.CallScan(ctx, ds, artwork.NoopCacheWarmer(), pls, metrics.NewNoopInstance(), fullScan)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to scan", err)
|
||||
}
|
||||
|
||||
// Wait for the scanner to finish
|
||||
if subprocess {
|
||||
trackScanAsSubprocess(ctx, progress)
|
||||
} else {
|
||||
trackScanInteractively(ctx, progress)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ const triggerScanSignal = syscall.SIGUSR1
|
||||
|
||||
func startSignaller(ctx context.Context) func() error {
|
||||
log.Info(ctx, "Starting signaler")
|
||||
scanner := GetScanner()
|
||||
scanner := CreateScanner(ctx)
|
||||
|
||||
return func() error {
|
||||
var sigChan = make(chan os.Signal, 1)
|
||||
@@ -27,11 +27,11 @@ func startSignaller(ctx context.Context) func() error {
|
||||
case sig := <-sigChan:
|
||||
log.Info(ctx, "Received signal, triggering a new scan", "signal", sig)
|
||||
start := time.Now()
|
||||
err := scanner.RescanAll(ctx, false)
|
||||
_, err := scanner.ScanAll(ctx, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error scanning", err)
|
||||
}
|
||||
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start).Round(100*time.Millisecond))
|
||||
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start))
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
}
|
||||
|
||||
267
cmd/svc.go
Normal file
267
cmd/svc.go
Normal file
@@ -0,0 +1,267 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/kardianos/service"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
svcStatusLabels = map[service.Status]string{
|
||||
service.StatusUnknown: "Unknown",
|
||||
service.StatusStopped: "Stopped",
|
||||
service.StatusRunning: "Running",
|
||||
}
|
||||
|
||||
installUser string
|
||||
workingDirectory string
|
||||
)
|
||||
|
||||
func init() {
|
||||
svcCmd.AddCommand(buildInstallCmd())
|
||||
svcCmd.AddCommand(buildUninstallCmd())
|
||||
svcCmd.AddCommand(buildStartCmd())
|
||||
svcCmd.AddCommand(buildStopCmd())
|
||||
svcCmd.AddCommand(buildStatusCmd())
|
||||
svcCmd.AddCommand(buildExecuteCmd())
|
||||
rootCmd.AddCommand(svcCmd)
|
||||
}
|
||||
|
||||
var svcCmd = &cobra.Command{
|
||||
Use: "service",
|
||||
Aliases: []string{"svc"},
|
||||
Short: "Manage Navidrome as a service",
|
||||
Long: fmt.Sprintf("Manage Navidrome as a service, using the OS service manager (%s)", service.Platform()),
|
||||
Run: runServiceCmd,
|
||||
}
|
||||
|
||||
type svcControl struct {
|
||||
ctx context.Context
|
||||
cancel context.CancelFunc
|
||||
done chan struct{}
|
||||
}
|
||||
|
||||
func (p *svcControl) Start(service.Service) error {
|
||||
p.done = make(chan struct{})
|
||||
p.ctx, p.cancel = context.WithCancel(context.Background())
|
||||
go func() {
|
||||
runNavidrome(p.ctx)
|
||||
close(p.done)
|
||||
}()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *svcControl) Stop(service.Service) error {
|
||||
log.Info("Stopping service")
|
||||
p.cancel()
|
||||
select {
|
||||
case <-p.done:
|
||||
log.Info("Service stopped gracefully")
|
||||
case <-time.After(10 * time.Second):
|
||||
log.Error("Service did not stop in time. Killing it.")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var svcInstance = sync.OnceValue(func() service.Service {
|
||||
options := make(service.KeyValue)
|
||||
options["Restart"] = "on-failure"
|
||||
options["SuccessExitStatus"] = "1 2 8 SIGKILL"
|
||||
options["UserService"] = false
|
||||
options["LogDirectory"] = conf.Server.DataFolder
|
||||
options["SystemdScript"] = systemdScript
|
||||
if conf.Server.LogFile != "" {
|
||||
options["LogOutput"] = false
|
||||
} else {
|
||||
options["LogOutput"] = true
|
||||
options["LogDirectory"] = conf.Server.DataFolder
|
||||
}
|
||||
svcConfig := &service.Config{
|
||||
UserName: installUser,
|
||||
Name: "navidrome",
|
||||
DisplayName: "Navidrome",
|
||||
Description: "Your Personal Streaming Service",
|
||||
Dependencies: []string{
|
||||
"After=remote-fs.target network.target",
|
||||
},
|
||||
WorkingDirectory: executablePath(),
|
||||
Option: options,
|
||||
}
|
||||
arguments := []string{"service", "execute"}
|
||||
if conf.Server.ConfigFile != "" {
|
||||
arguments = append(arguments, "-c", conf.Server.ConfigFile)
|
||||
}
|
||||
svcConfig.Arguments = arguments
|
||||
|
||||
prg := &svcControl{}
|
||||
svc, err := service.New(prg, svcConfig)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return svc
|
||||
})
|
||||
|
||||
func runServiceCmd(cmd *cobra.Command, _ []string) {
|
||||
_ = cmd.Help()
|
||||
}
|
||||
|
||||
func executablePath() string {
|
||||
if workingDirectory != "" {
|
||||
return workingDirectory
|
||||
}
|
||||
|
||||
ex, err := os.Executable()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return filepath.Dir(ex)
|
||||
}
|
||||
|
||||
func buildInstallCmd() *cobra.Command {
|
||||
runInstallCmd := func(_ *cobra.Command, _ []string) {
|
||||
var err error
|
||||
println("Installing service with:")
|
||||
println(" working directory: " + executablePath())
|
||||
println(" music folder: " + conf.Server.MusicFolder)
|
||||
println(" data folder: " + conf.Server.DataFolder)
|
||||
if conf.Server.LogFile != "" {
|
||||
println(" log file: " + conf.Server.LogFile)
|
||||
} else {
|
||||
println(" logs folder: " + conf.Server.DataFolder)
|
||||
}
|
||||
if cfgFile != "" {
|
||||
conf.Server.ConfigFile, err = filepath.Abs(cfgFile)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println(" config file: " + conf.Server.ConfigFile)
|
||||
}
|
||||
err = svcInstance().Install()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service installed. Use 'navidrome svc start' to start it.")
|
||||
}
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "install",
|
||||
Short: "Install Navidrome service.",
|
||||
Run: runInstallCmd,
|
||||
}
|
||||
cmd.Flags().StringVarP(&installUser, "user", "u", "", "user to run service")
|
||||
cmd.Flags().StringVarP(&workingDirectory, "working-directory", "w", "", "working directory of service")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func buildUninstallCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "uninstall",
|
||||
Short: "Uninstall Navidrome service. Does not delete the music or data folders",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Uninstall()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service uninstalled. Music and data folders are still intact.")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildStartCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "start",
|
||||
Short: "Start Navidrome service",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Start()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service started. Use 'navidrome svc status' to check its status.")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildStopCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "stop",
|
||||
Short: "Stop Navidrome service",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Stop()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
println("Service stopped. Use 'navidrome svc status' to check its status.")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildStatusCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "status",
|
||||
Short: "Show Navidrome service status",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
status, err := svcInstance().Status()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("Navidrome is %s.\n", svcStatusLabels[status])
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildExecuteCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "execute",
|
||||
Short: "Run navidrome as a service in the foreground (it is very unlikely you want to run this, you are better off running just navidrome)",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
err := svcInstance().Run()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const systemdScript = `[Unit]
|
||||
Description={{.Description}}
|
||||
ConditionFileIsExecutable={{.Path|cmdEscape}}
|
||||
{{range $i, $dep := .Dependencies}}
|
||||
{{$dep}} {{end}}
|
||||
|
||||
[Service]
|
||||
StartLimitInterval=5
|
||||
StartLimitBurst=10
|
||||
ExecStart={{.Path|cmdEscape}}{{range .Arguments}} {{.|cmd}}{{end}}
|
||||
{{if .WorkingDirectory}}WorkingDirectory={{.WorkingDirectory|cmdEscape}}{{end}}
|
||||
{{if .UserName}}User={{.UserName}}{{end}}
|
||||
{{if .Restart}}Restart={{.Restart}}{{end}}
|
||||
{{if .SuccessExitStatus}}SuccessExitStatus={{.SuccessExitStatus}}{{end}}
|
||||
TimeoutStopSec=20
|
||||
RestartSec=120
|
||||
EnvironmentFile=-/etc/sysconfig/{{.Name}}
|
||||
|
||||
DevicePolicy=closed
|
||||
NoNewPrivileges=yes
|
||||
PrivateTmp=yes
|
||||
ProtectControlGroups=yes
|
||||
ProtectKernelModules=yes
|
||||
ProtectKernelTunables=yes
|
||||
RestrictAddressFamilies=AF_UNIX AF_INET AF_INET6
|
||||
RestrictNamespaces=yes
|
||||
RestrictRealtime=yes
|
||||
SystemCallFilter=~@clock @debug @module @mount @obsolete @reboot @setuid @swap
|
||||
{{if .WorkingDirectory}}ReadWritePaths={{.WorkingDirectory|cmdEscape}}{{end}}
|
||||
ProtectSystem=full
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
`
|
||||
123
cmd/wire_gen.go
123
cmd/wire_gen.go
@@ -1,22 +1,26 @@
|
||||
// Code generated by Wire. DO NOT EDIT.
|
||||
|
||||
//go:generate go run -mod=mod github.com/google/wire/cmd/wire
|
||||
//go:generate go run -mod=mod github.com/google/wire/cmd/wire gen -tags "netgo"
|
||||
//go:build !wireinject
|
||||
// +build !wireinject
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/core/playback"
|
||||
"github.com/navidrome/navidrome/core/scrobbler"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/server"
|
||||
@@ -26,56 +30,69 @@ import (
|
||||
"github.com/navidrome/navidrome/server/subsonic"
|
||||
)
|
||||
|
||||
import (
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
)
|
||||
|
||||
// Injectors from wire_injectors.go:
|
||||
|
||||
func CreateServer(musicFolder string) *server.Server {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
func CreateDataStore() model.DataStore {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
return dataStore
|
||||
}
|
||||
|
||||
func CreateServer() *server.Server {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
broker := events.GetBroker()
|
||||
serverServer := server.New(dataStore, broker)
|
||||
insights := metrics.GetInstance(dataStore)
|
||||
serverServer := server.New(dataStore, broker, insights)
|
||||
return serverServer
|
||||
}
|
||||
|
||||
func CreateNativeAPIRouter() *nativeapi.Router {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
share := core.NewShare(dataStore)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
router := nativeapi.New(dataStore, share, playlists)
|
||||
insights := metrics.GetInstance(dataStore)
|
||||
router := nativeapi.New(dataStore, share, playlists, insights)
|
||||
return router
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter() *subsonic.Router {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
transcodingCache := core.GetTranscodingCache()
|
||||
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
|
||||
share := core.NewShare(dataStore)
|
||||
archiver := core.NewArchiver(mediaStreamer, dataStore, share)
|
||||
players := core.NewPlayers(dataStore)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
scannerScanner := scanner.GetInstance(dataStore, playlists, cacheWarmer, broker)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
playTracker := scrobbler.GetPlayTracker(dataStore, broker)
|
||||
playbackServer := playback.GetInstance(dataStore)
|
||||
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, externalMetadata, scannerScanner, broker, playlists, playTracker, share, playbackServer)
|
||||
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, scannerScanner, broker, playlists, playTracker, share, playbackServer)
|
||||
return router
|
||||
}
|
||||
|
||||
func CreatePublicRouter() *public.Router {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
transcodingCache := core.GetTranscodingCache()
|
||||
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
|
||||
share := core.NewShare(dataStore)
|
||||
@@ -85,41 +102,73 @@ func CreatePublicRouter() *public.Router {
|
||||
}
|
||||
|
||||
func CreateLastFMRouter() *lastfm.Router {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
router := lastfm.NewRouter(dataStore)
|
||||
return router
|
||||
}
|
||||
|
||||
func CreateListenBrainzRouter() *listenbrainz.Router {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
router := listenbrainz.NewRouter(dataStore)
|
||||
return router
|
||||
}
|
||||
|
||||
func GetScanner() scanner.Scanner {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
func CreateInsights() metrics.Insights {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
insights := metrics.GetInstance(dataStore)
|
||||
return insights
|
||||
}
|
||||
|
||||
func CreatePrometheus() metrics.Metrics {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
return metricsMetrics
|
||||
}
|
||||
|
||||
func CreateScanner(ctx context.Context) scanner.Scanner {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
scannerScanner := scanner.GetInstance(dataStore, playlists, cacheWarmer, broker)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
return scannerScanner
|
||||
}
|
||||
|
||||
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
watcher := scanner.NewWatcher(dataStore, scannerScanner)
|
||||
return watcher
|
||||
}
|
||||
|
||||
func GetPlaybackServer() playback.PlaybackServer {
|
||||
dbDB := db.Db()
|
||||
dataStore := persistence.New(dbDB)
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
playbackServer := playback.GetInstance(dataStore)
|
||||
return playbackServer
|
||||
}
|
||||
|
||||
// wire_injectors.go:
|
||||
|
||||
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.GetInstance, db.Db)
|
||||
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.NewWatcher, metrics.NewPrometheusInstance, db.Db)
|
||||
|
||||
@@ -3,13 +3,17 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/core/playback"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/server"
|
||||
@@ -30,11 +34,19 @@ var allProviders = wire.NewSet(
|
||||
lastfm.NewRouter,
|
||||
listenbrainz.NewRouter,
|
||||
events.GetBroker,
|
||||
scanner.GetInstance,
|
||||
scanner.New,
|
||||
scanner.NewWatcher,
|
||||
metrics.NewPrometheusInstance,
|
||||
db.Db,
|
||||
)
|
||||
|
||||
func CreateServer(musicFolder string) *server.Server {
|
||||
func CreateDataStore() model.DataStore {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateServer() *server.Server {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
@@ -46,7 +58,7 @@ func CreateNativeAPIRouter() *nativeapi.Router {
|
||||
))
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter() *subsonic.Router {
|
||||
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
@@ -70,7 +82,25 @@ func CreateListenBrainzRouter() *listenbrainz.Router {
|
||||
))
|
||||
}
|
||||
|
||||
func GetScanner() scanner.Scanner {
|
||||
func CreateInsights() metrics.Insights {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreatePrometheus() metrics.Metrics {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateScanner(ctx context.Context) scanner.Scanner {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
|
||||
4
conf/buildtags/buildtags.go
Normal file
4
conf/buildtags/buildtags.go
Normal file
@@ -0,0 +1,4 @@
|
||||
package buildtags
|
||||
|
||||
// This file is left intentionally empty. It is used to make sure the package is not empty, in the case all
|
||||
// required build tags are disabled.
|
||||
11
conf/buildtags/netgo.go
Normal file
11
conf/buildtags/netgo.go
Normal file
@@ -0,0 +1,11 @@
|
||||
//go:build netgo
|
||||
|
||||
package buildtags
|
||||
|
||||
// NOTICE: This file was created to force the inclusion of the `netgo` tag when compiling the project.
|
||||
// If the tag is not included, the compilation will fail because this variable won't be defined, and the `main.go`
|
||||
// file requires it.
|
||||
|
||||
// Why this tag is required? See https://github.com/navidrome/navidrome/issues/700
|
||||
|
||||
var NETGO = true
|
||||
@@ -9,9 +9,12 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/go-viper/encoding/ini"
|
||||
"github.com/kr/pretty"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/utils/chain"
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
@@ -26,8 +29,7 @@ type configOptions struct {
|
||||
CacheFolder string
|
||||
DbPath string
|
||||
LogLevel string
|
||||
ScanInterval time.Duration
|
||||
ScanSchedule string
|
||||
LogFile string
|
||||
SessionTimeout time.Duration
|
||||
BaseURL string
|
||||
BasePath string
|
||||
@@ -41,6 +43,7 @@ type configOptions struct {
|
||||
EnableTranscodingConfig bool
|
||||
EnableDownloads bool
|
||||
EnableExternalServices bool
|
||||
EnableInsightsCollector bool
|
||||
EnableMediaFileCoverArt bool
|
||||
TranscodingCacheSize string
|
||||
ImageCacheSize string
|
||||
@@ -57,7 +60,6 @@ type configOptions struct {
|
||||
PreferSortTags bool
|
||||
IgnoredArticles string
|
||||
IndexGroups string
|
||||
SubsonicArtistParticipations bool
|
||||
FFmpegPath string
|
||||
MPVPath string
|
||||
MPVCmdTemplate string
|
||||
@@ -87,11 +89,17 @@ type configOptions struct {
|
||||
Prometheus prometheusOptions
|
||||
Scanner scannerOptions
|
||||
Jukebox jukeboxOptions
|
||||
Backup backupOptions
|
||||
PID pidOptions
|
||||
Inspect inspectOptions
|
||||
Subsonic subsonicOptions
|
||||
LyricsPriority string
|
||||
|
||||
Agents string
|
||||
LastFM lastfmOptions
|
||||
Spotify spotifyOptions
|
||||
ListenBrainz listenBrainzOptions
|
||||
Tags map[string]TagConf
|
||||
|
||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||
DevLogSourceLine bool
|
||||
@@ -100,8 +108,8 @@ type configOptions struct {
|
||||
DevAutoCreateAdminPassword string
|
||||
DevAutoLoginUsername string
|
||||
DevActivityPanel bool
|
||||
DevActivityPanelUpdateRate time.Duration
|
||||
DevSidebarPlaylists bool
|
||||
DevEnableBufferedScrobble bool
|
||||
DevShowArtistPage bool
|
||||
DevOffsetOptimize int
|
||||
DevArtworkMaxRequests int
|
||||
@@ -109,12 +117,38 @@ type configOptions struct {
|
||||
DevArtworkThrottleBacklogTimeout time.Duration
|
||||
DevArtistInfoTimeToLive time.Duration
|
||||
DevAlbumInfoTimeToLive time.Duration
|
||||
DevExternalScanner bool
|
||||
DevScannerThreads uint
|
||||
DevInsightsInitialDelay time.Duration
|
||||
DevEnablePlayerInsights bool
|
||||
}
|
||||
|
||||
type scannerOptions struct {
|
||||
Enabled bool
|
||||
Schedule string
|
||||
WatcherWait time.Duration
|
||||
ScanOnStartup bool
|
||||
Extractor string
|
||||
GenreSeparators string
|
||||
GroupAlbumReleases bool
|
||||
ArtistJoiner string
|
||||
GenreSeparators string // Deprecated: Use Tags.genre.Split instead
|
||||
GroupAlbumReleases bool // Deprecated: Use PID.Album instead
|
||||
FollowSymlinks bool // Whether to follow symlinks when scanning directories
|
||||
}
|
||||
|
||||
type subsonicOptions struct {
|
||||
AppendSubtitle bool
|
||||
ArtistParticipations bool
|
||||
DefaultReportRealPath bool
|
||||
LegacyClients string
|
||||
}
|
||||
|
||||
type TagConf struct {
|
||||
Ignore bool `yaml:"ignore"`
|
||||
Aliases []string `yaml:"aliases"`
|
||||
Type string `yaml:"type"`
|
||||
MaxLength int `yaml:"maxLength"`
|
||||
Split []string `yaml:"split"`
|
||||
Album bool `yaml:"album"`
|
||||
}
|
||||
|
||||
type lastfmOptions struct {
|
||||
@@ -141,6 +175,7 @@ type secureOptions struct {
|
||||
type prometheusOptions struct {
|
||||
Enabled bool
|
||||
MetricsPath string
|
||||
Password string
|
||||
}
|
||||
|
||||
type AudioDeviceDefinition []string
|
||||
@@ -152,6 +187,24 @@ type jukeboxOptions struct {
|
||||
AdminOnly bool
|
||||
}
|
||||
|
||||
type backupOptions struct {
|
||||
Count int
|
||||
Path string
|
||||
Schedule string
|
||||
}
|
||||
|
||||
type pidOptions struct {
|
||||
Track string
|
||||
Album string
|
||||
}
|
||||
|
||||
type inspectOptions struct {
|
||||
Enabled bool
|
||||
MaxRequests int
|
||||
BacklogLimit int
|
||||
BacklogTimeout int
|
||||
}
|
||||
|
||||
var (
|
||||
Server = &configOptions{}
|
||||
hooks []func()
|
||||
@@ -164,18 +217,21 @@ func LoadFromFile(confFile string) {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error reading config file:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
Load()
|
||||
Load(true)
|
||||
}
|
||||
|
||||
func Load() {
|
||||
func Load(noConfigDump bool) {
|
||||
parseIniFileConfiguration()
|
||||
|
||||
err := viper.Unmarshal(&Server)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
err = os.MkdirAll(Server.DataFolder, os.ModePerm)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating data path:", "path", Server.DataFolder, err)
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating data path:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@@ -184,7 +240,7 @@ func Load() {
|
||||
}
|
||||
err = os.MkdirAll(Server.CacheFolder, os.ModePerm)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating cache path:", "path", Server.CacheFolder, err)
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating cache path:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@@ -193,19 +249,42 @@ func Load() {
|
||||
Server.DbPath = filepath.Join(Server.DataFolder, consts.DefaultDbPath)
|
||||
}
|
||||
|
||||
if Server.Backup.Path != "" {
|
||||
err = os.MkdirAll(Server.Backup.Path, os.ModePerm)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error creating backup path:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
out := os.Stderr
|
||||
if Server.LogFile != "" {
|
||||
out, err = os.OpenFile(Server.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "FATAL: Error opening log file %s: %s\n", Server.LogFile, err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
log.SetOutput(out)
|
||||
}
|
||||
|
||||
log.SetLevelString(Server.LogLevel)
|
||||
log.SetLogLevels(Server.DevLogLevels)
|
||||
log.SetLogSourceLine(Server.DevLogSourceLine)
|
||||
log.SetRedacting(Server.EnableLogRedacting)
|
||||
|
||||
if err := validateScanSchedule(); err != nil {
|
||||
err = chain.RunSequentially(
|
||||
validateScanSchedule,
|
||||
validateBackupSchedule,
|
||||
validatePlaylistsPath,
|
||||
)
|
||||
if err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if Server.BaseURL != "" {
|
||||
u, err := url.Parse(Server.BaseURL)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "FATAL: Invalid BaseURL %s: %s\n", Server.BaseURL, err.Error())
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Invalid BaseURL:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
Server.BasePath = u.Path
|
||||
@@ -216,26 +295,72 @@ func Load() {
|
||||
}
|
||||
|
||||
// Print current configuration if log level is Debug
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) && !noConfigDump {
|
||||
prettyConf := pretty.Sprintf("Loaded configuration from '%s': %# v", Server.ConfigFile, Server)
|
||||
if Server.EnableLogRedacting {
|
||||
prettyConf = log.Redact(prettyConf)
|
||||
}
|
||||
_, _ = fmt.Fprintln(os.Stderr, prettyConf)
|
||||
_, _ = fmt.Fprintln(out, prettyConf)
|
||||
}
|
||||
|
||||
if !Server.EnableExternalServices {
|
||||
disableExternalServices()
|
||||
}
|
||||
|
||||
if Server.Scanner.Extractor != consts.DefaultScannerExtractor {
|
||||
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
|
||||
Server.Scanner.Extractor = consts.DefaultScannerExtractor
|
||||
}
|
||||
logDeprecatedOptions("Scanner.GenreSeparators")
|
||||
logDeprecatedOptions("Scanner.GroupAlbumReleases")
|
||||
logDeprecatedOptions("DevEnableBufferedScrobble") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
||||
|
||||
// Call init hooks
|
||||
for _, hook := range hooks {
|
||||
hook()
|
||||
}
|
||||
}
|
||||
|
||||
func logDeprecatedOptions(options ...string) {
|
||||
for _, option := range options {
|
||||
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(option, ".", "_"))
|
||||
if os.Getenv(envVar) != "" {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", envVar))
|
||||
}
|
||||
if viper.InConfig(option) {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", option))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseIniFileConfiguration is used to parse the config file when it is in INI format. For INI files, it
|
||||
// would require a nested structure, so instead we unmarshal it to a map and then merge the nested [default]
|
||||
// section into the root level.
|
||||
func parseIniFileConfiguration() {
|
||||
cfgFile := viper.ConfigFileUsed()
|
||||
if strings.ToLower(filepath.Ext(cfgFile)) == ".ini" {
|
||||
var iniConfig map[string]interface{}
|
||||
err := viper.Unmarshal(&iniConfig)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
cfg, ok := iniConfig["default"].(map[string]any)
|
||||
if !ok {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config: missing [default] section:", iniConfig)
|
||||
os.Exit(1)
|
||||
}
|
||||
err = viper.MergeConfigMap(cfg)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func disableExternalServices() {
|
||||
log.Info("All external integrations are DISABLED!")
|
||||
Server.EnableInsightsCollector = false
|
||||
Server.LastFM.Enabled = false
|
||||
Server.Spotify.ID = ""
|
||||
Server.ListenBrainz.Enabled = false
|
||||
@@ -245,33 +370,49 @@ func disableExternalServices() {
|
||||
}
|
||||
}
|
||||
|
||||
func validateScanSchedule() error {
|
||||
if Server.ScanInterval != -1 {
|
||||
log.Warn("ScanInterval is DEPRECATED. Please use ScanSchedule. See docs at https://navidrome.org/docs/usage/configuration-options/")
|
||||
if Server.ScanSchedule != "@every 1m" {
|
||||
log.Error("You cannot specify both ScanInterval and ScanSchedule, ignoring ScanInterval")
|
||||
} else {
|
||||
if Server.ScanInterval == 0 {
|
||||
Server.ScanSchedule = ""
|
||||
} else {
|
||||
Server.ScanSchedule = fmt.Sprintf("@every %s", Server.ScanInterval)
|
||||
}
|
||||
log.Warn("Setting ScanSchedule", "schedule", Server.ScanSchedule)
|
||||
func validatePlaylistsPath() error {
|
||||
for _, path := range strings.Split(Server.PlaylistsPath, string(filepath.ListSeparator)) {
|
||||
_, err := doublestar.Match(path, "")
|
||||
if err != nil {
|
||||
log.Error("Invalid PlaylistsPath", "path", path, err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
if Server.ScanSchedule == "0" || Server.ScanSchedule == "" {
|
||||
Server.ScanSchedule = ""
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateScanSchedule() error {
|
||||
if Server.Scanner.Schedule == "0" || Server.Scanner.Schedule == "" {
|
||||
Server.Scanner.Schedule = ""
|
||||
return nil
|
||||
}
|
||||
if _, err := time.ParseDuration(Server.ScanSchedule); err == nil {
|
||||
Server.ScanSchedule = "@every " + Server.ScanSchedule
|
||||
var err error
|
||||
Server.Scanner.Schedule, err = validateSchedule(Server.Scanner.Schedule, "Scanner.Schedule")
|
||||
return err
|
||||
}
|
||||
|
||||
func validateBackupSchedule() error {
|
||||
if Server.Backup.Path == "" || Server.Backup.Schedule == "" || Server.Backup.Count == 0 {
|
||||
Server.Backup.Schedule = ""
|
||||
return nil
|
||||
}
|
||||
var err error
|
||||
Server.Backup.Schedule, err = validateSchedule(Server.Backup.Schedule, "Backup.Schedule")
|
||||
return err
|
||||
}
|
||||
|
||||
func validateSchedule(schedule, field string) (string, error) {
|
||||
if _, err := time.ParseDuration(schedule); err == nil {
|
||||
schedule = "@every " + schedule
|
||||
}
|
||||
c := cron.New()
|
||||
_, err := c.AddFunc(Server.ScanSchedule, func() {})
|
||||
id, err := c.AddFunc(schedule, func() {})
|
||||
if err != nil {
|
||||
log.Error("Invalid ScanSchedule. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", "schedule", Server.ScanSchedule, err)
|
||||
log.Error(fmt.Sprintf("Invalid %s. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", field), "schedule", schedule, err)
|
||||
} else {
|
||||
c.Remove(id)
|
||||
}
|
||||
return err
|
||||
return schedule, err
|
||||
}
|
||||
|
||||
// AddHook is used to register initialization code that should run as soon as the config is loaded
|
||||
@@ -284,12 +425,11 @@ func init() {
|
||||
viper.SetDefault("cachefolder", "")
|
||||
viper.SetDefault("datafolder", ".")
|
||||
viper.SetDefault("loglevel", "info")
|
||||
viper.SetDefault("logfile", "")
|
||||
viper.SetDefault("address", "0.0.0.0")
|
||||
viper.SetDefault("port", 4533)
|
||||
viper.SetDefault("unixsocketperm", "0660")
|
||||
viper.SetDefault("sessiontimeout", consts.DefaultSessionTimeout)
|
||||
viper.SetDefault("scaninterval", -1)
|
||||
viper.SetDefault("scanschedule", "@every 1m")
|
||||
viper.SetDefault("baseurl", "")
|
||||
viper.SetDefault("tlscert", "")
|
||||
viper.SetDefault("tlskey", "")
|
||||
@@ -303,7 +443,7 @@ func init() {
|
||||
viper.SetDefault("enableartworkprecache", true)
|
||||
viper.SetDefault("autoimportplaylists", true)
|
||||
viper.SetDefault("defaultplaylistpublicvisibility", false)
|
||||
viper.SetDefault("playlistspath", consts.DefaultPlaylistsPath)
|
||||
viper.SetDefault("playlistspath", "")
|
||||
viper.SetDefault("smartPlaylistRefreshDelay", 5*time.Second)
|
||||
viper.SetDefault("enabledownloads", true)
|
||||
viper.SetDefault("enableexternalservices", true)
|
||||
@@ -315,7 +455,6 @@ func init() {
|
||||
viper.SetDefault("prefersorttags", false)
|
||||
viper.SetDefault("ignoredarticles", "The El La Los Las Le Les Os As O A")
|
||||
viper.SetDefault("indexgroups", "A B C D E F G H I J K L M N O P Q R S T U V W X-Z(XYZ) [Unknown]([)")
|
||||
viper.SetDefault("subsonicartistparticipations", false)
|
||||
viper.SetDefault("ffmpegpath", "")
|
||||
viper.SetDefault("mpvcmdtemplate", "mpv --audio-device=%d --no-audio-display --pause %f --input-ipc-server=%s")
|
||||
|
||||
@@ -331,7 +470,11 @@ func init() {
|
||||
viper.SetDefault("defaultuivolume", consts.DefaultUIVolume)
|
||||
viper.SetDefault("enablereplaygain", true)
|
||||
viper.SetDefault("enablecoveranimation", true)
|
||||
viper.SetDefault("enablesharing", false)
|
||||
viper.SetDefault("shareurl", "")
|
||||
viper.SetDefault("defaultdownloadableshare", false)
|
||||
viper.SetDefault("gatrackingid", "")
|
||||
viper.SetDefault("enableinsightscollector", true)
|
||||
viper.SetDefault("enablelogredacting", true)
|
||||
viper.SetDefault("authrequestlimit", 5)
|
||||
viper.SetDefault("authwindowlength", 20*time.Second)
|
||||
@@ -341,16 +484,28 @@ func init() {
|
||||
viper.SetDefault("reverseproxywhitelist", "")
|
||||
|
||||
viper.SetDefault("prometheus.enabled", false)
|
||||
viper.SetDefault("prometheus.metricspath", "/metrics")
|
||||
viper.SetDefault("prometheus.metricspath", consts.PrometheusDefaultPath)
|
||||
viper.SetDefault("prometheus.password", "")
|
||||
|
||||
viper.SetDefault("jukebox.enabled", false)
|
||||
viper.SetDefault("jukebox.devices", []AudioDeviceDefinition{})
|
||||
viper.SetDefault("jukebox.default", "")
|
||||
viper.SetDefault("jukebox.adminonly", true)
|
||||
|
||||
viper.SetDefault("scanner.enabled", true)
|
||||
viper.SetDefault("scanner.schedule", "0")
|
||||
viper.SetDefault("scanner.extractor", consts.DefaultScannerExtractor)
|
||||
viper.SetDefault("scanner.genreseparators", ";/,")
|
||||
viper.SetDefault("scanner.watcherwait", consts.DefaultWatcherWait)
|
||||
viper.SetDefault("scanner.scanonstartup", true)
|
||||
viper.SetDefault("scanner.artistjoiner", consts.ArtistJoiner)
|
||||
viper.SetDefault("scanner.genreseparators", "")
|
||||
viper.SetDefault("scanner.groupalbumreleases", false)
|
||||
viper.SetDefault("scanner.followsymlinks", true)
|
||||
|
||||
viper.SetDefault("subsonic.appendsubtitle", true)
|
||||
viper.SetDefault("subsonic.artistparticipations", false)
|
||||
viper.SetDefault("subsonic.defaultreportrealpath", false)
|
||||
viper.SetDefault("subsonic.legacyclients", "DSub")
|
||||
|
||||
viper.SetDefault("agents", "lastfm,spotify")
|
||||
viper.SetDefault("lastfm.enabled", true)
|
||||
@@ -364,16 +519,27 @@ func init() {
|
||||
|
||||
viper.SetDefault("httpsecurityheaders.customframeoptionsvalue", "DENY")
|
||||
|
||||
viper.SetDefault("backup.path", "")
|
||||
viper.SetDefault("backup.schedule", "")
|
||||
viper.SetDefault("backup.count", 0)
|
||||
|
||||
viper.SetDefault("pid.track", consts.DefaultTrackPID)
|
||||
viper.SetDefault("pid.album", consts.DefaultAlbumPID)
|
||||
|
||||
viper.SetDefault("inspect.enabled", true)
|
||||
viper.SetDefault("inspect.maxrequests", 1)
|
||||
viper.SetDefault("inspect.backloglimit", consts.RequestThrottleBacklogLimit)
|
||||
viper.SetDefault("inspect.backlogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||
|
||||
viper.SetDefault("lyricspriority", ".lrc,.txt,embedded")
|
||||
|
||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||
viper.SetDefault("devlogsourceline", false)
|
||||
viper.SetDefault("devenableprofiler", false)
|
||||
viper.SetDefault("devautocreateadminpassword", "")
|
||||
viper.SetDefault("devautologinusername", "")
|
||||
viper.SetDefault("devactivitypanel", true)
|
||||
viper.SetDefault("enablesharing", false)
|
||||
viper.SetDefault("shareurl", "")
|
||||
viper.SetDefault("defaultdownloadableshare", false)
|
||||
viper.SetDefault("devenablebufferedscrobble", true)
|
||||
viper.SetDefault("devactivitypanelupdaterate", 300*time.Millisecond)
|
||||
viper.SetDefault("devsidebarplaylists", true)
|
||||
viper.SetDefault("devshowartistpage", true)
|
||||
viper.SetDefault("devoffsetoptimize", 50000)
|
||||
@@ -382,9 +548,17 @@ func init() {
|
||||
viper.SetDefault("devartworkthrottlebacklogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||
viper.SetDefault("devartistinfotimetolive", consts.ArtistInfoTimeToLive)
|
||||
viper.SetDefault("devalbuminfotimetolive", consts.AlbumInfoTimeToLive)
|
||||
viper.SetDefault("devexternalscanner", true)
|
||||
viper.SetDefault("devscannerthreads", 5)
|
||||
viper.SetDefault("devinsightsinitialdelay", consts.InsightsInitialDelay)
|
||||
viper.SetDefault("devenableplayerinsights", true)
|
||||
}
|
||||
|
||||
func InitConfig(cfgFile string) {
|
||||
codecRegistry := viper.NewCodecRegistry()
|
||||
_ = codecRegistry.RegisterCodec("ini", ini.Codec{})
|
||||
viper.SetOptions(viper.WithCodecRegistry(codecRegistry))
|
||||
|
||||
cfgFile = getConfigFile(cfgFile)
|
||||
if cfgFile != "" {
|
||||
// Use config file from the flag.
|
||||
@@ -408,9 +582,17 @@ func InitConfig(cfgFile string) {
|
||||
}
|
||||
}
|
||||
|
||||
// getConfigFile returns the path to the config file, either from the flag or from the environment variable.
|
||||
// If it is defined in the environment variable, it will check if the file exists.
|
||||
func getConfigFile(cfgFile string) string {
|
||||
if cfgFile != "" {
|
||||
return cfgFile
|
||||
}
|
||||
return os.Getenv("ND_CONFIGFILE")
|
||||
cfgFile = os.Getenv("ND_CONFIGFILE")
|
||||
if cfgFile != "" {
|
||||
if _, err := os.Stat(cfgFile); err == nil {
|
||||
return cfgFile
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
50
conf/configuration_test.go
Normal file
50
conf/configuration_test.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package conf_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
. "github.com/navidrome/navidrome/conf"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
func TestConfiguration(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Configuration Suite")
|
||||
}
|
||||
|
||||
var _ = Describe("Configuration", func() {
|
||||
BeforeEach(func() {
|
||||
// Reset viper configuration
|
||||
viper.Reset()
|
||||
viper.SetDefault("datafolder", GinkgoT().TempDir())
|
||||
viper.SetDefault("loglevel", "error")
|
||||
ResetConf()
|
||||
})
|
||||
|
||||
DescribeTable("should load configuration from",
|
||||
func(format string) {
|
||||
filename := filepath.Join("testdata", "cfg."+format)
|
||||
|
||||
// Initialize config with the test file
|
||||
InitConfig(filename)
|
||||
// Load the configuration (with noConfigDump=true)
|
||||
Load(true)
|
||||
|
||||
// Execute the format-specific assertions
|
||||
Expect(Server.MusicFolder).To(Equal(fmt.Sprintf("/%s/music", format)))
|
||||
Expect(Server.UIWelcomeMessage).To(Equal("Welcome " + format))
|
||||
Expect(Server.Tags["custom"].Aliases).To(Equal([]string{format, "test"}))
|
||||
|
||||
// The config file used should be the one we created
|
||||
Expect(Server.ConfigFile).To(Equal(filename))
|
||||
},
|
||||
Entry("TOML format", "toml"),
|
||||
Entry("YAML format", "yaml"),
|
||||
Entry("INI format", "ini"),
|
||||
Entry("JSON format", "json"),
|
||||
)
|
||||
})
|
||||
5
conf/export_test.go
Normal file
5
conf/export_test.go
Normal file
@@ -0,0 +1,5 @@
|
||||
package conf
|
||||
|
||||
func ResetConf() {
|
||||
Server = &configOptions{}
|
||||
}
|
||||
@@ -21,6 +21,7 @@ func initMimeTypes() {
|
||||
// In some circumstances, Windows sets JS mime-type to `text/plain`!
|
||||
_ = mime.AddExtensionType(".js", "text/javascript")
|
||||
_ = mime.AddExtensionType(".css", "text/css")
|
||||
_ = mime.AddExtensionType(".webmanifest", "application/manifest+json")
|
||||
|
||||
f, err := resources.FS().Open("mime_types.yaml")
|
||||
if err != nil {
|
||||
|
||||
6
conf/testdata/cfg.ini
vendored
Normal file
6
conf/testdata/cfg.ini
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
[default]
|
||||
MusicFolder = /ini/music
|
||||
UIWelcomeMessage = Welcome ini
|
||||
|
||||
[Tags]
|
||||
Custom.Aliases = ini,test
|
||||
12
conf/testdata/cfg.json
vendored
Normal file
12
conf/testdata/cfg.json
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"musicFolder": "/json/music",
|
||||
"uiWelcomeMessage": "Welcome json",
|
||||
"Tags": {
|
||||
"custom": {
|
||||
"aliases": [
|
||||
"json",
|
||||
"test"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
5
conf/testdata/cfg.toml
vendored
Normal file
5
conf/testdata/cfg.toml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
musicFolder = "/toml/music"
|
||||
uiWelcomeMessage = "Welcome toml"
|
||||
|
||||
[Tags.custom]
|
||||
aliases = ["toml", "test"]
|
||||
7
conf/testdata/cfg.yaml
vendored
Normal file
7
conf/testdata/cfg.yaml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
musicFolder: "/yaml/music"
|
||||
uiWelcomeMessage: "Welcome yaml"
|
||||
Tags:
|
||||
custom:
|
||||
aliases:
|
||||
- yaml
|
||||
- test
|
||||
@@ -1,26 +1,29 @@
|
||||
package consts
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/model/id"
|
||||
)
|
||||
|
||||
const (
|
||||
AppName = "navidrome"
|
||||
|
||||
DefaultDbPath = "navidrome.db?cache=shared&_cache_size=1000000000&_busy_timeout=5000&_journal_mode=WAL&_synchronous=NORMAL&_foreign_keys=on&_txlock=immediate"
|
||||
InitialSetupFlagKey = "InitialSetup"
|
||||
DefaultDbPath = "navidrome.db?cache=shared&_busy_timeout=15000&_journal_mode=WAL&_foreign_keys=on&synchronous=normal"
|
||||
InitialSetupFlagKey = "InitialSetup"
|
||||
FullScanAfterMigrationFlagKey = "FullScanAfterMigration"
|
||||
|
||||
UIAuthorizationHeader = "X-ND-Authorization"
|
||||
UIClientUniqueIDHeader = "X-ND-Client-Unique-Id"
|
||||
JWTSecretKey = "JWTSecret"
|
||||
JWTIssuer = "ND"
|
||||
DefaultSessionTimeout = 24 * time.Hour
|
||||
DefaultSessionTimeout = 48 * time.Hour
|
||||
CookieExpiry = 365 * 24 * 3600 // One year
|
||||
|
||||
OptimizeDBSchedule = "@every 24h"
|
||||
|
||||
// DefaultEncryptionKey This is the encryption key used if none is specified in the `PasswordEncryptionKey` option
|
||||
// Never ever change this! Or it will break all Navidrome installations that don't set the config option
|
||||
DefaultEncryptionKey = "just for obfuscation"
|
||||
@@ -50,14 +53,16 @@ const (
|
||||
|
||||
ServerReadHeaderTimeout = 3 * time.Second
|
||||
|
||||
ArtistInfoTimeToLive = 24 * time.Hour
|
||||
AlbumInfoTimeToLive = 7 * 24 * time.Hour
|
||||
ArtistInfoTimeToLive = 24 * time.Hour
|
||||
AlbumInfoTimeToLive = 7 * 24 * time.Hour
|
||||
UpdateLastAccessFrequency = time.Minute
|
||||
UpdatePlayerFrequency = time.Minute
|
||||
|
||||
I18nFolder = "i18n"
|
||||
SkipScanFile = ".ndignore"
|
||||
I18nFolder = "i18n"
|
||||
ScanIgnoreFile = ".ndignore"
|
||||
|
||||
PlaceholderArtistArt = "artist-placeholder.webp"
|
||||
PlaceholderAlbumArt = "placeholder.png"
|
||||
PlaceholderAlbumArt = "album-placeholder.webp"
|
||||
PlaceholderAvatar = "logo-192x192.png"
|
||||
UICoverArtSize = 300
|
||||
DefaultUIVolume = 100
|
||||
@@ -65,8 +70,14 @@ const (
|
||||
DefaultHttpClientTimeOut = 10 * time.Second
|
||||
|
||||
DefaultScannerExtractor = "taglib"
|
||||
DefaultWatcherWait = 5 * time.Second
|
||||
Zwsp = string('\u200b')
|
||||
)
|
||||
|
||||
Zwsp = string('\u200b')
|
||||
// Prometheus options
|
||||
const (
|
||||
PrometheusDefaultPath = "/metrics"
|
||||
PrometheusAuthUser = "navidrome"
|
||||
)
|
||||
|
||||
// Cache options
|
||||
@@ -86,6 +97,21 @@ const (
|
||||
AlbumPlayCountModeNormalized = "normalized"
|
||||
)
|
||||
|
||||
const (
|
||||
//DefaultAlbumPID = "album_legacy"
|
||||
DefaultAlbumPID = "musicbrainz_albumid|albumartistid,album,albumversion,releasedate"
|
||||
DefaultTrackPID = "musicbrainz_trackid|albumid,discnumber,tracknumber,title"
|
||||
PIDAlbumKey = "PIDAlbum"
|
||||
PIDTrackKey = "PIDTrack"
|
||||
)
|
||||
|
||||
const (
|
||||
InsightsIDKey = "InsightsID"
|
||||
InsightsEndpoint = "https://insights.navidrome.org/collect"
|
||||
InsightsUpdateInterval = 24 * time.Hour
|
||||
InsightsInitialDelay = 30 * time.Minute
|
||||
)
|
||||
|
||||
var (
|
||||
DefaultDownsamplingFormat = "opus"
|
||||
DefaultTranscodings = []struct {
|
||||
@@ -113,17 +139,29 @@ var (
|
||||
Command: "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a aac -f adts -",
|
||||
},
|
||||
}
|
||||
|
||||
DefaultPlaylistsPath = strings.Join([]string{".", "**/**"}, string(filepath.ListSeparator))
|
||||
)
|
||||
|
||||
var (
|
||||
VariousArtists = "Various Artists"
|
||||
VariousArtistsID = fmt.Sprintf("%x", md5.Sum([]byte(strings.ToLower(VariousArtists))))
|
||||
UnknownAlbum = "[Unknown Album]"
|
||||
UnknownArtist = "[Unknown Artist]"
|
||||
UnknownArtistID = fmt.Sprintf("%x", md5.Sum([]byte(strings.ToLower(UnknownArtist))))
|
||||
VariousArtists = "Various Artists"
|
||||
// TODO This will be dynamic when using disambiguation
|
||||
VariousArtistsID = "63sqASlAfjbGMuLP4JhnZU"
|
||||
UnknownAlbum = "[Unknown Album]"
|
||||
UnknownArtist = "[Unknown Artist]"
|
||||
// TODO This will be dynamic when using disambiguation
|
||||
UnknownArtistID = id.NewHash(strings.ToLower(UnknownArtist))
|
||||
VariousArtistsMbzId = "89ad4ac3-39f7-470e-963a-56509c546377"
|
||||
|
||||
ServerStart = time.Now()
|
||||
ArtistJoiner = " • "
|
||||
)
|
||||
|
||||
var (
|
||||
ServerStart = time.Now()
|
||||
|
||||
InContainer = func() bool {
|
||||
// Check if the /.nddockerenv file exists
|
||||
if _, err := os.Stat("/.nddockerenv"); err == nil {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}()
|
||||
)
|
||||
|
||||
@@ -11,15 +11,13 @@ WantedBy=multi-user.target
|
||||
User=navidrome
|
||||
Group=navidrome
|
||||
Type=simple
|
||||
ExecStart=/usr/bin/navidrome
|
||||
ExecStart=/usr/bin/navidrome --configfile "/etc/navidrome/navidrome.toml"
|
||||
StateDirectory=navidrome
|
||||
WorkingDirectory=/var/lib/navidrome
|
||||
TimeoutStopSec=20
|
||||
KillMode=process
|
||||
Restart=on-failure
|
||||
|
||||
EnvironmentFile=-/etc/sysconfig/navidrome
|
||||
|
||||
# See https://www.freedesktop.org/software/systemd/man/systemd.exec.html
|
||||
CapabilityBoundingSet=
|
||||
DevicePolicy=closed
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
"github.com/navidrome/navidrome/utils/singleton"
|
||||
)
|
||||
|
||||
type Agents struct {
|
||||
@@ -17,22 +18,36 @@ type Agents struct {
|
||||
agents []Interface
|
||||
}
|
||||
|
||||
func New(ds model.DataStore) *Agents {
|
||||
func GetAgents(ds model.DataStore) *Agents {
|
||||
return singleton.GetInstance(func() *Agents {
|
||||
return createAgents(ds)
|
||||
})
|
||||
}
|
||||
|
||||
func createAgents(ds model.DataStore) *Agents {
|
||||
var order []string
|
||||
if conf.Server.Agents != "" {
|
||||
order = strings.Split(conf.Server.Agents, ",")
|
||||
}
|
||||
order = append(order, LocalAgentName)
|
||||
var res []Interface
|
||||
var enabled []string
|
||||
for _, name := range order {
|
||||
init, ok := Map[name]
|
||||
if !ok {
|
||||
log.Error("Agent not available. Check configuration", "name", name)
|
||||
log.Error("Invalid agent. Check `Agents` configuration", "name", name, "conf", conf.Server.Agents)
|
||||
continue
|
||||
}
|
||||
|
||||
res = append(res, init(ds))
|
||||
agent := init(ds)
|
||||
if agent == nil {
|
||||
log.Debug("Agent not available. Missing configuration?", "name", name)
|
||||
continue
|
||||
}
|
||||
enabled = append(enabled, name)
|
||||
res = append(res, agent)
|
||||
}
|
||||
log.Debug("List of agents enabled", "names", enabled)
|
||||
|
||||
return &Agents{ds: ds, agents: res}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
@@ -28,7 +29,7 @@ var _ = Describe("Agents", func() {
|
||||
var ag *Agents
|
||||
BeforeEach(func() {
|
||||
conf.Server.Agents = ""
|
||||
ag = New(ds)
|
||||
ag = createAgents(ds)
|
||||
})
|
||||
|
||||
It("calls the placeholder GetArtistImages", func() {
|
||||
@@ -44,19 +45,21 @@ var _ = Describe("Agents", func() {
|
||||
var mock *mockAgent
|
||||
BeforeEach(func() {
|
||||
mock = &mockAgent{}
|
||||
Register("fake", func(ds model.DataStore) Interface {
|
||||
return mock
|
||||
})
|
||||
Register("empty", func(ds model.DataStore) Interface {
|
||||
return struct {
|
||||
Interface
|
||||
}{}
|
||||
})
|
||||
conf.Server.Agents = "empty,fake"
|
||||
ag = New(ds)
|
||||
Register("fake", func(model.DataStore) Interface { return mock })
|
||||
Register("disabled", func(model.DataStore) Interface { return nil })
|
||||
Register("empty", func(model.DataStore) Interface { return &emptyAgent{} })
|
||||
conf.Server.Agents = "empty,fake,disabled"
|
||||
ag = createAgents(ds)
|
||||
Expect(ag.AgentName()).To(Equal("agents"))
|
||||
})
|
||||
|
||||
It("does not register disabled agents", func() {
|
||||
ags := slice.Map(ag.agents, func(a Interface) string { return a.AgentName() })
|
||||
// local agent is always appended to the end of the agents list
|
||||
Expect(ags).To(HaveExactElements("empty", "fake", "local"))
|
||||
Expect(ags).ToNot(ContainElement("disabled"))
|
||||
})
|
||||
|
||||
Describe("GetArtistMBID", func() {
|
||||
It("returns on first match", func() {
|
||||
Expect(ag.GetArtistMBID(ctx, "123", "test")).To(Equal("mbid"))
|
||||
@@ -344,3 +347,11 @@ func (a *mockAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string)
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
type emptyAgent struct {
|
||||
Interface
|
||||
}
|
||||
|
||||
func (e *emptyAgent) AgentName() string {
|
||||
return "empty"
|
||||
}
|
||||
|
||||
@@ -3,11 +3,14 @@ package lastfm
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/andybalholm/cascadia"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
@@ -15,6 +18,7 @@ import (
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/cache"
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -28,15 +32,19 @@ var ignoredBiographies = []string{
|
||||
}
|
||||
|
||||
type lastfmAgent struct {
|
||||
ds model.DataStore
|
||||
sessionKeys *agents.SessionKeys
|
||||
apiKey string
|
||||
secret string
|
||||
lang string
|
||||
client *client
|
||||
ds model.DataStore
|
||||
sessionKeys *agents.SessionKeys
|
||||
apiKey string
|
||||
secret string
|
||||
lang string
|
||||
client *client
|
||||
getInfoMutex sync.Mutex
|
||||
}
|
||||
|
||||
func lastFMConstructor(ds model.DataStore) *lastfmAgent {
|
||||
if !conf.Server.LastFM.Enabled || conf.Server.LastFM.ApiKey == "" || conf.Server.LastFM.Secret == "" {
|
||||
return nil
|
||||
}
|
||||
l := &lastfmAgent{
|
||||
ds: ds,
|
||||
lang: conf.Server.LastFM.Language,
|
||||
@@ -104,7 +112,7 @@ func (l *lastfmAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid strin
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name, "")
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -115,7 +123,7 @@ func (l *lastfmAgent) GetArtistMBID(ctx context.Context, id string, name string)
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistURL(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name, mbid)
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -126,7 +134,7 @@ func (l *lastfmAgent) GetArtistURL(ctx context.Context, id, name, mbid string) (
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistBiography(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name, mbid)
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -143,7 +151,7 @@ func (l *lastfmAgent) GetArtistBiography(ctx context.Context, id, name, mbid str
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||
resp, err := l.callArtistGetSimilar(ctx, name, mbid, limit)
|
||||
resp, err := l.callArtistGetSimilar(ctx, name, limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -161,7 +169,7 @@ func (l *lastfmAgent) GetSimilarArtists(ctx context.Context, id, name, mbid stri
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||
resp, err := l.callArtistGetTopTracks(ctx, artistName, mbid, count)
|
||||
resp, err := l.callArtistGetTopTracks(ctx, artistName, count)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -178,13 +186,55 @@ func (l *lastfmAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbi
|
||||
return res, nil
|
||||
}
|
||||
|
||||
var artistOpenGraphQuery = cascadia.MustCompile(`html > head > meta[property="og:image"]`)
|
||||
|
||||
func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string) ([]agents.ExternalImage, error) {
|
||||
log.Debug(ctx, "Getting artist images from Last.fm", "name", name)
|
||||
hc := http.Client{
|
||||
Timeout: consts.DefaultHttpClientTimeOut,
|
||||
}
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get artist info: %w", err)
|
||||
}
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, a.URL, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("create artist image request: %w", err)
|
||||
}
|
||||
resp, err := hc.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get artist url: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
node, err := html.Parse(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parse html: %w", err)
|
||||
}
|
||||
|
||||
var res []agents.ExternalImage
|
||||
n := cascadia.Query(node, artistOpenGraphQuery)
|
||||
if n == nil {
|
||||
return res, nil
|
||||
}
|
||||
for _, attr := range n.Attr {
|
||||
if attr.Key == "content" {
|
||||
res = []agents.ExternalImage{
|
||||
{URL: attr.Val},
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callAlbumGetInfo(ctx context.Context, name, artist, mbid string) (*Album, error) {
|
||||
a, err := l.client.albumGetInfo(ctx, name, artist, mbid)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
|
||||
if mbid != "" && (isLastFMError && lfErr.Code == 6) {
|
||||
log.Warn(ctx, "LastFM/album.getInfo could not find album by mbid, trying again", "album", name, "mbid", mbid)
|
||||
log.Debug(ctx, "LastFM/album.getInfo could not find album by mbid, trying again", "album", name, "mbid", mbid)
|
||||
return l.callAlbumGetInfo(ctx, name, artist, "")
|
||||
}
|
||||
|
||||
@@ -199,48 +249,31 @@ func (l *lastfmAgent) callAlbumGetInfo(ctx context.Context, name, artist, mbid s
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetInfo(ctx context.Context, name string, mbid string) (*Artist, error) {
|
||||
a, err := l.client.artistGetInfo(ctx, name, mbid)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
|
||||
if mbid != "" && ((err == nil && a.Name == "[unknown]") || (isLastFMError && lfErr.Code == 6)) {
|
||||
log.Warn(ctx, "LastFM/artist.getInfo could not find artist by mbid, trying again", "artist", name, "mbid", mbid)
|
||||
return l.callArtistGetInfo(ctx, name, "")
|
||||
}
|
||||
func (l *lastfmAgent) callArtistGetInfo(ctx context.Context, name string) (*Artist, error) {
|
||||
l.getInfoMutex.Lock()
|
||||
defer l.getInfoMutex.Unlock()
|
||||
|
||||
a, err := l.client.artistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getInfo", "artist", name, "mbid", mbid, err)
|
||||
log.Error(ctx, "Error calling LastFM/artist.getInfo", "artist", name, err)
|
||||
return nil, err
|
||||
}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetSimilar(ctx context.Context, name string, mbid string, limit int) ([]Artist, error) {
|
||||
s, err := l.client.artistGetSimilar(ctx, name, mbid, limit)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
if mbid != "" && ((err == nil && s.Attr.Artist == "[unknown]") || (isLastFMError && lfErr.Code == 6)) {
|
||||
log.Warn(ctx, "LastFM/artist.getSimilar could not find artist by mbid, trying again", "artist", name, "mbid", mbid)
|
||||
return l.callArtistGetSimilar(ctx, name, "", limit)
|
||||
}
|
||||
func (l *lastfmAgent) callArtistGetSimilar(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||
s, err := l.client.artistGetSimilar(ctx, name, limit)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getSimilar", "artist", name, "mbid", mbid, err)
|
||||
log.Error(ctx, "Error calling LastFM/artist.getSimilar", "artist", name, err)
|
||||
return nil, err
|
||||
}
|
||||
return s.Artists, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName, mbid string, count int) ([]Track, error) {
|
||||
t, err := l.client.artistGetTopTracks(ctx, artistName, mbid, count)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
if mbid != "" && ((err == nil && t.Attr.Artist == "[unknown]") || (isLastFMError && lfErr.Code == 6)) {
|
||||
log.Warn(ctx, "LastFM/artist.getTopTracks could not find artist by mbid, trying again", "artist", artistName, "mbid", mbid)
|
||||
return l.callArtistGetTopTracks(ctx, artistName, "", count)
|
||||
}
|
||||
func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName string, count int) ([]Track, error) {
|
||||
t, err := l.client.artistGetTopTracks(ctx, artistName, count)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getTopTracks", "artist", artistName, "mbid", mbid, err)
|
||||
log.Error(ctx, "Error calling LastFM/artist.getTopTracks", "artist", artistName, err)
|
||||
return nil, err
|
||||
}
|
||||
return t.Track, nil
|
||||
@@ -263,7 +296,7 @@ func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *mode
|
||||
})
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Last.fm client.updateNowPlaying returned error", "track", track.Title, err)
|
||||
return scrobbler.ErrUnrecoverable
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -271,7 +304,7 @@ func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *mode
|
||||
func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.Scrobble) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return scrobbler.ErrNotAuthorized
|
||||
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||
}
|
||||
|
||||
if s.Duration <= 30 {
|
||||
@@ -295,12 +328,12 @@ func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.S
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
if !isLastFMError {
|
||||
log.Warn(ctx, "Last.fm client.scrobble returned error", "track", s.Title, err)
|
||||
return scrobbler.ErrRetryLater
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
}
|
||||
if lfErr.Code == 11 || lfErr.Code == 16 {
|
||||
return scrobbler.ErrRetryLater
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
}
|
||||
return scrobbler.ErrUnrecoverable
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
@@ -310,15 +343,11 @@ func (l *lastfmAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
|
||||
func init() {
|
||||
conf.AddHook(func() {
|
||||
if conf.Server.LastFM.Enabled {
|
||||
if conf.Server.LastFM.ApiKey != "" && conf.Server.LastFM.Secret != "" {
|
||||
agents.Register(lastFMAgentName, func(ds model.DataStore) agents.Interface {
|
||||
return lastFMConstructor(ds)
|
||||
})
|
||||
scrobbler.Register(lastFMAgentName, func(ds model.DataStore) scrobbler.Scrobbler {
|
||||
return lastFMConstructor(ds)
|
||||
})
|
||||
}
|
||||
}
|
||||
agents.Register(lastFMAgentName, func(ds model.DataStore) agents.Interface {
|
||||
return lastFMConstructor(ds)
|
||||
})
|
||||
scrobbler.Register(lastFMAgentName, func(ds model.DataStore) scrobbler.Scrobbler {
|
||||
return lastFMConstructor(ds)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/scrobbler"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -30,16 +31,38 @@ var _ = Describe("lastfmAgent", func() {
|
||||
BeforeEach(func() {
|
||||
ds = &tests.MockDataStore{}
|
||||
ctx = context.Background()
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.LastFM.Enabled = true
|
||||
conf.Server.LastFM.ApiKey = "123"
|
||||
conf.Server.LastFM.Secret = "secret"
|
||||
})
|
||||
Describe("lastFMConstructor", func() {
|
||||
It("uses configured api key and language", func() {
|
||||
conf.Server.LastFM.ApiKey = "123"
|
||||
conf.Server.LastFM.Secret = "secret"
|
||||
conf.Server.LastFM.Language = "pt"
|
||||
agent := lastFMConstructor(ds)
|
||||
Expect(agent.apiKey).To(Equal("123"))
|
||||
Expect(agent.secret).To(Equal("secret"))
|
||||
Expect(agent.lang).To(Equal("pt"))
|
||||
When("Agent is properly configured", func() {
|
||||
It("uses configured api key and language", func() {
|
||||
conf.Server.LastFM.Language = "pt"
|
||||
agent := lastFMConstructor(ds)
|
||||
Expect(agent.apiKey).To(Equal("123"))
|
||||
Expect(agent.secret).To(Equal("secret"))
|
||||
Expect(agent.lang).To(Equal("pt"))
|
||||
})
|
||||
})
|
||||
When("Agent is disabled", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.Enabled = false
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
When("ApiKey is empty", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.ApiKey = ""
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
When("Secret is empty", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.Secret = ""
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -56,48 +79,25 @@ var _ = Describe("lastfmAgent", func() {
|
||||
It("returns the biography", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")).To(Equal("U2 é uma das mais importantes bandas de rock de todos os tempos. Formada em 1976 em Dublin, composta por Bono (vocalista e guitarrista), The Edge (guitarrista, pianista e backing vocal), Adam Clayton (baixista), Larry Mullen, Jr. (baterista e percussionista).\n\nDesde a década de 80, U2 é uma das bandas mais populares no mundo. Seus shows são únicos e um verdadeiro festival de efeitos especiais, além de serem um dos que mais arrecadam anualmente. <a href=\"https://www.last.fm/music/U2\">Read more on Last.fm</a>"))
|
||||
Expect(agent.GetArtistBiography(ctx, "123", "U2", "")).To(Equal("U2 é uma das mais importantes bandas de rock de todos os tempos. Formada em 1976 em Dublin, composta por Bono (vocalista e guitarrista), The Edge (guitarrista, pianista e backing vocal), Adam Clayton (baixista), Larry Mullen, Jr. (baterista e percussionista).\n\nDesde a década de 80, U2 é uma das bandas mais populares no mundo. Seus shows são únicos e um verdadeiro festival de efeitos especiais, além de serem um dos que mais arrecadam anualmente. <a href=\"https://www.last.fm/music/U2\">Read more on Last.fm</a>"))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
})
|
||||
|
||||
Context("MBID non existent in Last.fm", func() {
|
||||
It("calls again when the response is artist == [unknown]", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.unknown.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
_, _ = agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
It("calls again when last.fm returns an error 6", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, _ = agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -114,51 +114,28 @@ var _ = Describe("lastfmAgent", func() {
|
||||
It("returns similar artists", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)).To(Equal([]agents.Artist{
|
||||
Expect(agent.GetSimilarArtists(ctx, "123", "U2", "", 2)).To(Equal([]agents.Artist{
|
||||
{Name: "Passengers", MBID: "e110c11f-1c94-4471-a350-c38f46b29389"},
|
||||
{Name: "INXS", MBID: "481bf5f9-2e7c-4c44-b08a-05b32bc7c00d"},
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
})
|
||||
|
||||
Context("MBID non existent in Last.fm", func() {
|
||||
It("calls again when the response is artist == [unknown]", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.unknown.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
_, _ = agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
It("calls again when last.fm returns an error 6", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, _ = agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -175,51 +152,28 @@ var _ = Describe("lastfmAgent", func() {
|
||||
It("returns top songs", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)).To(Equal([]agents.Song{
|
||||
Expect(agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)).To(Equal([]agents.Song{
|
||||
{Name: "Beautiful Day", MBID: "f7f264d0-a89b-4682-9cd7-a4e7c37637af"},
|
||||
{Name: "With or Without You", MBID: "6b9a509f-6907-4a6e-9345-2f12da09ba4b"},
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
})
|
||||
|
||||
Context("MBID non existent in Last.fm", func() {
|
||||
It("calls again when the response is artist == [unknown]", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.unknown.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
_, _ = agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
It("calls again when last.fm returns an error 6", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, _ = agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -65,7 +65,9 @@ func (s *Router) routes() http.Handler {
|
||||
}
|
||||
|
||||
func (s *Router) getLinkStatus(w http.ResponseWriter, r *http.Request) {
|
||||
resp := map[string]interface{}{}
|
||||
resp := map[string]interface{}{
|
||||
"apiKey": s.apiKey,
|
||||
}
|
||||
u, _ := request.UserFrom(r.Context())
|
||||
key, err := s.sessionKeys.Get(r.Context(), u.ID)
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
|
||||
@@ -59,11 +59,10 @@ func (c *client) albumGetInfo(ctx context.Context, name string, artist string, m
|
||||
return &response.Album, nil
|
||||
}
|
||||
|
||||
func (c *client) artistGetInfo(ctx context.Context, name string, mbid string) (*Artist, error) {
|
||||
func (c *client) artistGetInfo(ctx context.Context, name string) (*Artist, error) {
|
||||
params := url.Values{}
|
||||
params.Add("method", "artist.getInfo")
|
||||
params.Add("artist", name)
|
||||
params.Add("mbid", mbid)
|
||||
params.Add("lang", c.lang)
|
||||
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||
if err != nil {
|
||||
@@ -72,11 +71,10 @@ func (c *client) artistGetInfo(ctx context.Context, name string, mbid string) (*
|
||||
return &response.Artist, nil
|
||||
}
|
||||
|
||||
func (c *client) artistGetSimilar(ctx context.Context, name string, mbid string, limit int) (*SimilarArtists, error) {
|
||||
func (c *client) artistGetSimilar(ctx context.Context, name string, limit int) (*SimilarArtists, error) {
|
||||
params := url.Values{}
|
||||
params.Add("method", "artist.getSimilar")
|
||||
params.Add("artist", name)
|
||||
params.Add("mbid", mbid)
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||
if err != nil {
|
||||
@@ -85,11 +83,10 @@ func (c *client) artistGetSimilar(ctx context.Context, name string, mbid string,
|
||||
return &response.SimilarArtists, nil
|
||||
}
|
||||
|
||||
func (c *client) artistGetTopTracks(ctx context.Context, name string, mbid string, limit int) (*TopTracks, error) {
|
||||
func (c *client) artistGetTopTracks(ctx context.Context, name string, limit int) (*TopTracks, error) {
|
||||
params := url.Values{}
|
||||
params.Add("method", "artist.getTopTracks")
|
||||
params.Add("artist", name)
|
||||
params.Add("mbid", mbid)
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||
if err != nil {
|
||||
|
||||
@@ -42,10 +42,10 @@ var _ = Describe("client", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
|
||||
artist, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
artist, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(BeNil())
|
||||
Expect(artist.Name).To(Equal("U2"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&lang=pt&mbid=123&method=artist.getInfo"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&lang=pt&method=artist.getInfo"))
|
||||
})
|
||||
|
||||
It("fails if Last.fm returns an http status != 200", func() {
|
||||
@@ -54,7 +54,7 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 500,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(MatchError("last.fm http status: (500)"))
|
||||
})
|
||||
|
||||
@@ -64,7 +64,7 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 400,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(MatchError(&lastFMError{Code: 3, Message: "Invalid Method - No method with that name in this package"}))
|
||||
})
|
||||
|
||||
@@ -74,14 +74,14 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 200,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(MatchError(&lastFMError{Code: 6, Message: "The artist you supplied could not be found"}))
|
||||
})
|
||||
|
||||
It("fails if HttpClient.Do() returns error", func() {
|
||||
httpClient.Err = errors.New("generic error")
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(MatchError("generic error"))
|
||||
})
|
||||
|
||||
@@ -91,7 +91,7 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 200,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
Expect(err).To(MatchError("invalid character '<' looking for beginning of value"))
|
||||
})
|
||||
|
||||
@@ -102,10 +102,10 @@ var _ = Describe("client", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
|
||||
similar, err := client.artistGetSimilar(context.Background(), "U2", "123", 2)
|
||||
similar, err := client.artistGetSimilar(context.Background(), "U2", 2)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(len(similar.Artists)).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&mbid=123&method=artist.getSimilar"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&method=artist.getSimilar"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -114,10 +114,10 @@ var _ = Describe("client", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
|
||||
top, err := client.artistGetTopTracks(context.Background(), "U2", "123", 2)
|
||||
top, err := client.artistGetTopTracks(context.Background(), "U2", 2)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(len(top.Track)).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&mbid=123&method=artist.getTopTracks"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&method=artist.getTopTracks"))
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/cache"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -45,6 +46,12 @@ func (l *listenBrainzAgent) AgentName() string {
|
||||
}
|
||||
|
||||
func (l *listenBrainzAgent) formatListen(track *model.MediaFile) listenInfo {
|
||||
artistMBIDs := slice.Map(track.Participants[model.RoleArtist], func(p model.Participant) string {
|
||||
return p.MbzArtistID
|
||||
})
|
||||
artistNames := slice.Map(track.Participants[model.RoleArtist], func(p model.Participant) string {
|
||||
return p.Name
|
||||
})
|
||||
li := listenInfo{
|
||||
TrackMetadata: trackMetadata{
|
||||
ArtistName: track.Artist,
|
||||
@@ -54,9 +61,11 @@ func (l *listenBrainzAgent) formatListen(track *model.MediaFile) listenInfo {
|
||||
SubmissionClient: consts.AppName,
|
||||
SubmissionClientVersion: consts.Version,
|
||||
TrackNumber: track.TrackNumber,
|
||||
ArtistMbzIDs: []string{track.MbzArtistID},
|
||||
RecordingMbzID: track.MbzRecordingID,
|
||||
ReleaseMbID: track.MbzAlbumID,
|
||||
ArtistNames: artistNames,
|
||||
ArtistMBIDs: artistMBIDs,
|
||||
RecordingMBID: track.MbzRecordingID,
|
||||
ReleaseMBID: track.MbzAlbumID,
|
||||
ReleaseGroupMBID: track.MbzReleaseGroupID,
|
||||
DurationMs: int(track.Duration * 1000),
|
||||
},
|
||||
},
|
||||
@@ -67,14 +76,14 @@ func (l *listenBrainzAgent) formatListen(track *model.MediaFile) listenInfo {
|
||||
func (l *listenBrainzAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return scrobbler.ErrNotAuthorized
|
||||
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||
}
|
||||
|
||||
li := l.formatListen(track)
|
||||
err = l.client.updateNowPlaying(ctx, sk, li)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "ListenBrainz updateNowPlaying returned error", "track", track.Title, err)
|
||||
return scrobbler.ErrUnrecoverable
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -82,7 +91,7 @@ func (l *listenBrainzAgent) NowPlaying(ctx context.Context, userId string, track
|
||||
func (l *listenBrainzAgent) Scrobble(ctx context.Context, userId string, s scrobbler.Scrobble) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return scrobbler.ErrNotAuthorized
|
||||
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||
}
|
||||
|
||||
li := l.formatListen(&s.MediaFile)
|
||||
@@ -96,12 +105,12 @@ func (l *listenBrainzAgent) Scrobble(ctx context.Context, userId string, s scrob
|
||||
isListenBrainzError := errors.As(err, &lbErr)
|
||||
if !isListenBrainzError {
|
||||
log.Warn(ctx, "ListenBrainz Scrobble returned HTTP error", "track", s.Title, err)
|
||||
return scrobbler.ErrRetryLater
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
}
|
||||
if lbErr.Code == 500 || lbErr.Code == 503 {
|
||||
return scrobbler.ErrRetryLater
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
}
|
||||
return scrobbler.ErrUnrecoverable
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
}
|
||||
|
||||
func (l *listenBrainzAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
|
||||
@@ -32,24 +32,26 @@ var _ = Describe("listenBrainzAgent", func() {
|
||||
agent = listenBrainzConstructor(ds)
|
||||
agent.client = newClient("http://localhost:8080", httpClient)
|
||||
track = &model.MediaFile{
|
||||
ID: "123",
|
||||
Title: "Track Title",
|
||||
Album: "Track Album",
|
||||
Artist: "Track Artist",
|
||||
TrackNumber: 1,
|
||||
MbzRecordingID: "mbz-123",
|
||||
MbzAlbumID: "mbz-456",
|
||||
MbzArtistID: "mbz-789",
|
||||
Duration: 142.2,
|
||||
ID: "123",
|
||||
Title: "Track Title",
|
||||
Album: "Track Album",
|
||||
Artist: "Track Artist",
|
||||
TrackNumber: 1,
|
||||
MbzRecordingID: "mbz-123",
|
||||
MbzAlbumID: "mbz-456",
|
||||
MbzReleaseGroupID: "mbz-789",
|
||||
Duration: 142.2,
|
||||
Participants: map[model.Role]model.ParticipantList{
|
||||
model.RoleArtist: []model.Participant{
|
||||
{Artist: model.Artist{ID: "ar-1", Name: "Artist 1", MbzArtistID: "mbz-111"}},
|
||||
{Artist: model.Artist{ID: "ar-2", Name: "Artist 2", MbzArtistID: "mbz-222"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
Describe("formatListen", func() {
|
||||
It("constructs the listenInfo properly", func() {
|
||||
var idArtistId = func(element interface{}) string {
|
||||
return element.(string)
|
||||
}
|
||||
|
||||
lr := agent.formatListen(track)
|
||||
Expect(lr).To(MatchAllFields(Fields{
|
||||
"ListenedAt": Equal(0),
|
||||
@@ -61,12 +63,12 @@ var _ = Describe("listenBrainzAgent", func() {
|
||||
"SubmissionClient": Equal(consts.AppName),
|
||||
"SubmissionClientVersion": Equal(consts.Version),
|
||||
"TrackNumber": Equal(track.TrackNumber),
|
||||
"RecordingMbzID": Equal(track.MbzRecordingID),
|
||||
"ReleaseMbID": Equal(track.MbzAlbumID),
|
||||
"ArtistMbzIDs": MatchAllElements(idArtistId, Elements{
|
||||
"mbz-789": Equal(track.MbzArtistID),
|
||||
}),
|
||||
"DurationMs": Equal(142200),
|
||||
"RecordingMBID": Equal(track.MbzRecordingID),
|
||||
"ReleaseMBID": Equal(track.MbzAlbumID),
|
||||
"ReleaseGroupMBID": Equal(track.MbzReleaseGroupID),
|
||||
"ArtistNames": ConsistOf("Artist 1", "Artist 2"),
|
||||
"ArtistMBIDs": ConsistOf("mbz-111", "mbz-222"),
|
||||
"DurationMs": Equal(142200),
|
||||
}),
|
||||
}),
|
||||
}))
|
||||
|
||||
@@ -76,9 +76,11 @@ type additionalInfo struct {
|
||||
SubmissionClient string `json:"submission_client,omitempty"`
|
||||
SubmissionClientVersion string `json:"submission_client_version,omitempty"`
|
||||
TrackNumber int `json:"tracknumber,omitempty"`
|
||||
RecordingMbzID string `json:"recording_mbid,omitempty"`
|
||||
ArtistMbzIDs []string `json:"artist_mbids,omitempty"`
|
||||
ReleaseMbID string `json:"release_mbid,omitempty"`
|
||||
ArtistNames []string `json:"artist_names,omitempty"`
|
||||
ArtistMBIDs []string `json:"artist_mbids,omitempty"`
|
||||
RecordingMBID string `json:"recording_mbid,omitempty"`
|
||||
ReleaseMBID string `json:"release_mbid,omitempty"`
|
||||
ReleaseGroupMBID string `json:"release_group_mbid,omitempty"`
|
||||
DurationMs int `json:"duration_ms,omitempty"`
|
||||
}
|
||||
|
||||
|
||||
@@ -74,11 +74,12 @@ var _ = Describe("client", func() {
|
||||
TrackName: "Track Title",
|
||||
ReleaseName: "Track Album",
|
||||
AdditionalInfo: additionalInfo{
|
||||
TrackNumber: 1,
|
||||
RecordingMbzID: "mbz-123",
|
||||
ArtistMbzIDs: []string{"mbz-789"},
|
||||
ReleaseMbID: "mbz-456",
|
||||
DurationMs: 142200,
|
||||
TrackNumber: 1,
|
||||
ArtistNames: []string{"Artist 1", "Artist 2"},
|
||||
ArtistMBIDs: []string{"mbz-789", "mbz-012"},
|
||||
RecordingMBID: "mbz-123",
|
||||
ReleaseMBID: "mbz-456",
|
||||
DurationMs: 142200,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -27,6 +27,9 @@ type spotifyAgent struct {
|
||||
}
|
||||
|
||||
func spotifyConstructor(ds model.DataStore) agents.Interface {
|
||||
if conf.Server.Spotify.ID == "" || conf.Server.Spotify.Secret == "" {
|
||||
return nil
|
||||
}
|
||||
l := &spotifyAgent{
|
||||
ds: ds,
|
||||
id: conf.Server.Spotify.ID,
|
||||
@@ -88,8 +91,6 @@ func (s *spotifyAgent) searchArtist(ctx context.Context, name string) (*Artist,
|
||||
|
||||
func init() {
|
||||
conf.AddHook(func() {
|
||||
if conf.Server.Spotify.ID != "" && conf.Server.Spotify.Secret != "" {
|
||||
agents.Register(spotifyAgentName, spotifyConstructor)
|
||||
}
|
||||
agents.Register(spotifyAgentName, spotifyConstructor)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -53,11 +53,11 @@ func (a *archiver) zipAlbums(ctx context.Context, id string, format string, bitr
|
||||
})
|
||||
for _, album := range albums {
|
||||
discs := slice.Group(album, func(mf model.MediaFile) int { return mf.DiscNumber })
|
||||
isMultDisc := len(discs) > 1
|
||||
isMultiDisc := len(discs) > 1
|
||||
log.Debug(ctx, "Zipping album", "name", album[0].Album, "artist", album[0].AlbumArtist,
|
||||
"format", format, "bitrate", bitrate, "isMultDisc", isMultDisc, "numTracks", len(album))
|
||||
"format", format, "bitrate", bitrate, "isMultiDisc", isMultiDisc, "numTracks", len(album))
|
||||
for _, mf := range album {
|
||||
file := a.albumFilename(mf, format, isMultDisc)
|
||||
file := a.albumFilename(mf, format, isMultiDisc)
|
||||
_ = a.addFileToZip(ctx, z, mf, format, bitrate, file)
|
||||
}
|
||||
}
|
||||
@@ -78,12 +78,12 @@ func createZipWriter(out io.Writer, format string, bitrate int) *zip.Writer {
|
||||
return z
|
||||
}
|
||||
|
||||
func (a *archiver) albumFilename(mf model.MediaFile, format string, isMultDisc bool) string {
|
||||
func (a *archiver) albumFilename(mf model.MediaFile, format string, isMultiDisc bool) string {
|
||||
_, file := filepath.Split(mf.Path)
|
||||
if format != "raw" {
|
||||
file = strings.TrimSuffix(file, mf.Suffix) + format
|
||||
}
|
||||
if isMultDisc {
|
||||
if isMultiDisc {
|
||||
file = fmt.Sprintf("Disc %02d/%s", mf.DiscNumber, file)
|
||||
}
|
||||
return fmt.Sprintf("%s/%s", sanitizeName(mf.Album), file)
|
||||
@@ -91,18 +91,18 @@ func (a *archiver) albumFilename(mf model.MediaFile, format string, isMultDisc b
|
||||
|
||||
func (a *archiver) ZipShare(ctx context.Context, id string, out io.Writer) error {
|
||||
s, err := a.shares.Load(ctx, id)
|
||||
if !s.Downloadable {
|
||||
return model.ErrNotAuthorized
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !s.Downloadable {
|
||||
return model.ErrNotAuthorized
|
||||
}
|
||||
log.Debug(ctx, "Zipping share", "name", s.ID, "format", s.Format, "bitrate", s.MaxBitRate, "numTracks", len(s.Tracks))
|
||||
return a.zipMediaFiles(ctx, id, s.Format, s.MaxBitRate, out, s.Tracks)
|
||||
}
|
||||
|
||||
func (a *archiver) ZipPlaylist(ctx context.Context, id string, format string, bitrate int, out io.Writer) error {
|
||||
pls, err := a.ds.Playlist(ctx).GetWithTracks(id, true)
|
||||
pls, err := a.ds.Playlist(ctx).GetWithTracks(id, true, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error loading mediafiles from playlist", "id", id, err)
|
||||
return err
|
||||
@@ -138,13 +138,14 @@ func sanitizeName(target string) string {
|
||||
}
|
||||
|
||||
func (a *archiver) addFileToZip(ctx context.Context, z *zip.Writer, mf model.MediaFile, format string, bitrate int, filename string) error {
|
||||
path := mf.AbsolutePath()
|
||||
w, err := z.CreateHeader(&zip.FileHeader{
|
||||
Name: filename,
|
||||
Modified: mf.UpdatedAt,
|
||||
Method: zip.Store,
|
||||
})
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error creating zip entry", "file", mf.Path, err)
|
||||
log.Error(ctx, "Error creating zip entry", "file", path, err)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -152,22 +153,22 @@ func (a *archiver) addFileToZip(ctx context.Context, z *zip.Writer, mf model.Med
|
||||
if format != "raw" && format != "" {
|
||||
r, err = a.ms.DoStream(ctx, &mf, format, bitrate, 0)
|
||||
} else {
|
||||
r, err = os.Open(mf.Path)
|
||||
r, err = os.Open(path)
|
||||
}
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error opening file for zipping", "file", mf.Path, "format", format, err)
|
||||
log.Error(ctx, "Error opening file for zipping", "file", path, "format", format, err)
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := r.Close(); err != nil && log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
log.Error(ctx, "Error closing stream", "id", mf.ID, "file", mf.Path, err)
|
||||
log.Error(ctx, "Error closing stream", "id", mf.ID, "file", path, err)
|
||||
}
|
||||
}()
|
||||
|
||||
_, err = io.Copy(w, r)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error zipping file", "file", mf.Path, err)
|
||||
log.Error(ctx, "Error zipping file", "file", path, err)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -25,8 +25,8 @@ var _ = Describe("Archiver", func() {
|
||||
|
||||
BeforeEach(func() {
|
||||
ms = &mockMediaStreamer{}
|
||||
ds = &mockDataStore{}
|
||||
sh = &mockShare{}
|
||||
ds = &mockDataStore{}
|
||||
arch = core.NewArchiver(ms, ds, sh)
|
||||
})
|
||||
|
||||
@@ -134,7 +134,7 @@ var _ = Describe("Archiver", func() {
|
||||
}
|
||||
|
||||
plRepo := &mockPlaylistRepository{}
|
||||
plRepo.On("GetWithTracks", "1", true).Return(pls, nil)
|
||||
plRepo.On("GetWithTracks", "1", true, false).Return(pls, nil)
|
||||
ds.On("Playlist", mock.Anything).Return(plRepo)
|
||||
ms.On("DoStream", mock.Anything, mock.Anything, "mp3", 128, 0).Return(io.NopCloser(strings.NewReader("test")), nil).Times(2)
|
||||
|
||||
@@ -167,6 +167,19 @@ func (m *mockDataStore) Playlist(ctx context.Context) model.PlaylistRepository {
|
||||
return args.Get(0).(model.PlaylistRepository)
|
||||
}
|
||||
|
||||
func (m *mockDataStore) Library(context.Context) model.LibraryRepository {
|
||||
return &mockLibraryRepository{}
|
||||
}
|
||||
|
||||
type mockLibraryRepository struct {
|
||||
mock.Mock
|
||||
model.LibraryRepository
|
||||
}
|
||||
|
||||
func (m *mockLibraryRepository) GetPath(id int) (string, error) {
|
||||
return "/music", nil
|
||||
}
|
||||
|
||||
type mockMediaFileRepository struct {
|
||||
mock.Mock
|
||||
model.MediaFileRepository
|
||||
@@ -182,8 +195,8 @@ type mockPlaylistRepository struct {
|
||||
model.PlaylistRepository
|
||||
}
|
||||
|
||||
func (m *mockPlaylistRepository) GetWithTracks(id string, includeTracks bool) (*model.Playlist, error) {
|
||||
args := m.Called(id, includeTracks)
|
||||
func (m *mockPlaylistRepository) GetWithTracks(id string, refreshSmartPlaylists, includeMissing bool) (*model.Playlist, error) {
|
||||
args := m.Called(id, refreshSmartPlaylists, includeMissing)
|
||||
return args.Get(0).(*model.Playlist), args.Error(1)
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -24,15 +24,15 @@ type Artwork interface {
|
||||
GetOrPlaceholder(ctx context.Context, id string, size int, square bool) (io.ReadCloser, time.Time, error)
|
||||
}
|
||||
|
||||
func NewArtwork(ds model.DataStore, cache cache.FileCache, ffmpeg ffmpeg.FFmpeg, em core.ExternalMetadata) Artwork {
|
||||
return &artwork{ds: ds, cache: cache, ffmpeg: ffmpeg, em: em}
|
||||
func NewArtwork(ds model.DataStore, cache cache.FileCache, ffmpeg ffmpeg.FFmpeg, provider external.Provider) Artwork {
|
||||
return &artwork{ds: ds, cache: cache, ffmpeg: ffmpeg, provider: provider}
|
||||
}
|
||||
|
||||
type artwork struct {
|
||||
ds model.DataStore
|
||||
cache cache.FileCache
|
||||
ffmpeg ffmpeg.FFmpeg
|
||||
em core.ExternalMetadata
|
||||
ds model.DataStore
|
||||
cache cache.FileCache
|
||||
ffmpeg ffmpeg.FFmpeg
|
||||
provider external.Provider
|
||||
}
|
||||
|
||||
type artworkReader interface {
|
||||
@@ -115,9 +115,9 @@ func (a *artwork) getArtworkReader(ctx context.Context, artID model.ArtworkID, s
|
||||
} else {
|
||||
switch artID.Kind {
|
||||
case model.KindArtistArtwork:
|
||||
artReader, err = newArtistReader(ctx, a, artID, a.em)
|
||||
artReader, err = newArtistReader(ctx, a, artID, a.provider)
|
||||
case model.KindAlbumArtwork:
|
||||
artReader, err = newAlbumArtworkReader(ctx, a, artID, a.em)
|
||||
artReader, err = newAlbumArtworkReader(ctx, a, artID, a.provider)
|
||||
case model.KindMediaFileArtwork:
|
||||
artReader, err = newMediafileArtworkReader(ctx, a, artID)
|
||||
case model.KindPlaylistArtwork:
|
||||
|
||||
@@ -4,15 +4,10 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"image"
|
||||
"image/jpeg"
|
||||
"image/png"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
@@ -20,7 +15,8 @@ import (
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Artwork", func() {
|
||||
// TODO Fix tests
|
||||
var _ = XDescribe("Artwork", func() {
|
||||
var aw *artwork
|
||||
var ds model.DataStore
|
||||
var ffmpeg *tests.MockFFmpeg
|
||||
@@ -37,17 +33,17 @@ var _ = Describe("Artwork", func() {
|
||||
ds = &tests.MockDataStore{MockedTranscoding: &tests.MockTranscodingRepo{}}
|
||||
alOnlyEmbed = model.Album{ID: "222", Name: "Only embed", EmbedArtPath: "tests/fixtures/artist/an-album/test.mp3"}
|
||||
alEmbedNotFound = model.Album{ID: "333", Name: "Embed not found", EmbedArtPath: "tests/fixtures/NON_EXISTENT.mp3"}
|
||||
alOnlyExternal = model.Album{ID: "444", Name: "Only external", ImageFiles: "tests/fixtures/artist/an-album/front.png"}
|
||||
alExternalNotFound = model.Album{ID: "555", Name: "External not found", ImageFiles: "tests/fixtures/NON_EXISTENT.png"}
|
||||
//alOnlyExternal = model.Album{ID: "444", Name: "Only external", ImageFiles: "tests/fixtures/artist/an-album/front.png"}
|
||||
//alExternalNotFound = model.Album{ID: "555", Name: "External not found", ImageFiles: "tests/fixtures/NON_EXISTENT.png"}
|
||||
arMultipleCovers = model.Artist{ID: "777", Name: "All options"}
|
||||
alMultipleCovers = model.Album{
|
||||
ID: "666",
|
||||
Name: "All options",
|
||||
EmbedArtPath: "tests/fixtures/artist/an-album/test.mp3",
|
||||
Paths: "tests/fixtures/artist/an-album",
|
||||
ImageFiles: "tests/fixtures/artist/an-album/cover.jpg" + consts.Zwsp +
|
||||
"tests/fixtures/artist/an-album/front.png" + consts.Zwsp +
|
||||
"tests/fixtures/artist/an-album/artist.png",
|
||||
//Paths: []string{"tests/fixtures/artist/an-album"},
|
||||
//ImageFiles: "tests/fixtures/artist/an-album/cover.jpg" + consts.Zwsp +
|
||||
// "tests/fixtures/artist/an-album/front.png" + consts.Zwsp +
|
||||
// "tests/fixtures/artist/an-album/artist.png",
|
||||
AlbumArtistID: "777",
|
||||
}
|
||||
mfWithEmbed = model.MediaFile{ID: "22", Path: "tests/fixtures/test.mp3", HasCoverArt: true, AlbumID: "222"}
|
||||
@@ -245,11 +241,11 @@ var _ = Describe("Artwork", func() {
|
||||
DescribeTable("resize",
|
||||
func(format string, landscape bool, size int) {
|
||||
coverFileName := "cover." + format
|
||||
dirName := createImage(format, landscape, size)
|
||||
//dirName := createImage(format, landscape, size)
|
||||
alCover = model.Album{
|
||||
ID: "444",
|
||||
Name: "Only external",
|
||||
ImageFiles: filepath.Join(dirName, coverFileName),
|
||||
ID: "444",
|
||||
Name: "Only external",
|
||||
//ImageFiles: filepath.Join(dirName, coverFileName),
|
||||
}
|
||||
ds.Album(ctx).(*tests.MockAlbumRepo).SetData(model.Albums{
|
||||
alCover,
|
||||
@@ -274,24 +270,24 @@ var _ = Describe("Artwork", func() {
|
||||
})
|
||||
})
|
||||
|
||||
func createImage(format string, landscape bool, size int) string {
|
||||
var img image.Image
|
||||
|
||||
if landscape {
|
||||
img = image.NewRGBA(image.Rect(0, 0, size, size/2))
|
||||
} else {
|
||||
img = image.NewRGBA(image.Rect(0, 0, size/2, size))
|
||||
}
|
||||
|
||||
tmpDir := GinkgoT().TempDir()
|
||||
f, _ := os.Create(filepath.Join(tmpDir, "cover."+format))
|
||||
defer f.Close()
|
||||
switch format {
|
||||
case "png":
|
||||
_ = png.Encode(f, img)
|
||||
case "jpg":
|
||||
_ = jpeg.Encode(f, img, &jpeg.Options{Quality: 75})
|
||||
}
|
||||
|
||||
return tmpDir
|
||||
}
|
||||
//func createImage(format string, landscape bool, size int) string {
|
||||
// var img image.Image
|
||||
//
|
||||
// if landscape {
|
||||
// img = image.NewRGBA(image.Rect(0, 0, size, size/2))
|
||||
// } else {
|
||||
// img = image.NewRGBA(image.Rect(0, 0, size/2, size))
|
||||
// }
|
||||
//
|
||||
// tmpDir := GinkgoT().TempDir()
|
||||
// f, _ := os.Create(filepath.Join(tmpDir, "cover."+format))
|
||||
// defer f.Close()
|
||||
// switch format {
|
||||
// case "png":
|
||||
// _ = png.Encode(f, img)
|
||||
// case "jpg":
|
||||
// _ = jpeg.Encode(f, img, &jpeg.Options{Quality: 75})
|
||||
// }
|
||||
//
|
||||
// return tmpDir
|
||||
//}
|
||||
|
||||
@@ -22,6 +22,9 @@ type CacheWarmer interface {
|
||||
PreCache(artID model.ArtworkID)
|
||||
}
|
||||
|
||||
// NewCacheWarmer creates a new CacheWarmer instance. The CacheWarmer will pre-cache Artwork images in the background
|
||||
// to speed up the response time when the image is requested by the UI. The cache is pre-populated with the original
|
||||
// image size, as well as the size defined in the UICoverArtSize constant.
|
||||
func NewCacheWarmer(artwork Artwork, cache cache.FileCache) CacheWarmer {
|
||||
// If image cache is disabled, return a NOOP implementation
|
||||
if conf.Server.ImageCacheSize == "0" || !conf.Server.EnableArtworkPrecache {
|
||||
@@ -49,15 +52,7 @@ type cacheWarmer struct {
|
||||
wakeSignal chan struct{}
|
||||
}
|
||||
|
||||
var ignoredIds = map[string]struct{}{
|
||||
consts.VariousArtistsID: {},
|
||||
consts.UnknownArtistID: {},
|
||||
}
|
||||
|
||||
func (a *cacheWarmer) PreCache(artID model.ArtworkID) {
|
||||
if _, shouldIgnore := ignoredIds[artID.ID]; shouldIgnore {
|
||||
return
|
||||
}
|
||||
a.mutex.Lock()
|
||||
defer a.mutex.Unlock()
|
||||
a.buffer[artID] = struct{}{}
|
||||
@@ -104,14 +99,8 @@ func (a *cacheWarmer) run(ctx context.Context) {
|
||||
}
|
||||
|
||||
func (a *cacheWarmer) waitSignal(ctx context.Context, timeout time.Duration) {
|
||||
var to <-chan time.Time
|
||||
if !a.cache.Available(ctx) {
|
||||
tmr := time.NewTimer(timeout)
|
||||
defer tmr.Stop()
|
||||
to = tmr.C
|
||||
}
|
||||
select {
|
||||
case <-to:
|
||||
case <-time.After(timeout):
|
||||
case <-a.wakeSignal:
|
||||
case <-ctx.Done():
|
||||
}
|
||||
@@ -130,7 +119,7 @@ func (a *cacheWarmer) doCacheImage(ctx context.Context, id model.ArtworkID) erro
|
||||
ctx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
r, _, err := a.artwork.Get(ctx, id, consts.UICoverArtSize, false)
|
||||
r, _, err := a.artwork.Get(ctx, id, consts.UICoverArtSize, true)
|
||||
if err != nil {
|
||||
return fmt.Errorf("caching id='%s': %w", id, err)
|
||||
}
|
||||
@@ -142,6 +131,10 @@ func (a *cacheWarmer) doCacheImage(ctx context.Context, id model.ArtworkID) erro
|
||||
return nil
|
||||
}
|
||||
|
||||
func NoopCacheWarmer() CacheWarmer {
|
||||
return &noopCacheWarmer{}
|
||||
}
|
||||
|
||||
type noopCacheWarmer struct{}
|
||||
|
||||
func (a *noopCacheWarmer) PreCache(model.ArtworkID) {}
|
||||
|
||||
@@ -5,34 +5,52 @@ import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
)
|
||||
|
||||
type albumArtworkReader struct {
|
||||
cacheKey
|
||||
a *artwork
|
||||
em core.ExternalMetadata
|
||||
album model.Album
|
||||
a *artwork
|
||||
provider external.Provider
|
||||
album model.Album
|
||||
updatedAt *time.Time
|
||||
imgFiles []string
|
||||
rootFolder string
|
||||
}
|
||||
|
||||
func newAlbumArtworkReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, em core.ExternalMetadata) (*albumArtworkReader, error) {
|
||||
func newAlbumArtworkReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, provider external.Provider) (*albumArtworkReader, error) {
|
||||
al, err := artwork.ds.Album(ctx).Get(artID.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, imgFiles, imagesUpdateAt, err := loadAlbumFoldersPaths(ctx, artwork.ds, *al)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
a := &albumArtworkReader{
|
||||
a: artwork,
|
||||
em: em,
|
||||
album: *al,
|
||||
a: artwork,
|
||||
provider: provider,
|
||||
album: *al,
|
||||
updatedAt: imagesUpdateAt,
|
||||
imgFiles: imgFiles,
|
||||
rootFolder: core.AbsolutePath(ctx, artwork.ds, al.LibraryID, ""),
|
||||
}
|
||||
a.cacheKey.artID = artID
|
||||
a.cacheKey.lastUpdate = al.UpdatedAt
|
||||
if a.updatedAt != nil && a.updatedAt.After(al.UpdatedAt) {
|
||||
a.cacheKey.lastUpdate = *a.updatedAt
|
||||
} else {
|
||||
a.cacheKey.lastUpdate = al.UpdatedAt
|
||||
}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
@@ -63,12 +81,43 @@ func (a *albumArtworkReader) fromCoverArtPriority(ctx context.Context, ffmpeg ff
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
switch {
|
||||
case pattern == "embedded":
|
||||
ff = append(ff, fromTag(a.album.EmbedArtPath), fromFFmpegTag(ctx, ffmpeg, a.album.EmbedArtPath))
|
||||
embedArtPath := filepath.Join(a.rootFolder, a.album.EmbedArtPath)
|
||||
ff = append(ff, fromTag(ctx, embedArtPath), fromFFmpegTag(ctx, ffmpeg, embedArtPath))
|
||||
case pattern == "external":
|
||||
ff = append(ff, fromAlbumExternalSource(ctx, a.album, a.em))
|
||||
case a.album.ImageFiles != "":
|
||||
ff = append(ff, fromExternalFile(ctx, a.album.ImageFiles, pattern))
|
||||
ff = append(ff, fromAlbumExternalSource(ctx, a.album, a.provider))
|
||||
case len(a.imgFiles) > 0:
|
||||
ff = append(ff, fromExternalFile(ctx, a.imgFiles, pattern))
|
||||
}
|
||||
}
|
||||
return ff
|
||||
}
|
||||
|
||||
func loadAlbumFoldersPaths(ctx context.Context, ds model.DataStore, albums ...model.Album) ([]string, []string, *time.Time, error) {
|
||||
var folderIDs []string
|
||||
for _, album := range albums {
|
||||
folderIDs = append(folderIDs, album.FolderIDs...)
|
||||
}
|
||||
folders, err := ds.Folder(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"folder.id": folderIDs, "missing": false}})
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
var paths []string
|
||||
var imgFiles []string
|
||||
var updatedAt time.Time
|
||||
for _, f := range folders {
|
||||
path := f.AbsolutePath()
|
||||
paths = append(paths, path)
|
||||
if f.ImagesUpdatedAt.After(updatedAt) {
|
||||
updatedAt = f.ImagesUpdatedAt
|
||||
}
|
||||
for _, img := range f.ImageFiles {
|
||||
imgFiles = append(imgFiles, filepath.Join(path, img))
|
||||
}
|
||||
}
|
||||
|
||||
// Sort image files to ensure consistent selection of cover art
|
||||
// This prioritizes files from lower-numbered disc folders by sorting the paths
|
||||
slices.Sort(imgFiles)
|
||||
|
||||
return paths, imgFiles, &updatedAt, nil
|
||||
}
|
||||
|
||||
76
core/artwork/reader_album_test.go
Normal file
76
core/artwork/reader_album_test.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package artwork
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Album Artwork Reader", func() {
|
||||
Describe("loadAlbumFoldersPaths", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
ds *fakeDataStore
|
||||
repo *fakeFolderRepo
|
||||
album model.Album
|
||||
now time.Time
|
||||
expectedAt time.Time
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = context.Background()
|
||||
now = time.Now().Truncate(time.Second)
|
||||
expectedAt = now.Add(5 * time.Minute)
|
||||
|
||||
// Set up the test folders with image files
|
||||
repo = &fakeFolderRepo{
|
||||
result: []model.Folder{
|
||||
{
|
||||
Path: "Artist/Album/Disc1",
|
||||
ImagesUpdatedAt: expectedAt,
|
||||
ImageFiles: []string{"cover.jpg", "back.jpg"},
|
||||
},
|
||||
{
|
||||
Path: "Artist/Album/Disc2",
|
||||
ImagesUpdatedAt: now,
|
||||
ImageFiles: []string{"cover.jpg"},
|
||||
},
|
||||
{
|
||||
Path: "Artist/Album/Disc10",
|
||||
ImagesUpdatedAt: now,
|
||||
ImageFiles: []string{"cover.jpg"},
|
||||
},
|
||||
},
|
||||
err: nil,
|
||||
}
|
||||
ds = &fakeDataStore{
|
||||
folderRepo: repo,
|
||||
}
|
||||
album = model.Album{
|
||||
ID: "album1",
|
||||
Name: "Album",
|
||||
FolderIDs: []string{"folder1", "folder2", "folder3"},
|
||||
}
|
||||
})
|
||||
|
||||
It("returns sorted image files", func() {
|
||||
_, imgFiles, imagesUpdatedAt, err := loadAlbumFoldersPaths(ctx, ds, album)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(*imagesUpdatedAt).To(Equal(expectedAt))
|
||||
|
||||
// Check that image files are sorted alphabetically
|
||||
Expect(imgFiles).To(HaveLen(4))
|
||||
|
||||
// The files should be sorted by full path
|
||||
Expect(imgFiles[0]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/back.jpg")))
|
||||
Expect(imgFiles[1]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/cover.jpg")))
|
||||
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/Disc10/cover.jpg")))
|
||||
Expect(imgFiles[3]).To(Equal(filepath.FromSlash("Artist/Album/Disc2/cover.jpg")))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -13,8 +13,8 @@ import (
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/str"
|
||||
@@ -23,42 +23,49 @@ import (
|
||||
type artistReader struct {
|
||||
cacheKey
|
||||
a *artwork
|
||||
em core.ExternalMetadata
|
||||
provider external.Provider
|
||||
artist model.Artist
|
||||
artistFolder string
|
||||
files string
|
||||
imgFiles []string
|
||||
}
|
||||
|
||||
func newArtistReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, em core.ExternalMetadata) (*artistReader, error) {
|
||||
func newArtistReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, provider external.Provider) (*artistReader, error) {
|
||||
ar, err := artwork.ds.Artist(ctx).Get(artID.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
als, err := artwork.ds.Album(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"album_artist_id": artID.ID}})
|
||||
// Only consider albums where the artist is the sole album artist.
|
||||
als, err := artwork.ds.Album(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.And{
|
||||
squirrel.Eq{"album_artist_id": artID.ID},
|
||||
squirrel.Eq{"json_array_length(participants, '$.albumartist')": 1},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
albumPaths, imgFiles, imagesUpdatedAt, err := loadAlbumFoldersPaths(ctx, artwork.ds, als...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
artistFolder, artistFolderLastUpdate, err := loadArtistFolder(ctx, artwork.ds, als, albumPaths)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
a := &artistReader{
|
||||
a: artwork,
|
||||
em: em,
|
||||
artist: *ar,
|
||||
a: artwork,
|
||||
provider: provider,
|
||||
artist: *ar,
|
||||
artistFolder: artistFolder,
|
||||
imgFiles: imgFiles,
|
||||
}
|
||||
// TODO Find a way to factor in the ExternalUpdateInfoAt in the cache key. Problem is that it can
|
||||
// change _after_ retrieving from external sources, making the key invalid
|
||||
//a.cacheKey.lastUpdate = ar.ExternalInfoUpdatedAt
|
||||
var files []string
|
||||
var paths []string
|
||||
for _, al := range als {
|
||||
files = append(files, al.ImageFiles)
|
||||
paths = append(paths, splitList(al.Paths)...)
|
||||
if a.cacheKey.lastUpdate.Before(al.UpdatedAt) {
|
||||
a.cacheKey.lastUpdate = al.UpdatedAt
|
||||
}
|
||||
}
|
||||
a.files = strings.Join(files, consts.Zwsp)
|
||||
a.artistFolder = str.LongestCommonPrefix(paths)
|
||||
if !strings.HasSuffix(a.artistFolder, string(filepath.Separator)) {
|
||||
a.artistFolder, _ = filepath.Split(a.artistFolder)
|
||||
|
||||
a.cacheKey.lastUpdate = *imagesUpdatedAt
|
||||
if artistFolderLastUpdate.After(a.cacheKey.lastUpdate) {
|
||||
a.cacheKey.lastUpdate = artistFolderLastUpdate
|
||||
}
|
||||
a.cacheKey.artID = artID
|
||||
return a, nil
|
||||
@@ -89,9 +96,9 @@ func (a *artistReader) fromArtistArtPriority(ctx context.Context, priority strin
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
switch {
|
||||
case pattern == "external":
|
||||
ff = append(ff, fromArtistExternalSource(ctx, a.artist, a.em))
|
||||
ff = append(ff, fromArtistExternalSource(ctx, a.artist, a.provider))
|
||||
case strings.HasPrefix(pattern, "album/"):
|
||||
ff = append(ff, fromExternalFile(ctx, a.files, strings.TrimPrefix(pattern, "album/")))
|
||||
ff = append(ff, fromExternalFile(ctx, a.imgFiles, strings.TrimPrefix(pattern, "album/")))
|
||||
default:
|
||||
ff = append(ff, fromArtistFolder(ctx, a.artistFolder, pattern))
|
||||
}
|
||||
@@ -125,3 +132,33 @@ func fromArtistFolder(ctx context.Context, artistFolder string, pattern string)
|
||||
return nil, "", nil
|
||||
}
|
||||
}
|
||||
|
||||
func loadArtistFolder(ctx context.Context, ds model.DataStore, albums model.Albums, paths []string) (string, time.Time, error) {
|
||||
if len(albums) == 0 {
|
||||
return "", time.Time{}, nil
|
||||
}
|
||||
libID := albums[0].LibraryID // Just need one of the albums, as they should all be in the same Library
|
||||
|
||||
folderPath := str.LongestCommonPrefix(paths)
|
||||
if !strings.HasSuffix(folderPath, string(filepath.Separator)) {
|
||||
folderPath, _ = filepath.Split(folderPath)
|
||||
}
|
||||
folderPath = filepath.Dir(folderPath)
|
||||
|
||||
// Manipulate the path to get the folder ID
|
||||
// TODO: This is a bit hacky, but it's the easiest way to get the folder ID, ATM
|
||||
libPath := core.AbsolutePath(ctx, ds, libID, "")
|
||||
folderID := model.FolderID(model.Library{ID: libID, Path: libPath}, folderPath)
|
||||
|
||||
log.Trace(ctx, "Calculating artist folder details", "folderPath", folderPath, "folderID", folderID,
|
||||
"libPath", libPath, "libID", libID, "albumPaths", paths)
|
||||
|
||||
// Get the last update time for the folder
|
||||
folders, err := ds.Folder(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"folder.id": folderID, "missing": false}})
|
||||
if err != nil || len(folders) == 0 {
|
||||
log.Warn(ctx, "Could not find folder for artist", "folderPath", folderPath, "id", folderID,
|
||||
"libPath", libPath, "libID", libID, err)
|
||||
return "", time.Time{}, err
|
||||
}
|
||||
return folderPath, folders[0].ImagesUpdatedAt, nil
|
||||
}
|
||||
|
||||
141
core/artwork/reader_artist_test.go
Normal file
141
core/artwork/reader_artist_test.go
Normal file
@@ -0,0 +1,141 @@
|
||||
package artwork
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("artistReader", func() {
|
||||
var _ = Describe("loadArtistFolder", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
fds *fakeDataStore
|
||||
repo *fakeFolderRepo
|
||||
albums model.Albums
|
||||
paths []string
|
||||
now time.Time
|
||||
expectedUpdTime time.Time
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = context.Background()
|
||||
DeferCleanup(stubCoreAbsolutePath())
|
||||
|
||||
now = time.Now().Truncate(time.Second)
|
||||
expectedUpdTime = now.Add(5 * time.Minute)
|
||||
repo = &fakeFolderRepo{
|
||||
result: []model.Folder{
|
||||
{
|
||||
ImagesUpdatedAt: expectedUpdTime,
|
||||
},
|
||||
},
|
||||
err: nil,
|
||||
}
|
||||
fds = &fakeDataStore{
|
||||
folderRepo: repo,
|
||||
}
|
||||
albums = model.Albums{
|
||||
{LibraryID: 1, ID: "album1", Name: "Album 1"},
|
||||
}
|
||||
})
|
||||
|
||||
When("no albums provided", func() {
|
||||
It("returns empty and zero time", func() {
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, model.Albums{}, []string{"/dummy/path"})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(BeEmpty())
|
||||
Expect(upd).To(BeZero())
|
||||
})
|
||||
})
|
||||
|
||||
When("artist has only one album", func() {
|
||||
It("returns the parent folder", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/artist/album1"),
|
||||
}
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(Equal("/music/artist"))
|
||||
Expect(upd).To(Equal(expectedUpdTime))
|
||||
})
|
||||
})
|
||||
|
||||
When("the artist have multiple albums", func() {
|
||||
It("returns the common prefix for the albums paths", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/library/artist/one"),
|
||||
filepath.FromSlash("/music/library/artist/two"),
|
||||
}
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(Equal(filepath.FromSlash("/music/library/artist")))
|
||||
Expect(upd).To(Equal(expectedUpdTime))
|
||||
})
|
||||
})
|
||||
|
||||
When("the album paths contain same prefix", func() {
|
||||
It("returns the common prefix", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/artist/album1"),
|
||||
filepath.FromSlash("/music/artist/album2"),
|
||||
}
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(Equal("/music/artist"))
|
||||
Expect(upd).To(Equal(expectedUpdTime))
|
||||
})
|
||||
})
|
||||
|
||||
When("ds.Folder().GetAll returns an error", func() {
|
||||
It("returns an error", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/artist/album1"),
|
||||
filepath.FromSlash("/music/artist/album2"),
|
||||
}
|
||||
repo.err = errors.New("fake error")
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).To(MatchError(ContainSubstring("fake error")))
|
||||
// Folder and time are empty on error.
|
||||
Expect(folder).To(BeEmpty())
|
||||
Expect(upd).To(BeZero())
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
type fakeFolderRepo struct {
|
||||
model.FolderRepository
|
||||
result []model.Folder
|
||||
err error
|
||||
}
|
||||
|
||||
func (f *fakeFolderRepo) GetAll(...model.QueryOptions) ([]model.Folder, error) {
|
||||
return f.result, f.err
|
||||
}
|
||||
|
||||
type fakeDataStore struct {
|
||||
model.DataStore
|
||||
folderRepo *fakeFolderRepo
|
||||
}
|
||||
|
||||
func (fds *fakeDataStore) Folder(_ context.Context) model.FolderRepository {
|
||||
return fds.folderRepo
|
||||
}
|
||||
|
||||
func stubCoreAbsolutePath() func() {
|
||||
// Override core.AbsolutePath to return a fixed string during tests.
|
||||
original := core.AbsolutePath
|
||||
core.AbsolutePath = func(_ context.Context, ds model.DataStore, libID int, p string) string {
|
||||
return filepath.FromSlash("/music")
|
||||
}
|
||||
return func() {
|
||||
core.AbsolutePath = original
|
||||
}
|
||||
}
|
||||
@@ -54,9 +54,10 @@ func (a *mediafileArtworkReader) LastUpdated() time.Time {
|
||||
func (a *mediafileArtworkReader) Reader(ctx context.Context) (io.ReadCloser, string, error) {
|
||||
var ff []sourceFunc
|
||||
if a.mediafile.CoverArtID().Kind == model.KindMediaFileArtwork {
|
||||
path := a.mediafile.AbsolutePath()
|
||||
ff = []sourceFunc{
|
||||
fromTag(a.mediafile.Path),
|
||||
fromFFmpegTag(ctx, a.a.ffmpeg, a.mediafile.Path),
|
||||
fromTag(ctx, path),
|
||||
fromFFmpegTag(ctx, a.a.ffmpeg, path),
|
||||
}
|
||||
}
|
||||
ff = append(ff, fromAlbum(ctx, a.a, a.mediafile.AlbumCoverArtID()))
|
||||
|
||||
@@ -61,7 +61,7 @@ func (a *playlistArtworkReader) fromGeneratedTiledCover(ctx context.Context) sou
|
||||
}
|
||||
}
|
||||
|
||||
func toArtworkIDs(albumIDs []string) []model.ArtworkID {
|
||||
func toAlbumArtworkIDs(albumIDs []string) []model.ArtworkID {
|
||||
return slice.Map(albumIDs, func(id string) model.ArtworkID {
|
||||
al := model.Album{ID: id}
|
||||
return al.CoverArtID()
|
||||
@@ -75,24 +75,21 @@ func (a *playlistArtworkReader) loadTiles(ctx context.Context) ([]image.Image, e
|
||||
log.Error(ctx, "Error getting album IDs for playlist", "id", a.pl.ID, "name", a.pl.Name, err)
|
||||
return nil, err
|
||||
}
|
||||
ids := toArtworkIDs(albumIds)
|
||||
ids := toAlbumArtworkIDs(albumIds)
|
||||
|
||||
var tiles []image.Image
|
||||
for len(tiles) < 4 {
|
||||
if len(ids) == 0 {
|
||||
for _, id := range ids {
|
||||
r, _, err := fromAlbum(ctx, a.a, id)()
|
||||
if err == nil {
|
||||
tile, err := a.createTile(ctx, r)
|
||||
if err == nil {
|
||||
tiles = append(tiles, tile)
|
||||
}
|
||||
_ = r.Close()
|
||||
}
|
||||
if len(tiles) == 4 {
|
||||
break
|
||||
}
|
||||
id := ids[len(ids)-1]
|
||||
ids = ids[0 : len(ids)-1]
|
||||
r, _, err := fromAlbum(ctx, a.a, id)()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
tile, err := a.createTile(ctx, r)
|
||||
if err == nil {
|
||||
tiles = append(tiles, tile)
|
||||
}
|
||||
_ = r.Close()
|
||||
}
|
||||
switch len(tiles) {
|
||||
case 0:
|
||||
|
||||
@@ -59,25 +59,21 @@ func (a *resizedArtworkReader) Reader(ctx context.Context) (io.ReadCloser, strin
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
// Keep a copy of the original data. In case we can't resize it, send it as is
|
||||
buf := new(bytes.Buffer)
|
||||
r := io.TeeReader(orig, buf)
|
||||
defer orig.Close()
|
||||
|
||||
resized, origSize, err := resizeImage(r, a.size, a.square)
|
||||
resized, origSize, err := resizeImage(orig, a.size, a.square)
|
||||
if resized == nil {
|
||||
log.Trace(ctx, "Image smaller than requested size", "artID", a.artID, "original", origSize, "resized", a.size)
|
||||
log.Trace(ctx, "Image smaller than requested size", "artID", a.artID, "original", origSize, "resized", a.size, "square", a.square)
|
||||
} else {
|
||||
log.Trace(ctx, "Resizing artwork", "artID", a.artID, "original", origSize, "resized", a.size)
|
||||
log.Trace(ctx, "Resizing artwork", "artID", a.artID, "original", origSize, "resized", a.size, "square", a.square)
|
||||
}
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Could not resize image. Will return image as is", "artID", a.artID, "size", a.size, err)
|
||||
log.Warn(ctx, "Could not resize image. Will return image as is", "artID", a.artID, "size", a.size, "square", a.square, err)
|
||||
}
|
||||
if err != nil || resized == nil {
|
||||
// Force finish reading any remaining data
|
||||
_, _ = io.Copy(io.Discard, r)
|
||||
return io.NopCloser(buf), "", nil //nolint:nilerr
|
||||
// if we couldn't resize the image, return the original
|
||||
orig, _, err = a.a.Get(ctx, a.artID, 0, false)
|
||||
return orig, "", err
|
||||
}
|
||||
return io.NopCloser(resized), fmt.Sprintf("%s@%d", a.artID, a.size), nil
|
||||
}
|
||||
|
||||
@@ -10,13 +10,14 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/dhowden/tag"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -52,13 +53,9 @@ func (f sourceFunc) String() string {
|
||||
return name
|
||||
}
|
||||
|
||||
func splitList(s string) []string {
|
||||
return strings.Split(s, consts.Zwsp)
|
||||
}
|
||||
|
||||
func fromExternalFile(ctx context.Context, files string, pattern string) sourceFunc {
|
||||
func fromExternalFile(ctx context.Context, files []string, pattern string) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
for _, file := range splitList(files) {
|
||||
for _, file := range files {
|
||||
_, name := filepath.Split(file)
|
||||
match, err := filepath.Match(pattern, strings.ToLower(name))
|
||||
if err != nil {
|
||||
@@ -79,7 +76,14 @@ func fromExternalFile(ctx context.Context, files string, pattern string) sourceF
|
||||
}
|
||||
}
|
||||
|
||||
func fromTag(path string) sourceFunc {
|
||||
// These regexes are used to match the picture type in the file, in the order they are listed.
|
||||
var picTypeRegexes = []*regexp.Regexp{
|
||||
regexp.MustCompile(`(?i).*cover.*front.*|.*front.*cover.*`),
|
||||
regexp.MustCompile(`(?i).*front.*`),
|
||||
regexp.MustCompile(`(?i).*cover.*`),
|
||||
}
|
||||
|
||||
func fromTag(ctx context.Context, path string) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
if path == "" {
|
||||
return nil, "", nil
|
||||
@@ -95,10 +99,31 @@ func fromTag(path string) sourceFunc {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
picture := m.Picture()
|
||||
if picture == nil {
|
||||
types := m.PictureTypes()
|
||||
if len(types) == 0 {
|
||||
return nil, "", fmt.Errorf("no embedded image found in %s", path)
|
||||
}
|
||||
|
||||
var picture *tag.Picture
|
||||
for _, regex := range picTypeRegexes {
|
||||
for _, t := range types {
|
||||
if regex.MatchString(t) {
|
||||
log.Trace(ctx, "Found embedded image", "type", t, "path", path)
|
||||
picture = m.Pictures(t)
|
||||
break
|
||||
}
|
||||
}
|
||||
if picture != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
if picture == nil {
|
||||
log.Trace(ctx, "Could not find a front image. Getting the first one", "type", types[0], "path", path)
|
||||
picture = m.Picture()
|
||||
}
|
||||
if picture == nil {
|
||||
return nil, "", fmt.Errorf("could not load embedded image from %s", path)
|
||||
}
|
||||
return io.NopCloser(bytes.NewReader(picture.Data)), path, nil
|
||||
}
|
||||
}
|
||||
@@ -112,13 +137,7 @@ func fromFFmpegTag(ctx context.Context, ffmpeg ffmpeg.FFmpeg, path string) sourc
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
defer r.Close()
|
||||
buf := new(bytes.Buffer)
|
||||
_, err = io.Copy(buf, r)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
return io.NopCloser(buf), path, nil
|
||||
return r, path, nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,9 +157,9 @@ func fromAlbumPlaceholder() sourceFunc {
|
||||
return r, consts.PlaceholderAlbumArt, nil
|
||||
}
|
||||
}
|
||||
func fromArtistExternalSource(ctx context.Context, ar model.Artist, em core.ExternalMetadata) sourceFunc {
|
||||
func fromArtistExternalSource(ctx context.Context, ar model.Artist, provider external.Provider) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
imageUrl, err := em.ArtistImage(ctx, ar.ID)
|
||||
imageUrl, err := provider.ArtistImage(ctx, ar.ID)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
@@ -149,9 +168,9 @@ func fromArtistExternalSource(ctx context.Context, ar model.Artist, em core.Exte
|
||||
}
|
||||
}
|
||||
|
||||
func fromAlbumExternalSource(ctx context.Context, al model.Album, em core.ExternalMetadata) sourceFunc {
|
||||
func fromAlbumExternalSource(ctx context.Context, al model.Album, provider external.Provider) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
imageUrl, err := em.AlbumImage(ctx, al.ID)
|
||||
imageUrl, err := provider.AlbumImage(ctx, al.ID)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
@@ -1,36 +1,47 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/jwtauth/v5"
|
||||
"github.com/google/uuid"
|
||||
"github.com/lestrrat-go/jwx/v2/jwt"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/id"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
)
|
||||
|
||||
var (
|
||||
once sync.Once
|
||||
Secret []byte
|
||||
TokenAuth *jwtauth.JWTAuth
|
||||
)
|
||||
|
||||
// Init creates a JWTAuth object from the secret stored in the DB.
|
||||
// If the secret is not found, it will create a new one and store it in the DB.
|
||||
func Init(ds model.DataStore) {
|
||||
once.Do(func() {
|
||||
ctx := context.TODO()
|
||||
log.Info("Setting Session Timeout", "value", conf.Server.SessionTimeout)
|
||||
secret, err := ds.Property(context.TODO()).Get(consts.JWTSecretKey)
|
||||
|
||||
secret, err := ds.Property(ctx).Get(consts.JWTSecretKey)
|
||||
if err != nil || secret == "" {
|
||||
log.Error("No JWT secret found in DB. Setting a temp one, but please report this error", err)
|
||||
secret = uuid.NewString()
|
||||
log.Info(ctx, "Creating new JWT secret, used for encrypting UI sessions")
|
||||
secret = createNewSecret(ctx, ds)
|
||||
} else {
|
||||
if secret, err = utils.Decrypt(ctx, getEncKey(), secret); err != nil {
|
||||
log.Error(ctx, "Could not decrypt JWT secret, creating a new one", err)
|
||||
secret = createNewSecret(ctx, ds)
|
||||
}
|
||||
}
|
||||
Secret = []byte(secret)
|
||||
TokenAuth = jwtauth.New("HS256", Secret, nil)
|
||||
|
||||
TokenAuth = jwtauth.New("HS256", []byte(secret), nil)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -112,3 +123,25 @@ func WithAdminUser(ctx context.Context, ds model.DataStore) context.Context {
|
||||
ctx = request.WithUsername(ctx, u.UserName)
|
||||
return request.WithUser(ctx, *u)
|
||||
}
|
||||
|
||||
func createNewSecret(ctx context.Context, ds model.DataStore) string {
|
||||
secret := id.NewRandom()
|
||||
encSecret, err := utils.Encrypt(ctx, getEncKey(), secret)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Could not encrypt JWT secret", err)
|
||||
return secret
|
||||
}
|
||||
if err := ds.Property(ctx).Put(consts.JWTSecretKey, encSecret); err != nil {
|
||||
log.Error(ctx, "Could not save JWT secret in DB", err)
|
||||
}
|
||||
return secret
|
||||
}
|
||||
|
||||
func getEncKey() []byte {
|
||||
key := cmp.Or(
|
||||
conf.Server.PasswordEncryptionKey,
|
||||
consts.DefaultEncryptionKey,
|
||||
)
|
||||
sum := sha256.Sum256([]byte(key))
|
||||
return sum[:]
|
||||
}
|
||||
|
||||
@@ -4,12 +4,12 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/jwtauth/v5"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
@@ -32,8 +32,10 @@ var _ = BeforeSuite(func() {
|
||||
var _ = Describe("Auth", func() {
|
||||
|
||||
BeforeEach(func() {
|
||||
auth.Secret = []byte(testJWTSecret)
|
||||
auth.TokenAuth = jwtauth.New("HS256", auth.Secret, nil)
|
||||
ds := &tests.MockDataStore{
|
||||
MockedProperty: &tests.MockedPropertyRepo{},
|
||||
}
|
||||
auth.Init(ds)
|
||||
})
|
||||
|
||||
Describe("Validate", func() {
|
||||
|
||||
@@ -2,7 +2,9 @@ package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
)
|
||||
|
||||
@@ -13,3 +15,13 @@ func userName(ctx context.Context) string {
|
||||
return user.UserName
|
||||
}
|
||||
}
|
||||
|
||||
// BFR We should only access files through the `storage.Storage` interface. This will require changing how
|
||||
// TagLib and ffmpeg access files
|
||||
var AbsolutePath = func(ctx context.Context, ds model.DataStore, libId int, path string) string {
|
||||
libPath, err := ds.Library(ctx).GetPath(libId)
|
||||
if err != nil {
|
||||
return path
|
||||
}
|
||||
return filepath.Join(libPath, path)
|
||||
}
|
||||
|
||||
55
core/common_test.go
Normal file
55
core/common_test.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
)
|
||||
|
||||
var _ = Describe("common.go", func() {
|
||||
Describe("userName", func() {
|
||||
It("returns the username from context", func() {
|
||||
ctx := request.WithUser(context.Background(), model.User{UserName: "testuser"})
|
||||
Expect(userName(ctx)).To(Equal("testuser"))
|
||||
})
|
||||
|
||||
It("returns 'UNKNOWN' if no user in context", func() {
|
||||
ctx := context.Background()
|
||||
Expect(userName(ctx)).To(Equal("UNKNOWN"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("AbsolutePath", func() {
|
||||
var (
|
||||
ds *tests.MockDataStore
|
||||
libId int
|
||||
path string
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ds = &tests.MockDataStore{}
|
||||
libId = 1
|
||||
path = "music/file.mp3"
|
||||
mockLib := &tests.MockLibraryRepo{}
|
||||
mockLib.SetData(model.Libraries{{ID: libId, Path: "/library/root"}})
|
||||
ds.MockedLibrary = mockLib
|
||||
})
|
||||
|
||||
It("returns the absolute path when library exists", func() {
|
||||
ctx := context.Background()
|
||||
abs := AbsolutePath(ctx, ds, libId, path)
|
||||
Expect(abs).To(Equal("/library/root/music/file.mp3"))
|
||||
})
|
||||
|
||||
It("returns the original path if library not found", func() {
|
||||
ctx := context.Background()
|
||||
abs := AbsolutePath(ctx, ds, 999, path)
|
||||
Expect(abs).To(Equal(path))
|
||||
})
|
||||
})
|
||||
})
|
||||
270
core/external/extdata_helper_test.go
vendored
Normal file
270
core/external/extdata_helper_test.go
vendored
Normal file
@@ -0,0 +1,270 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// --- Shared Mock Implementations ---
|
||||
|
||||
// mockArtistRepo mocks model.ArtistRepository
|
||||
type mockArtistRepo struct {
|
||||
mock.Mock
|
||||
model.ArtistRepository
|
||||
}
|
||||
|
||||
func newMockArtistRepo() *mockArtistRepo {
|
||||
return &mockArtistRepo{}
|
||||
}
|
||||
|
||||
// SetData sets up basic Get expectations.
|
||||
func (m *mockArtistRepo) SetData(artists model.Artists) {
|
||||
for _, a := range artists {
|
||||
artistCopy := a
|
||||
m.On("Get", artistCopy.ID).Return(&artistCopy, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// Get implements model.ArtistRepository.
|
||||
func (m *mockArtistRepo) Get(id string) (*model.Artist, error) {
|
||||
args := m.Called(id)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*model.Artist), args.Error(1)
|
||||
}
|
||||
|
||||
// GetAll implements model.ArtistRepository.
|
||||
func (m *mockArtistRepo) GetAll(options ...model.QueryOptions) (model.Artists, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
args := m.Called(argsSlice...)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(model.Artists), args.Error(1)
|
||||
}
|
||||
|
||||
// SetError is a helper to set up a generic error for GetAll.
|
||||
func (m *mockArtistRepo) SetError(hasError bool) {
|
||||
if hasError {
|
||||
m.On("GetAll", mock.Anything).Return(nil, errors.New("mock repo error"))
|
||||
}
|
||||
}
|
||||
|
||||
// FindByName is a helper to set up a GetAll expectation for finding by name.
|
||||
func (m *mockArtistRepo) FindByName(name string, artist model.Artist) {
|
||||
m.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Filters != nil
|
||||
})).Return(model.Artists{artist}, nil).Once()
|
||||
}
|
||||
|
||||
// mockMediaFileRepo mocks model.MediaFileRepository
|
||||
type mockMediaFileRepo struct {
|
||||
mock.Mock
|
||||
model.MediaFileRepository
|
||||
}
|
||||
|
||||
func newMockMediaFileRepo() *mockMediaFileRepo {
|
||||
return &mockMediaFileRepo{}
|
||||
}
|
||||
|
||||
// SetData sets up basic Get expectations.
|
||||
func (m *mockMediaFileRepo) SetData(mediaFiles model.MediaFiles) {
|
||||
for _, mf := range mediaFiles {
|
||||
mfCopy := mf
|
||||
m.On("Get", mfCopy.ID).Return(&mfCopy, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// Get implements model.MediaFileRepository.
|
||||
func (m *mockMediaFileRepo) Get(id string) (*model.MediaFile, error) {
|
||||
args := m.Called(id)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*model.MediaFile), args.Error(1)
|
||||
}
|
||||
|
||||
// GetAll implements model.MediaFileRepository.
|
||||
func (m *mockMediaFileRepo) GetAll(options ...model.QueryOptions) (model.MediaFiles, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
args := m.Called(argsSlice...)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(model.MediaFiles), args.Error(1)
|
||||
}
|
||||
|
||||
// SetError is a helper to set up a generic error for GetAll.
|
||||
func (m *mockMediaFileRepo) SetError(hasError bool) {
|
||||
if hasError {
|
||||
m.On("GetAll", mock.Anything).Return(nil, errors.New("mock repo error"))
|
||||
}
|
||||
}
|
||||
|
||||
// FindByMBID is a helper to set up a GetAll expectation for finding by MBID.
|
||||
func (m *mockMediaFileRepo) FindByMBID(mbid string, mediaFile model.MediaFile) {
|
||||
m.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Filters != nil
|
||||
})).Return(model.MediaFiles{mediaFile}, nil).Once()
|
||||
}
|
||||
|
||||
// FindByArtistAndTitle is a helper to set up a GetAll expectation for finding by artist/title.
|
||||
func (m *mockMediaFileRepo) FindByArtistAndTitle(artistID string, title string, mediaFile model.MediaFile) {
|
||||
m.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Filters != nil
|
||||
})).Return(model.MediaFiles{mediaFile}, nil).Once()
|
||||
}
|
||||
|
||||
// mockAlbumRepo mocks model.AlbumRepository
|
||||
type mockAlbumRepo struct {
|
||||
mock.Mock
|
||||
model.AlbumRepository
|
||||
}
|
||||
|
||||
func newMockAlbumRepo() *mockAlbumRepo {
|
||||
return &mockAlbumRepo{}
|
||||
}
|
||||
|
||||
// Get implements model.AlbumRepository.
|
||||
func (m *mockAlbumRepo) Get(id string) (*model.Album, error) {
|
||||
args := m.Called(id)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*model.Album), args.Error(1)
|
||||
}
|
||||
|
||||
// GetAll implements model.AlbumRepository.
|
||||
func (m *mockAlbumRepo) GetAll(options ...model.QueryOptions) (model.Albums, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
args := m.Called(argsSlice...)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(model.Albums), args.Error(1)
|
||||
}
|
||||
|
||||
// mockSimilarArtistAgent mocks agents implementing ArtistTopSongsRetriever and ArtistSimilarRetriever
|
||||
type mockSimilarArtistAgent struct {
|
||||
mock.Mock
|
||||
agents.Interface // Embed to satisfy methods not explicitly mocked
|
||||
}
|
||||
|
||||
func (m *mockSimilarArtistAgent) AgentName() string {
|
||||
return "mockSimilar"
|
||||
}
|
||||
|
||||
func (m *mockSimilarArtistAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||
args := m.Called(ctx, id, artistName, mbid, count)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Song), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockSimilarArtistAgent) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||
args := m.Called(ctx, id, name, mbid, limit)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Artist), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
// mockAgents mocks the main Agents interface used by Provider
|
||||
type mockAgents struct {
|
||||
mock.Mock // Embed testify mock
|
||||
topSongsAgent agents.ArtistTopSongsRetriever
|
||||
similarAgent agents.ArtistSimilarRetriever
|
||||
imageAgent agents.ArtistImageRetriever
|
||||
albumInfoAgent agents.AlbumInfoRetriever
|
||||
bioAgent agents.ArtistBiographyRetriever
|
||||
mbidAgent agents.ArtistMBIDRetriever
|
||||
urlAgent agents.ArtistURLRetriever
|
||||
agents.Interface
|
||||
}
|
||||
|
||||
func (m *mockAgents) AgentName() string {
|
||||
return "mockCombined"
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||
if m.similarAgent != nil {
|
||||
return m.similarAgent.GetSimilarArtists(ctx, id, name, mbid, limit)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid, limit)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Artist), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||
if m.topSongsAgent != nil {
|
||||
return m.topSongsAgent.GetArtistTopSongs(ctx, id, artistName, mbid, count)
|
||||
}
|
||||
args := m.Called(ctx, id, artistName, mbid, count)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Song), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*agents.AlbumInfo, error) {
|
||||
if m.albumInfoAgent != nil {
|
||||
return m.albumInfoAgent.GetAlbumInfo(ctx, name, artist, mbid)
|
||||
}
|
||||
args := m.Called(ctx, name, artist, mbid)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).(*agents.AlbumInfo), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||
if m.mbidAgent != nil {
|
||||
return m.mbidAgent.GetArtistMBID(ctx, id, name)
|
||||
}
|
||||
args := m.Called(ctx, id, name)
|
||||
return args.String(0), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistURL(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
if m.urlAgent != nil {
|
||||
return m.urlAgent.GetArtistURL(ctx, id, name, mbid)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid)
|
||||
return args.String(0), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistBiography(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
if m.bioAgent != nil {
|
||||
return m.bioAgent.GetArtistBiography(ctx, id, name, mbid)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid)
|
||||
return args.String(0), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistImages(ctx context.Context, id, name, mbid string) ([]agents.ExternalImage, error) {
|
||||
if m.imageAgent != nil {
|
||||
return m.imageAgent.GetArtistImages(ctx, id, name, mbid)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.ExternalImage), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
17
core/external/extdata_suite_test.go
vendored
Normal file
17
core/external/extdata_suite_test.go
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
package external
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestExternal(t *testing.T) {
|
||||
tests.Init(t, false)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "External Suite")
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package core
|
||||
package external
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -19,19 +19,19 @@ import (
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/navidrome/navidrome/utils/gg"
|
||||
"github.com/navidrome/navidrome/utils/random"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
"github.com/navidrome/navidrome/utils/str"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
const (
|
||||
unavailableArtistID = "-1"
|
||||
maxSimilarArtists = 100
|
||||
refreshDelay = 5 * time.Second
|
||||
refreshTimeout = 15 * time.Second
|
||||
refreshQueueLength = 2000
|
||||
maxSimilarArtists = 100
|
||||
refreshDelay = 5 * time.Second
|
||||
refreshTimeout = 15 * time.Second
|
||||
refreshQueueLength = 2000
|
||||
)
|
||||
|
||||
type ExternalMetadata interface {
|
||||
type Provider interface {
|
||||
UpdateAlbumInfo(ctx context.Context, id string) (*model.Album, error)
|
||||
UpdateArtistInfo(ctx context.Context, id string, count int, includeNotPresent bool) (*model.Artist, error)
|
||||
SimilarSongs(ctx context.Context, id string, count int) (model.MediaFiles, error)
|
||||
@@ -40,9 +40,9 @@ type ExternalMetadata interface {
|
||||
AlbumImage(ctx context.Context, id string) (*url.URL, error)
|
||||
}
|
||||
|
||||
type externalMetadata struct {
|
||||
type provider struct {
|
||||
ds model.DataStore
|
||||
ag *agents.Agents
|
||||
ag Agents
|
||||
artistQueue refreshQueue[auxArtist]
|
||||
albumQueue refreshQueue[auxAlbum]
|
||||
}
|
||||
@@ -57,18 +57,28 @@ type auxArtist struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func NewExternalMetadata(ds model.DataStore, agents *agents.Agents) ExternalMetadata {
|
||||
e := &externalMetadata{ds: ds, ag: agents}
|
||||
type Agents interface {
|
||||
agents.AlbumInfoRetriever
|
||||
agents.ArtistBiographyRetriever
|
||||
agents.ArtistMBIDRetriever
|
||||
agents.ArtistImageRetriever
|
||||
agents.ArtistSimilarRetriever
|
||||
agents.ArtistTopSongsRetriever
|
||||
agents.ArtistURLRetriever
|
||||
}
|
||||
|
||||
func NewProvider(ds model.DataStore, agents Agents) Provider {
|
||||
e := &provider{ds: ds, ag: agents}
|
||||
e.artistQueue = newRefreshQueue(context.TODO(), e.populateArtistInfo)
|
||||
e.albumQueue = newRefreshQueue(context.TODO(), e.populateAlbumInfo)
|
||||
return e
|
||||
}
|
||||
|
||||
func (e *externalMetadata) getAlbum(ctx context.Context, id string) (*auxAlbum, error) {
|
||||
func (e *provider) getAlbum(ctx context.Context, id string) (auxAlbum, error) {
|
||||
var entity interface{}
|
||||
entity, err := model.GetEntityByID(ctx, e.ds, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return auxAlbum{}, err
|
||||
}
|
||||
|
||||
var album auxAlbum
|
||||
@@ -79,12 +89,13 @@ func (e *externalMetadata) getAlbum(ctx context.Context, id string) (*auxAlbum,
|
||||
case *model.MediaFile:
|
||||
return e.getAlbum(ctx, v.AlbumID)
|
||||
default:
|
||||
return nil, model.ErrNotFound
|
||||
return auxAlbum{}, model.ErrNotFound
|
||||
}
|
||||
return &album, nil
|
||||
|
||||
return album, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album, error) {
|
||||
func (e *provider) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album, error) {
|
||||
album, err := e.getAlbum(ctx, id)
|
||||
if err != nil {
|
||||
log.Info(ctx, "Not found", "id", id)
|
||||
@@ -94,7 +105,7 @@ func (e *externalMetadata) UpdateAlbumInfo(ctx context.Context, id string) (*mod
|
||||
updatedAt := V(album.ExternalInfoUpdatedAt)
|
||||
if updatedAt.IsZero() {
|
||||
log.Debug(ctx, "AlbumInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", album.Name)
|
||||
err = e.populateAlbumInfo(ctx, album)
|
||||
album, err = e.populateAlbumInfo(ctx, album)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -103,22 +114,22 @@ func (e *externalMetadata) UpdateAlbumInfo(ctx context.Context, id string) (*mod
|
||||
// If info is expired, trigger a populateAlbumInfo in the background
|
||||
if time.Since(updatedAt) > conf.Server.DevAlbumInfoTimeToLive {
|
||||
log.Debug("Found expired cached AlbumInfo, refreshing in the background", "updatedAt", album.ExternalInfoUpdatedAt, "name", album.Name)
|
||||
e.albumQueue.enqueue(*album)
|
||||
e.albumQueue.enqueue(&album)
|
||||
}
|
||||
|
||||
return &album.Album, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) populateAlbumInfo(ctx context.Context, album *auxAlbum) error {
|
||||
func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAlbum, error) {
|
||||
start := time.Now()
|
||||
info, err := e.ag.GetAlbumInfo(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
return nil
|
||||
return album, nil
|
||||
}
|
||||
if err != nil {
|
||||
log.Error("Error refreshing AlbumInfo", "id", album.ID, "name", album.Name, "artist", album.AlbumArtist,
|
||||
"elapsed", time.Since(start), err)
|
||||
return err
|
||||
return album, err
|
||||
}
|
||||
|
||||
album.ExternalInfoUpdatedAt = P(time.Now())
|
||||
@@ -144,7 +155,7 @@ func (e *externalMetadata) populateAlbumInfo(ctx context.Context, album *auxAlbu
|
||||
}
|
||||
}
|
||||
|
||||
err = e.ds.Album(ctx).Put(&album.Album)
|
||||
err = e.ds.Album(ctx).UpdateExternalInfo(&album.Album)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error trying to update album external information", "id", album.ID, "name", album.Name,
|
||||
"elapsed", time.Since(start), err)
|
||||
@@ -152,14 +163,14 @@ func (e *externalMetadata) populateAlbumInfo(ctx context.Context, album *auxAlbu
|
||||
log.Trace(ctx, "AlbumInfo collected", "album", album, "elapsed", time.Since(start))
|
||||
}
|
||||
|
||||
return nil
|
||||
return album, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) getArtist(ctx context.Context, id string) (*auxArtist, error) {
|
||||
func (e *provider) getArtist(ctx context.Context, id string) (auxArtist, error) {
|
||||
var entity interface{}
|
||||
entity, err := model.GetEntityByID(ctx, e.ds, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return auxArtist{}, err
|
||||
}
|
||||
|
||||
var artist auxArtist
|
||||
@@ -172,46 +183,46 @@ func (e *externalMetadata) getArtist(ctx context.Context, id string) (*auxArtist
|
||||
case *model.Album:
|
||||
return e.getArtist(ctx, v.AlbumArtistID)
|
||||
default:
|
||||
return nil, model.ErrNotFound
|
||||
return auxArtist{}, model.ErrNotFound
|
||||
}
|
||||
return &artist, nil
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) UpdateArtistInfo(ctx context.Context, id string, similarCount int, includeNotPresent bool) (*model.Artist, error) {
|
||||
func (e *provider) UpdateArtistInfo(ctx context.Context, id string, similarCount int, includeNotPresent bool) (*model.Artist, error) {
|
||||
artist, err := e.refreshArtistInfo(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = e.loadSimilar(ctx, artist, similarCount, includeNotPresent)
|
||||
err = e.loadSimilar(ctx, &artist, similarCount, includeNotPresent)
|
||||
return &artist.Artist, err
|
||||
}
|
||||
|
||||
func (e *externalMetadata) refreshArtistInfo(ctx context.Context, id string) (*auxArtist, error) {
|
||||
func (e *provider) refreshArtistInfo(ctx context.Context, id string) (auxArtist, error) {
|
||||
artist, err := e.getArtist(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return auxArtist{}, err
|
||||
}
|
||||
|
||||
// If we don't have any info, retrieves it now
|
||||
updatedAt := V(artist.ExternalInfoUpdatedAt)
|
||||
if updatedAt.IsZero() {
|
||||
log.Debug(ctx, "ArtistInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", artist.Name)
|
||||
err := e.populateArtistInfo(ctx, artist)
|
||||
artist, err = e.populateArtistInfo(ctx, artist)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return auxArtist{}, err
|
||||
}
|
||||
}
|
||||
|
||||
// If info is expired, trigger a populateArtistInfo in the background
|
||||
if time.Since(updatedAt) > conf.Server.DevArtistInfoTimeToLive {
|
||||
log.Debug("Found expired cached ArtistInfo, refreshing in the background", "updatedAt", updatedAt, "name", artist.Name)
|
||||
e.artistQueue.enqueue(*artist)
|
||||
e.artistQueue.enqueue(&artist)
|
||||
}
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) populateArtistInfo(ctx context.Context, artist *auxArtist) error {
|
||||
func (e *provider) populateArtistInfo(ctx context.Context, artist auxArtist) (auxArtist, error) {
|
||||
start := time.Now()
|
||||
// Get MBID first, if it is not yet available
|
||||
if artist.MbzArtistID == "" {
|
||||
@@ -224,35 +235,35 @@ func (e *externalMetadata) populateArtistInfo(ctx context.Context, artist *auxAr
|
||||
// Call all registered agents and collect information
|
||||
g := errgroup.Group{}
|
||||
g.SetLimit(2)
|
||||
g.Go(func() error { e.callGetImage(ctx, e.ag, artist); return nil })
|
||||
g.Go(func() error { e.callGetBiography(ctx, e.ag, artist); return nil })
|
||||
g.Go(func() error { e.callGetURL(ctx, e.ag, artist); return nil })
|
||||
g.Go(func() error { e.callGetSimilar(ctx, e.ag, artist, maxSimilarArtists, true); return nil })
|
||||
g.Go(func() error { e.callGetImage(ctx, e.ag, &artist); return nil })
|
||||
g.Go(func() error { e.callGetBiography(ctx, e.ag, &artist); return nil })
|
||||
g.Go(func() error { e.callGetURL(ctx, e.ag, &artist); return nil })
|
||||
g.Go(func() error { e.callGetSimilar(ctx, e.ag, &artist, maxSimilarArtists, true); return nil })
|
||||
_ = g.Wait()
|
||||
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "ArtistInfo update canceled", "elapsed", "id", artist.ID, "name", artist.Name, time.Since(start), ctx.Err())
|
||||
return ctx.Err()
|
||||
return artist, ctx.Err()
|
||||
}
|
||||
|
||||
artist.ExternalInfoUpdatedAt = P(time.Now())
|
||||
err := e.ds.Artist(ctx).Put(&artist.Artist)
|
||||
err := e.ds.Artist(ctx).UpdateExternalInfo(&artist.Artist)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error trying to update artist external information", "id", artist.ID, "name", artist.Name,
|
||||
"elapsed", time.Since(start), err)
|
||||
} else {
|
||||
log.Trace(ctx, "ArtistInfo collected", "artist", artist, "elapsed", time.Since(start))
|
||||
}
|
||||
return nil
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) SimilarSongs(ctx context.Context, id string, count int) (model.MediaFiles, error) {
|
||||
func (e *provider) SimilarSongs(ctx context.Context, id string, count int) (model.MediaFiles, error) {
|
||||
artist, err := e.getArtist(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
e.callGetSimilar(ctx, e.ag, artist, 15, false)
|
||||
e.callGetSimilar(ctx, e.ag, &artist, 15, false)
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "SimilarSongs call canceled", ctx.Err())
|
||||
return nil, ctx.Err()
|
||||
@@ -304,13 +315,13 @@ func (e *externalMetadata) SimilarSongs(ctx context.Context, id string, count in
|
||||
return similarSongs, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) ArtistImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
func (e *provider) ArtistImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
artist, err := e.getArtist(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
e.callGetImage(ctx, e.ag, artist)
|
||||
e.callGetImage(ctx, e.ag, &artist)
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "ArtistImage call canceled", ctx.Err())
|
||||
return nil, ctx.Err()
|
||||
@@ -318,24 +329,35 @@ func (e *externalMetadata) ArtistImage(ctx context.Context, id string) (*url.URL
|
||||
|
||||
imageUrl := artist.ArtistImageUrl()
|
||||
if imageUrl == "" {
|
||||
return nil, agents.ErrNotFound
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
return url.Parse(imageUrl)
|
||||
}
|
||||
|
||||
func (e *externalMetadata) AlbumImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
func (e *provider) AlbumImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
album, err := e.getAlbum(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info, err := e.ag.GetAlbumInfo(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
if err != nil {
|
||||
switch {
|
||||
case errors.Is(err, agents.ErrNotFound):
|
||||
log.Trace(ctx, "Album not found in agent", "albumID", id, "name", album.Name, "artist", album.AlbumArtist)
|
||||
return nil, model.ErrNotFound
|
||||
case errors.Is(err, context.Canceled):
|
||||
log.Debug(ctx, "GetAlbumInfo call canceled", err)
|
||||
default:
|
||||
log.Warn(ctx, "Error getting album info from agent", "albumID", id, "name", album.Name, "artist", album.AlbumArtist, err)
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "AlbumImage call canceled", ctx.Err())
|
||||
return nil, ctx.Err()
|
||||
|
||||
if info == nil {
|
||||
log.Warn(ctx, "Agent returned nil info without error", "albumID", id, "name", album.Name, "artist", album.AlbumArtist)
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
|
||||
// Return the biggest image
|
||||
@@ -346,26 +368,37 @@ func (e *externalMetadata) AlbumImage(ctx context.Context, id string) (*url.URL,
|
||||
}
|
||||
}
|
||||
if img.URL == "" {
|
||||
return nil, agents.ErrNotFound
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
return url.Parse(img.URL)
|
||||
}
|
||||
|
||||
func (e *externalMetadata) TopSongs(ctx context.Context, artistName string, count int) (model.MediaFiles, error) {
|
||||
func (e *provider) TopSongs(ctx context.Context, artistName string, count int) (model.MediaFiles, error) {
|
||||
artist, err := e.findArtistByName(ctx, artistName)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Artist not found", "name", artistName, err)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return e.getMatchingTopSongs(ctx, e.ag, artist, count)
|
||||
songs, err := e.getMatchingTopSongs(ctx, e.ag, artist, count)
|
||||
if err != nil {
|
||||
switch {
|
||||
case errors.Is(err, agents.ErrNotFound):
|
||||
log.Trace(ctx, "TopSongs not found", "name", artistName)
|
||||
return nil, model.ErrNotFound
|
||||
case errors.Is(err, context.Canceled):
|
||||
log.Debug(ctx, "TopSongs call canceled", err)
|
||||
default:
|
||||
log.Warn(ctx, "Error getting top songs from agent", "artist", artistName, err)
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
return songs, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) getMatchingTopSongs(ctx context.Context, agent agents.ArtistTopSongsRetriever, artist *auxArtist, count int) (model.MediaFiles, error) {
|
||||
func (e *provider) getMatchingTopSongs(ctx context.Context, agent agents.ArtistTopSongsRetriever, artist *auxArtist, count int) (model.MediaFiles, error) {
|
||||
songs, err := agent.GetArtistTopSongs(ctx, artist.ID, artist.Name, artist.MbzArtistID, count)
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -386,13 +419,17 @@ func (e *externalMetadata) getMatchingTopSongs(ctx context.Context, agent agents
|
||||
} else {
|
||||
log.Debug(ctx, "Found matching top songs", "name", artist.Name, "numSongs", len(mfs))
|
||||
}
|
||||
|
||||
return mfs, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) findMatchingTrack(ctx context.Context, mbid string, artistID, title string) (*model.MediaFile, error) {
|
||||
func (e *provider) findMatchingTrack(ctx context.Context, mbid string, artistID, title string) (*model.MediaFile, error) {
|
||||
if mbid != "" {
|
||||
mfs, err := e.ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Eq{"mbz_recording_id": mbid},
|
||||
Filters: squirrel.And{
|
||||
squirrel.Eq{"mbz_recording_id": mbid},
|
||||
squirrel.Eq{"missing": false},
|
||||
},
|
||||
})
|
||||
if err == nil && len(mfs) > 0 {
|
||||
return &mfs[0], nil
|
||||
@@ -406,6 +443,7 @@ func (e *externalMetadata) findMatchingTrack(ctx context.Context, mbid string, a
|
||||
squirrel.Eq{"album_artist_id": artistID},
|
||||
},
|
||||
squirrel.Like{"order_title": str.SanitizeFieldForSorting(title)},
|
||||
squirrel.Eq{"missing": false},
|
||||
},
|
||||
Sort: "starred desc, rating desc, year asc, compilation asc ",
|
||||
Max: 1,
|
||||
@@ -416,7 +454,7 @@ func (e *externalMetadata) findMatchingTrack(ctx context.Context, mbid string, a
|
||||
return &mfs[0], nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) callGetURL(ctx context.Context, agent agents.ArtistURLRetriever, artist *auxArtist) {
|
||||
func (e *provider) callGetURL(ctx context.Context, agent agents.ArtistURLRetriever, artist *auxArtist) {
|
||||
artisURL, err := agent.GetArtistURL(ctx, artist.ID, artist.Name, artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -424,7 +462,7 @@ func (e *externalMetadata) callGetURL(ctx context.Context, agent agents.ArtistUR
|
||||
artist.ExternalUrl = artisURL
|
||||
}
|
||||
|
||||
func (e *externalMetadata) callGetBiography(ctx context.Context, agent agents.ArtistBiographyRetriever, artist *auxArtist) {
|
||||
func (e *provider) callGetBiography(ctx context.Context, agent agents.ArtistBiographyRetriever, artist *auxArtist) {
|
||||
bio, err := agent.GetArtistBiography(ctx, artist.ID, str.Clear(artist.Name), artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -434,7 +472,7 @@ func (e *externalMetadata) callGetBiography(ctx context.Context, agent agents.Ar
|
||||
artist.Biography = strings.ReplaceAll(bio, "<a ", "<a target='_blank' ")
|
||||
}
|
||||
|
||||
func (e *externalMetadata) callGetImage(ctx context.Context, agent agents.ArtistImageRetriever, artist *auxArtist) {
|
||||
func (e *provider) callGetImage(ctx context.Context, agent agents.ArtistImageRetriever, artist *auxArtist) {
|
||||
images, err := agent.GetArtistImages(ctx, artist.ID, artist.Name, artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -452,7 +490,7 @@ func (e *externalMetadata) callGetImage(ctx context.Context, agent agents.Artist
|
||||
}
|
||||
}
|
||||
|
||||
func (e *externalMetadata) callGetSimilar(ctx context.Context, agent agents.ArtistSimilarRetriever, artist *auxArtist,
|
||||
func (e *provider) callGetSimilar(ctx context.Context, agent agents.ArtistSimilarRetriever, artist *auxArtist,
|
||||
limit int, includeNotPresent bool) {
|
||||
similar, err := agent.GetSimilarArtists(ctx, artist.ID, artist.Name, artist.MbzArtistID, limit)
|
||||
if len(similar) == 0 || err != nil {
|
||||
@@ -467,24 +505,43 @@ func (e *externalMetadata) callGetSimilar(ctx context.Context, agent agents.Arti
|
||||
artist.SimilarArtists = sa
|
||||
}
|
||||
|
||||
func (e *externalMetadata) mapSimilarArtists(ctx context.Context, similar []agents.Artist, includeNotPresent bool) (model.Artists, error) {
|
||||
func (e *provider) mapSimilarArtists(ctx context.Context, similar []agents.Artist, includeNotPresent bool) (model.Artists, error) {
|
||||
var result model.Artists
|
||||
var notPresent []string
|
||||
|
||||
// First select artists that are present.
|
||||
artistNames := slice.Map(similar, func(artist agents.Artist) string { return artist.Name })
|
||||
|
||||
// Query all artists at once
|
||||
clauses := slice.Map(artistNames, func(name string) squirrel.Sqlizer {
|
||||
return squirrel.Like{"artist.name": name}
|
||||
})
|
||||
artists, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Or(clauses),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create a map for quick lookup
|
||||
artistMap := make(map[string]model.Artist)
|
||||
for _, artist := range artists {
|
||||
artistMap[artist.Name] = artist
|
||||
}
|
||||
|
||||
// Process the similar artists
|
||||
for _, s := range similar {
|
||||
sa, err := e.findArtistByName(ctx, s.Name)
|
||||
if err != nil {
|
||||
if artist, found := artistMap[s.Name]; found {
|
||||
result = append(result, artist)
|
||||
} else {
|
||||
notPresent = append(notPresent, s.Name)
|
||||
continue
|
||||
}
|
||||
result = append(result, sa.Artist)
|
||||
}
|
||||
|
||||
// Then fill up with non-present artists
|
||||
if includeNotPresent {
|
||||
for _, s := range notPresent {
|
||||
sa := model.Artist{ID: unavailableArtistID, Name: s}
|
||||
// Let the ID empty to indicate that the artist is not present in the DB
|
||||
sa := model.Artist{Name: s}
|
||||
result = append(result, sa)
|
||||
}
|
||||
}
|
||||
@@ -492,7 +549,7 @@ func (e *externalMetadata) mapSimilarArtists(ctx context.Context, similar []agen
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) findArtistByName(ctx context.Context, artistName string) (*auxArtist, error) {
|
||||
func (e *provider) findArtistByName(ctx context.Context, artistName string) (*auxArtist, error) {
|
||||
artists, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Like{"artist.name": artistName},
|
||||
Max: 1,
|
||||
@@ -510,10 +567,10 @@ func (e *externalMetadata) findArtistByName(ctx context.Context, artistName stri
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *externalMetadata) loadSimilar(ctx context.Context, artist *auxArtist, count int, includeNotPresent bool) error {
|
||||
func (e *provider) loadSimilar(ctx context.Context, artist *auxArtist, count int, includeNotPresent bool) error {
|
||||
var ids []string
|
||||
for _, sa := range artist.SimilarArtists {
|
||||
if sa.ID == unavailableArtistID {
|
||||
if sa.ID == "" {
|
||||
continue
|
||||
}
|
||||
ids = append(ids, sa.ID)
|
||||
@@ -544,7 +601,7 @@ func (e *externalMetadata) loadSimilar(ctx context.Context, artist *auxArtist, c
|
||||
continue
|
||||
}
|
||||
la = sa
|
||||
la.ID = unavailableArtistID
|
||||
la.ID = ""
|
||||
}
|
||||
loaded = append(loaded, la)
|
||||
}
|
||||
@@ -552,28 +609,31 @@ func (e *externalMetadata) loadSimilar(ctx context.Context, artist *auxArtist, c
|
||||
return nil
|
||||
}
|
||||
|
||||
type refreshQueue[T any] chan<- T
|
||||
type refreshQueue[T any] chan<- *T
|
||||
|
||||
func newRefreshQueue[T any](ctx context.Context, processFn func(context.Context, *T) error) refreshQueue[T] {
|
||||
queue := make(chan T, refreshQueueLength)
|
||||
func newRefreshQueue[T any](ctx context.Context, processFn func(context.Context, T) (T, error)) refreshQueue[T] {
|
||||
queue := make(chan *T, refreshQueueLength)
|
||||
go func() {
|
||||
for {
|
||||
time.Sleep(refreshDelay)
|
||||
ctx, cancel := context.WithTimeout(ctx, refreshTimeout)
|
||||
select {
|
||||
case item := <-queue:
|
||||
_ = processFn(ctx, &item)
|
||||
cancel()
|
||||
case <-ctx.Done():
|
||||
cancel()
|
||||
break
|
||||
return
|
||||
case <-time.After(refreshDelay):
|
||||
ctx, cancel := context.WithTimeout(ctx, refreshTimeout)
|
||||
select {
|
||||
case item := <-queue:
|
||||
_, _ = processFn(ctx, *item)
|
||||
cancel()
|
||||
case <-ctx.Done():
|
||||
cancel()
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
return queue
|
||||
}
|
||||
|
||||
func (q *refreshQueue[T]) enqueue(item T) {
|
||||
func (q *refreshQueue[T]) enqueue(item *T) {
|
||||
select {
|
||||
case *q <- item:
|
||||
default: // It is ok to miss a refresh request
|
||||
303
core/external/provider_albumimage_test.go
vendored
Normal file
303
core/external/provider_albumimage_test.go
vendored
Normal file
@@ -0,0 +1,303 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/url"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - AlbumImage", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var provider Provider
|
||||
var mockArtistRepo *mockArtistRepo
|
||||
var mockAlbumRepo *mockAlbumRepo
|
||||
var mockMediaFileRepo *mockMediaFileRepo
|
||||
var mockAlbumAgent *mockAlbumInfoAgent
|
||||
var agentsCombined *mockAgents
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.Agents = "mockAlbum" // Configure mock agent
|
||||
|
||||
mockArtistRepo = newMockArtistRepo()
|
||||
mockAlbumRepo = newMockAlbumRepo()
|
||||
mockMediaFileRepo = newMockMediaFileRepo()
|
||||
|
||||
ds = &tests.MockDataStore{
|
||||
MockedArtist: mockArtistRepo,
|
||||
MockedAlbum: mockAlbumRepo,
|
||||
MockedMediaFile: mockMediaFileRepo,
|
||||
}
|
||||
|
||||
mockAlbumAgent = newMockAlbumInfoAgent()
|
||||
|
||||
agentsCombined = &mockAgents{
|
||||
albumInfoAgent: mockAlbumAgent,
|
||||
}
|
||||
|
||||
provider = NewProvider(ds, agentsCombined)
|
||||
|
||||
// Default mocks
|
||||
// Mocks for GetEntityByID sequence (initial failed lookups)
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockArtistRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
|
||||
// Default mock for non-existent entities - Use Maybe() for flexibility
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
})
|
||||
|
||||
It("returns the largest image URL when successful", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").
|
||||
Return(&agents.AlbumInfo{
|
||||
Images: []agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1") // From GetEntityByID
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertNotCalled(GinkgoT(), "Get", "artist-1") // Artist lookup no longer happens in getAlbum
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "") // Expect empty artist name
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the album is not found in the DB", func() {
|
||||
// Arrange: Explicitly expect the full GetEntityByID sequence for "not-found"
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "not-found")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumAgent.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns the agent error if the agent fails", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
agentErr := errors.New("agent failure")
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").Return(nil, agentErr).Once() // Expect empty artist
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("agent failure"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertNotCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "") // Expect empty artist
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns ErrNotFound", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").Return(nil, agents.ErrNotFound).Once() // Expect empty artist
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "") // Expect empty artist
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns no images", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").
|
||||
Return(&agents.AlbumInfo{Images: []agents.ExternalImage{}}, nil).Once() // Expect empty artist
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "") // Expect empty artist
|
||||
})
|
||||
|
||||
It("returns context error if context is canceled", func() {
|
||||
// Arrange
|
||||
cctx, cancelCtx := context.WithCancel(ctx)
|
||||
// Mock the necessary DB calls *before* canceling the context
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Expect the agent call even if context is cancelled, returning the context error
|
||||
mockAlbumAgent.On("GetAlbumInfo", cctx, "Album One", "", "").Return(nil, context.Canceled).Once()
|
||||
// Cancel the context *before* calling the function under test
|
||||
cancelCtx()
|
||||
|
||||
imgURL, err := provider.AlbumImage(cctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("context canceled"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
// Agent should now be called, verify this expectation
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", cctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("derives album ID from MediaFile ID", func() {
|
||||
// Arrange: Mock full GetEntityByID for "mf-1" and recursive "album-1"
|
||||
mockArtistRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "mf-1").Return(&model.MediaFile{ID: "mf-1", Title: "Track One", ArtistID: "artist-1", AlbumID: "album-1"}, nil).Once()
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").
|
||||
Return(&agents.AlbumInfo{
|
||||
Images: []agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "mf-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertNotCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("handles different image orders from agent", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").
|
||||
Return(&agents.AlbumInfo{
|
||||
Images: []agents.ExternalImage{
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL)) // Should still pick the largest
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("handles agent returning only one image", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").
|
||||
Return(&agents.AlbumInfo{
|
||||
Images: []agents.ExternalImage{
|
||||
{URL: "http://example.com/single.jpg", Size: 700},
|
||||
},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/single.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if deriving album ID fails", func() {
|
||||
// Arrange: Mock full GetEntityByID for "mf-no-album" and recursive "not-found"
|
||||
mockArtistRepo.On("Get", "mf-no-album").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-no-album").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "mf-no-album").Return(&model.MediaFile{ID: "mf-no-album", Title: "Track No Album", ArtistID: "artist-1", AlbumID: "not-found"}, nil).Once()
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "mf-no-album")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-no-album")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-no-album")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-no-album")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumAgent.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
})
|
||||
|
||||
// mockAlbumInfoAgent implementation
|
||||
type mockAlbumInfoAgent struct {
|
||||
mock.Mock
|
||||
agents.AlbumInfoRetriever // Embed interface
|
||||
}
|
||||
|
||||
func newMockAlbumInfoAgent() *mockAlbumInfoAgent {
|
||||
m := new(mockAlbumInfoAgent)
|
||||
m.On("AgentName").Return("mockAlbum").Maybe()
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *mockAlbumInfoAgent) AgentName() string {
|
||||
args := m.Called()
|
||||
return args.String(0)
|
||||
}
|
||||
|
||||
func (m *mockAlbumInfoAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*agents.AlbumInfo, error) {
|
||||
args := m.Called(ctx, name, artist, mbid)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*agents.AlbumInfo), args.Error(1)
|
||||
}
|
||||
|
||||
// Ensure mockAgent implements the interface
|
||||
var _ agents.AlbumInfoRetriever = (*mockAlbumInfoAgent)(nil)
|
||||
301
core/external/provider_artistimage_test.go
vendored
Normal file
301
core/external/provider_artistimage_test.go
vendored
Normal file
@@ -0,0 +1,301 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/url"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - ArtistImage", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var provider Provider
|
||||
var mockArtistRepo *mockArtistRepo
|
||||
var mockAlbumRepo *mockAlbumRepo
|
||||
var mockMediaFileRepo *mockMediaFileRepo
|
||||
var mockImageAgent *mockArtistImageAgent
|
||||
var agentsCombined *mockAgents
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.Agents = "mockImage" // Configure only the mock agent
|
||||
ctx = GinkgoT().Context()
|
||||
|
||||
mockArtistRepo = newMockArtistRepo()
|
||||
mockAlbumRepo = newMockAlbumRepo()
|
||||
mockMediaFileRepo = newMockMediaFileRepo()
|
||||
|
||||
ds = &tests.MockDataStore{
|
||||
MockedArtist: mockArtistRepo,
|
||||
MockedAlbum: mockAlbumRepo,
|
||||
MockedMediaFile: mockMediaFileRepo,
|
||||
}
|
||||
|
||||
mockImageAgent = newMockArtistImageAgent()
|
||||
|
||||
// Use the mockAgents from helper, setting the specific agent
|
||||
agentsCombined = &mockAgents{
|
||||
imageAgent: mockImageAgent,
|
||||
}
|
||||
|
||||
provider = NewProvider(ds, agentsCombined)
|
||||
|
||||
// Default mocks for successful Get calls
|
||||
mockArtistRepo.On("Get", "artist-1").Return(&model.Artist{ID: "artist-1", Name: "Artist One"}, nil).Maybe()
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Maybe()
|
||||
mockMediaFileRepo.On("Get", "mf-1").Return(&model.MediaFile{ID: "mf-1", Title: "Track One", ArtistID: "artist-1"}, nil).Maybe()
|
||||
// Default mock for non-existent entities
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
|
||||
// Default successful image agent response
|
||||
mockImageAgent.On("GetArtistImages", mock.Anything, "artist-1", "Artist One", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
}, nil).Maybe()
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
mockArtistRepo.AssertExpectations(GinkgoT())
|
||||
mockAlbumRepo.AssertExpectations(GinkgoT())
|
||||
mockMediaFileRepo.AssertExpectations(GinkgoT())
|
||||
mockImageAgent.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns the largest image URL when successful", func() {
|
||||
// Arrange
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the artist is not found in the DB", func() {
|
||||
// Arrange
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "not-found")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockImageAgent.AssertNotCalled(GinkgoT(), "GetArtistImages", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns the agent error if the agent fails", func() {
|
||||
// Arrange
|
||||
agentErr := errors.New("agent failure")
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").Return(nil, agentErr).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound)) // Corrected Expectation: The provider maps agent errors (other than canceled) to ErrNotFound if no image was found/populated
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns ErrNotFound", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").Return(nil, agents.ErrNotFound).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns no images", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").Return([]agents.ExternalImage{}, nil).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound)) // Implementation maps empty result to ErrNotFound
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns context error if context is canceled before agent call", func() {
|
||||
// Arrange
|
||||
cctx, cancelCtx := context.WithCancel(context.Background())
|
||||
mockArtistRepo.Mock = mock.Mock{} // Reset default expectation for artist repo as well
|
||||
mockArtistRepo.On("Get", "artist-1").Return(&model.Artist{ID: "artist-1", Name: "Artist One"}, nil).Run(func(args mock.Arguments) {
|
||||
cancelCtx() // Cancel context *during* the DB call simulation
|
||||
}).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(cctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(context.Canceled))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
})
|
||||
|
||||
It("derives artist ID from MediaFile ID", func() {
|
||||
// Arrange: Add mocks for the initial GetEntityByID lookups
|
||||
mockArtistRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
// Default mocks for MediaFileRepo.Get("mf-1") and ArtistRepo.Get("artist-1") handle the rest
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "mf-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-1") // GetEntityByID sequence
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-1") // GetEntityByID sequence
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1") // Should be called after getting MF
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("derives artist ID from Album ID", func() {
|
||||
// Arrange: Add mock for the initial GetEntityByID lookup
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
// Default mocks for AlbumRepo.Get("album-1") and ArtistRepo.Get("artist-1") handle the rest
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "album-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1") // GetEntityByID sequence
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1") // Should be called after getting Album
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if derived artist is not found", func() {
|
||||
// Arrange
|
||||
// Add mocks for the initial GetEntityByID lookups
|
||||
mockArtistRepo.On("Get", "mf-bad-artist").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-bad-artist").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "mf-bad-artist").Return(&model.MediaFile{ID: "mf-bad-artist", ArtistID: "not-found"}, nil).Once()
|
||||
// Add expectation for the recursive GetEntityByID call for the MediaFileRepo
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
// The default mocks for ArtistRepo/AlbumRepo handle the final "not-found" lookups
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "mf-bad-artist")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-bad-artist") // GetEntityByID sequence
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-bad-artist") // GetEntityByID sequence
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-bad-artist")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockImageAgent.AssertNotCalled(GinkgoT(), "GetArtistImages", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("handles different image orders from agent", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
}, nil).Once()
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL)) // Still picks the largest
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("handles agent returning only one image", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
}, nil).Once()
|
||||
expectedURL, _ := url.Parse("http://example.com/medium.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
})
|
||||
|
||||
// mockArtistImageAgent implementation using testify/mock
|
||||
// This remains local as it's specific to testing the ArtistImage functionality
|
||||
type mockArtistImageAgent struct {
|
||||
mock.Mock
|
||||
agents.ArtistImageRetriever // Embed interface
|
||||
}
|
||||
|
||||
// Constructor for the mock agent
|
||||
func newMockArtistImageAgent() *mockArtistImageAgent {
|
||||
mock := new(mockArtistImageAgent)
|
||||
// Set default AgentName if needed, although usually called via mockAgents
|
||||
mock.On("AgentName").Return("mockImage").Maybe()
|
||||
return mock
|
||||
}
|
||||
|
||||
func (m *mockArtistImageAgent) AgentName() string {
|
||||
args := m.Called()
|
||||
return args.String(0)
|
||||
}
|
||||
|
||||
func (m *mockArtistImageAgent) GetArtistImages(ctx context.Context, id, artistName, mbid string) ([]agents.ExternalImage, error) {
|
||||
args := m.Called(ctx, id, artistName, mbid)
|
||||
// Need careful type assertion for potentially nil slice
|
||||
var res []agents.ExternalImage
|
||||
if args.Get(0) != nil {
|
||||
res = args.Get(0).([]agents.ExternalImage)
|
||||
}
|
||||
return res, args.Error(1)
|
||||
}
|
||||
|
||||
// Ensure mockAgent implements the interface
|
||||
var _ agents.ArtistImageRetriever = (*mockArtistImageAgent)(nil)
|
||||
198
core/external/provider_similarsongs_test.go
vendored
Normal file
198
core/external/provider_similarsongs_test.go
vendored
Normal file
@@ -0,0 +1,198 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - SimilarSongs", func() {
|
||||
var ds model.DataStore
|
||||
var provider Provider
|
||||
var mockAgent *mockSimilarArtistAgent
|
||||
var mockTopAgent agents.ArtistTopSongsRetriever
|
||||
var mockSimilarAgent agents.ArtistSimilarRetriever
|
||||
var agentsCombined Agents
|
||||
var artistRepo *mockArtistRepo
|
||||
var mediaFileRepo *mockMediaFileRepo
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
|
||||
artistRepo = newMockArtistRepo()
|
||||
mediaFileRepo = newMockMediaFileRepo()
|
||||
|
||||
ds = &tests.MockDataStore{
|
||||
MockedArtist: artistRepo,
|
||||
MockedMediaFile: mediaFileRepo,
|
||||
}
|
||||
|
||||
mockAgent = &mockSimilarArtistAgent{}
|
||||
mockTopAgent = mockAgent
|
||||
mockSimilarAgent = mockAgent
|
||||
|
||||
agentsCombined = &mockAgents{
|
||||
topSongsAgent: mockTopAgent,
|
||||
similarAgent: mockSimilarAgent,
|
||||
}
|
||||
|
||||
provider = NewProvider(ds, agentsCombined)
|
||||
})
|
||||
|
||||
It("returns similar songs from main artist and similar artists", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
similarArtist := model.Artist{ID: "artist-3", Name: "Similar Artist"}
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1"}
|
||||
song3 := model.MediaFile{ID: "song-3", Title: "Song Three", ArtistID: "artist-3"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("Get", "artist-3").Return(&similarArtist, nil).Maybe()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
similarAgentsResp := []agents.Artist{
|
||||
{Name: "Similar Artist", MBID: "similar-mbid"},
|
||||
}
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return(similarAgentsResp, nil).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{similarArtist}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-1"},
|
||||
{Name: "Song Two", MBID: "mbid-2"},
|
||||
}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-3", "Similar Artist", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song Three", MBID: "mbid-3"},
|
||||
}, nil).Once()
|
||||
|
||||
mediaFileRepo.FindByMBID("mbid-1", song1)
|
||||
mediaFileRepo.FindByMBID("mbid-2", song2)
|
||||
mediaFileRepo.FindByMBID("mbid-3", song3)
|
||||
|
||||
songs, err := provider.SimilarSongs(ctx, "artist-1", 3)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(3))
|
||||
for _, song := range songs {
|
||||
Expect(song.ID).To(BeElementOf("song-1", "song-2", "song-3"))
|
||||
}
|
||||
})
|
||||
|
||||
It("returns ErrNotFound when artist is not found", func() {
|
||||
artistRepo.On("Get", "artist-unknown-artist").Return(nil, model.ErrNotFound)
|
||||
mediaFileRepo.On("Get", "artist-unknown-artist").Return(nil, model.ErrNotFound)
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Maybe()
|
||||
|
||||
songs, err := provider.SimilarSongs(ctx, "artist-unknown-artist", 5)
|
||||
|
||||
Expect(err).To(Equal(model.ErrNotFound))
|
||||
Expect(songs).To(BeNil())
|
||||
})
|
||||
|
||||
It("returns songs from main artist when GetSimilarArtists returns error", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Maybe()
|
||||
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return(nil, errors.New("error getting similar artists")).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-1"},
|
||||
}, nil).Once()
|
||||
|
||||
mediaFileRepo.FindByMBID("mbid-1", song1)
|
||||
|
||||
songs, err := provider.SimilarSongs(ctx, "artist-1", 5)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
})
|
||||
|
||||
It("returns empty list when GetArtistTopSongs returns error", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Maybe()
|
||||
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return([]agents.Artist{}, nil).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return(nil, errors.New("error getting top songs")).Once()
|
||||
|
||||
songs, err := provider.SimilarSongs(ctx, "artist-1", 5)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("respects count parameter", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Maybe()
|
||||
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return([]agents.Artist{}, nil).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-1"},
|
||||
{Name: "Song Two", MBID: "mbid-2"},
|
||||
}, nil).Once()
|
||||
|
||||
mediaFileRepo.FindByMBID("mbid-1", song1)
|
||||
mediaFileRepo.FindByMBID("mbid-2", song2)
|
||||
|
||||
songs, err := provider.SimilarSongs(ctx, "artist-1", 1)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(BeElementOf("song-1", "song-2"))
|
||||
})
|
||||
})
|
||||
193
core/external/provider_topsongs_test.go
vendored
Normal file
193
core/external/provider_topsongs_test.go
vendored
Normal file
@@ -0,0 +1,193 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
_ "github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
_ "github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/core/agents/spotify"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - TopSongs", func() {
|
||||
var (
|
||||
p Provider
|
||||
artistRepo *mockArtistRepo // From provider_helper_test.go
|
||||
mediaFileRepo *mockMediaFileRepo // From provider_helper_test.go
|
||||
ag *mockAgents // Consolidated mock from export_test.go
|
||||
ctx context.Context
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
|
||||
artistRepo = newMockArtistRepo() // Use helper mock
|
||||
mediaFileRepo = newMockMediaFileRepo() // Use helper mock
|
||||
|
||||
// Configure tests.MockDataStore to use the testify/mock-based repos
|
||||
ds := &tests.MockDataStore{
|
||||
MockedArtist: artistRepo,
|
||||
MockedMediaFile: mediaFileRepo,
|
||||
}
|
||||
|
||||
ag = new(mockAgents)
|
||||
|
||||
p = NewProvider(ds, ag)
|
||||
})
|
||||
|
||||
BeforeEach(func() {
|
||||
// Setup expectations in individual tests
|
||||
})
|
||||
|
||||
It("returns top songs for a known artist", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response
|
||||
agentSongs := []agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-song-1"},
|
||||
{Name: "Song Two", MBID: "mbid-song-2"},
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 2).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock finding matching tracks
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1", MbzRecordingID: "mbid-song-2"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once()
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song2}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 2)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(2))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
Expect(songs[1].ID).To(Equal("song-2"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns nil for an unknown artist", func() {
|
||||
// Mock artist not found
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Unknown Artist", 5)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred()) // TopSongs returns nil error if artist not found
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistTopSongs", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns error when the agent returns an error", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent error
|
||||
agentErr := errors.New("agent error")
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 5).Return(nil, agentErr).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 5)
|
||||
|
||||
Expect(err).To(MatchError(agentErr))
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns ErrNotFound when the agent returns ErrNotFound", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent ErrNotFound
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 5).Return(nil, agents.ErrNotFound).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 5)
|
||||
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns fewer songs if count is less than available top songs", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response (only need 1 for the test)
|
||||
agentSongs := []agents.Song{{Name: "Song One", MBID: "mbid-song-1"}}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 1).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock finding matching track
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 1)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns fewer songs if fewer matching tracks are found", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response
|
||||
agentSongs := []agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-song-1"},
|
||||
{Name: "Song Two", MBID: "mbid-song-2"},
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 2).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock finding matching tracks (only find song 1)
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once()
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{}, nil).Once() // For mbid-song-2 (fails)
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{}, nil).Once() // For title fallback (fails)
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 2)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns error when context is canceled during agent call", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Setup context that will be canceled
|
||||
canceledCtx, cancel := context.WithCancel(ctx)
|
||||
|
||||
// Mock agent call to return context canceled error
|
||||
ag.On("GetArtistTopSongs", canceledCtx, "artist-1", "Artist One", "mbid-artist-1", 5).Return(nil, context.Canceled).Once()
|
||||
|
||||
cancel() // Cancel the context before calling
|
||||
songs, err := p.TopSongs(canceledCtx, "Artist One", 5)
|
||||
|
||||
Expect(err).To(MatchError(context.Canceled))
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
})
|
||||
170
core/external/provider_updatealbuminfo_test.go
vendored
Normal file
170
core/external/provider_updatealbuminfo_test.go
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetLevel(log.LevelDebug)
|
||||
}
|
||||
|
||||
var _ = Describe("Provider - UpdateAlbumInfo", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
p external.Provider
|
||||
ds *tests.MockDataStore
|
||||
ag *mockAgents
|
||||
mockAlbumRepo *tests.MockAlbumRepo
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
ds = new(tests.MockDataStore)
|
||||
ag = new(mockAgents)
|
||||
p = external.NewProvider(ds, ag)
|
||||
mockAlbumRepo = ds.Album(ctx).(*tests.MockAlbumRepo)
|
||||
conf.Server.DevAlbumInfoTimeToLive = 1 * time.Hour
|
||||
})
|
||||
|
||||
It("returns error when album is not found", func() {
|
||||
album, err := p.UpdateAlbumInfo(ctx, "al-not-found")
|
||||
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(album).To(BeNil())
|
||||
ag.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("populates info when album exists but has no external info", func() {
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-existing",
|
||||
Name: "Test Album",
|
||||
AlbumArtist: "Test Artist",
|
||||
MbzAlbumID: "mbid-album",
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
expectedInfo := &agents.AlbumInfo{
|
||||
URL: "http://example.com/album",
|
||||
Description: "Album Description",
|
||||
Images: []agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 300},
|
||||
{URL: "http://example.com/medium.jpg", Size: 200},
|
||||
{URL: "http://example.com/small.jpg", Size: 100},
|
||||
},
|
||||
}
|
||||
ag.On("GetAlbumInfo", ctx, "Test Album", "Test Artist", "mbid-album").Return(expectedInfo, nil)
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-existing")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(updatedAlbum.ID).To(Equal("al-existing"))
|
||||
Expect(updatedAlbum.ExternalUrl).To(Equal("http://example.com/album"))
|
||||
Expect(updatedAlbum.Description).To(Equal("Album Description"))
|
||||
Expect(updatedAlbum.LargeImageUrl).To(Equal("http://example.com/large.jpg"))
|
||||
Expect(updatedAlbum.MediumImageUrl).To(Equal("http://example.com/medium.jpg"))
|
||||
Expect(updatedAlbum.SmallImageUrl).To(Equal("http://example.com/small.jpg"))
|
||||
Expect(updatedAlbum.ExternalInfoUpdatedAt).NotTo(BeNil())
|
||||
Expect(*updatedAlbum.ExternalInfoUpdatedAt).To(BeTemporally("~", time.Now(), time.Second))
|
||||
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns cached info when album exists and info is not expired", func() {
|
||||
now := time.Now()
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-cached",
|
||||
Name: "Cached Album",
|
||||
AlbumArtist: "Cached Artist",
|
||||
ExternalUrl: "http://cached.com/album",
|
||||
Description: "Cached Desc",
|
||||
LargeImageUrl: "http://cached.com/large.jpg",
|
||||
ExternalInfoUpdatedAt: gg.P(now.Add(-conf.Server.DevAlbumInfoTimeToLive / 2)),
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-cached")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(*updatedAlbum).To(Equal(*originalAlbum))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns cached info and triggers background refresh when info is expired", func() {
|
||||
now := time.Now()
|
||||
expiredTime := now.Add(-conf.Server.DevAlbumInfoTimeToLive * 2)
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-expired",
|
||||
Name: "Expired Album",
|
||||
AlbumArtist: "Expired Artist",
|
||||
ExternalUrl: "http://expired.com/album",
|
||||
Description: "Expired Desc",
|
||||
LargeImageUrl: "http://expired.com/large.jpg",
|
||||
ExternalInfoUpdatedAt: gg.P(expiredTime),
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-expired")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(*updatedAlbum).To(Equal(*originalAlbum))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns error when agent fails to get album info", func() {
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-agent-error",
|
||||
Name: "Agent Error Album",
|
||||
AlbumArtist: "Agent Error Artist",
|
||||
MbzAlbumID: "mbid-agent-error",
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
expectedErr := errors.New("agent communication failed")
|
||||
ag.On("GetAlbumInfo", ctx, "Agent Error Album", "Agent Error Artist", "mbid-agent-error").Return(nil, expectedErr)
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-agent-error")
|
||||
|
||||
Expect(err).To(MatchError(expectedErr))
|
||||
Expect(updatedAlbum).To(BeNil())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns original album when agent returns ErrNotFound", func() {
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-agent-notfound",
|
||||
Name: "Agent NotFound Album",
|
||||
AlbumArtist: "Agent NotFound Artist",
|
||||
MbzAlbumID: "mbid-agent-notfound",
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
ag.On("GetAlbumInfo", ctx, "Agent NotFound Album", "Agent NotFound Artist", "mbid-agent-notfound").Return(nil, agents.ErrNotFound)
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-agent-notfound")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(*updatedAlbum).To(Equal(*originalAlbum))
|
||||
Expect(updatedAlbum.ExternalInfoUpdatedAt).To(BeNil())
|
||||
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
})
|
||||
229
core/external/provider_updateartistinfo_test.go
vendored
Normal file
229
core/external/provider_updateartistinfo_test.go
vendored
Normal file
@@ -0,0 +1,229 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetLevel(log.LevelDebug)
|
||||
}
|
||||
|
||||
var _ = Describe("Provider - UpdateArtistInfo", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
p external.Provider
|
||||
ds *tests.MockDataStore
|
||||
ag *mockAgents
|
||||
mockArtistRepo *tests.MockArtistRepo
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.DevArtistInfoTimeToLive = 1 * time.Hour
|
||||
ctx = GinkgoT().Context()
|
||||
ds = new(tests.MockDataStore)
|
||||
ag = new(mockAgents)
|
||||
p = external.NewProvider(ds, ag)
|
||||
mockArtistRepo = ds.Artist(ctx).(*tests.MockArtistRepo)
|
||||
})
|
||||
|
||||
It("returns error when artist is not found", func() {
|
||||
artist, err := p.UpdateArtistInfo(ctx, "ar-not-found", 10, false)
|
||||
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(artist).To(BeNil())
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistMBID")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistImages")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistBiography")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistURL")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetSimilarArtists")
|
||||
})
|
||||
|
||||
It("populates info when artist exists but has no external info", func() {
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-existing",
|
||||
Name: "Test Artist",
|
||||
}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist})
|
||||
|
||||
expectedMBID := "mbid-artist-123"
|
||||
expectedBio := "Artist Bio"
|
||||
expectedURL := "http://artist.url"
|
||||
expectedImages := []agents.ExternalImage{
|
||||
{URL: "http://large.jpg", Size: 300},
|
||||
{URL: "http://medium.jpg", Size: 200},
|
||||
{URL: "http://small.jpg", Size: 100},
|
||||
}
|
||||
rawSimilar := []agents.Artist{
|
||||
{Name: "Similar Artist 1", MBID: "mbid-similar-1"},
|
||||
{Name: "Similar Artist 2", MBID: "mbid-similar-2"},
|
||||
{Name: "Similar Artist 3", MBID: "mbid-similar-3"},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-similar-2", Name: "Similar Artist 2"}
|
||||
|
||||
ag.On("GetArtistMBID", ctx, "ar-existing", "Test Artist").Return(expectedMBID, nil).Once()
|
||||
ag.On("GetArtistImages", ctx, "ar-existing", "Test Artist", expectedMBID).Return(expectedImages, nil).Once()
|
||||
ag.On("GetArtistBiography", ctx, "ar-existing", "Test Artist", expectedMBID).Return(expectedBio, nil).Once()
|
||||
ag.On("GetArtistURL", ctx, "ar-existing", "Test Artist", expectedMBID).Return(expectedURL, nil).Once()
|
||||
ag.On("GetSimilarArtists", ctx, "ar-existing", "Test Artist", expectedMBID, 100).Return(rawSimilar, nil).Once()
|
||||
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-existing", 10, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal("ar-existing"))
|
||||
Expect(updatedArtist.MbzArtistID).To(Equal(expectedMBID))
|
||||
Expect(updatedArtist.Biography).To(Equal("Artist Bio"))
|
||||
Expect(updatedArtist.ExternalUrl).To(Equal(expectedURL))
|
||||
Expect(updatedArtist.LargeImageUrl).To(Equal("http://large.jpg"))
|
||||
Expect(updatedArtist.MediumImageUrl).To(Equal("http://medium.jpg"))
|
||||
Expect(updatedArtist.SmallImageUrl).To(Equal("http://small.jpg"))
|
||||
Expect(updatedArtist.ExternalInfoUpdatedAt).NotTo(BeNil())
|
||||
Expect(*updatedArtist.ExternalInfoUpdatedAt).To(BeTemporally("~", time.Now(), time.Second))
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal("ar-similar-2"))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal("Similar Artist 2"))
|
||||
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns cached info when artist exists and info is not expired", func() {
|
||||
now := time.Now()
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-cached",
|
||||
Name: "Cached Artist",
|
||||
MbzArtistID: "mbid-cached",
|
||||
ExternalUrl: "http://cached.url",
|
||||
Biography: "Cached Bio",
|
||||
LargeImageUrl: "http://cached_large.jpg",
|
||||
ExternalInfoUpdatedAt: gg.P(now.Add(-conf.Server.DevArtistInfoTimeToLive / 2)),
|
||||
SimilarArtists: model.Artists{
|
||||
{ID: "ar-similar-present", Name: "Similar Present"},
|
||||
{ID: "ar-similar-absent", Name: "Similar Absent"},
|
||||
},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-similar-present", Name: "Similar Present Updated"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-cached", 5, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal(originalArtist.ID))
|
||||
Expect(updatedArtist.Name).To(Equal(originalArtist.Name))
|
||||
Expect(updatedArtist.MbzArtistID).To(Equal(originalArtist.MbzArtistID))
|
||||
Expect(updatedArtist.ExternalUrl).To(Equal(originalArtist.ExternalUrl))
|
||||
Expect(updatedArtist.Biography).To(Equal(originalArtist.Biography))
|
||||
Expect(updatedArtist.LargeImageUrl).To(Equal(originalArtist.LargeImageUrl))
|
||||
Expect(updatedArtist.ExternalInfoUpdatedAt).To(Equal(originalArtist.ExternalInfoUpdatedAt))
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal(similarInDS.ID))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal(similarInDS.Name))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistMBID")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistImages")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistBiography")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistURL")
|
||||
})
|
||||
|
||||
It("returns cached info and triggers background refresh when info is expired", func() {
|
||||
now := time.Now()
|
||||
expiredTime := now.Add(-conf.Server.DevArtistInfoTimeToLive * 2)
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-expired",
|
||||
Name: "Expired Artist",
|
||||
ExternalInfoUpdatedAt: gg.P(expiredTime),
|
||||
SimilarArtists: model.Artists{
|
||||
{ID: "ar-exp-similar", Name: "Expired Similar"},
|
||||
},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-exp-similar", Name: "Expired Similar Updated"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-expired", 5, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal(originalArtist.ID))
|
||||
Expect(updatedArtist.Name).To(Equal(originalArtist.Name))
|
||||
Expect(updatedArtist.ExternalInfoUpdatedAt).To(Equal(originalArtist.ExternalInfoUpdatedAt))
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal(similarInDS.ID))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal(similarInDS.Name))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistMBID")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistImages")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistBiography")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistURL")
|
||||
})
|
||||
|
||||
It("includes non-present similar artists when includeNotPresent is true", func() {
|
||||
now := time.Now()
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-similar-test",
|
||||
Name: "Similar Test Artist",
|
||||
ExternalInfoUpdatedAt: gg.P(now.Add(-conf.Server.DevArtistInfoTimeToLive / 2)),
|
||||
SimilarArtists: model.Artists{
|
||||
{ID: "ar-sim-present", Name: "Similar Present"},
|
||||
{ID: "", Name: "Similar Absent Raw"},
|
||||
{ID: "ar-sim-absent-lookup", Name: "Similar Absent Lookup"},
|
||||
},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-sim-present", Name: "Similar Present Updated"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-similar-test", 5, true)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(3))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal(similarInDS.ID))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal(similarInDS.Name))
|
||||
Expect(updatedArtist.SimilarArtists[1].ID).To(BeEmpty())
|
||||
Expect(updatedArtist.SimilarArtists[1].Name).To(Equal("Similar Absent Raw"))
|
||||
Expect(updatedArtist.SimilarArtists[2].ID).To(BeEmpty())
|
||||
Expect(updatedArtist.SimilarArtists[2].Name).To(Equal("Similar Absent Lookup"))
|
||||
})
|
||||
|
||||
It("updates ArtistInfo even if an optional agent call fails", func() {
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-agent-fail",
|
||||
Name: "Agent Fail Artist",
|
||||
}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist})
|
||||
|
||||
expectedErr := errors.New("agent MBID failed")
|
||||
ag.On("GetArtistMBID", ctx, "ar-agent-fail", "Agent Fail Artist").Return("", expectedErr).Once()
|
||||
ag.On("GetArtistImages", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything).Return(nil, nil).Maybe()
|
||||
ag.On("GetArtistBiography", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetArtistURL", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetSimilarArtists", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything, 100).Return(nil, nil).Maybe()
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-agent-fail", 10, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal("ar-agent-fail"))
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
})
|
||||
@@ -18,8 +18,6 @@ import (
|
||||
type FFmpeg interface {
|
||||
Transcode(ctx context.Context, command, path string, maxBitRate, offset int) (io.ReadCloser, error)
|
||||
ExtractImage(ctx context.Context, path string) (io.ReadCloser, error)
|
||||
ConvertToWAV(ctx context.Context, path string) (io.ReadCloser, error)
|
||||
ConvertToFLAC(ctx context.Context, path string) (io.ReadCloser, error)
|
||||
Probe(ctx context.Context, files []string) (string, error)
|
||||
CmdPath() (string, error)
|
||||
IsAvailable() bool
|
||||
@@ -31,10 +29,8 @@ func New() FFmpeg {
|
||||
}
|
||||
|
||||
const (
|
||||
extractImageCmd = "ffmpeg -i %s -an -vcodec copy -f image2pipe -"
|
||||
extractImageCmd = "ffmpeg -i %s -map 0:v -map -0:V -vcodec copy -f image2pipe -"
|
||||
probeCmd = "ffmpeg %s -f ffmetadata"
|
||||
createWavCmd = "ffmpeg -i %s -c:a pcm_s16le -f wav -"
|
||||
createFLACCmd = "ffmpeg -i %s -f flac -"
|
||||
)
|
||||
|
||||
type ffmpeg struct{}
|
||||
@@ -43,6 +39,10 @@ func (e *ffmpeg) Transcode(ctx context.Context, command, path string, maxBitRate
|
||||
if _, err := ffmpegCmd(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// First make sure the file exists
|
||||
if err := fileExists(path); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args := createFFmpegCommand(command, path, maxBitRate, offset)
|
||||
return e.start(ctx, args)
|
||||
}
|
||||
@@ -51,18 +51,23 @@ func (e *ffmpeg) ExtractImage(ctx context.Context, path string) (io.ReadCloser,
|
||||
if _, err := ffmpegCmd(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// First make sure the file exists
|
||||
if err := fileExists(path); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args := createFFmpegCommand(extractImageCmd, path, 0, 0)
|
||||
return e.start(ctx, args)
|
||||
}
|
||||
|
||||
func (e *ffmpeg) ConvertToWAV(ctx context.Context, path string) (io.ReadCloser, error) {
|
||||
args := createFFmpegCommand(createWavCmd, path, 0, 0)
|
||||
return e.start(ctx, args)
|
||||
}
|
||||
|
||||
func (e *ffmpeg) ConvertToFLAC(ctx context.Context, path string) (io.ReadCloser, error) {
|
||||
args := createFFmpegCommand(createFLACCmd, path, 0, 0)
|
||||
return e.start(ctx, args)
|
||||
func fileExists(path string) error {
|
||||
s, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if s.IsDir() {
|
||||
return fmt.Errorf("'%s' is a directory", path)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *ffmpeg) Probe(ctx context.Context, files []string) (string, error) {
|
||||
@@ -153,31 +158,26 @@ func (j *ffCmd) wait() {
|
||||
|
||||
// Path will always be an absolute path
|
||||
func createFFmpegCommand(cmd, path string, maxBitRate, offset int) []string {
|
||||
split := strings.Split(fixCmd(cmd), " ")
|
||||
var parts []string
|
||||
|
||||
for _, s := range split {
|
||||
var args []string
|
||||
for _, s := range fixCmd(cmd) {
|
||||
if strings.Contains(s, "%s") {
|
||||
s = strings.ReplaceAll(s, "%s", path)
|
||||
parts = append(parts, s)
|
||||
args = append(args, s)
|
||||
if offset > 0 && !strings.Contains(cmd, "%t") {
|
||||
parts = append(parts, "-ss", strconv.Itoa(offset))
|
||||
args = append(args, "-ss", strconv.Itoa(offset))
|
||||
}
|
||||
} else {
|
||||
s = strings.ReplaceAll(s, "%t", strconv.Itoa(offset))
|
||||
s = strings.ReplaceAll(s, "%b", strconv.Itoa(maxBitRate))
|
||||
parts = append(parts, s)
|
||||
args = append(args, s)
|
||||
}
|
||||
}
|
||||
|
||||
return parts
|
||||
return args
|
||||
}
|
||||
|
||||
func createProbeCommand(cmd string, inputs []string) []string {
|
||||
split := strings.Split(fixCmd(cmd), " ")
|
||||
var args []string
|
||||
|
||||
for _, s := range split {
|
||||
for _, s := range fixCmd(cmd) {
|
||||
if s == "%s" {
|
||||
for _, inp := range inputs {
|
||||
args = append(args, "-i", inp)
|
||||
@@ -189,18 +189,15 @@ func createProbeCommand(cmd string, inputs []string) []string {
|
||||
return args
|
||||
}
|
||||
|
||||
func fixCmd(cmd string) string {
|
||||
split := strings.Split(cmd, " ")
|
||||
var result []string
|
||||
func fixCmd(cmd string) []string {
|
||||
split := strings.Fields(cmd)
|
||||
cmdPath, _ := ffmpegCmd()
|
||||
for _, s := range split {
|
||||
for i, s := range split {
|
||||
if s == "ffmpeg" || s == "ffmpeg.exe" {
|
||||
result = append(result, cmdPath)
|
||||
} else {
|
||||
result = append(result, s)
|
||||
split[i] = cmdPath
|
||||
}
|
||||
}
|
||||
return strings.Join(result, " ")
|
||||
return split
|
||||
}
|
||||
|
||||
func ffmpegCmd() (string, error) {
|
||||
@@ -223,6 +220,7 @@ func ffmpegCmd() (string, error) {
|
||||
return ffmpegPath, ffmpegErr
|
||||
}
|
||||
|
||||
// These variables are accessible here for tests. Do not use them directly in production code. Use ffmpegCmd() instead.
|
||||
var (
|
||||
ffOnce sync.Once
|
||||
ffmpegPath string
|
||||
|
||||
@@ -27,6 +27,10 @@ var _ = Describe("ffmpeg", func() {
|
||||
args := createFFmpegCommand("ffmpeg -i %s -b:a %bk mp3 -", "/music library/file.mp3", 123, 0)
|
||||
Expect(args).To(Equal([]string{"ffmpeg", "-i", "/music library/file.mp3", "-b:a", "123k", "mp3", "-"}))
|
||||
})
|
||||
It("handles extra spaces in the command string", func() {
|
||||
args := createFFmpegCommand("ffmpeg -i %s -b:a %bk mp3 -", "/music library/file.mp3", 123, 0)
|
||||
Expect(args).To(Equal([]string{"ffmpeg", "-i", "/music library/file.mp3", "-b:a", "123k", "mp3", "-"}))
|
||||
})
|
||||
Context("when command has time offset param", func() {
|
||||
It("creates a valid command line with offset", func() {
|
||||
args := createFFmpegCommand("ffmpeg -i %s -b:a %bk -ss %t mp3 -", "/music library/file.mp3", 123, 456)
|
||||
@@ -48,4 +52,17 @@ var _ = Describe("ffmpeg", func() {
|
||||
Expect(args).To(Equal([]string{"ffmpeg", "-i", "/music library/one.mp3", "-i", "/music library/two.mp3", "-f", "ffmetadata"}))
|
||||
})
|
||||
})
|
||||
|
||||
When("ffmpegPath is set", func() {
|
||||
It("returns the correct ffmpeg path", func() {
|
||||
ffmpegPath = "/usr/bin/ffmpeg"
|
||||
args := createProbeCommand(probeCmd, []string{"one.mp3"})
|
||||
Expect(args).To(Equal([]string{"/usr/bin/ffmpeg", "-i", "one.mp3", "-f", "ffmetadata"}))
|
||||
})
|
||||
It("returns the correct ffmpeg path with spaces", func() {
|
||||
ffmpegPath = "/usr/bin/with spaces/ffmpeg.exe"
|
||||
args := createProbeCommand(probeCmd, []string{"one.mp3"})
|
||||
Expect(args).To(Equal([]string{"/usr/bin/with spaces/ffmpeg.exe", "-i", "one.mp3", "-f", "ffmetadata"}))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
51
core/inspect.go
Normal file
51
core/inspect.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/navidrome/navidrome/core/storage"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
. "github.com/navidrome/navidrome/utils/gg"
|
||||
)
|
||||
|
||||
type InspectOutput struct {
|
||||
File string `json:"file"`
|
||||
RawTags model.RawTags `json:"rawTags"`
|
||||
MappedTags *model.MediaFile `json:"mappedTags,omitempty"`
|
||||
}
|
||||
|
||||
func Inspect(filePath string, libraryId int, folderId string) (*InspectOutput, error) {
|
||||
path, file := filepath.Split(filePath)
|
||||
|
||||
s, err := storage.For(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fs, err := s.FS()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tags, err := fs.ReadTags(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tag, ok := tags[file]
|
||||
if !ok {
|
||||
log.Error("Could not get tags for path", "path", filePath)
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
|
||||
md := metadata.New(path, tag)
|
||||
result := &InspectOutput{
|
||||
File: filePath,
|
||||
RawTags: tags[file].Tags,
|
||||
MappedTags: P(md.ToMediaFile(libraryId, folderId)),
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
37
core/lyrics/lyrics.go
Normal file
37
core/lyrics/lyrics.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package lyrics
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
)
|
||||
|
||||
func GetLyrics(ctx context.Context, mf *model.MediaFile) (model.LyricList, error) {
|
||||
var lyricsList model.LyricList
|
||||
var err error
|
||||
|
||||
for pattern := range strings.SplitSeq(strings.ToLower(conf.Server.LyricsPriority), ",") {
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
switch {
|
||||
case pattern == "embedded":
|
||||
lyricsList, err = fromEmbedded(ctx, mf)
|
||||
case strings.HasPrefix(pattern, "."):
|
||||
lyricsList, err = fromExternalFile(ctx, mf, pattern)
|
||||
default:
|
||||
log.Error(ctx, "Invalid lyric pattern", "pattern", pattern)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
log.Error(ctx, "error parsing lyrics", "source", pattern, err)
|
||||
}
|
||||
|
||||
if len(lyricsList) > 0 {
|
||||
return lyricsList, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
17
core/lyrics/lyrics_suite_test.go
Normal file
17
core/lyrics/lyrics_suite_test.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package lyrics_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestLyrics(t *testing.T) {
|
||||
tests.Init(t, false)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Lyrics Suite")
|
||||
}
|
||||
124
core/lyrics/lyrics_test.go
Normal file
124
core/lyrics/lyrics_test.go
Normal file
@@ -0,0 +1,124 @@
|
||||
package lyrics_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"os"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/lyrics"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("sources", func() {
|
||||
var mf model.MediaFile
|
||||
var ctx context.Context
|
||||
|
||||
const badLyrics = "This is a set of lyrics\nThat is not good"
|
||||
unsynced, _ := model.ToLyrics("xxx", badLyrics)
|
||||
embeddedLyrics := model.LyricList{*unsynced}
|
||||
|
||||
syncedLyrics := model.LyricList{
|
||||
model.Lyrics{
|
||||
DisplayArtist: "Rick Astley",
|
||||
DisplayTitle: "That one song",
|
||||
Lang: "eng",
|
||||
Line: []model.Line{
|
||||
{
|
||||
Start: gg.P(int64(18800)),
|
||||
Value: "We're no strangers to love",
|
||||
},
|
||||
{
|
||||
Start: gg.P(int64(22801)),
|
||||
Value: "You know the rules and so do I",
|
||||
},
|
||||
},
|
||||
Offset: gg.P(int64(-100)),
|
||||
Synced: true,
|
||||
},
|
||||
}
|
||||
|
||||
unsyncedLyrics := model.LyricList{
|
||||
model.Lyrics{
|
||||
Lang: "xxx",
|
||||
Line: []model.Line{
|
||||
{
|
||||
Value: "We're no strangers to love",
|
||||
},
|
||||
{
|
||||
Value: "You know the rules and so do I",
|
||||
},
|
||||
},
|
||||
Synced: false,
|
||||
},
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
|
||||
lyricsJson, _ := json.Marshal(embeddedLyrics)
|
||||
|
||||
mf = model.MediaFile{
|
||||
Lyrics: string(lyricsJson),
|
||||
Path: "tests/fixtures/test.mp3",
|
||||
}
|
||||
ctx = context.Background()
|
||||
})
|
||||
|
||||
DescribeTable("Lyrics Priority", func(priority string, expected model.LyricList) {
|
||||
conf.Server.LyricsPriority = priority
|
||||
list, err := lyrics.GetLyrics(ctx, &mf)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(list).To(Equal(expected))
|
||||
},
|
||||
Entry("embedded > lrc > txt", "embedded,.lrc,.txt", embeddedLyrics),
|
||||
Entry("lrc > embedded > txt", ".lrc,embedded,.txt", syncedLyrics),
|
||||
Entry("txt > lrc > embedded", ".txt,.lrc,embedded", unsyncedLyrics))
|
||||
|
||||
Context("Errors", func() {
|
||||
var RegularUserContext = XContext
|
||||
var isRegularUser = os.Getuid() != 0
|
||||
if isRegularUser {
|
||||
RegularUserContext = Context
|
||||
}
|
||||
|
||||
RegularUserContext("run without root permissions", func() {
|
||||
var accessForbiddenFile string
|
||||
|
||||
BeforeEach(func() {
|
||||
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
|
||||
|
||||
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
mf.Path = accessForbiddenFile
|
||||
|
||||
DeferCleanup(func() {
|
||||
Expect(f.Close()).To(Succeed())
|
||||
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
|
||||
})
|
||||
})
|
||||
|
||||
It("should fallback to embedded if an error happens when parsing file", func() {
|
||||
conf.Server.LyricsPriority = ".mp3,embedded"
|
||||
|
||||
list, err := lyrics.GetLyrics(ctx, &mf)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(list).To(Equal(embeddedLyrics))
|
||||
})
|
||||
|
||||
It("should return nothing if error happens when trying to parse file", func() {
|
||||
conf.Server.LyricsPriority = ".mp3"
|
||||
|
||||
list, err := lyrics.GetLyrics(ctx, &mf)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(list).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
51
core/lyrics/sources.go
Normal file
51
core/lyrics/sources.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package lyrics
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
)
|
||||
|
||||
func fromEmbedded(ctx context.Context, mf *model.MediaFile) (model.LyricList, error) {
|
||||
if mf.Lyrics != "" {
|
||||
log.Trace(ctx, "embedded lyrics found in file", "title", mf.Title)
|
||||
return mf.StructuredLyrics()
|
||||
}
|
||||
|
||||
log.Trace(ctx, "no embedded lyrics for file", "path", mf.Title)
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func fromExternalFile(ctx context.Context, mf *model.MediaFile, suffix string) (model.LyricList, error) {
|
||||
basePath := mf.AbsolutePath()
|
||||
ext := path.Ext(basePath)
|
||||
|
||||
externalLyric := basePath[0:len(basePath)-len(ext)] + suffix
|
||||
|
||||
contents, err := os.ReadFile(externalLyric)
|
||||
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
log.Trace(ctx, "no lyrics found at path", "path", externalLyric)
|
||||
return nil, nil
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
lyrics, err := model.ToLyrics("xxx", string(contents))
|
||||
if err != nil {
|
||||
log.Error(ctx, "error parsing lyric external file", "path", externalLyric, err)
|
||||
return nil, err
|
||||
} else if lyrics == nil {
|
||||
log.Trace(ctx, "empty lyrics from external file", "path", externalLyric)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
log.Trace(ctx, "retrieved lyrics from external file", "path", externalLyric)
|
||||
|
||||
return model.LyricList{*lyrics}, nil
|
||||
}
|
||||
112
core/lyrics/sources_test.go
Normal file
112
core/lyrics/sources_test.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package lyrics
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("sources", func() {
|
||||
ctx := context.Background()
|
||||
|
||||
Describe("fromEmbedded", func() {
|
||||
It("should return nothing for a media file with no lyrics", func() {
|
||||
mf := model.MediaFile{}
|
||||
lyrics, err := fromEmbedded(ctx, &mf)
|
||||
|
||||
Expect(err).To(BeNil())
|
||||
Expect(lyrics).To(HaveLen(0))
|
||||
})
|
||||
|
||||
It("should return lyrics for a media file with well-formatted lyrics", func() {
|
||||
const syncedLyrics = "[00:18.80]We're no strangers to love\n[00:22.801]You know the rules and so do I"
|
||||
const unsyncedLyrics = "We're no strangers to love\nYou know the rules and so do I"
|
||||
|
||||
synced, _ := model.ToLyrics("eng", syncedLyrics)
|
||||
unsynced, _ := model.ToLyrics("xxx", unsyncedLyrics)
|
||||
|
||||
expectedList := model.LyricList{*synced, *unsynced}
|
||||
lyricsJson, err := json.Marshal(expectedList)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
mf := model.MediaFile{
|
||||
Lyrics: string(lyricsJson),
|
||||
}
|
||||
|
||||
lyrics, err := fromEmbedded(ctx, &mf)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(lyrics).ToNot(BeNil())
|
||||
Expect(lyrics).To(Equal(expectedList))
|
||||
})
|
||||
|
||||
It("should return an error if somehow the JSON is bad", func() {
|
||||
mf := model.MediaFile{Lyrics: "["}
|
||||
lyrics, err := fromEmbedded(ctx, &mf)
|
||||
|
||||
Expect(lyrics).To(HaveLen(0))
|
||||
Expect(err).ToNot(BeNil())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("fromExternalFile", func() {
|
||||
It("should return nil for lyrics that don't exist", func() {
|
||||
mf := model.MediaFile{Path: "tests/fixtures/01 Invisible (RED) Edit Version.mp3"}
|
||||
lyrics, err := fromExternalFile(ctx, &mf, ".lrc")
|
||||
|
||||
Expect(err).To(BeNil())
|
||||
Expect(lyrics).To(HaveLen(0))
|
||||
})
|
||||
|
||||
It("should return synchronized lyrics from a file", func() {
|
||||
mf := model.MediaFile{Path: "tests/fixtures/test.mp3"}
|
||||
lyrics, err := fromExternalFile(ctx, &mf, ".lrc")
|
||||
|
||||
Expect(err).To(BeNil())
|
||||
Expect(lyrics).To(Equal(model.LyricList{
|
||||
model.Lyrics{
|
||||
DisplayArtist: "Rick Astley",
|
||||
DisplayTitle: "That one song",
|
||||
Lang: "eng",
|
||||
Line: []model.Line{
|
||||
{
|
||||
Start: gg.P(int64(18800)),
|
||||
Value: "We're no strangers to love",
|
||||
},
|
||||
{
|
||||
Start: gg.P(int64(22801)),
|
||||
Value: "You know the rules and so do I",
|
||||
},
|
||||
},
|
||||
Offset: gg.P(int64(-100)),
|
||||
Synced: true,
|
||||
},
|
||||
}))
|
||||
})
|
||||
|
||||
It("should return unsynchronized lyrics from a file", func() {
|
||||
mf := model.MediaFile{Path: "tests/fixtures/test.mp3"}
|
||||
lyrics, err := fromExternalFile(ctx, &mf, ".txt")
|
||||
|
||||
Expect(err).To(BeNil())
|
||||
Expect(lyrics).To(Equal(model.LyricList{
|
||||
model.Lyrics{
|
||||
Lang: "xxx",
|
||||
Line: []model.Line{
|
||||
{
|
||||
Value: "We're no strangers to love",
|
||||
},
|
||||
{
|
||||
Value: "You know the rules and so do I",
|
||||
},
|
||||
},
|
||||
Synced: false,
|
||||
},
|
||||
}))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,7 +1,6 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
@@ -37,11 +36,12 @@ type mediaStreamer struct {
|
||||
}
|
||||
|
||||
type streamJob struct {
|
||||
ms *mediaStreamer
|
||||
mf *model.MediaFile
|
||||
format string
|
||||
bitRate int
|
||||
offset int
|
||||
ms *mediaStreamer
|
||||
mf *model.MediaFile
|
||||
filePath string
|
||||
format string
|
||||
bitRate int
|
||||
offset int
|
||||
}
|
||||
|
||||
func (j *streamJob) Key() string {
|
||||
@@ -69,13 +69,14 @@ func (ms *mediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, reqF
|
||||
|
||||
format, bitRate = selectTranscodingOptions(ctx, ms.ds, mf, reqFormat, reqBitRate)
|
||||
s := &Stream{ctx: ctx, mf: mf, format: format, bitRate: bitRate}
|
||||
filePath := mf.AbsolutePath()
|
||||
|
||||
if format == "raw" {
|
||||
log.Debug(ctx, "Streaming RAW file", "id", mf.ID, "path", mf.Path,
|
||||
log.Debug(ctx, "Streaming RAW file", "id", mf.ID, "path", filePath,
|
||||
"requestBitrate", reqBitRate, "requestFormat", reqFormat, "requestOffset", reqOffset,
|
||||
"originalBitrate", mf.BitRate, "originalFormat", mf.Suffix,
|
||||
"selectedBitrate", bitRate, "selectedFormat", format)
|
||||
f, err := os.Open(mf.Path)
|
||||
f, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -86,11 +87,12 @@ func (ms *mediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, reqF
|
||||
}
|
||||
|
||||
job := &streamJob{
|
||||
ms: ms,
|
||||
mf: mf,
|
||||
format: format,
|
||||
bitRate: bitRate,
|
||||
offset: reqOffset,
|
||||
ms: ms,
|
||||
mf: mf,
|
||||
filePath: filePath,
|
||||
format: format,
|
||||
bitRate: bitRate,
|
||||
offset: reqOffset,
|
||||
}
|
||||
r, err := ms.cache.Get(ctx, job)
|
||||
if err != nil {
|
||||
@@ -102,7 +104,7 @@ func (ms *mediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, reqF
|
||||
s.ReadCloser = r
|
||||
s.Seeker = r.Seeker
|
||||
|
||||
log.Debug(ctx, "Streaming TRANSCODED file", "id", mf.ID, "path", mf.Path,
|
||||
log.Debug(ctx, "Streaming TRANSCODED file", "id", mf.ID, "path", filePath,
|
||||
"requestBitrate", reqBitRate, "requestFormat", reqFormat, "requestOffset", reqOffset,
|
||||
"originalBitrate", mf.BitRate, "originalFormat", mf.Suffix,
|
||||
"selectedBitrate", bitRate, "selectedFormat", format, "cached", cached, "seekable", s.Seekable())
|
||||
@@ -128,64 +130,56 @@ func (s *Stream) EstimatedContentLength() int {
|
||||
return int(s.mf.Duration * float32(s.bitRate) / 8 * 1024)
|
||||
}
|
||||
|
||||
// selectTranscodingOptions selects the appropriate transcoding options based on the requested format and bitrate.
|
||||
// If the requested format is "raw" or matches the media file's suffix and the requested bitrate is 0, it returns the
|
||||
// original format and bitrate.
|
||||
// Otherwise, it determines the format and bitrate using determineFormatAndBitRate and findTranscoding functions.
|
||||
//
|
||||
// NOTE: It is easier to follow the tests in core/media_streamer_internal_test.go to understand the different scenarios.
|
||||
func selectTranscodingOptions(ctx context.Context, ds model.DataStore, mf *model.MediaFile, reqFormat string, reqBitRate int) (string, int) {
|
||||
if reqFormat == "raw" || reqFormat == mf.Suffix && reqBitRate == 0 {
|
||||
return "raw", mf.BitRate
|
||||
// TODO This function deserves some love (refactoring)
|
||||
func selectTranscodingOptions(ctx context.Context, ds model.DataStore, mf *model.MediaFile, reqFormat string, reqBitRate int) (format string, bitRate int) {
|
||||
format = "raw"
|
||||
if reqFormat == "raw" {
|
||||
return format, 0
|
||||
}
|
||||
|
||||
format, bitRate := determineFormatAndBitRate(ctx, mf.BitRate, reqFormat, reqBitRate)
|
||||
if format == "" && bitRate == 0 {
|
||||
return "raw", 0
|
||||
if reqFormat == mf.Suffix && reqBitRate == 0 {
|
||||
bitRate = mf.BitRate
|
||||
return format, bitRate
|
||||
}
|
||||
|
||||
return findTranscoding(ctx, ds, mf, format, bitRate)
|
||||
}
|
||||
|
||||
// determineFormatAndBitRate determines the format and bitrate for transcoding based on the requested format and bitrate.
|
||||
// If the requested format is not empty, it returns the requested format and bitrate.
|
||||
// Otherwise, it checks for default transcoding settings from the context or server configuration.
|
||||
func determineFormatAndBitRate(ctx context.Context, srcBitRate int, reqFormat string, reqBitRate int) (string, int) {
|
||||
trc, hasDefault := request.TranscodingFrom(ctx)
|
||||
var cFormat string
|
||||
var cBitRate int
|
||||
if reqFormat != "" {
|
||||
return reqFormat, reqBitRate
|
||||
}
|
||||
|
||||
format, bitRate := "", 0
|
||||
if trc, hasDefault := request.TranscodingFrom(ctx); hasDefault {
|
||||
format = trc.TargetFormat
|
||||
bitRate = trc.DefaultBitRate
|
||||
|
||||
if p, ok := request.PlayerFrom(ctx); ok && p.MaxBitRate > 0 && p.MaxBitRate < bitRate {
|
||||
bitRate = p.MaxBitRate
|
||||
cFormat = reqFormat
|
||||
} else {
|
||||
if hasDefault {
|
||||
cFormat = trc.TargetFormat
|
||||
cBitRate = trc.DefaultBitRate
|
||||
if p, ok := request.PlayerFrom(ctx); ok {
|
||||
cBitRate = p.MaxBitRate
|
||||
}
|
||||
} else if reqBitRate > 0 && reqBitRate < mf.BitRate && conf.Server.DefaultDownsamplingFormat != "" {
|
||||
// If no format is specified and no transcoding associated to the player, but a bitrate is specified,
|
||||
// and there is no transcoding set for the player, we use the default downsampling format.
|
||||
// But only if the requested bitRate is lower than the original bitRate.
|
||||
log.Debug("Default Downsampling", "Using default downsampling format", conf.Server.DefaultDownsamplingFormat)
|
||||
cFormat = conf.Server.DefaultDownsamplingFormat
|
||||
}
|
||||
} else if reqBitRate > 0 && reqBitRate < srcBitRate && conf.Server.DefaultDownsamplingFormat != "" {
|
||||
// If no format is specified and no transcoding associated to the player, but a bitrate is specified,
|
||||
// and there is no transcoding set for the player, we use the default downsampling format.
|
||||
// But only if the requested bitRate is lower than the original bitRate.
|
||||
log.Debug(ctx, "Using default downsampling format", "format", conf.Server.DefaultDownsamplingFormat)
|
||||
format = conf.Server.DefaultDownsamplingFormat
|
||||
}
|
||||
|
||||
return format, cmp.Or(reqBitRate, bitRate)
|
||||
}
|
||||
|
||||
// findTranscoding finds the appropriate transcoding settings for the given format and bitrate.
|
||||
// If the format matches the media file's suffix and the bitrate is greater than or equal to the original bitrate,
|
||||
// it returns the original format and bitrate.
|
||||
// Otherwise, it returns the target format and bitrate from the
|
||||
// transcoding settings.
|
||||
func findTranscoding(ctx context.Context, ds model.DataStore, mf *model.MediaFile, format string, bitRate int) (string, int) {
|
||||
t, err := ds.Transcoding(ctx).FindByFormat(format)
|
||||
if err != nil || t == nil || format == mf.Suffix && bitRate >= mf.BitRate {
|
||||
return "raw", 0
|
||||
if reqBitRate > 0 {
|
||||
cBitRate = reqBitRate
|
||||
}
|
||||
|
||||
return t.TargetFormat, cmp.Or(bitRate, t.DefaultBitRate)
|
||||
if cBitRate == 0 && cFormat == "" {
|
||||
return format, bitRate
|
||||
}
|
||||
t, err := ds.Transcoding(ctx).FindByFormat(cFormat)
|
||||
if err == nil {
|
||||
format = t.TargetFormat
|
||||
if cBitRate != 0 {
|
||||
bitRate = cBitRate
|
||||
} else {
|
||||
bitRate = t.DefaultBitRate
|
||||
}
|
||||
}
|
||||
if format == mf.Suffix && bitRate >= mf.BitRate {
|
||||
format = "raw"
|
||||
bitRate = 0
|
||||
}
|
||||
return format, bitRate
|
||||
}
|
||||
|
||||
var (
|
||||
@@ -210,7 +204,7 @@ func NewTranscodingCache() TranscodingCache {
|
||||
log.Error(ctx, "Error loading transcoding command", "format", job.format, err)
|
||||
return nil, os.ErrInvalid
|
||||
}
|
||||
out, err := job.ms.transcoder.Transcode(ctx, t.Command, job.mf.Path, job.bitRate, job.offset)
|
||||
out, err := job.ms.transcoder.Transcode(ctx, t.Command, job.filePath, job.bitRate, job.offset)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error starting transcoder", "id", job.mf.ID, err)
|
||||
return nil, os.ErrInvalid
|
||||
|
||||
@@ -122,10 +122,11 @@ var _ = Describe("MediaStreamer", func() {
|
||||
Expect(bitRate).To(Equal(0))
|
||||
})
|
||||
})
|
||||
|
||||
Context("player has maxBitRate configured", func() {
|
||||
BeforeEach(func() {
|
||||
t := model.Transcoding{ID: "oga1", TargetFormat: "oga", DefaultBitRate: 96}
|
||||
p := model.Player{ID: "player1", TranscodingId: t.ID, MaxBitRate: 80}
|
||||
p := model.Player{ID: "player1", TranscodingId: t.ID, MaxBitRate: 192}
|
||||
ctx = request.WithTranscoding(ctx, t)
|
||||
ctx = request.WithPlayer(ctx, p)
|
||||
})
|
||||
@@ -140,7 +141,7 @@ var _ = Describe("MediaStreamer", func() {
|
||||
mf.BitRate = 1000
|
||||
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 0)
|
||||
Expect(format).To(Equal("oga"))
|
||||
Expect(bitRate).To(Equal(80))
|
||||
Expect(bitRate).To(Equal(192))
|
||||
})
|
||||
It("returns requested format", func() {
|
||||
mf.Suffix = "flac"
|
||||
@@ -152,9 +153,9 @@ var _ = Describe("MediaStreamer", func() {
|
||||
It("returns requested bitrate", func() {
|
||||
mf.Suffix = "flac"
|
||||
mf.BitRate = 1000
|
||||
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 80)
|
||||
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 160)
|
||||
Expect(format).To(Equal("oga"))
|
||||
Expect(bitRate).To(Equal(80))
|
||||
Expect(bitRate).To(Equal(160))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
123
core/metrics.go
123
core/metrics.go
@@ -1,123 +0,0 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
)
|
||||
|
||||
func WriteInitialMetrics() {
|
||||
getPrometheusMetrics().versionInfo.With(prometheus.Labels{"version": consts.Version}).Set(1)
|
||||
}
|
||||
|
||||
func WriteAfterScanMetrics(ctx context.Context, dataStore model.DataStore, success bool) {
|
||||
processSqlAggregateMetrics(ctx, dataStore, getPrometheusMetrics().dbTotal)
|
||||
|
||||
scanLabels := prometheus.Labels{"success": strconv.FormatBool(success)}
|
||||
getPrometheusMetrics().lastMediaScan.With(scanLabels).SetToCurrentTime()
|
||||
getPrometheusMetrics().mediaScansCounter.With(scanLabels).Inc()
|
||||
}
|
||||
|
||||
// Prometheus' metrics requires initialization. But not more than once
|
||||
var (
|
||||
prometheusMetricsInstance *prometheusMetrics
|
||||
prometheusOnce sync.Once
|
||||
)
|
||||
|
||||
type prometheusMetrics struct {
|
||||
dbTotal *prometheus.GaugeVec
|
||||
versionInfo *prometheus.GaugeVec
|
||||
lastMediaScan *prometheus.GaugeVec
|
||||
mediaScansCounter *prometheus.CounterVec
|
||||
}
|
||||
|
||||
func getPrometheusMetrics() *prometheusMetrics {
|
||||
prometheusOnce.Do(func() {
|
||||
var err error
|
||||
prometheusMetricsInstance, err = newPrometheusMetrics()
|
||||
if err != nil {
|
||||
log.Fatal("Unable to create Prometheus metrics instance.", err)
|
||||
}
|
||||
})
|
||||
return prometheusMetricsInstance
|
||||
}
|
||||
|
||||
func newPrometheusMetrics() (*prometheusMetrics, error) {
|
||||
res := &prometheusMetrics{
|
||||
dbTotal: prometheus.NewGaugeVec(
|
||||
prometheus.GaugeOpts{
|
||||
Name: "db_model_totals",
|
||||
Help: "Total number of DB items per model",
|
||||
},
|
||||
[]string{"model"},
|
||||
),
|
||||
versionInfo: prometheus.NewGaugeVec(
|
||||
prometheus.GaugeOpts{
|
||||
Name: "navidrome_info",
|
||||
Help: "Information about Navidrome version",
|
||||
},
|
||||
[]string{"version"},
|
||||
),
|
||||
lastMediaScan: prometheus.NewGaugeVec(
|
||||
prometheus.GaugeOpts{
|
||||
Name: "media_scan_last",
|
||||
Help: "Last media scan timestamp by success",
|
||||
},
|
||||
[]string{"success"},
|
||||
),
|
||||
mediaScansCounter: prometheus.NewCounterVec(
|
||||
prometheus.CounterOpts{
|
||||
Name: "media_scans",
|
||||
Help: "Total success media scans by success",
|
||||
},
|
||||
[]string{"success"},
|
||||
),
|
||||
}
|
||||
|
||||
err := prometheus.DefaultRegisterer.Register(res.dbTotal)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to register db_model_totals metrics: %w", err)
|
||||
}
|
||||
err = prometheus.DefaultRegisterer.Register(res.versionInfo)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to register navidrome_info metrics: %w", err)
|
||||
}
|
||||
err = prometheus.DefaultRegisterer.Register(res.lastMediaScan)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to register media_scan_last metrics: %w", err)
|
||||
}
|
||||
err = prometheus.DefaultRegisterer.Register(res.mediaScansCounter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to register media_scans metrics: %w", err)
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func processSqlAggregateMetrics(ctx context.Context, dataStore model.DataStore, targetGauge *prometheus.GaugeVec) {
|
||||
albumsCount, err := dataStore.Album(ctx).CountAll()
|
||||
if err != nil {
|
||||
log.Warn("album CountAll error", err)
|
||||
return
|
||||
}
|
||||
targetGauge.With(prometheus.Labels{"model": "album"}).Set(float64(albumsCount))
|
||||
|
||||
songsCount, err := dataStore.MediaFile(ctx).CountAll()
|
||||
if err != nil {
|
||||
log.Warn("media CountAll error", err)
|
||||
return
|
||||
}
|
||||
targetGauge.With(prometheus.Labels{"model": "media"}).Set(float64(songsCount))
|
||||
|
||||
usersCount, err := dataStore.User(ctx).CountAll()
|
||||
if err != nil {
|
||||
log.Warn("user CountAll error", err)
|
||||
return
|
||||
}
|
||||
targetGauge.With(prometheus.Labels{"model": "user"}).Set(float64(usersCount))
|
||||
}
|
||||
265
core/metrics/insights.go
Normal file
265
core/metrics/insights.go
Normal file
@@ -0,0 +1,265 @@
|
||||
package metrics
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"math"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"runtime/debug"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/google/uuid"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
"github.com/navidrome/navidrome/core/metrics/insights"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/singleton"
|
||||
)
|
||||
|
||||
type Insights interface {
|
||||
Run(ctx context.Context)
|
||||
LastRun(ctx context.Context) (timestamp time.Time, success bool)
|
||||
}
|
||||
|
||||
var (
|
||||
insightsID string
|
||||
)
|
||||
|
||||
type insightsCollector struct {
|
||||
ds model.DataStore
|
||||
lastRun atomic.Int64
|
||||
lastStatus atomic.Bool
|
||||
}
|
||||
|
||||
func GetInstance(ds model.DataStore) Insights {
|
||||
return singleton.GetInstance(func() *insightsCollector {
|
||||
id, err := ds.Property(context.TODO()).Get(consts.InsightsIDKey)
|
||||
if err != nil {
|
||||
log.Trace("Could not get Insights ID from DB. Creating one", err)
|
||||
id = uuid.NewString()
|
||||
err = ds.Property(context.TODO()).Put(consts.InsightsIDKey, id)
|
||||
if err != nil {
|
||||
log.Trace("Could not save Insights ID to DB", err)
|
||||
}
|
||||
}
|
||||
insightsID = id
|
||||
return &insightsCollector{ds: ds}
|
||||
})
|
||||
}
|
||||
|
||||
func (c *insightsCollector) Run(ctx context.Context) {
|
||||
ctx = auth.WithAdminUser(ctx, c.ds)
|
||||
for {
|
||||
c.sendInsights(ctx)
|
||||
select {
|
||||
case <-time.After(consts.InsightsUpdateInterval):
|
||||
continue
|
||||
case <-ctx.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *insightsCollector) LastRun(context.Context) (timestamp time.Time, success bool) {
|
||||
t := c.lastRun.Load()
|
||||
return time.UnixMilli(t), c.lastStatus.Load()
|
||||
}
|
||||
|
||||
func (c *insightsCollector) sendInsights(ctx context.Context) {
|
||||
count, err := c.ds.User(ctx).CountAll(model.QueryOptions{})
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Could not check user count", err)
|
||||
return
|
||||
}
|
||||
if count == 0 {
|
||||
log.Trace(ctx, "No users found, skipping Insights data collection")
|
||||
return
|
||||
}
|
||||
hc := &http.Client{
|
||||
Timeout: consts.DefaultHttpClientTimeOut,
|
||||
}
|
||||
data := c.collect(ctx)
|
||||
if data == nil {
|
||||
return
|
||||
}
|
||||
body := bytes.NewReader(data)
|
||||
req, err := http.NewRequestWithContext(ctx, "POST", consts.InsightsEndpoint, body)
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Could not create Insights request", err)
|
||||
return
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
resp, err := hc.Do(req)
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Could not send Insights data", err)
|
||||
return
|
||||
}
|
||||
log.Info(ctx, "Sent Insights data (for details see http://navidrome.org/docs/getting-started/insights", "data",
|
||||
string(data), "server", consts.InsightsEndpoint, "status", resp.Status)
|
||||
c.lastRun.Store(time.Now().UnixMilli())
|
||||
c.lastStatus.Store(resp.StatusCode < 300)
|
||||
resp.Body.Close()
|
||||
}
|
||||
|
||||
func buildInfo() (map[string]string, string) {
|
||||
bInfo := map[string]string{}
|
||||
var version string
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
for _, setting := range info.Settings {
|
||||
if setting.Value == "" {
|
||||
continue
|
||||
}
|
||||
bInfo[setting.Key] = setting.Value
|
||||
}
|
||||
version = info.GoVersion
|
||||
}
|
||||
return bInfo, version
|
||||
}
|
||||
|
||||
func getFSInfo(path string) *insights.FSInfo {
|
||||
var info insights.FSInfo
|
||||
|
||||
// Normalize the path
|
||||
absPath, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
absPath = filepath.Clean(absPath)
|
||||
|
||||
fsType, err := getFilesystemType(absPath)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
info.Type = fsType
|
||||
return &info
|
||||
}
|
||||
|
||||
var staticData = sync.OnceValue(func() insights.Data {
|
||||
// Basic info
|
||||
data := insights.Data{
|
||||
InsightsID: insightsID,
|
||||
Version: consts.Version,
|
||||
}
|
||||
|
||||
// Build info
|
||||
data.Build.Settings, data.Build.GoVersion = buildInfo()
|
||||
data.OS.Containerized = consts.InContainer
|
||||
|
||||
// OS info
|
||||
data.OS.Type = runtime.GOOS
|
||||
data.OS.Arch = runtime.GOARCH
|
||||
data.OS.NumCPU = runtime.NumCPU()
|
||||
data.OS.Version, data.OS.Distro = getOSVersion()
|
||||
|
||||
// FS info
|
||||
data.FS.Music = getFSInfo(conf.Server.MusicFolder)
|
||||
data.FS.Data = getFSInfo(conf.Server.DataFolder)
|
||||
if conf.Server.CacheFolder != "" {
|
||||
data.FS.Cache = getFSInfo(conf.Server.CacheFolder)
|
||||
}
|
||||
if conf.Server.Backup.Path != "" {
|
||||
data.FS.Backup = getFSInfo(conf.Server.Backup.Path)
|
||||
}
|
||||
|
||||
// Config info
|
||||
data.Config.LogLevel = conf.Server.LogLevel
|
||||
data.Config.LogFileConfigured = conf.Server.LogFile != ""
|
||||
data.Config.TLSConfigured = conf.Server.TLSCert != "" && conf.Server.TLSKey != ""
|
||||
data.Config.DefaultBackgroundURLSet = conf.Server.UILoginBackgroundURL == consts.DefaultUILoginBackgroundURL
|
||||
data.Config.EnableArtworkPrecache = conf.Server.EnableArtworkPrecache
|
||||
data.Config.EnableCoverAnimation = conf.Server.EnableCoverAnimation
|
||||
data.Config.EnableDownloads = conf.Server.EnableDownloads
|
||||
data.Config.EnableSharing = conf.Server.EnableSharing
|
||||
data.Config.EnableStarRating = conf.Server.EnableStarRating
|
||||
data.Config.EnableLastFM = conf.Server.LastFM.Enabled
|
||||
data.Config.EnableListenBrainz = conf.Server.ListenBrainz.Enabled
|
||||
data.Config.EnableMediaFileCoverArt = conf.Server.EnableMediaFileCoverArt
|
||||
data.Config.EnableSpotify = conf.Server.Spotify.ID != ""
|
||||
data.Config.EnableJukebox = conf.Server.Jukebox.Enabled
|
||||
data.Config.EnablePrometheus = conf.Server.Prometheus.Enabled
|
||||
data.Config.TranscodingCacheSize = conf.Server.TranscodingCacheSize
|
||||
data.Config.ImageCacheSize = conf.Server.ImageCacheSize
|
||||
data.Config.SessionTimeout = uint64(math.Trunc(conf.Server.SessionTimeout.Seconds()))
|
||||
data.Config.SearchFullString = conf.Server.SearchFullString
|
||||
data.Config.RecentlyAddedByModTime = conf.Server.RecentlyAddedByModTime
|
||||
data.Config.PreferSortTags = conf.Server.PreferSortTags
|
||||
data.Config.BackupSchedule = conf.Server.Backup.Schedule
|
||||
data.Config.BackupCount = conf.Server.Backup.Count
|
||||
data.Config.DevActivityPanel = conf.Server.DevActivityPanel
|
||||
data.Config.ScannerEnabled = conf.Server.Scanner.Enabled
|
||||
data.Config.ScanSchedule = conf.Server.Scanner.Schedule
|
||||
data.Config.ScanWatcherWait = uint64(math.Trunc(conf.Server.Scanner.WatcherWait.Seconds()))
|
||||
data.Config.ScanOnStartup = conf.Server.Scanner.ScanOnStartup
|
||||
|
||||
return data
|
||||
})
|
||||
|
||||
func (c *insightsCollector) collect(ctx context.Context) []byte {
|
||||
data := staticData()
|
||||
data.Uptime = time.Since(consts.ServerStart).Milliseconds() / 1000
|
||||
|
||||
// Library info
|
||||
var err error
|
||||
data.Library.Tracks, err = c.ds.MediaFile(ctx).CountAll()
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Error reading tracks count", err)
|
||||
}
|
||||
data.Library.Albums, err = c.ds.Album(ctx).CountAll()
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Error reading albums count", err)
|
||||
}
|
||||
data.Library.Artists, err = c.ds.Artist(ctx).CountAll()
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Error reading artists count", err)
|
||||
}
|
||||
data.Library.Playlists, err = c.ds.Playlist(ctx).CountAll()
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Error reading playlists count", err)
|
||||
}
|
||||
data.Library.Shares, err = c.ds.Share(ctx).CountAll()
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Error reading shares count", err)
|
||||
}
|
||||
data.Library.Radios, err = c.ds.Radio(ctx).Count()
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Error reading radios count", err)
|
||||
}
|
||||
data.Library.ActiveUsers, err = c.ds.User(ctx).CountAll(model.QueryOptions{
|
||||
Filters: squirrel.Gt{"last_access_at": time.Now().Add(-7 * 24 * time.Hour)},
|
||||
})
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Error reading active users count", err)
|
||||
}
|
||||
if conf.Server.DevEnablePlayerInsights {
|
||||
data.Library.ActivePlayers, err = c.ds.Player(ctx).CountByClient(model.QueryOptions{
|
||||
Filters: squirrel.Gt{"last_seen": time.Now().Add(-7 * 24 * time.Hour)},
|
||||
})
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Error reading active players count", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Memory info
|
||||
var m runtime.MemStats
|
||||
runtime.ReadMemStats(&m)
|
||||
data.Mem.Alloc = m.Alloc
|
||||
data.Mem.TotalAlloc = m.TotalAlloc
|
||||
data.Mem.Sys = m.Sys
|
||||
data.Mem.NumGC = m.NumGC
|
||||
|
||||
// Marshal to JSON
|
||||
resp, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Could not marshal Insights data", err)
|
||||
return nil
|
||||
}
|
||||
return resp
|
||||
}
|
||||
76
core/metrics/insights/data.go
Normal file
76
core/metrics/insights/data.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package insights
|
||||
|
||||
type Data struct {
|
||||
InsightsID string `json:"id"`
|
||||
Version string `json:"version"`
|
||||
Uptime int64 `json:"uptime"`
|
||||
Build struct {
|
||||
// build settings used by the Go compiler
|
||||
Settings map[string]string `json:"settings"`
|
||||
GoVersion string `json:"goVersion"`
|
||||
} `json:"build"`
|
||||
OS struct {
|
||||
Type string `json:"type"`
|
||||
Distro string `json:"distro,omitempty"`
|
||||
Version string `json:"version,omitempty"`
|
||||
Containerized bool `json:"containerized"`
|
||||
Arch string `json:"arch"`
|
||||
NumCPU int `json:"numCPU"`
|
||||
} `json:"os"`
|
||||
Mem struct {
|
||||
Alloc uint64 `json:"alloc"`
|
||||
TotalAlloc uint64 `json:"totalAlloc"`
|
||||
Sys uint64 `json:"sys"`
|
||||
NumGC uint32 `json:"numGC"`
|
||||
} `json:"mem"`
|
||||
FS struct {
|
||||
Music *FSInfo `json:"music,omitempty"`
|
||||
Data *FSInfo `json:"data,omitempty"`
|
||||
Cache *FSInfo `json:"cache,omitempty"`
|
||||
Backup *FSInfo `json:"backup,omitempty"`
|
||||
} `json:"fs"`
|
||||
Library struct {
|
||||
Tracks int64 `json:"tracks"`
|
||||
Albums int64 `json:"albums"`
|
||||
Artists int64 `json:"artists"`
|
||||
Playlists int64 `json:"playlists"`
|
||||
Shares int64 `json:"shares"`
|
||||
Radios int64 `json:"radios"`
|
||||
ActiveUsers int64 `json:"activeUsers"`
|
||||
ActivePlayers map[string]int64 `json:"activePlayers,omitempty"`
|
||||
} `json:"library"`
|
||||
Config struct {
|
||||
LogLevel string `json:"logLevel,omitempty"`
|
||||
LogFileConfigured bool `json:"logFileConfigured,omitempty"`
|
||||
TLSConfigured bool `json:"tlsConfigured,omitempty"`
|
||||
ScannerEnabled bool `json:"scannerEnabled,omitempty"`
|
||||
ScanSchedule string `json:"scanSchedule,omitempty"`
|
||||
ScanWatcherWait uint64 `json:"scanWatcherWait,omitempty"`
|
||||
ScanOnStartup bool `json:"scanOnStartup,omitempty"`
|
||||
TranscodingCacheSize string `json:"transcodingCacheSize,omitempty"`
|
||||
ImageCacheSize string `json:"imageCacheSize,omitempty"`
|
||||
EnableArtworkPrecache bool `json:"enableArtworkPrecache,omitempty"`
|
||||
EnableDownloads bool `json:"enableDownloads,omitempty"`
|
||||
EnableSharing bool `json:"enableSharing,omitempty"`
|
||||
EnableStarRating bool `json:"enableStarRating,omitempty"`
|
||||
EnableLastFM bool `json:"enableLastFM,omitempty"`
|
||||
EnableListenBrainz bool `json:"enableListenBrainz,omitempty"`
|
||||
EnableMediaFileCoverArt bool `json:"enableMediaFileCoverArt,omitempty"`
|
||||
EnableSpotify bool `json:"enableSpotify,omitempty"`
|
||||
EnableJukebox bool `json:"enableJukebox,omitempty"`
|
||||
EnablePrometheus bool `json:"enablePrometheus,omitempty"`
|
||||
EnableCoverAnimation bool `json:"enableCoverAnimation,omitempty"`
|
||||
SessionTimeout uint64 `json:"sessionTimeout,omitempty"`
|
||||
SearchFullString bool `json:"searchFullString,omitempty"`
|
||||
RecentlyAddedByModTime bool `json:"recentlyAddedByModTime,omitempty"`
|
||||
PreferSortTags bool `json:"preferSortTags,omitempty"`
|
||||
BackupSchedule string `json:"backupSchedule,omitempty"`
|
||||
BackupCount int `json:"backupCount,omitempty"`
|
||||
DevActivityPanel bool `json:"devActivityPanel,omitempty"`
|
||||
DefaultBackgroundURLSet bool `json:"defaultBackgroundURL,omitempty"`
|
||||
} `json:"config"`
|
||||
}
|
||||
|
||||
type FSInfo struct {
|
||||
Type string `json:"type,omitempty"`
|
||||
}
|
||||
37
core/metrics/insights_darwin.go
Normal file
37
core/metrics/insights_darwin.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package metrics
|
||||
|
||||
import (
|
||||
"os/exec"
|
||||
"strings"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func getOSVersion() (string, string) {
|
||||
cmd := exec.Command("sw_vers", "-productVersion")
|
||||
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
return "", ""
|
||||
}
|
||||
|
||||
return strings.TrimSpace(string(output)), ""
|
||||
}
|
||||
|
||||
func getFilesystemType(path string) (string, error) {
|
||||
var stat syscall.Statfs_t
|
||||
err := syscall.Statfs(path, &stat)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Convert the filesystem type name from [16]int8 to string
|
||||
fsType := make([]byte, 0, 16)
|
||||
for _, c := range stat.Fstypename {
|
||||
if c == 0 {
|
||||
break
|
||||
}
|
||||
fsType = append(fsType, byte(c))
|
||||
}
|
||||
|
||||
return string(fsType), nil
|
||||
}
|
||||
9
core/metrics/insights_default.go
Normal file
9
core/metrics/insights_default.go
Normal file
@@ -0,0 +1,9 @@
|
||||
//go:build !linux && !windows && !darwin
|
||||
|
||||
package metrics
|
||||
|
||||
import "errors"
|
||||
|
||||
func getOSVersion() (string, string) { return "", "" }
|
||||
|
||||
func getFilesystemType(_ string) (string, error) { return "", errors.New("not implemented") }
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user