mirror of
https://github.com/navidrome/navidrome.git
synced 2026-01-18 03:38:03 -05:00
Compare commits
4 Commits
go-taglib
...
chore/devc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
342b9eb2f2 | ||
|
|
cb38d2a031 | ||
|
|
382b80ccb4 | ||
|
|
41cc4610af |
@@ -23,7 +23,5 @@ RUN DOWNLOAD_ARCH="linux-${TARGETARCH}" \
|
||||
&& rmdir /usr/include/taglib \
|
||||
&& rm /tmp/cross-taglib.tar.gz /usr/provenance.json
|
||||
|
||||
ENV CGO_CFLAGS_ALLOW="--define-prefix"
|
||||
|
||||
# [Optional] Uncomment this line to install global node packages.
|
||||
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||
|
||||
108
.github/workflows/pipeline.yml
vendored
108
.github/workflows/pipeline.yml
vendored
@@ -15,7 +15,6 @@ concurrency:
|
||||
|
||||
env:
|
||||
CROSS_TAGLIB_VERSION: "2.1.1-1"
|
||||
CGO_CFLAGS_ALLOW: "--define-prefix"
|
||||
IS_RELEASE: ${{ startsWith(github.ref, 'refs/tags/') && 'true' || 'false' }}
|
||||
|
||||
jobs:
|
||||
@@ -89,16 +88,6 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Run go generate
|
||||
run: go generate ./...
|
||||
- name: Verify no changes from go generate
|
||||
run: |
|
||||
git status --porcelain
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo 'Generated code is out of date. Run "make gen" and commit the changes'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
go:
|
||||
name: Test Go code
|
||||
runs-on: ubuntu-latest
|
||||
@@ -119,13 +108,6 @@ jobs:
|
||||
pkg-config --define-prefix --cflags --libs taglib # for debugging
|
||||
go test -shuffle=on -tags netgo -race ./... -v
|
||||
|
||||
- name: Test ndpgen
|
||||
run: |
|
||||
cd plugins/cmd/ndpgen
|
||||
go test -shuffle=on -v
|
||||
go build -o ndpgen .
|
||||
./ndpgen --help
|
||||
|
||||
js:
|
||||
name: Test JS code
|
||||
runs-on: ubuntu-latest
|
||||
@@ -235,7 +217,7 @@ jobs:
|
||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: Upload Binaries
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: navidrome-${{ env.PLATFORM }}
|
||||
path: ./output
|
||||
@@ -266,7 +248,7 @@ jobs:
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM }}
|
||||
@@ -274,11 +256,8 @@ jobs:
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
push-manifest-ghcr:
|
||||
name: Push to GHCR
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
push-manifest:
|
||||
name: Push Docker manifest
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, check-push-enabled]
|
||||
if: needs.check-push-enabled.outputs.is_enabled == 'true'
|
||||
@@ -288,41 +267,7 @@ jobs:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Prepare Docker Buildx
|
||||
uses: ./.github/actions/prepare-docker
|
||||
id: docker
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create manifest list and push to ghcr.io
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map(select(startswith("ghcr.io"))) | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image in ghcr.io
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
push-manifest-dockerhub:
|
||||
name: Push to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
needs: [build, check-push-enabled]
|
||||
if: needs.check-push-enabled.outputs.is_enabled == 'true' && vars.DOCKER_HUB_REPO != ''
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
@@ -337,27 +282,28 @@ jobs:
|
||||
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
|
||||
|
||||
- name: Create manifest list and push to ghcr.io
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||
|
||||
- name: Create manifest list and push to Docker Hub
|
||||
uses: nick-fields/retry@v3
|
||||
with:
|
||||
timeout_minutes: 5
|
||||
max_attempts: 3
|
||||
retry_wait_seconds: 30
|
||||
command: |
|
||||
cd /tmp/digests
|
||||
docker buildx imagetools create $(jq -cr '.tags | map(select(startswith("ghcr.io") | not)) | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf 'ghcr.io/${{ github.repository }}@sha256:%s ' *)
|
||||
working-directory: /tmp/digests
|
||||
if: vars.DOCKER_HUB_REPO != ''
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ vars.DOCKER_HUB_REPO }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image in ghcr.io
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
- name: Inspect image in Docker Hub
|
||||
if: vars.DOCKER_HUB_REPO != ''
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ vars.DOCKER_HUB_REPO }}:${{ steps.docker.outputs.version }}
|
||||
|
||||
cleanup-digests:
|
||||
name: Cleanup digest artifacts
|
||||
runs-on: ubuntu-latest
|
||||
needs: [push-manifest-ghcr, push-manifest-dockerhub]
|
||||
if: always() && needs.push-manifest-ghcr.result == 'success'
|
||||
steps:
|
||||
- name: Delete unnecessary digest artifacts
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
@@ -374,7 +320,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: actions/download-artifact@v7
|
||||
- uses: actions/download-artifact@v6
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-windows*
|
||||
@@ -393,7 +339,7 @@ jobs:
|
||||
du -h binaries/msi/*.msi
|
||||
|
||||
- name: Upload MSI files
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: navidrome-windows-installers
|
||||
path: binaries/msi/*.msi
|
||||
@@ -411,7 +357,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- uses: actions/download-artifact@v7
|
||||
- uses: actions/download-artifact@v6
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-*
|
||||
@@ -437,7 +383,7 @@ jobs:
|
||||
rm ./dist/*.tar.gz ./dist/*.zip
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: packages
|
||||
path: dist/navidrome_0*
|
||||
@@ -460,13 +406,13 @@ jobs:
|
||||
item: ${{ fromJson(needs.release.outputs.package_list) }}
|
||||
steps:
|
||||
- name: Download all-packages artifact
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: packages
|
||||
path: ./dist
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: navidrome_linux_${{ matrix.item }}
|
||||
path: dist/navidrome_0*_linux_${{ matrix.item }}
|
||||
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v6
|
||||
- uses: dessant/lock-threads@v5
|
||||
with:
|
||||
process-only: 'issues, prs'
|
||||
issue-inactive-days: 120
|
||||
|
||||
2
.github/workflows/update-translations.yml
vendored
2
.github/workflows/update-translations.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
git status --porcelain
|
||||
git diff
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v8
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.PAT }}
|
||||
author: "navidrome-bot <navidrome-bot@navidrome.org>"
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -17,7 +17,6 @@ master.zip
|
||||
testDB
|
||||
cache/*
|
||||
*.swp
|
||||
coverage.out
|
||||
dist
|
||||
music
|
||||
*.db*
|
||||
@@ -26,14 +25,10 @@ docker-compose.yml
|
||||
!contrib/docker-compose.yml
|
||||
binaries
|
||||
navidrome-*
|
||||
/ndpgen
|
||||
AGENTS.md
|
||||
.github/prompts
|
||||
.github/instructions
|
||||
.github/git-commit-instructions.md
|
||||
*.exe
|
||||
*.test
|
||||
*.wasm
|
||||
*.ndp
|
||||
openspec/
|
||||
go.work*
|
||||
*.wasm
|
||||
11
Dockerfile
11
Dockerfile
@@ -2,10 +2,10 @@ FROM --platform=$BUILDPLATFORM ghcr.io/crazy-max/osxcross:14.5-debian AS osxcros
|
||||
|
||||
########################################################################################################################
|
||||
### Build xx (original image: tonistiigi/xx)
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.20 AS xx-build
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS xx-build
|
||||
|
||||
# v1.9.0
|
||||
ENV XX_VERSION=a5592eab7a57895e8d385394ff12241bc65ecd50
|
||||
# v1.5.0
|
||||
ENV XX_VERSION=b4e4c451c778822e6742bfc9d9a91d7c7d885c8a
|
||||
|
||||
RUN apk add -U --no-cache git
|
||||
RUN git clone https://github.com/tonistiigi/xx && \
|
||||
@@ -26,7 +26,7 @@ COPY --from=xx-build /out/ /usr/bin/
|
||||
|
||||
########################################################################################################################
|
||||
### Get TagLib
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.20 AS taglib-build
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS taglib-build
|
||||
ARG TARGETPLATFORM
|
||||
ARG CROSS_TAGLIB_VERSION=2.1.1-1
|
||||
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
|
||||
@@ -94,7 +94,6 @@ RUN --mount=type=bind,source=. \
|
||||
# Setup CGO cross-compilation environment
|
||||
xx-go --wrap
|
||||
export CGO_ENABLED=1
|
||||
export CGO_CFLAGS_ALLOW="--define-prefix"
|
||||
export PKG_CONFIG_PATH=/taglib/lib/pkgconfig
|
||||
cat $(go env GOENV)
|
||||
|
||||
@@ -123,7 +122,7 @@ COPY --from=build /out /
|
||||
|
||||
########################################################################################################################
|
||||
### Build Final Image
|
||||
FROM public.ecr.aws/docker/library/alpine:3.20 AS final
|
||||
FROM public.ecr.aws/docker/library/alpine:3.19 AS final
|
||||
LABEL maintainer="deluan@navidrome.org"
|
||||
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
|
||||
|
||||
|
||||
45
Makefile
45
Makefile
@@ -1,10 +1,6 @@
|
||||
GO_VERSION=$(shell grep "^go " go.mod | cut -f 2 -d ' ')
|
||||
NODE_VERSION=$(shell cat .nvmrc)
|
||||
|
||||
# Set global environment variables, required for most targets
|
||||
export CGO_CFLAGS_ALLOW=--define-prefix
|
||||
export ND_ENABLEINSIGHTSCOLLECTOR=false
|
||||
|
||||
ifneq ("$(wildcard .git/HEAD)","")
|
||||
GIT_SHA=$(shell git rev-parse --short HEAD)
|
||||
GIT_TAG=$(shell git describe --tags `git rev-list --tags --max-count=1`)-SNAPSHOT
|
||||
@@ -20,7 +16,7 @@ DOCKER_TAG ?= deluan/navidrome:develop
|
||||
|
||||
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
|
||||
CROSS_TAGLIB_VERSION ?= 2.1.1-1
|
||||
GOLANGCI_LINT_VERSION ?= v2.8.0
|
||||
GOLANGCI_LINT_VERSION ?= v2.6.2
|
||||
|
||||
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
|
||||
|
||||
@@ -30,11 +26,11 @@ setup: check_env download-deps install-golangci-lint setup-git ##@1_Run_First In
|
||||
.PHONY: setup
|
||||
|
||||
dev: check_env ##@Development Start Navidrome in development mode, with hot-reload for both frontend and backend
|
||||
npx foreman -j Procfile.dev -p 4533 start
|
||||
ND_ENABLEINSIGHTSCOLLECTOR="false" npx foreman -j Procfile.dev -p 4533 start
|
||||
.PHONY: dev
|
||||
|
||||
server: check_go_env buildjs ##@Development Start the backend in development mode
|
||||
go tool reflex -d none -c reflex.conf
|
||||
@ND_ENABLEINSIGHTSCOLLECTOR="false" go tool reflex -d none -c reflex.conf
|
||||
.PHONY: server
|
||||
|
||||
stop: ##@Development Stop development servers (UI and backend)
|
||||
@@ -54,11 +50,7 @@ test: ##@Development Run Go tests. Use PKG variable to specify packages to test,
|
||||
go test -tags netgo $(PKG)
|
||||
.PHONY: test
|
||||
|
||||
test-ndpgen: ##@Development Run tests for ndpgen plugin
|
||||
cd plugins/cmd/ndpgen && go test ./......
|
||||
.PHONY: test-ndpgen
|
||||
|
||||
testall: test test-ndpgen test-i18n test-js ##@Development Run Go and JS tests
|
||||
testall: test-race test-i18n test-js ##@Development Run Go and JS tests
|
||||
.PHONY: testall
|
||||
|
||||
test-race: ##@Development Run Go tests with race detector
|
||||
@@ -93,7 +85,7 @@ install-golangci-lint: ##@Development Install golangci-lint if not present
|
||||
.PHONY: install-golangci-lint
|
||||
|
||||
lint: install-golangci-lint ##@Development Lint Go code
|
||||
PATH=$$PATH:./bin golangci-lint run --timeout 5m
|
||||
PATH=$$PATH:./bin golangci-lint run -v --timeout 5m
|
||||
.PHONY: lint
|
||||
|
||||
lintall: lint ##@Development Lint Go and JS code
|
||||
@@ -111,15 +103,6 @@ wire: check_go_env ##@Development Update Dependency Injection
|
||||
go tool wire gen -tags=netgo ./...
|
||||
.PHONY: wire
|
||||
|
||||
gen: check_go_env ##@Development Run go generate for code generation
|
||||
go generate ./...
|
||||
cd plugins/cmd/ndpgen && go run . -host-wrappers -input=../../host -package=host
|
||||
cd plugins/cmd/ndpgen && go run . -input=../../host -output=../../pdk -go -python -rust
|
||||
cd plugins/cmd/ndpgen && go run . -capability-only -input=../../capabilities -output=../../pdk -go -rust
|
||||
cd plugins/cmd/ndpgen && go run . -schemas -input=../../capabilities
|
||||
go mod tidy -C plugins/pdk/go
|
||||
.PHONY: gen
|
||||
|
||||
snapshots: ##@Development Update (GoLang) Snapshot tests
|
||||
UPDATE_SNAPSHOTS=true go tool ginkgo ./server/subsonic/responses/...
|
||||
.PHONY: snapshots
|
||||
@@ -283,6 +266,24 @@ deprecated:
|
||||
@echo "WARNING: This target is deprecated and will be removed in future releases. Use 'make build' instead."
|
||||
.PHONY: deprecated
|
||||
|
||||
# Generate Go code from plugins/api/api.proto
|
||||
plugin-gen: check_go_env ##@Development Generate Go code from plugins protobuf files
|
||||
go generate ./plugins/...
|
||||
.PHONY: plugin-gen
|
||||
|
||||
plugin-examples: check_go_env ##@Development Build all example plugins
|
||||
$(MAKE) -C plugins/examples clean all
|
||||
.PHONY: plugin-examples
|
||||
|
||||
plugin-clean: check_go_env ##@Development Clean all plugins
|
||||
$(MAKE) -C plugins/examples clean
|
||||
$(MAKE) -C plugins/testdata clean
|
||||
.PHONY: plugin-clean
|
||||
|
||||
plugin-tests: check_go_env ##@Development Build all test plugins
|
||||
$(MAKE) -C plugins/testdata clean all
|
||||
.PHONY: plugin-tests
|
||||
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
HELP_FUN = \
|
||||
|
||||
@@ -1,274 +0,0 @@
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/djherbis/times"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type testFileInfo struct {
|
||||
fs.FileInfo
|
||||
}
|
||||
|
||||
func (t testFileInfo) BirthTime() time.Time {
|
||||
if ts := times.Get(t.FileInfo); ts.HasBirthTime() {
|
||||
return ts.BirthTime()
|
||||
}
|
||||
return t.FileInfo.ModTime()
|
||||
}
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
toP := func(name, sortName, mbid string) model.Participant {
|
||||
return model.Participant{
|
||||
Artist: model.Artist{Name: name, SortArtistName: sortName, MbzArtistID: mbid},
|
||||
}
|
||||
}
|
||||
|
||||
roles := []struct {
|
||||
model.Role
|
||||
model.ParticipantList
|
||||
}{
|
||||
{model.RoleComposer, model.ParticipantList{
|
||||
toP("coma a", "a, coma", "bf13b584-f27c-43db-8f42-32898d33d4e2"),
|
||||
toP("comb", "comb", "924039a2-09c6-4d29-9b4f-50cc54447d36"),
|
||||
}},
|
||||
{model.RoleLyricist, model.ParticipantList{
|
||||
toP("la a", "a, la", "c84f648f-68a6-40a2-a0cb-d135b25da3c2"),
|
||||
toP("lb", "lb", "0a7c582d-143a-4540-b4e9-77200835af65"),
|
||||
}},
|
||||
{model.RoleArranger, model.ParticipantList{
|
||||
toP("aa", "", "4605a1d4-8d15-42a3-bd00-9c20e42f71e6"),
|
||||
toP("ab", "", "002f0ff8-77bf-42cc-8216-61a9c43dc145"),
|
||||
}},
|
||||
{model.RoleConductor, model.ParticipantList{
|
||||
toP("cona", "", "af86879b-2141-42af-bad2-389a4dc91489"),
|
||||
toP("conb", "", "3dfa3c70-d7d3-4b97-b953-c298dd305e12"),
|
||||
}},
|
||||
{model.RoleDirector, model.ParticipantList{
|
||||
toP("dia", "", "f943187f-73de-4794-be47-88c66f0fd0f4"),
|
||||
toP("dib", "", "bceb75da-1853-4b3d-b399-b27f0cafc389"),
|
||||
}},
|
||||
{model.RoleEngineer, model.ParticipantList{
|
||||
toP("ea", "", "f634bf6d-d66a-425d-888a-28ad39392759"),
|
||||
toP("eb", "", "243d64ae-d514-44e1-901a-b918d692baee"),
|
||||
}},
|
||||
{model.RoleProducer, model.ParticipantList{
|
||||
toP("pra", "", "d971c8d7-999c-4a5f-ac31-719721ab35d6"),
|
||||
toP("prb", "", "f0a09070-9324-434f-a599-6d25ded87b69"),
|
||||
}},
|
||||
{model.RoleRemixer, model.ParticipantList{
|
||||
toP("ra", "", "c7dc6095-9534-4c72-87cc-aea0103462cf"),
|
||||
toP("rb", "", "8ebeef51-c08c-4736-992f-c37870becedd"),
|
||||
}},
|
||||
{model.RoleDJMixer, model.ParticipantList{
|
||||
toP("dja", "", "d063f13b-7589-4efc-ab7f-c60e6db17247"),
|
||||
toP("djb", "", "3636670c-385f-4212-89c8-0ff51d6bc456"),
|
||||
}},
|
||||
{model.RoleMixer, model.ParticipantList{
|
||||
toP("ma", "", "53fb5a2d-7016-427e-a563-d91819a5f35a"),
|
||||
toP("mb", "", "64c13e65-f0da-4ab9-a300-71ee53b0376a"),
|
||||
}},
|
||||
}
|
||||
|
||||
var e *extractor
|
||||
|
||||
parseTestFile := func(path string) *model.MediaFile {
|
||||
mds, err := e.Parse(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
info, ok := mds[path]
|
||||
Expect(ok).To(BeTrue())
|
||||
|
||||
fileInfo, err := os.Stat(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
info.FileInfo = testFileInfo{FileInfo: fileInfo}
|
||||
|
||||
metadata := metadata.New(path, info)
|
||||
mf := metadata.ToMediaFile(1, "folderID")
|
||||
return &mf
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("ReplayGain", func() {
|
||||
DescribeTable("test replaygain end-to-end", func(file string, trackGain, trackPeak, albumGain, albumPeak *float64) {
|
||||
mf := parseTestFile("tests/fixtures/" + file)
|
||||
|
||||
Expect(mf.RGTrackGain).To(Equal(trackGain))
|
||||
Expect(mf.RGTrackPeak).To(Equal(trackPeak))
|
||||
Expect(mf.RGAlbumGain).To(Equal(albumGain))
|
||||
Expect(mf.RGAlbumPeak).To(Equal(albumPeak))
|
||||
},
|
||||
Entry("mp3 with no replaygain", "no_replaygain.mp3", nil, nil, nil, nil),
|
||||
Entry("mp3 with no zero replaygain", "zero_replaygain.mp3", gg.P(0.0), gg.P(1.0), gg.P(0.0), gg.P(1.0)),
|
||||
)
|
||||
})
|
||||
|
||||
Describe("lyrics", func() {
|
||||
makeLyrics := func(code, secondLine string) model.Lyrics {
|
||||
return model.Lyrics{
|
||||
DisplayArtist: "",
|
||||
DisplayTitle: "",
|
||||
Lang: code,
|
||||
Line: []model.Line{
|
||||
{Start: gg.P(int64(0)), Value: "This is"},
|
||||
{Start: gg.P(int64(2500)), Value: secondLine},
|
||||
},
|
||||
Offset: nil,
|
||||
Synced: true,
|
||||
}
|
||||
}
|
||||
|
||||
It("should fetch both synced and unsynced lyrics in mixed flac", func() {
|
||||
mf := parseTestFile("tests/fixtures/mixed-lyrics.flac")
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(lyrics).To(HaveLen(2))
|
||||
|
||||
Expect(lyrics[0].Synced).To(BeTrue())
|
||||
Expect(lyrics[1].Synced).To(BeFalse())
|
||||
})
|
||||
|
||||
It("should handle mp3 with uslt and sylt", func() {
|
||||
mf := parseTestFile("tests/fixtures/test.mp3")
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(lyrics).To(HaveLen(4))
|
||||
|
||||
engSylt := makeLyrics("eng", "English SYLT")
|
||||
engUslt := makeLyrics("eng", "English")
|
||||
unsSylt := makeLyrics("xxx", "unspecified SYLT")
|
||||
unsUslt := makeLyrics("xxx", "unspecified")
|
||||
|
||||
Expect(lyrics).To(ConsistOf(engSylt, engUslt, unsSylt, unsUslt))
|
||||
})
|
||||
|
||||
DescribeTable("format-specific lyrics", func(file string, isId3 bool) {
|
||||
mf := parseTestFile("tests/fixtures/" + file)
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).To(Not(HaveOccurred()))
|
||||
Expect(lyrics).To(HaveLen(2))
|
||||
|
||||
unspec := makeLyrics("xxx", "unspecified")
|
||||
eng := makeLyrics("xxx", "English")
|
||||
|
||||
if isId3 {
|
||||
eng.Lang = "eng"
|
||||
}
|
||||
|
||||
Expect(lyrics).To(Or(
|
||||
Equal(model.LyricList{unspec, eng}),
|
||||
Equal(model.LyricList{eng, unspec})))
|
||||
},
|
||||
Entry("flac", "test.flac", false),
|
||||
Entry("m4a", "test.m4a", false),
|
||||
Entry("ogg", "test.ogg", false),
|
||||
Entry("wma", "test.wma", false),
|
||||
Entry("wv", "test.wv", false),
|
||||
Entry("wav", "test.wav", true),
|
||||
Entry("aiff", "test.aiff", true),
|
||||
)
|
||||
})
|
||||
|
||||
Describe("Participants", func() {
|
||||
DescribeTable("test tags consistent across formats", func(format string) {
|
||||
mf := parseTestFile("tests/fixtures/test." + format)
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
|
||||
actual := mf.Participants[role]
|
||||
Expect(actual).To(HaveLen(len(artists)))
|
||||
|
||||
for i := range artists {
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[i]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
Expect(actualArtist.SortArtistName).To(Equal(expectedArtist.SortArtistName))
|
||||
Expect(actualArtist.MbzArtistID).To(Equal(expectedArtist.MbzArtistID))
|
||||
}
|
||||
}
|
||||
|
||||
if format != "m4a" {
|
||||
performers := mf.Participants[model.RolePerformer]
|
||||
Expect(performers).To(HaveLen(8))
|
||||
|
||||
rules := map[string][]string{
|
||||
"pgaa": {"2fd0b311-9fa8-4ff9-be5d-f6f3d16b835e", "Guitar"},
|
||||
"pgbb": {"223d030b-bf97-4c2a-ad26-b7f7bbe25c93", "Guitar", ""},
|
||||
"pvaa": {"cb195f72-448f-41c8-b962-3f3c13d09d38", "Vocals"},
|
||||
"pvbb": {"60a1f832-8ca2-49f6-8660-84d57f07b520", "Vocals", "Flute"},
|
||||
"pfaa": {"51fb40c-0305-4bf9-a11b-2ee615277725", "", "Flute"},
|
||||
}
|
||||
|
||||
for name, rule := range rules {
|
||||
mbid := rule[0]
|
||||
for i := 1; i < len(rule); i++ {
|
||||
found := false
|
||||
|
||||
for _, mapped := range performers {
|
||||
if mapped.Name == name && mapped.MbzArtistID == mbid && mapped.SubRole == rule[i] {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
Expect(found).To(BeTrue(), "Could not find matching artist")
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Entry("FLAC format", "flac"),
|
||||
Entry("M4a format", "m4a"),
|
||||
Entry("OGG format", "ogg"),
|
||||
Entry("WV format", "wv"),
|
||||
|
||||
Entry("MP3 format", "mp3"),
|
||||
Entry("WAV format", "wav"),
|
||||
Entry("AIFF format", "aiff"),
|
||||
)
|
||||
|
||||
It("should parse wma", func() {
|
||||
mf := parseTestFile("tests/fixtures/test.wma")
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
actual := mf.Participants[role]
|
||||
|
||||
// WMA has no Arranger role
|
||||
if role == model.RoleArranger {
|
||||
Expect(actual).To(HaveLen(0))
|
||||
continue
|
||||
}
|
||||
|
||||
Expect(actual).To(HaveLen(len(artists)), role.String())
|
||||
|
||||
// For some bizarre reason, the order is inverted. We also don't get
|
||||
// sort names or MBIDs
|
||||
for i := range artists {
|
||||
idx := len(artists) - 1 - i
|
||||
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[idx]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,255 +0,0 @@
|
||||
// Package gotaglib provides an alternative metadata extractor using go-taglib,
|
||||
// a pure Go (WASM-based) implementation of TagLib.
|
||||
//
|
||||
// This extractor aims for parity with the CGO-based taglib extractor. It uses
|
||||
// TagLib's PropertyMap interface for standard tags. The File handle API provides
|
||||
// efficient access to format-specific tags (ID3v2 frames, MP4 atoms, ASF attributes)
|
||||
// through a single file open operation.
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/core/storage/local"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
"go.senan.xyz/taglib"
|
||||
)
|
||||
|
||||
type extractor struct {
|
||||
baseDir string
|
||||
}
|
||||
|
||||
func (e extractor) Parse(files ...string) (map[string]metadata.Info, error) {
|
||||
results := make(map[string]metadata.Info)
|
||||
for _, path := range files {
|
||||
props, err := e.extractMetadata(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
results[path] = *props
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (e extractor) Version() string {
|
||||
return "go-taglib (TagLib 2.1.1 WASM)"
|
||||
}
|
||||
|
||||
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
|
||||
fullPath := filepath.Join(e.baseDir, filePath)
|
||||
|
||||
// Open the file once and read all data
|
||||
f, err := taglib.OpenReadOnly(fullPath)
|
||||
if err != nil {
|
||||
// Check if file doesn't exist
|
||||
if _, statErr := os.Stat(fullPath); os.IsNotExist(statErr) {
|
||||
return nil, fs.ErrNotExist
|
||||
}
|
||||
// Check if permission denied
|
||||
if errors.Is(err, taglib.ErrInvalidFile) {
|
||||
// Try to open the file to check for permission errors
|
||||
if osFile, openErr := os.Open(fullPath); openErr != nil {
|
||||
if os.IsPermission(openErr) {
|
||||
return nil, os.ErrPermission
|
||||
}
|
||||
} else {
|
||||
osFile.Close()
|
||||
}
|
||||
}
|
||||
log.Warn("gotaglib extractor: Error reading metadata from file. Skipping", "filePath", fullPath, err)
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
// Get all tags and properties in one go
|
||||
allTags := f.AllTags()
|
||||
props := f.Properties()
|
||||
|
||||
// Map properties to AudioProperties
|
||||
ap := metadata.AudioProperties{
|
||||
Duration: props.Length.Round(time.Millisecond * 10),
|
||||
BitRate: int(props.Bitrate),
|
||||
Channels: int(props.Channels),
|
||||
SampleRate: int(props.SampleRate),
|
||||
BitDepth: int(props.BitsPerSample),
|
||||
}
|
||||
|
||||
// Convert normalized tags to lowercase keys (go-taglib returns UPPERCASE keys)
|
||||
normalizedTags := make(map[string][]string)
|
||||
for key, values := range allTags.Tags {
|
||||
lowerKey := strings.ToLower(key)
|
||||
normalizedTags[lowerKey] = values
|
||||
}
|
||||
|
||||
// Process format-specific raw tags
|
||||
processRawTags(allTags, normalizedTags)
|
||||
|
||||
// Parse track/disc totals from "N/Total" format
|
||||
parseTuple(normalizedTags, "track")
|
||||
parseTuple(normalizedTags, "disc")
|
||||
|
||||
// Adjust some ID3 tags
|
||||
parseLyrics(normalizedTags)
|
||||
parseTIPL(normalizedTags)
|
||||
delete(normalizedTags, "tmcl") // TMCL is already parsed by TagLib
|
||||
|
||||
// Determine if file has embedded picture
|
||||
hasPicture := len(props.Images) > 0
|
||||
|
||||
return &metadata.Info{
|
||||
Tags: normalizedTags,
|
||||
AudioProperties: ap,
|
||||
HasPicture: hasPicture,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// parseTuple parses track/disc numbers in "N/Total" format and separates them.
|
||||
// For example, tracknumber="2/10" becomes tracknumber="2" and tracktotal="10".
|
||||
func parseTuple(tags map[string][]string, prop string) {
|
||||
tagName := prop + "number"
|
||||
tagTotal := prop + "total"
|
||||
if value, ok := tags[tagName]; ok && len(value) > 0 {
|
||||
parts := strings.Split(value[0], "/")
|
||||
tags[tagName] = []string{parts[0]}
|
||||
if len(parts) == 2 {
|
||||
tags[tagTotal] = []string{parts[1]}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseLyrics ensures lyrics tags have a language code.
|
||||
// If lyrics exist without a language code, they are moved to "lyrics:xxx".
|
||||
func parseLyrics(tags map[string][]string) {
|
||||
lyrics := tags["lyrics"]
|
||||
if len(lyrics) > 0 {
|
||||
tags["lyrics:xxx"] = lyrics
|
||||
delete(tags, "lyrics")
|
||||
}
|
||||
}
|
||||
|
||||
// processRawTags processes format-specific raw tags based on the detected file format.
|
||||
// This handles ID3v2 frames (MP3/WAV/AIFF), MP4 atoms, and ASF attributes.
|
||||
func processRawTags(allTags taglib.AllTags, normalizedTags map[string][]string) {
|
||||
switch allTags.Format {
|
||||
case taglib.FormatMPEG, taglib.FormatWAV, taglib.FormatAIFF:
|
||||
parseID3v2Frames(allTags.Raw, normalizedTags)
|
||||
case taglib.FormatMP4:
|
||||
parseMP4Atoms(allTags.Raw, normalizedTags)
|
||||
case taglib.FormatASF:
|
||||
parseASFAttributes(allTags.Raw, normalizedTags)
|
||||
}
|
||||
}
|
||||
|
||||
// parseID3v2Frames processes ID3v2 raw frames to extract USLT/SYLT with language codes.
|
||||
// This extracts language-specific lyrics that the standard Tags() doesn't provide.
|
||||
func parseID3v2Frames(rawFrames map[string][]string, tags map[string][]string) {
|
||||
// Process frames that have language-specific data
|
||||
for key, values := range rawFrames {
|
||||
lowerKey := strings.ToLower(key)
|
||||
|
||||
// Handle USLT:xxx and SYLT:xxx (lyrics with language codes)
|
||||
if strings.HasPrefix(lowerKey, "uslt:") || strings.HasPrefix(lowerKey, "sylt:") {
|
||||
parts := strings.SplitN(lowerKey, ":", 2)
|
||||
if len(parts) == 2 && parts[1] != "" {
|
||||
lang := parts[1]
|
||||
lyricsKey := "lyrics:" + lang
|
||||
tags[lyricsKey] = append(tags[lyricsKey], values...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we found any language-specific lyrics from ID3v2 frames, remove the generic lyrics
|
||||
for key := range tags {
|
||||
if strings.HasPrefix(key, "lyrics:") && key != "lyrics" {
|
||||
delete(tags, "lyrics")
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const iTunesKeyPrefix = "----:com.apple.iTunes:"
|
||||
|
||||
// parseMP4Atoms processes MP4 raw atoms to get iTunes-specific tags.
|
||||
func parseMP4Atoms(rawAtoms map[string][]string, tags map[string][]string) {
|
||||
// Process all atoms and add them to tags
|
||||
for key, values := range rawAtoms {
|
||||
// Strip iTunes prefix and convert to lowercase
|
||||
normalizedKey := strings.TrimPrefix(key, iTunesKeyPrefix)
|
||||
normalizedKey = strings.ToLower(normalizedKey)
|
||||
|
||||
// Only add if the tag doesn't already exist (avoid duplication with PropertyMap)
|
||||
if _, exists := tags[normalizedKey]; !exists {
|
||||
tags[normalizedKey] = values
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseASFAttributes processes ASF raw attributes to get WMA-specific tags.
|
||||
func parseASFAttributes(rawAttrs map[string][]string, tags map[string][]string) {
|
||||
// Process all attributes and add them to tags
|
||||
for key, values := range rawAttrs {
|
||||
normalizedKey := strings.ToLower(key)
|
||||
|
||||
// Only add if the tag doesn't already exist (avoid duplication with PropertyMap)
|
||||
if _, exists := tags[normalizedKey]; !exists {
|
||||
tags[normalizedKey] = values
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// These are the only roles we support, based on Picard's tag map:
|
||||
// https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
var tiplMapping = map[string]string{
|
||||
"arranger": "arranger",
|
||||
"engineer": "engineer",
|
||||
"producer": "producer",
|
||||
"mix": "mixer",
|
||||
"DJ-mix": "djmixer",
|
||||
}
|
||||
|
||||
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
|
||||
//
|
||||
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".
|
||||
//
|
||||
// and breaks it down into a map of roles and names, e.g.:
|
||||
//
|
||||
// {"arranger": ["Andrew Powell"], "engineer": ["Chris Blair", "Pat Stapley"], "producer": ["Eric Woolfson"]}.
|
||||
func parseTIPL(tags map[string][]string) {
|
||||
tipl := tags["tipl"]
|
||||
if len(tipl) == 0 {
|
||||
return
|
||||
}
|
||||
addRole := func(currentRole string, currentValue []string) {
|
||||
if currentRole != "" && len(currentValue) > 0 {
|
||||
role := tiplMapping[currentRole]
|
||||
tags[role] = append(tags[role], strings.Join(currentValue, " "))
|
||||
}
|
||||
}
|
||||
var currentRole string
|
||||
var currentValue []string
|
||||
for _, part := range strings.Split(tipl[0], " ") {
|
||||
if _, ok := tiplMapping[part]; ok {
|
||||
addRole(currentRole, currentValue)
|
||||
currentRole = part
|
||||
currentValue = nil
|
||||
continue
|
||||
}
|
||||
currentValue = append(currentValue, part)
|
||||
}
|
||||
addRole(currentRole, currentValue)
|
||||
delete(tags, "tipl")
|
||||
}
|
||||
|
||||
var _ local.Extractor = (*extractor)(nil)
|
||||
|
||||
func init() {
|
||||
local.RegisterExtractor("gotaglib", func(_ fs.FS, baseDir string) local.Extractor {
|
||||
return &extractor{baseDir}
|
||||
})
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestGoTagLib(t *testing.T) {
|
||||
tests.Init(t, true)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "GoTagLib Suite")
|
||||
}
|
||||
@@ -1,295 +0,0 @@
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("Parse", func() {
|
||||
It("correctly parses metadata from all files in folder", func() {
|
||||
mds, err := e.Parse(
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
|
||||
// Test MP3
|
||||
m := mds["tests/fixtures/test.mp3"]
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Song"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal("1.02s"))
|
||||
Expect(m.AudioProperties.BitRate).To(Equal(192))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(44100))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("tcmp", []string{"1"})),
|
||||
)
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014-05-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("originaldate", []string{"1996-11-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("releasedate", []string{"2020-12-31"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("discnumber", []string{"1"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_gain", []string{"+3.21518 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_peak", []string{"0.9125"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_gain", []string{"-1.48 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_peak", []string{"0.4512"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracknumber", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
|
||||
Expect(m.Tags).ToNot(HaveKey("lyrics"))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}), HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
})))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}), HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
})))
|
||||
|
||||
// Test OGG
|
||||
m = mds["tests/fixtures/test.ogg"]
|
||||
Expect(err).To(BeNil())
|
||||
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||
|
||||
// TagLib 1.12 returns 18, previous versions return 39.
|
||||
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
|
||||
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
})
|
||||
|
||||
DescribeTable("Format-Specific tests",
|
||||
func(file, duration string, channels, samplerate, bitdepth int, albumGain, albumPeak, trackGain, trackPeak string, id3Lyrics bool, image bool) {
|
||||
file = "tests/fixtures/" + file
|
||||
mds, err := e.Parse(file)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(1))
|
||||
|
||||
m := mds[file]
|
||||
|
||||
Expect(m.HasPicture).To(Equal(image))
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal(duration))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(channels))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(samplerate))
|
||||
Expect(m.AudioProperties.BitDepth).To(Equal(bitdepth))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_gain", []string{albumGain}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_peak", []string{albumPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_peak", []string{albumPeak}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_gain", []string{trackGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{trackGain}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_peak", []string{trackPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_peak", []string{trackPeak}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Title"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("tracknumber", []string{"3"}),
|
||||
HaveKeyWithValue("tracknumber", []string{"3/10"}),
|
||||
))
|
||||
if !strings.HasSuffix(file, "test.wma") {
|
||||
// TODO Not sure why this is not working for WMA
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
}
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("discnumber", []string{"1"}),
|
||||
HaveKeyWithValue("discnumber", []string{"1/2"}),
|
||||
))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
|
||||
// WMA does not have a "compilation" tag, but "wm/iscompilation"
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("wm/iscompilation", []string{"1"})),
|
||||
)
|
||||
|
||||
if id3Lyrics {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}))
|
||||
} else {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
}
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
},
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1200:duration=1" test.flac
|
||||
Entry("correctly parses flac tags", "test.flac", "1s", 1, 44100, 16, "+4.06 dB", "0.12496948", "+4.06 dB", "0.12496948", false, true),
|
||||
|
||||
Entry("correctly parses m4a (aac) gain tags", "01 Invisible (RED) Edit Version.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
|
||||
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
|
||||
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=800:duration=1" test.wv
|
||||
Entry("correctly parses wv (wavpak) tags", "test.wv", "1s", 1, 44100, 16, "3.43 dB", "0.125061", "3.43 dB", "0.125061", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1000:duration=1" test.wav
|
||||
Entry("correctly parses wav tags", "test.wav", "1s", 1, 44100, 16, "3.06 dB", "0.125056", "3.06 dB", "0.125056", true, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1400:duration=1" test.aiff
|
||||
Entry("correctly parses aiff tags", "test.aiff", "1s", 1, 44100, 16, "2.00 dB", "0.124972", "2.00 dB", "0.124972", true, true),
|
||||
)
|
||||
|
||||
// Skip these tests when running as root
|
||||
Context("Access Forbidden", func() {
|
||||
var accessForbiddenFile string
|
||||
var RegularUserContext = XContext
|
||||
var isRegularUser = os.Getuid() != 0
|
||||
if isRegularUser {
|
||||
RegularUserContext = Context
|
||||
}
|
||||
|
||||
// Only run permission tests if we are not root
|
||||
RegularUserContext("when run without root privileges", func() {
|
||||
BeforeEach(func() {
|
||||
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
|
||||
|
||||
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
DeferCleanup(func() {
|
||||
Expect(f.Close()).To(Succeed())
|
||||
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
|
||||
})
|
||||
})
|
||||
|
||||
It("correctly handle unreadable file due to insufficient read permission", func() {
|
||||
_, err := e.extractMetadata(accessForbiddenFile)
|
||||
Expect(err).To(MatchError(os.ErrPermission))
|
||||
})
|
||||
|
||||
It("skips the file if it cannot be read", func() {
|
||||
files := []string{
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
accessForbiddenFile,
|
||||
}
|
||||
mds, err := e.Parse(files...)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
Expect(mds).ToNot(HaveKey(accessForbiddenFile))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Describe("Error Checking", func() {
|
||||
It("returns a generic ErrPath if file does not exist", func() {
|
||||
testFilePath := "tests/fixtures/NON_EXISTENT.ogg"
|
||||
_, err := e.extractMetadata(testFilePath)
|
||||
Expect(err).To(MatchError(fs.ErrNotExist))
|
||||
})
|
||||
It("does not throw a SIGSEGV error when reading a file with an invalid frame", func() {
|
||||
// File has an empty TDAT frame
|
||||
md, err := e.extractMetadata("tests/fixtures/invalid-files/test-invalid-frame.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(md.Tags).To(HaveKeyWithValue("albumartist", []string{"Elvis Presley"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("parseTIPL", func() {
|
||||
var tags map[string][]string
|
||||
|
||||
BeforeEach(func() {
|
||||
tags = make(map[string][]string)
|
||||
})
|
||||
|
||||
Context("when the TIPL string is populated", func() {
|
||||
It("correctly parses roles and names", func() {
|
||||
tags["tipl"] = []string{"arranger Andrew Powell DJ-mix François Kevorkian DJ-mix Jane Doe engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["arranger"]).To(ConsistOf("Andrew Powell"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Chris Blair"))
|
||||
Expect(tags["djmixer"]).To(ConsistOf("François Kevorkian", "Jane Doe"))
|
||||
})
|
||||
|
||||
It("handles multiple names for a single role", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer Eric Woolfson engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["producer"]).To(ConsistOf("Eric Woolfson"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
|
||||
It("discards roles without names", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).ToNot(HaveKey("producer"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL string is empty", func() {
|
||||
It("does nothing", func() {
|
||||
tags["tipl"] = []string{""}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL is not present", func() {
|
||||
It("does nothing", func() {
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
@@ -151,7 +151,11 @@ var _ = Describe("Extractor", func() {
|
||||
unsSylt := makeLyrics("xxx", "unspecified SYLT")
|
||||
unsUslt := makeLyrics("xxx", "unspecified")
|
||||
|
||||
Expect(lyrics).To(ConsistOf(engSylt, engUslt, unsSylt, unsUslt))
|
||||
// Why is the order inconsistent between runs? Nobody knows
|
||||
Expect(lyrics).To(Or(
|
||||
Equal(model.LyricList{engSylt, engUslt, unsSylt, unsUslt}),
|
||||
Equal(model.LyricList{unsSylt, unsUslt, engSylt, engUslt}),
|
||||
))
|
||||
})
|
||||
|
||||
DescribeTable("format-specific lyrics", func(file string, isId3 bool) {
|
||||
|
||||
@@ -80,11 +80,12 @@ var _ = Describe("Extractor", func() {
|
||||
Expect(err).To(BeNil())
|
||||
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||
|
||||
// TagLib 1.12 returns 18, previous versions return 39.
|
||||
// TabLib 1.12 returns 18, previous versions return 39.
|
||||
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
|
||||
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
})
|
||||
|
||||
@@ -105,7 +106,7 @@ var _ = Describe("Extractor", func() {
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{albumGain}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
|
||||
35
cmd/pls.go
35
cmd/pls.go
@@ -10,8 +10,11 @@ import (
|
||||
"strconv"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@@ -49,7 +52,7 @@ var (
|
||||
Short: "Export playlists",
|
||||
Long: "Export Navidrome playlists to M3U files",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runExporter(cmd.Context())
|
||||
runExporter()
|
||||
},
|
||||
}
|
||||
|
||||
@@ -57,13 +60,15 @@ var (
|
||||
Use: "list",
|
||||
Short: "List playlists",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runList(cmd.Context())
|
||||
runList()
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func runExporter(ctx context.Context) {
|
||||
ds, ctx := getAdminContext(ctx)
|
||||
func runExporter() {
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
@@ -95,19 +100,31 @@ func runExporter(ctx context.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
func runList(ctx context.Context) {
|
||||
func runList() {
|
||||
if outputFormat != "csv" && outputFormat != "json" {
|
||||
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
||||
}
|
||||
|
||||
ds, ctx := getAdminContext(ctx)
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
||||
|
||||
options := model.QueryOptions{Sort: "owner_name"}
|
||||
|
||||
if userID != "" {
|
||||
user, err := getUser(ctx, userID, ds)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Error retrieving user", "username or id", userID)
|
||||
user, err := ds.User(ctx).FindByUsername(userID)
|
||||
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
log.Fatal("Error retrieving user by name", "name", userID, err)
|
||||
}
|
||||
|
||||
if errors.Is(err, model.ErrNotFound) {
|
||||
user, err = ds.User(ctx).Get(userID)
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving user by id", "id", userID, err)
|
||||
}
|
||||
}
|
||||
|
||||
options.Filters = squirrel.Eq{"owner_id": user.ID}
|
||||
}
|
||||
|
||||
|
||||
716
cmd/plugin.go
Normal file
716
cmd/plugin.go
Normal file
@@ -0,0 +1,716 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/plugins"
|
||||
"github.com/navidrome/navidrome/plugins/schema"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
pluginPackageExtension = ".ndp"
|
||||
pluginDirPermissions = 0700
|
||||
pluginFilePermissions = 0600
|
||||
)
|
||||
|
||||
func init() {
|
||||
pluginCmd := &cobra.Command{
|
||||
Use: "plugin",
|
||||
Short: "Manage Navidrome plugins",
|
||||
Long: "Commands for managing Navidrome plugins",
|
||||
}
|
||||
|
||||
listCmd := &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List installed plugins",
|
||||
Long: "List all installed plugins with their metadata",
|
||||
Run: pluginList,
|
||||
}
|
||||
|
||||
infoCmd := &cobra.Command{
|
||||
Use: "info [pluginPackage|pluginName]",
|
||||
Short: "Show details of a plugin",
|
||||
Long: "Show detailed information about a plugin package (.ndp file) or an installed plugin",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginInfo,
|
||||
}
|
||||
|
||||
installCmd := &cobra.Command{
|
||||
Use: "install [pluginPackage]",
|
||||
Short: "Install a plugin from a .ndp file",
|
||||
Long: "Install a Navidrome Plugin Package (.ndp) file",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginInstall,
|
||||
}
|
||||
|
||||
removeCmd := &cobra.Command{
|
||||
Use: "remove [pluginName]",
|
||||
Short: "Remove an installed plugin",
|
||||
Long: "Remove a plugin by name",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginRemove,
|
||||
}
|
||||
|
||||
updateCmd := &cobra.Command{
|
||||
Use: "update [pluginPackage]",
|
||||
Short: "Update an existing plugin",
|
||||
Long: "Update an installed plugin with a new version from a .ndp file",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginUpdate,
|
||||
}
|
||||
|
||||
refreshCmd := &cobra.Command{
|
||||
Use: "refresh [pluginName]",
|
||||
Short: "Reload a plugin without restarting Navidrome",
|
||||
Long: "Reload and recompile a plugin without needing to restart Navidrome",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginRefresh,
|
||||
}
|
||||
|
||||
devCmd := &cobra.Command{
|
||||
Use: "dev [folder_path]",
|
||||
Short: "Create symlink to development folder",
|
||||
Long: "Create a symlink from a plugin development folder to the plugins directory for easier development",
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: pluginDev,
|
||||
}
|
||||
|
||||
pluginCmd.AddCommand(listCmd, infoCmd, installCmd, removeCmd, updateCmd, refreshCmd, devCmd)
|
||||
rootCmd.AddCommand(pluginCmd)
|
||||
}
|
||||
|
||||
// Validation helpers
|
||||
|
||||
func validatePluginPackageFile(path string) error {
|
||||
if !utils.FileExists(path) {
|
||||
return fmt.Errorf("plugin package not found: %s", path)
|
||||
}
|
||||
if filepath.Ext(path) != pluginPackageExtension {
|
||||
return fmt.Errorf("not a valid plugin package: %s (expected %s extension)", path, pluginPackageExtension)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validatePluginDirectory(pluginsDir, pluginName string) (string, error) {
|
||||
pluginDir := filepath.Join(pluginsDir, pluginName)
|
||||
if !utils.FileExists(pluginDir) {
|
||||
return "", fmt.Errorf("plugin not found: %s (path: %s)", pluginName, pluginDir)
|
||||
}
|
||||
return pluginDir, nil
|
||||
}
|
||||
|
||||
func resolvePluginPath(pluginDir string) (resolvedPath string, isSymlink bool, err error) {
|
||||
// Check if it's a directory or a symlink
|
||||
lstat, err := os.Lstat(pluginDir)
|
||||
if err != nil {
|
||||
return "", false, fmt.Errorf("failed to stat plugin: %w", err)
|
||||
}
|
||||
|
||||
isSymlink = lstat.Mode()&os.ModeSymlink != 0
|
||||
|
||||
if isSymlink {
|
||||
// Resolve the symlink target
|
||||
targetDir, err := os.Readlink(pluginDir)
|
||||
if err != nil {
|
||||
return "", true, fmt.Errorf("failed to resolve symlink: %w", err)
|
||||
}
|
||||
|
||||
// If target is a relative path, make it absolute
|
||||
if !filepath.IsAbs(targetDir) {
|
||||
targetDir = filepath.Join(filepath.Dir(pluginDir), targetDir)
|
||||
}
|
||||
|
||||
// Verify the target exists and is a directory
|
||||
targetInfo, err := os.Stat(targetDir)
|
||||
if err != nil {
|
||||
return "", true, fmt.Errorf("failed to access symlink target %s: %w", targetDir, err)
|
||||
}
|
||||
|
||||
if !targetInfo.IsDir() {
|
||||
return "", true, fmt.Errorf("symlink target is not a directory: %s", targetDir)
|
||||
}
|
||||
|
||||
return targetDir, true, nil
|
||||
} else if !lstat.IsDir() {
|
||||
return "", false, fmt.Errorf("not a valid plugin directory: %s", pluginDir)
|
||||
}
|
||||
|
||||
return pluginDir, false, nil
|
||||
}
|
||||
|
||||
// Package handling helpers
|
||||
|
||||
func loadAndValidatePackage(ndpPath string) (*plugins.PluginPackage, error) {
|
||||
if err := validatePluginPackageFile(ndpPath); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pkg, err := plugins.LoadPackage(ndpPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load plugin package: %w", err)
|
||||
}
|
||||
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
func extractAndSetupPlugin(ndpPath, targetDir string) error {
|
||||
if err := plugins.ExtractPackage(ndpPath, targetDir); err != nil {
|
||||
return fmt.Errorf("failed to extract plugin package: %w", err)
|
||||
}
|
||||
|
||||
ensurePluginDirPermissions(targetDir)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Display helpers
|
||||
|
||||
func displayPluginTableRow(w *tabwriter.Writer, discovery plugins.PluginDiscoveryEntry) {
|
||||
if discovery.Error != nil {
|
||||
// Handle global errors (like directory read failure)
|
||||
if discovery.ID == "" {
|
||||
log.Error("Failed to read plugins directory", "folder", conf.Server.Plugins.Folder, discovery.Error)
|
||||
return
|
||||
}
|
||||
// Handle individual plugin errors - show them in the table
|
||||
fmt.Fprintf(w, "%s\tERROR\tERROR\tERROR\tERROR\t%v\n", discovery.ID, discovery.Error)
|
||||
return
|
||||
}
|
||||
|
||||
// Mark symlinks with an indicator
|
||||
nameDisplay := discovery.Manifest.Name
|
||||
if discovery.IsSymlink {
|
||||
nameDisplay = nameDisplay + " (dev)"
|
||||
}
|
||||
|
||||
// Convert capabilities to strings
|
||||
capabilities := slice.Map(discovery.Manifest.Capabilities, func(cap schema.PluginManifestCapabilitiesElem) string {
|
||||
return string(cap)
|
||||
})
|
||||
|
||||
fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%s\n",
|
||||
discovery.ID,
|
||||
nameDisplay,
|
||||
cmp.Or(discovery.Manifest.Author, "-"),
|
||||
cmp.Or(discovery.Manifest.Version, "-"),
|
||||
strings.Join(capabilities, ", "),
|
||||
cmp.Or(discovery.Manifest.Description, "-"))
|
||||
}
|
||||
|
||||
func displayTypedPermissions(permissions schema.PluginManifestPermissions, indent string) {
|
||||
if permissions.Http != nil {
|
||||
fmt.Printf("%shttp:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Http.Reason)
|
||||
fmt.Printf("%s Allow Local Network: %t\n", indent, permissions.Http.AllowLocalNetwork)
|
||||
fmt.Printf("%s Allowed URLs:\n", indent)
|
||||
for urlPattern, methodEnums := range permissions.Http.AllowedUrls {
|
||||
methods := make([]string, len(methodEnums))
|
||||
for i, methodEnum := range methodEnums {
|
||||
methods[i] = string(methodEnum)
|
||||
}
|
||||
fmt.Printf("%s %s: [%s]\n", indent, urlPattern, strings.Join(methods, ", "))
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Config != nil {
|
||||
fmt.Printf("%sconfig:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Config.Reason)
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Scheduler != nil {
|
||||
fmt.Printf("%sscheduler:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Scheduler.Reason)
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Websocket != nil {
|
||||
fmt.Printf("%swebsocket:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Websocket.Reason)
|
||||
fmt.Printf("%s Allow Local Network: %t\n", indent, permissions.Websocket.AllowLocalNetwork)
|
||||
fmt.Printf("%s Allowed URLs: [%s]\n", indent, strings.Join(permissions.Websocket.AllowedUrls, ", "))
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Cache != nil {
|
||||
fmt.Printf("%scache:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Cache.Reason)
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Artwork != nil {
|
||||
fmt.Printf("%sartwork:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Artwork.Reason)
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if permissions.Subsonicapi != nil {
|
||||
allowedUsers := "All Users"
|
||||
if len(permissions.Subsonicapi.AllowedUsernames) > 0 {
|
||||
allowedUsers = strings.Join(permissions.Subsonicapi.AllowedUsernames, ", ")
|
||||
}
|
||||
fmt.Printf("%ssubsonicapi:\n", indent)
|
||||
fmt.Printf("%s Reason: %s\n", indent, permissions.Subsonicapi.Reason)
|
||||
fmt.Printf("%s Allow Admins: %t\n", indent, permissions.Subsonicapi.AllowAdmins)
|
||||
fmt.Printf("%s Allowed Usernames: [%s]\n", indent, allowedUsers)
|
||||
fmt.Println()
|
||||
}
|
||||
}
|
||||
|
||||
func displayPluginDetails(manifest *schema.PluginManifest, fileInfo *pluginFileInfo, permInfo *pluginPermissionInfo) {
|
||||
fmt.Println("\nPlugin Information:")
|
||||
fmt.Printf(" Name: %s\n", manifest.Name)
|
||||
fmt.Printf(" Author: %s\n", manifest.Author)
|
||||
fmt.Printf(" Version: %s\n", manifest.Version)
|
||||
fmt.Printf(" Description: %s\n", manifest.Description)
|
||||
|
||||
fmt.Print(" Capabilities: ")
|
||||
capabilities := make([]string, len(manifest.Capabilities))
|
||||
for i, cap := range manifest.Capabilities {
|
||||
capabilities[i] = string(cap)
|
||||
}
|
||||
fmt.Print(strings.Join(capabilities, ", "))
|
||||
fmt.Println()
|
||||
|
||||
// Display manifest permissions using the typed permissions
|
||||
fmt.Println(" Required Permissions:")
|
||||
displayTypedPermissions(manifest.Permissions, " ")
|
||||
|
||||
// Print file information if available
|
||||
if fileInfo != nil {
|
||||
fmt.Println("Package Information:")
|
||||
fmt.Printf(" File: %s\n", fileInfo.path)
|
||||
fmt.Printf(" Size: %d bytes (%.2f KB)\n", fileInfo.size, float64(fileInfo.size)/1024)
|
||||
fmt.Printf(" SHA-256: %s\n", fileInfo.hash)
|
||||
fmt.Printf(" Modified: %s\n", fileInfo.modTime.Format(time.RFC3339))
|
||||
}
|
||||
|
||||
// Print file permissions information if available
|
||||
if permInfo != nil {
|
||||
fmt.Println("File Permissions:")
|
||||
fmt.Printf(" Plugin Directory: %s (%s)\n", permInfo.dirPath, permInfo.dirMode)
|
||||
if permInfo.isSymlink {
|
||||
fmt.Printf(" Symlink Target: %s (%s)\n", permInfo.targetPath, permInfo.targetMode)
|
||||
}
|
||||
fmt.Printf(" Manifest File: %s\n", permInfo.manifestMode)
|
||||
if permInfo.wasmMode != "" {
|
||||
fmt.Printf(" WASM File: %s\n", permInfo.wasmMode)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type pluginFileInfo struct {
|
||||
path string
|
||||
size int64
|
||||
hash string
|
||||
modTime time.Time
|
||||
}
|
||||
|
||||
type pluginPermissionInfo struct {
|
||||
dirPath string
|
||||
dirMode string
|
||||
isSymlink bool
|
||||
targetPath string
|
||||
targetMode string
|
||||
manifestMode string
|
||||
wasmMode string
|
||||
}
|
||||
|
||||
func getFileInfo(path string) *pluginFileInfo {
|
||||
fileInfo, err := os.Stat(path)
|
||||
if err != nil {
|
||||
log.Error("Failed to get file information", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
return &pluginFileInfo{
|
||||
path: path,
|
||||
size: fileInfo.Size(),
|
||||
hash: calculateSHA256(path),
|
||||
modTime: fileInfo.ModTime(),
|
||||
}
|
||||
}
|
||||
|
||||
func getPermissionInfo(pluginDir string) *pluginPermissionInfo {
|
||||
// Get plugin directory permissions
|
||||
dirInfo, err := os.Lstat(pluginDir)
|
||||
if err != nil {
|
||||
log.Error("Failed to get plugin directory permissions", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
permInfo := &pluginPermissionInfo{
|
||||
dirPath: pluginDir,
|
||||
dirMode: dirInfo.Mode().String(),
|
||||
}
|
||||
|
||||
// Check if it's a symlink
|
||||
if dirInfo.Mode()&os.ModeSymlink != 0 {
|
||||
permInfo.isSymlink = true
|
||||
|
||||
// Get target path and permissions
|
||||
targetPath, err := os.Readlink(pluginDir)
|
||||
if err == nil {
|
||||
if !filepath.IsAbs(targetPath) {
|
||||
targetPath = filepath.Join(filepath.Dir(pluginDir), targetPath)
|
||||
}
|
||||
permInfo.targetPath = targetPath
|
||||
|
||||
if targetInfo, err := os.Stat(targetPath); err == nil {
|
||||
permInfo.targetMode = targetInfo.Mode().String()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get manifest file permissions
|
||||
manifestPath := filepath.Join(pluginDir, "manifest.json")
|
||||
if manifestInfo, err := os.Stat(manifestPath); err == nil {
|
||||
permInfo.manifestMode = manifestInfo.Mode().String()
|
||||
}
|
||||
|
||||
// Get WASM file permissions (look for .wasm files)
|
||||
entries, err := os.ReadDir(pluginDir)
|
||||
if err == nil {
|
||||
for _, entry := range entries {
|
||||
if filepath.Ext(entry.Name()) == ".wasm" {
|
||||
wasmPath := filepath.Join(pluginDir, entry.Name())
|
||||
if wasmInfo, err := os.Stat(wasmPath); err == nil {
|
||||
permInfo.wasmMode = wasmInfo.Mode().String()
|
||||
break // Just show the first WASM file found
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return permInfo
|
||||
}
|
||||
|
||||
// Command implementations
|
||||
|
||||
func pluginList(cmd *cobra.Command, args []string) {
|
||||
discoveries := plugins.DiscoverPlugins(conf.Server.Plugins.Folder)
|
||||
|
||||
w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
|
||||
fmt.Fprintln(w, "ID\tNAME\tAUTHOR\tVERSION\tCAPABILITIES\tDESCRIPTION")
|
||||
|
||||
for _, discovery := range discoveries {
|
||||
displayPluginTableRow(w, discovery)
|
||||
}
|
||||
w.Flush()
|
||||
}
|
||||
|
||||
func pluginInfo(cmd *cobra.Command, args []string) {
|
||||
path := args[0]
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
var manifest *schema.PluginManifest
|
||||
var fileInfo *pluginFileInfo
|
||||
var permInfo *pluginPermissionInfo
|
||||
|
||||
if filepath.Ext(path) == pluginPackageExtension {
|
||||
// It's a package file
|
||||
pkg, err := loadAndValidatePackage(path)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to load plugin package", err)
|
||||
}
|
||||
manifest = pkg.Manifest
|
||||
fileInfo = getFileInfo(path)
|
||||
// No permission info for package files
|
||||
} else {
|
||||
// It's a plugin name
|
||||
pluginDir, err := validatePluginDirectory(pluginsDir, path)
|
||||
if err != nil {
|
||||
log.Fatal("Plugin validation failed", err)
|
||||
}
|
||||
|
||||
manifest, err = plugins.LoadManifest(pluginDir)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to load plugin manifest", err)
|
||||
}
|
||||
|
||||
// Get permission info for installed plugins
|
||||
permInfo = getPermissionInfo(pluginDir)
|
||||
}
|
||||
|
||||
displayPluginDetails(manifest, fileInfo, permInfo)
|
||||
}
|
||||
|
||||
func pluginInstall(cmd *cobra.Command, args []string) {
|
||||
ndpPath := args[0]
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
pkg, err := loadAndValidatePackage(ndpPath)
|
||||
if err != nil {
|
||||
log.Fatal("Package validation failed", err)
|
||||
}
|
||||
|
||||
// Create target directory based on plugin name
|
||||
targetDir := filepath.Join(pluginsDir, pkg.Manifest.Name)
|
||||
|
||||
// Check if plugin already exists
|
||||
if utils.FileExists(targetDir) {
|
||||
log.Fatal("Plugin already installed", "name", pkg.Manifest.Name, "path", targetDir,
|
||||
"use", "navidrome plugin update")
|
||||
}
|
||||
|
||||
if err := extractAndSetupPlugin(ndpPath, targetDir); err != nil {
|
||||
log.Fatal("Plugin installation failed", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Plugin '%s' v%s installed successfully\n", pkg.Manifest.Name, pkg.Manifest.Version)
|
||||
}
|
||||
|
||||
func pluginRemove(cmd *cobra.Command, args []string) {
|
||||
pluginName := args[0]
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
pluginDir, err := validatePluginDirectory(pluginsDir, pluginName)
|
||||
if err != nil {
|
||||
log.Fatal("Plugin validation failed", err)
|
||||
}
|
||||
|
||||
_, isSymlink, err := resolvePluginPath(pluginDir)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to resolve plugin path", err)
|
||||
}
|
||||
|
||||
if isSymlink {
|
||||
// For symlinked plugins (dev mode), just remove the symlink
|
||||
if err := os.Remove(pluginDir); err != nil {
|
||||
log.Fatal("Failed to remove plugin symlink", "name", pluginName, err)
|
||||
}
|
||||
fmt.Printf("Development plugin symlink '%s' removed successfully (target directory preserved)\n", pluginName)
|
||||
} else {
|
||||
// For regular plugins, remove the entire directory
|
||||
if err := os.RemoveAll(pluginDir); err != nil {
|
||||
log.Fatal("Failed to remove plugin directory", "name", pluginName, err)
|
||||
}
|
||||
fmt.Printf("Plugin '%s' removed successfully\n", pluginName)
|
||||
}
|
||||
}
|
||||
|
||||
func pluginUpdate(cmd *cobra.Command, args []string) {
|
||||
ndpPath := args[0]
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
pkg, err := loadAndValidatePackage(ndpPath)
|
||||
if err != nil {
|
||||
log.Fatal("Package validation failed", err)
|
||||
}
|
||||
|
||||
// Check if plugin exists
|
||||
targetDir := filepath.Join(pluginsDir, pkg.Manifest.Name)
|
||||
if !utils.FileExists(targetDir) {
|
||||
log.Fatal("Plugin not found", "name", pkg.Manifest.Name, "path", targetDir,
|
||||
"use", "navidrome plugin install")
|
||||
}
|
||||
|
||||
// Create a backup of the existing plugin
|
||||
backupDir := targetDir + ".bak." + time.Now().Format("20060102150405")
|
||||
if err := os.Rename(targetDir, backupDir); err != nil {
|
||||
log.Fatal("Failed to backup existing plugin", err)
|
||||
}
|
||||
|
||||
// Extract the new package
|
||||
if err := extractAndSetupPlugin(ndpPath, targetDir); err != nil {
|
||||
// Restore backup if extraction failed
|
||||
os.RemoveAll(targetDir)
|
||||
_ = os.Rename(backupDir, targetDir) // Ignore error as we're already in a fatal path
|
||||
log.Fatal("Plugin update failed", err)
|
||||
}
|
||||
|
||||
// Remove the backup
|
||||
os.RemoveAll(backupDir)
|
||||
|
||||
fmt.Printf("Plugin '%s' updated to v%s successfully\n", pkg.Manifest.Name, pkg.Manifest.Version)
|
||||
}
|
||||
|
||||
func pluginRefresh(cmd *cobra.Command, args []string) {
|
||||
pluginName := args[0]
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
pluginDir, err := validatePluginDirectory(pluginsDir, pluginName)
|
||||
if err != nil {
|
||||
log.Fatal("Plugin validation failed", err)
|
||||
}
|
||||
|
||||
resolvedPath, isSymlink, err := resolvePluginPath(pluginDir)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to resolve plugin path", err)
|
||||
}
|
||||
|
||||
if isSymlink {
|
||||
log.Debug("Processing symlinked plugin", "name", pluginName, "link", pluginDir, "target", resolvedPath)
|
||||
}
|
||||
|
||||
fmt.Printf("Refreshing plugin '%s'...\n", pluginName)
|
||||
|
||||
// Get the plugin manager and refresh
|
||||
mgr := GetPluginManager(cmd.Context())
|
||||
log.Debug("Scanning plugins directory", "path", pluginsDir)
|
||||
mgr.ScanPlugins()
|
||||
|
||||
log.Info("Waiting for plugin compilation to complete", "name", pluginName)
|
||||
|
||||
// Wait for compilation to complete
|
||||
if err := mgr.EnsureCompiled(pluginName); err != nil {
|
||||
log.Fatal("Failed to compile refreshed plugin", "name", pluginName, err)
|
||||
}
|
||||
|
||||
log.Info("Plugin compilation completed successfully", "name", pluginName)
|
||||
fmt.Printf("Plugin '%s' refreshed successfully\n", pluginName)
|
||||
}
|
||||
|
||||
func pluginDev(cmd *cobra.Command, args []string) {
|
||||
sourcePath, err := filepath.Abs(args[0])
|
||||
if err != nil {
|
||||
log.Fatal("Invalid path", "path", args[0], err)
|
||||
}
|
||||
pluginsDir := conf.Server.Plugins.Folder
|
||||
|
||||
// Validate source directory and manifest
|
||||
if err := validateDevSource(sourcePath); err != nil {
|
||||
log.Fatal("Source validation failed", err)
|
||||
}
|
||||
|
||||
// Load manifest to get plugin name
|
||||
manifest, err := plugins.LoadManifest(sourcePath)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to load plugin manifest", "path", filepath.Join(sourcePath, "manifest.json"), err)
|
||||
}
|
||||
|
||||
pluginName := cmp.Or(manifest.Name, filepath.Base(sourcePath))
|
||||
targetPath := filepath.Join(pluginsDir, pluginName)
|
||||
|
||||
// Handle existing target
|
||||
if err := handleExistingTarget(targetPath, sourcePath); err != nil {
|
||||
log.Fatal("Failed to handle existing target", err)
|
||||
}
|
||||
|
||||
// Create target directory if needed
|
||||
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
|
||||
log.Fatal("Failed to create plugins directory", "path", filepath.Dir(targetPath), err)
|
||||
}
|
||||
|
||||
// Create the symlink
|
||||
if err := os.Symlink(sourcePath, targetPath); err != nil {
|
||||
log.Fatal("Failed to create symlink", "source", sourcePath, "target", targetPath, err)
|
||||
}
|
||||
|
||||
fmt.Printf("Development symlink created: '%s' -> '%s'\n", targetPath, sourcePath)
|
||||
fmt.Println("Plugin can be refreshed with: navidrome plugin refresh", pluginName)
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
|
||||
func validateDevSource(sourcePath string) error {
|
||||
sourceInfo, err := os.Stat(sourcePath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("source folder not found: %s (%w)", sourcePath, err)
|
||||
}
|
||||
if !sourceInfo.IsDir() {
|
||||
return fmt.Errorf("source path is not a directory: %s", sourcePath)
|
||||
}
|
||||
|
||||
manifestPath := filepath.Join(sourcePath, "manifest.json")
|
||||
if !utils.FileExists(manifestPath) {
|
||||
return fmt.Errorf("source folder missing manifest.json: %s", sourcePath)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleExistingTarget(targetPath, sourcePath string) error {
|
||||
if !utils.FileExists(targetPath) {
|
||||
return nil // Nothing to handle
|
||||
}
|
||||
|
||||
// Check if it's already a symlink to our source
|
||||
existingLink, err := os.Readlink(targetPath)
|
||||
if err == nil && existingLink == sourcePath {
|
||||
fmt.Printf("Symlink already exists and points to the correct source\n")
|
||||
return fmt.Errorf("symlink already exists") // This will cause early return in caller
|
||||
}
|
||||
|
||||
// Handle case where target exists but is not a symlink to our source
|
||||
fmt.Printf("Target path '%s' already exists.\n", targetPath)
|
||||
fmt.Print("Do you want to replace it? (y/N): ")
|
||||
var response string
|
||||
_, err = fmt.Scanln(&response)
|
||||
if err != nil || strings.ToLower(response) != "y" {
|
||||
if err != nil {
|
||||
log.Debug("Error reading input, assuming 'no'", err)
|
||||
}
|
||||
return fmt.Errorf("operation canceled")
|
||||
}
|
||||
|
||||
// Remove existing target
|
||||
if err := os.RemoveAll(targetPath); err != nil {
|
||||
return fmt.Errorf("failed to remove existing target %s: %w", targetPath, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ensurePluginDirPermissions(dir string) {
|
||||
if err := os.Chmod(dir, pluginDirPermissions); err != nil {
|
||||
log.Error("Failed to set plugin directory permissions", "dir", dir, err)
|
||||
}
|
||||
|
||||
// Apply permissions to all files in the directory
|
||||
entries, err := os.ReadDir(dir)
|
||||
if err != nil {
|
||||
log.Error("Failed to read plugin directory", "dir", dir, err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
path := filepath.Join(dir, entry.Name())
|
||||
info, err := os.Stat(path)
|
||||
if err != nil {
|
||||
log.Error("Failed to stat file", "path", path, err)
|
||||
continue
|
||||
}
|
||||
|
||||
mode := os.FileMode(pluginFilePermissions) // Files
|
||||
if info.IsDir() {
|
||||
mode = os.FileMode(pluginDirPermissions) // Directories
|
||||
ensurePluginDirPermissions(path) // Recursive
|
||||
}
|
||||
|
||||
if err := os.Chmod(path, mode); err != nil {
|
||||
log.Error("Failed to set file permissions", "path", path, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func calculateSHA256(filePath string) string {
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
log.Error("Failed to open file for hashing", err)
|
||||
return "N/A"
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
hasher := sha256.New()
|
||||
if _, err := io.Copy(hasher, file); err != nil {
|
||||
log.Error("Failed to calculate hash", err)
|
||||
return "N/A"
|
||||
}
|
||||
|
||||
return hex.EncodeToString(hasher.Sum(nil))
|
||||
}
|
||||
193
cmd/plugin_test.go
Normal file
193
cmd/plugin_test.go
Normal file
@@ -0,0 +1,193 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var _ = Describe("Plugin CLI Commands", func() {
|
||||
var tempDir string
|
||||
var cmd *cobra.Command
|
||||
var stdOut *os.File
|
||||
var origStdout *os.File
|
||||
var outReader *os.File
|
||||
|
||||
// Helper to create a test plugin with the given name and details
|
||||
createTestPlugin := func(name, author, version string, capabilities []string) string {
|
||||
pluginDir := filepath.Join(tempDir, name)
|
||||
Expect(os.MkdirAll(pluginDir, 0755)).To(Succeed())
|
||||
|
||||
// Create a properly formatted capabilities JSON array
|
||||
capabilitiesJSON := `"` + strings.Join(capabilities, `", "`) + `"`
|
||||
|
||||
manifest := `{
|
||||
"name": "` + name + `",
|
||||
"author": "` + author + `",
|
||||
"version": "` + version + `",
|
||||
"description": "Plugin for testing",
|
||||
"website": "https://test.navidrome.org/` + name + `",
|
||||
"capabilities": [` + capabilitiesJSON + `],
|
||||
"permissions": {}
|
||||
}`
|
||||
|
||||
Expect(os.WriteFile(filepath.Join(pluginDir, "manifest.json"), []byte(manifest), 0600)).To(Succeed())
|
||||
|
||||
// Create a dummy WASM file
|
||||
wasmContent := []byte("dummy wasm content for testing")
|
||||
Expect(os.WriteFile(filepath.Join(pluginDir, "plugin.wasm"), wasmContent, 0600)).To(Succeed())
|
||||
|
||||
return pluginDir
|
||||
}
|
||||
|
||||
// Helper to execute a command and return captured output
|
||||
captureOutput := func(reader io.Reader) string {
|
||||
stdOut.Close()
|
||||
outputBytes, err := io.ReadAll(reader)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
return string(outputBytes)
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
tempDir = GinkgoT().TempDir()
|
||||
|
||||
// Setup config
|
||||
conf.Server.Plugins.Enabled = true
|
||||
conf.Server.Plugins.Folder = tempDir
|
||||
|
||||
// Create a command for testing
|
||||
cmd = &cobra.Command{Use: "test"}
|
||||
|
||||
// Setup stdout capture
|
||||
origStdout = os.Stdout
|
||||
var err error
|
||||
outReader, stdOut, err = os.Pipe()
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
os.Stdout = stdOut
|
||||
|
||||
DeferCleanup(func() {
|
||||
os.Stdout = origStdout
|
||||
})
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
os.Stdout = origStdout
|
||||
if stdOut != nil {
|
||||
stdOut.Close()
|
||||
}
|
||||
if outReader != nil {
|
||||
outReader.Close()
|
||||
}
|
||||
})
|
||||
|
||||
Describe("Plugin list command", func() {
|
||||
It("should list installed plugins", func() {
|
||||
// Create test plugins
|
||||
createTestPlugin("plugin1", "Test Author", "1.0.0", []string{"MetadataAgent"})
|
||||
createTestPlugin("plugin2", "Another Author", "2.1.0", []string{"Scrobbler"})
|
||||
|
||||
// Execute command
|
||||
pluginList(cmd, []string{})
|
||||
|
||||
// Verify output
|
||||
output := captureOutput(outReader)
|
||||
|
||||
Expect(output).To(ContainSubstring("plugin1"))
|
||||
Expect(output).To(ContainSubstring("Test Author"))
|
||||
Expect(output).To(ContainSubstring("1.0.0"))
|
||||
Expect(output).To(ContainSubstring("MetadataAgent"))
|
||||
|
||||
Expect(output).To(ContainSubstring("plugin2"))
|
||||
Expect(output).To(ContainSubstring("Another Author"))
|
||||
Expect(output).To(ContainSubstring("2.1.0"))
|
||||
Expect(output).To(ContainSubstring("Scrobbler"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Plugin info command", func() {
|
||||
It("should display information about an installed plugin", func() {
|
||||
// Create test plugin with multiple capabilities
|
||||
createTestPlugin("test-plugin", "Test Author", "1.0.0",
|
||||
[]string{"MetadataAgent", "Scrobbler"})
|
||||
|
||||
// Execute command
|
||||
pluginInfo(cmd, []string{"test-plugin"})
|
||||
|
||||
// Verify output
|
||||
output := captureOutput(outReader)
|
||||
|
||||
Expect(output).To(ContainSubstring("Name: test-plugin"))
|
||||
Expect(output).To(ContainSubstring("Author: Test Author"))
|
||||
Expect(output).To(ContainSubstring("Version: 1.0.0"))
|
||||
Expect(output).To(ContainSubstring("Description: Plugin for testing"))
|
||||
Expect(output).To(ContainSubstring("Capabilities: MetadataAgent, Scrobbler"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Plugin remove command", func() {
|
||||
It("should remove a regular plugin directory", func() {
|
||||
// Create test plugin
|
||||
pluginDir := createTestPlugin("regular-plugin", "Test Author", "1.0.0",
|
||||
[]string{"MetadataAgent"})
|
||||
|
||||
// Execute command
|
||||
pluginRemove(cmd, []string{"regular-plugin"})
|
||||
|
||||
// Verify output
|
||||
output := captureOutput(outReader)
|
||||
Expect(output).To(ContainSubstring("Plugin 'regular-plugin' removed successfully"))
|
||||
|
||||
// Verify directory is actually removed
|
||||
_, err := os.Stat(pluginDir)
|
||||
Expect(os.IsNotExist(err)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("should remove only the symlink for a development plugin", func() {
|
||||
// Create a real source directory
|
||||
sourceDir := filepath.Join(GinkgoT().TempDir(), "dev-plugin-source")
|
||||
Expect(os.MkdirAll(sourceDir, 0755)).To(Succeed())
|
||||
|
||||
manifest := `{
|
||||
"name": "dev-plugin",
|
||||
"author": "Dev Author",
|
||||
"version": "0.1.0",
|
||||
"description": "Development plugin for testing",
|
||||
"website": "https://test.navidrome.org/dev-plugin",
|
||||
"capabilities": ["Scrobbler"],
|
||||
"permissions": {}
|
||||
}`
|
||||
Expect(os.WriteFile(filepath.Join(sourceDir, "manifest.json"), []byte(manifest), 0600)).To(Succeed())
|
||||
|
||||
// Create a dummy WASM file
|
||||
wasmContent := []byte("dummy wasm content for testing")
|
||||
Expect(os.WriteFile(filepath.Join(sourceDir, "plugin.wasm"), wasmContent, 0600)).To(Succeed())
|
||||
|
||||
// Create a symlink in the plugins directory
|
||||
symlinkPath := filepath.Join(tempDir, "dev-plugin")
|
||||
Expect(os.Symlink(sourceDir, symlinkPath)).To(Succeed())
|
||||
|
||||
// Execute command
|
||||
pluginRemove(cmd, []string{"dev-plugin"})
|
||||
|
||||
// Verify output
|
||||
output := captureOutput(outReader)
|
||||
Expect(output).To(ContainSubstring("Development plugin symlink 'dev-plugin' removed successfully"))
|
||||
Expect(output).To(ContainSubstring("target directory preserved"))
|
||||
|
||||
// Verify the symlink is removed but source directory exists
|
||||
_, err := os.Lstat(symlinkPath)
|
||||
Expect(os.IsNotExist(err)).To(BeTrue())
|
||||
|
||||
_, err = os.Stat(sourceDir)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
})
|
||||
})
|
||||
})
|
||||
20
cmd/root.go
20
cmd/root.go
@@ -9,6 +9,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
@@ -21,14 +22,6 @@ import (
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"golang.org/x/sync/errgroup"
|
||||
|
||||
// Import adapters to register them
|
||||
_ "github.com/navidrome/navidrome/adapters/deezer"
|
||||
_ "github.com/navidrome/navidrome/adapters/gotaglib"
|
||||
_ "github.com/navidrome/navidrome/adapters/lastfm"
|
||||
_ "github.com/navidrome/navidrome/adapters/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/adapters/spotify"
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -337,13 +330,16 @@ func startPlaybackServer(ctx context.Context) func() error {
|
||||
// startPluginManager starts the plugin manager, if configured.
|
||||
func startPluginManager(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
manager := GetPluginManager(ctx)
|
||||
if !conf.Server.Plugins.Enabled {
|
||||
log.Debug("Plugin system is DISABLED")
|
||||
log.Debug("Plugins are DISABLED")
|
||||
return nil
|
||||
}
|
||||
log.Info(ctx, "Starting plugin manager")
|
||||
return manager.Start(ctx)
|
||||
// Get the manager instance and scan for plugins
|
||||
manager := GetPluginManager(ctx)
|
||||
manager.ScanPlugins()
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -378,7 +374,6 @@ func init() {
|
||||
rootCmd.Flags().Duration("scaninterval", viper.GetDuration("scaninterval"), "how frequently to scan for changes in your music library")
|
||||
rootCmd.Flags().String("uiloginbackgroundurl", viper.GetString("uiloginbackgroundurl"), "URL to a backaground image used in the Login page")
|
||||
rootCmd.Flags().Bool("enabletranscodingconfig", viper.GetBool("enabletranscodingconfig"), "enables transcoding configuration in the UI")
|
||||
rootCmd.Flags().Bool("enabletranscodingcancellation", viper.GetBool("enabletranscodingcancellation"), "enables transcoding context cancellation")
|
||||
rootCmd.Flags().String("transcodingcachesize", viper.GetString("transcodingcachesize"), "size of transcoding cache")
|
||||
rootCmd.Flags().String("imagecachesize", viper.GetString("imagecachesize"), "size of image (art work) cache. set to 0 to disable cache")
|
||||
rootCmd.Flags().String("albumplaycountmode", viper.GetString("albumplaycountmode"), "how to compute playcount for albums. absolute (default) or normalized")
|
||||
@@ -402,7 +397,6 @@ func init() {
|
||||
_ = viper.BindPFlag("prometheus.metricspath", rootCmd.Flags().Lookup("prometheus.metricspath"))
|
||||
|
||||
_ = viper.BindPFlag("enabletranscodingconfig", rootCmd.Flags().Lookup("enabletranscodingconfig"))
|
||||
_ = viper.BindPFlag("enabletranscodingcancellation", rootCmd.Flags().Lookup("enabletranscodingcancellation"))
|
||||
_ = viper.BindPFlag("transcodingcachesize", rootCmd.Flags().Lookup("transcodingcachesize"))
|
||||
_ = viper.BindPFlag("imagecachesize", rootCmd.Flags().Lookup("imagecachesize"))
|
||||
}
|
||||
|
||||
46
cmd/scan.go
46
cmd/scan.go
@@ -1,12 +1,9 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"encoding/gob"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
@@ -22,14 +19,12 @@ var (
|
||||
fullScan bool
|
||||
subprocess bool
|
||||
targets []string
|
||||
targetFile string
|
||||
)
|
||||
|
||||
func init() {
|
||||
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
|
||||
scanCmd.Flags().StringArrayVarP(&targets, "target", "t", []string{}, "list of libraryID:folderPath pairs, can be repeated (e.g., \"-t 1:Music/Rock -t 1:Music/Jazz -t 2:Classical\")")
|
||||
scanCmd.Flags().StringVar(&targetFile, "target-file", "", "path to file containing targets (one libraryID:folderPath per line)")
|
||||
rootCmd.AddCommand(scanCmd)
|
||||
}
|
||||
|
||||
@@ -76,17 +71,10 @@ func runScanner(ctx context.Context) {
|
||||
ds := persistence.New(sqlDB)
|
||||
pls := core.NewPlaylists(ds)
|
||||
|
||||
// Parse targets from command line or file
|
||||
// Parse targets if provided
|
||||
var scanTargets []model.ScanTarget
|
||||
var err error
|
||||
|
||||
if targetFile != "" {
|
||||
scanTargets, err = readTargetsFromFile(targetFile)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to read targets from file", err)
|
||||
}
|
||||
log.Info(ctx, "Scanning specific folders from file", "numTargets", len(scanTargets))
|
||||
} else if len(targets) > 0 {
|
||||
if len(targets) > 0 {
|
||||
var err error
|
||||
scanTargets, err = model.ParseTargets(targets)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to parse targets", err)
|
||||
@@ -106,31 +94,3 @@ func runScanner(ctx context.Context) {
|
||||
trackScanInteractively(ctx, progress)
|
||||
}
|
||||
}
|
||||
|
||||
// readTargetsFromFile reads scan targets from a file, one per line.
|
||||
// Each line should be in the format "libraryID:folderPath".
|
||||
// Empty lines and lines starting with # are ignored.
|
||||
func readTargetsFromFile(filePath string) ([]model.ScanTarget, error) {
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open target file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
var targetStrings []string
|
||||
scanner := bufio.NewScanner(file)
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
// Skip empty lines and comments
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
targetStrings = append(targetStrings, line)
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("failed to read target file: %w", err)
|
||||
}
|
||||
|
||||
return model.ParseTargets(targetStrings)
|
||||
}
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("readTargetsFromFile", func() {
|
||||
var tempDir string
|
||||
|
||||
BeforeEach(func() {
|
||||
var err error
|
||||
tempDir, err = os.MkdirTemp("", "navidrome-test-")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
os.RemoveAll(tempDir)
|
||||
})
|
||||
|
||||
It("reads valid targets from file", func() {
|
||||
filePath := filepath.Join(tempDir, "targets.txt")
|
||||
content := "1:Music/Rock\n2:Music/Jazz\n3:Classical\n"
|
||||
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
targets, err := readTargetsFromFile(filePath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(targets).To(HaveLen(3))
|
||||
Expect(targets[0]).To(Equal(model.ScanTarget{LibraryID: 1, FolderPath: "Music/Rock"}))
|
||||
Expect(targets[1]).To(Equal(model.ScanTarget{LibraryID: 2, FolderPath: "Music/Jazz"}))
|
||||
Expect(targets[2]).To(Equal(model.ScanTarget{LibraryID: 3, FolderPath: "Classical"}))
|
||||
})
|
||||
|
||||
It("skips empty lines", func() {
|
||||
filePath := filepath.Join(tempDir, "targets.txt")
|
||||
content := "1:Music/Rock\n\n2:Music/Jazz\n\n"
|
||||
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
targets, err := readTargetsFromFile(filePath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(targets).To(HaveLen(2))
|
||||
})
|
||||
|
||||
It("trims whitespace", func() {
|
||||
filePath := filepath.Join(tempDir, "targets.txt")
|
||||
content := " 1:Music/Rock \n\t2:Music/Jazz\t\n"
|
||||
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
targets, err := readTargetsFromFile(filePath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(targets).To(HaveLen(2))
|
||||
Expect(targets[0].FolderPath).To(Equal("Music/Rock"))
|
||||
Expect(targets[1].FolderPath).To(Equal("Music/Jazz"))
|
||||
})
|
||||
|
||||
It("returns error for non-existent file", func() {
|
||||
_, err := readTargetsFromFile("/nonexistent/file.txt")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("failed to open target file"))
|
||||
})
|
||||
|
||||
It("returns error for invalid target format", func() {
|
||||
filePath := filepath.Join(tempDir, "targets.txt")
|
||||
content := "invalid-format\n"
|
||||
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
_, err = readTargetsFromFile(filePath)
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
|
||||
It("handles mixed valid and empty lines", func() {
|
||||
filePath := filepath.Join(tempDir, "targets.txt")
|
||||
content := "\n1:Music/Rock\n\n\n2:Music/Jazz\n\n"
|
||||
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
targets, err := readTargetsFromFile(filePath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(targets).To(HaveLen(2))
|
||||
})
|
||||
})
|
||||
477
cmd/user.go
477
cmd/user.go
@@ -1,477 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/term"
|
||||
)
|
||||
|
||||
var (
|
||||
email string
|
||||
libraryIds []int
|
||||
name string
|
||||
|
||||
removeEmail bool
|
||||
removeName bool
|
||||
setAdmin bool
|
||||
setPassword bool
|
||||
setRegularUser bool
|
||||
)
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(userRoot)
|
||||
|
||||
userCreateCommand.Flags().StringVarP(&userID, "username", "u", "", "username")
|
||||
|
||||
userCreateCommand.Flags().StringVarP(&email, "email", "e", "", "New user email")
|
||||
userCreateCommand.Flags().IntSliceVarP(&libraryIds, "library-ids", "i", []int{}, "Comma-separated list of library IDs. Set the user's accessible libraries. If empty, the user can access all libraries. This is incompatible with admin, as admin can always access all libraries")
|
||||
|
||||
userCreateCommand.Flags().BoolVarP(&setAdmin, "admin", "a", false, "If set, make the user an admin. This user will have access to every library")
|
||||
userCreateCommand.Flags().StringVar(&name, "name", "", "New user's name (this is separate from username used to log in)")
|
||||
|
||||
_ = userCreateCommand.MarkFlagRequired("username")
|
||||
|
||||
userRoot.AddCommand(userCreateCommand)
|
||||
|
||||
userDeleteCommand.Flags().StringVarP(&userID, "user", "u", "", "username or id")
|
||||
_ = userDeleteCommand.MarkFlagRequired("user")
|
||||
userRoot.AddCommand(userDeleteCommand)
|
||||
|
||||
userEditCommand.Flags().StringVarP(&userID, "user", "u", "", "username or id")
|
||||
|
||||
userEditCommand.Flags().BoolVar(&setAdmin, "set-admin", false, "If set, make the user an admin")
|
||||
userEditCommand.Flags().BoolVar(&setRegularUser, "set-regular", false, "If set, make the user a non-admin")
|
||||
userEditCommand.MarkFlagsMutuallyExclusive("set-admin", "set-regular")
|
||||
|
||||
userEditCommand.Flags().BoolVar(&removeEmail, "remove-email", false, "If set, clear the user's email")
|
||||
userEditCommand.Flags().StringVarP(&email, "email", "e", "", "New user email")
|
||||
userEditCommand.MarkFlagsMutuallyExclusive("email", "remove-email")
|
||||
|
||||
userEditCommand.Flags().BoolVar(&removeName, "remove-name", false, "If set, clear the user's name")
|
||||
userEditCommand.Flags().StringVar(&name, "name", "", "New user name (this is separate from username used to log in)")
|
||||
userEditCommand.MarkFlagsMutuallyExclusive("name", "remove-name")
|
||||
|
||||
userEditCommand.Flags().BoolVar(&setPassword, "set-password", false, "If set, the user's new password will be prompted on the CLI")
|
||||
|
||||
userEditCommand.Flags().IntSliceVarP(&libraryIds, "library-ids", "i", []int{}, "Comma-separated list of library IDs. Set the user's accessible libraries by id")
|
||||
|
||||
_ = userEditCommand.MarkFlagRequired("user")
|
||||
userRoot.AddCommand(userEditCommand)
|
||||
|
||||
userListCommand.Flags().StringVarP(&outputFormat, "format", "f", "csv", "output format [supported values: csv, json]")
|
||||
userRoot.AddCommand(userListCommand)
|
||||
}
|
||||
|
||||
var (
|
||||
userRoot = &cobra.Command{
|
||||
Use: "user",
|
||||
Short: "Administer users",
|
||||
Long: "Create, delete, list, or update users",
|
||||
}
|
||||
|
||||
userCreateCommand = &cobra.Command{
|
||||
Use: "create",
|
||||
Aliases: []string{"c"},
|
||||
Short: "Create a new user",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runCreateUser(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
userDeleteCommand = &cobra.Command{
|
||||
Use: "delete",
|
||||
Aliases: []string{"d"},
|
||||
Short: "Deletes an existing user",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runDeleteUser(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
userEditCommand = &cobra.Command{
|
||||
Use: "edit",
|
||||
Aliases: []string{"e"},
|
||||
Short: "Edit a user",
|
||||
Long: "Edit the password, admin status, and/or library access",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runUserEdit(cmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
userListCommand = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List users",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runUserList(cmd.Context())
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func promptPassword() string {
|
||||
for {
|
||||
fmt.Print("Enter new password (press enter with no password to cancel): ")
|
||||
// This cast is necessary for some platforms
|
||||
password, err := term.ReadPassword(int(syscall.Stdin)) //nolint:unconvert
|
||||
|
||||
if err != nil {
|
||||
log.Fatal("Error getting password", err)
|
||||
}
|
||||
|
||||
fmt.Print("\nConfirm new password (press enter with no password to cancel): ")
|
||||
confirmation, err := term.ReadPassword(int(syscall.Stdin)) //nolint:unconvert
|
||||
|
||||
if err != nil {
|
||||
log.Fatal("Error getting password confirmation", err)
|
||||
}
|
||||
|
||||
// clear the line.
|
||||
fmt.Println()
|
||||
|
||||
pass := string(password)
|
||||
confirm := string(confirmation)
|
||||
|
||||
if pass == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
if pass == confirm {
|
||||
return pass
|
||||
}
|
||||
|
||||
fmt.Println("Password and password confirmation do not match")
|
||||
}
|
||||
}
|
||||
|
||||
func libraryError(libraries model.Libraries) error {
|
||||
ids := make([]int, len(libraries))
|
||||
for idx, library := range libraries {
|
||||
ids[idx] = library.ID
|
||||
}
|
||||
return fmt.Errorf("not all available libraries found. Requested ids: %v, Found libraries: %v", libraryIds, ids)
|
||||
}
|
||||
|
||||
func runCreateUser(ctx context.Context) {
|
||||
password := promptPassword()
|
||||
if password == "" {
|
||||
log.Fatal("Empty password provided, user creation cancelled")
|
||||
}
|
||||
|
||||
user := model.User{
|
||||
UserName: userID,
|
||||
Email: email,
|
||||
Name: name,
|
||||
IsAdmin: setAdmin,
|
||||
NewPassword: password,
|
||||
}
|
||||
|
||||
if user.Name == "" {
|
||||
user.Name = userID
|
||||
}
|
||||
|
||||
ds, ctx := getAdminContext(ctx)
|
||||
|
||||
err := ds.WithTx(func(tx model.DataStore) error {
|
||||
existingUser, err := tx.User(ctx).FindByUsername(userID)
|
||||
if existingUser != nil {
|
||||
return fmt.Errorf("existing user '%s'", userID)
|
||||
}
|
||||
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
return fmt.Errorf("failed to check existing username: %w", err)
|
||||
}
|
||||
|
||||
if len(libraryIds) > 0 && !setAdmin {
|
||||
user.Libraries, err = tx.Library(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"id": libraryIds}})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(user.Libraries) != len(libraryIds) {
|
||||
return libraryError(user.Libraries)
|
||||
}
|
||||
} else {
|
||||
user.Libraries, err = tx.Library(ctx).GetAll()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
err = tx.User(ctx).Put(&user)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
updatedIds := make([]int, len(user.Libraries))
|
||||
for idx, lib := range user.Libraries {
|
||||
updatedIds[idx] = lib.ID
|
||||
}
|
||||
|
||||
err = tx.User(ctx).SetUserLibraries(user.ID, updatedIds)
|
||||
return err
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
log.Fatal(ctx, err)
|
||||
}
|
||||
|
||||
log.Info(ctx, "Successfully created user", "id", user.ID, "username", user.UserName)
|
||||
}
|
||||
|
||||
func runDeleteUser(ctx context.Context) {
|
||||
ds, ctx := getAdminContext(ctx)
|
||||
|
||||
var err error
|
||||
var user *model.User
|
||||
|
||||
err = ds.WithTx(func(tx model.DataStore) error {
|
||||
count, err := tx.User(ctx).CountAll()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if count == 1 {
|
||||
return errors.New("refusing to delete the last user")
|
||||
}
|
||||
|
||||
user, err = getUser(ctx, userID, tx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return tx.User(ctx).Delete(user.ID)
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to delete user", err)
|
||||
}
|
||||
|
||||
log.Info(ctx, "Deleted user", "username", user.UserName)
|
||||
}
|
||||
|
||||
func runUserEdit(ctx context.Context) {
|
||||
ds, ctx := getAdminContext(ctx)
|
||||
|
||||
var err error
|
||||
var user *model.User
|
||||
changes := []string{}
|
||||
|
||||
err = ds.WithTx(func(tx model.DataStore) error {
|
||||
var newLibraries model.Libraries
|
||||
|
||||
user, err = getUser(ctx, userID, tx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(libraryIds) > 0 && !setAdmin {
|
||||
libraries, err := tx.Library(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"id": libraryIds}})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(libraries) != len(libraryIds) {
|
||||
return libraryError(libraries)
|
||||
}
|
||||
|
||||
newLibraries = libraries
|
||||
changes = append(changes, "updated library ids")
|
||||
}
|
||||
|
||||
if setAdmin && !user.IsAdmin {
|
||||
libraries, err := tx.Library(ctx).GetAll()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
user.IsAdmin = true
|
||||
user.Libraries = libraries
|
||||
changes = append(changes, "set admin")
|
||||
|
||||
newLibraries = libraries
|
||||
}
|
||||
|
||||
if setRegularUser && user.IsAdmin {
|
||||
user.IsAdmin = false
|
||||
changes = append(changes, "set regular user")
|
||||
}
|
||||
|
||||
if setPassword {
|
||||
password := promptPassword()
|
||||
|
||||
if password != "" {
|
||||
user.NewPassword = password
|
||||
changes = append(changes, "updated password")
|
||||
}
|
||||
}
|
||||
|
||||
if email != "" && email != user.Email {
|
||||
user.Email = email
|
||||
changes = append(changes, "updated email")
|
||||
} else if removeEmail && user.Email != "" {
|
||||
user.Email = ""
|
||||
changes = append(changes, "removed email")
|
||||
}
|
||||
|
||||
if name != "" && name != user.Name {
|
||||
user.Name = name
|
||||
changes = append(changes, "updated name")
|
||||
} else if removeName && user.Name != "" {
|
||||
user.Name = ""
|
||||
changes = append(changes, "removed name")
|
||||
}
|
||||
|
||||
if len(changes) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
err := tx.User(ctx).Put(user)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(newLibraries) > 0 {
|
||||
updatedIds := make([]int, len(newLibraries))
|
||||
for idx, lib := range newLibraries {
|
||||
updatedIds[idx] = lib.ID
|
||||
}
|
||||
|
||||
err := tx.User(ctx).SetUserLibraries(user.ID, updatedIds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to update user", err)
|
||||
}
|
||||
|
||||
if len(changes) == 0 {
|
||||
log.Info(ctx, "No changes for user", "user", user.UserName)
|
||||
} else {
|
||||
log.Info(ctx, "Updated user", "user", user.UserName, "changes", strings.Join(changes, ", "))
|
||||
}
|
||||
}
|
||||
|
||||
type displayLibrary struct {
|
||||
ID int `json:"id"`
|
||||
Path string `json:"path"`
|
||||
}
|
||||
|
||||
type displayUser struct {
|
||||
Id string `json:"id"`
|
||||
Username string `json:"username"`
|
||||
Name string `json:"name"`
|
||||
Email string `json:"email"`
|
||||
Admin bool `json:"admin"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
LastAccess *time.Time `json:"lastAccess"`
|
||||
LastLogin *time.Time `json:"lastLogin"`
|
||||
Libraries []displayLibrary `json:"libraries"`
|
||||
}
|
||||
|
||||
func runUserList(ctx context.Context) {
|
||||
if outputFormat != "csv" && outputFormat != "json" {
|
||||
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
|
||||
}
|
||||
|
||||
ds, ctx := getAdminContext(ctx)
|
||||
|
||||
users, err := ds.User(ctx).ReadAll()
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to retrieve users", err)
|
||||
}
|
||||
|
||||
userList := users.(model.Users)
|
||||
|
||||
if outputFormat == "csv" {
|
||||
w := csv.NewWriter(os.Stdout)
|
||||
_ = w.Write([]string{
|
||||
"user id",
|
||||
"username",
|
||||
"user's name",
|
||||
"user email",
|
||||
"admin",
|
||||
"created at",
|
||||
"updated at",
|
||||
"last access",
|
||||
"last login",
|
||||
"libraries",
|
||||
})
|
||||
for _, user := range userList {
|
||||
paths := make([]string, len(user.Libraries))
|
||||
|
||||
for idx, library := range user.Libraries {
|
||||
paths[idx] = fmt.Sprintf("%d:%s", library.ID, library.Path)
|
||||
}
|
||||
|
||||
var lastAccess, lastLogin string
|
||||
|
||||
if user.LastAccessAt != nil {
|
||||
lastAccess = user.LastAccessAt.Format(time.RFC3339Nano)
|
||||
} else {
|
||||
lastAccess = "never"
|
||||
}
|
||||
|
||||
if user.LastLoginAt != nil {
|
||||
lastLogin = user.LastLoginAt.Format(time.RFC3339Nano)
|
||||
} else {
|
||||
lastLogin = "never"
|
||||
}
|
||||
|
||||
_ = w.Write([]string{
|
||||
user.ID,
|
||||
user.UserName,
|
||||
user.Name,
|
||||
user.Email,
|
||||
strconv.FormatBool(user.IsAdmin),
|
||||
user.CreatedAt.Format(time.RFC3339Nano),
|
||||
user.UpdatedAt.Format(time.RFC3339Nano),
|
||||
lastAccess,
|
||||
lastLogin,
|
||||
fmt.Sprintf("'%s'", strings.Join(paths, "|")),
|
||||
})
|
||||
}
|
||||
w.Flush()
|
||||
} else {
|
||||
users := make([]displayUser, len(userList))
|
||||
for idx, user := range userList {
|
||||
paths := make([]displayLibrary, len(user.Libraries))
|
||||
|
||||
for idx, library := range user.Libraries {
|
||||
paths[idx].ID = library.ID
|
||||
paths[idx].Path = library.Path
|
||||
}
|
||||
|
||||
users[idx].Id = user.ID
|
||||
users[idx].Username = user.UserName
|
||||
users[idx].Name = user.Name
|
||||
users[idx].Email = user.Email
|
||||
users[idx].Admin = user.IsAdmin
|
||||
users[idx].CreatedAt = user.CreatedAt
|
||||
users[idx].UpdatedAt = user.UpdatedAt
|
||||
users[idx].LastAccess = user.LastAccessAt
|
||||
users[idx].LastLogin = user.LastLoginAt
|
||||
users[idx].Libraries = paths
|
||||
}
|
||||
|
||||
j, _ := json.Marshal(users)
|
||||
fmt.Printf("%s\n", j)
|
||||
}
|
||||
}
|
||||
42
cmd/utils.go
42
cmd/utils.go
@@ -1,42 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
)
|
||||
|
||||
func getAdminContext(ctx context.Context) (model.DataStore, context.Context) {
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx = auth.WithAdminUser(ctx, ds)
|
||||
u, _ := request.UserFrom(ctx)
|
||||
if !u.IsAdmin {
|
||||
log.Fatal(ctx, "There must be at least one admin user to run this command.")
|
||||
}
|
||||
return ds, ctx
|
||||
}
|
||||
|
||||
func getUser(ctx context.Context, id string, ds model.DataStore) (*model.User, error) {
|
||||
user, err := ds.User(ctx).FindByUsername(id)
|
||||
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
return nil, fmt.Errorf("finding user by name: %w", err)
|
||||
}
|
||||
|
||||
if errors.Is(err, model.ErrNotFound) {
|
||||
user, err = ds.User(ctx).Get(id)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("finding user by id: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
@@ -9,10 +9,10 @@ package cmd
|
||||
import (
|
||||
"context"
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/adapters/lastfm"
|
||||
"github.com/navidrome/navidrome/adapters/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
@@ -32,11 +32,6 @@ import (
|
||||
)
|
||||
|
||||
import (
|
||||
_ "github.com/navidrome/navidrome/adapters/deezer"
|
||||
_ "github.com/navidrome/navidrome/adapters/gotaglib"
|
||||
_ "github.com/navidrome/navidrome/adapters/lastfm"
|
||||
_ "github.com/navidrome/navidrome/adapters/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/adapters/spotify"
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
)
|
||||
|
||||
@@ -52,7 +47,9 @@ func CreateServer() *server.Server {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
broker := events.GetBroker()
|
||||
insights := metrics.GetInstance(dataStore)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
insights := metrics.GetInstance(dataStore, manager)
|
||||
serverServer := server.New(dataStore, broker, insights)
|
||||
return serverServer
|
||||
}
|
||||
@@ -62,22 +59,21 @@ func CreateNativeAPIRouter(ctx context.Context) *nativeapi.Router {
|
||||
dataStore := persistence.New(sqlDB)
|
||||
share := core.NewShare(dataStore)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
insights := metrics.GetInstance(dataStore)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
insights := metrics.GetInstance(dataStore, manager)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
broker := events.GetBroker()
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, broker, metricsMetrics)
|
||||
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
watcher := scanner.GetWatcher(dataStore, modelScanner)
|
||||
library := core.NewLibrary(dataStore, modelScanner, watcher, broker, manager)
|
||||
user := core.NewUser(dataStore, manager)
|
||||
library := core.NewLibrary(dataStore, modelScanner, watcher, broker)
|
||||
maintenance := core.NewMaintenance(dataStore)
|
||||
router := nativeapi.New(dataStore, share, playlists, insights, library, user, maintenance, manager)
|
||||
router := nativeapi.New(dataStore, share, playlists, insights, library, maintenance)
|
||||
return router
|
||||
}
|
||||
|
||||
@@ -86,9 +82,8 @@ func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
broker := events.GetBroker()
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, broker, metricsMetrics)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
@@ -98,6 +93,7 @@ func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||
archiver := core.NewArchiver(mediaStreamer, dataStore, share)
|
||||
players := core.NewPlayers(dataStore)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
playTracker := scrobbler.GetPlayTracker(dataStore, broker, manager)
|
||||
@@ -111,9 +107,8 @@ func CreatePublicRouter() *public.Router {
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
broker := events.GetBroker()
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, broker, metricsMetrics)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
@@ -142,7 +137,9 @@ func CreateListenBrainzRouter() *listenbrainz.Router {
|
||||
func CreateInsights() metrics.Insights {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
insights := metrics.GetInstance(dataStore)
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
insights := metrics.GetInstance(dataStore, manager)
|
||||
return insights
|
||||
}
|
||||
|
||||
@@ -158,13 +155,13 @@ func CreateScanner(ctx context.Context) model.Scanner {
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
broker := events.GetBroker()
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, broker, metricsMetrics)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
return modelScanner
|
||||
@@ -175,13 +172,13 @@ func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
broker := events.GetBroker()
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, broker, metricsMetrics)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
agentsAgents := agents.GetAgents(dataStore, manager)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
watcher := scanner.GetWatcher(dataStore, modelScanner)
|
||||
@@ -195,20 +192,19 @@ func GetPlaybackServer() playback.PlaybackServer {
|
||||
return playbackServer
|
||||
}
|
||||
|
||||
func getPluginManager() *plugins.Manager {
|
||||
func getPluginManager() plugins.Manager {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
broker := events.GetBroker()
|
||||
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
|
||||
manager := plugins.GetManager(dataStore, broker, metricsMetrics)
|
||||
manager := plugins.GetManager(dataStore, metricsMetrics)
|
||||
return manager
|
||||
}
|
||||
|
||||
// wire_injectors.go:
|
||||
|
||||
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.GetWatcher, metrics.GetPrometheusInstance, db.Db, plugins.GetManager, wire.Bind(new(agents.PluginLoader), new(*plugins.Manager)), wire.Bind(new(scrobbler.PluginLoader), new(*plugins.Manager)), wire.Bind(new(nativeapi.PluginManager), new(*plugins.Manager)), wire.Bind(new(core.PluginUnloader), new(*plugins.Manager)), wire.Bind(new(plugins.PluginMetricsRecorder), new(metrics.Metrics)), wire.Bind(new(core.Watcher), new(scanner.Watcher)))
|
||||
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.GetWatcher, plugins.GetManager, metrics.GetPrometheusInstance, db.Db, wire.Bind(new(agents.PluginLoader), new(plugins.Manager)), wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)), wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)), wire.Bind(new(core.Watcher), new(scanner.Watcher)))
|
||||
|
||||
func GetPluginManager(ctx context.Context) *plugins.Manager {
|
||||
func GetPluginManager(ctx context.Context) plugins.Manager {
|
||||
manager := getPluginManager()
|
||||
manager.SetSubsonicRouter(CreateSubsonicAPIRouter(ctx))
|
||||
return manager
|
||||
|
||||
@@ -6,10 +6,10 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/adapters/lastfm"
|
||||
"github.com/navidrome/navidrome/adapters/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/core/playback"
|
||||
@@ -39,14 +39,12 @@ var allProviders = wire.NewSet(
|
||||
events.GetBroker,
|
||||
scanner.New,
|
||||
scanner.GetWatcher,
|
||||
plugins.GetManager,
|
||||
metrics.GetPrometheusInstance,
|
||||
db.Db,
|
||||
plugins.GetManager,
|
||||
wire.Bind(new(agents.PluginLoader), new(*plugins.Manager)),
|
||||
wire.Bind(new(scrobbler.PluginLoader), new(*plugins.Manager)),
|
||||
wire.Bind(new(nativeapi.PluginManager), new(*plugins.Manager)),
|
||||
wire.Bind(new(core.PluginUnloader), new(*plugins.Manager)),
|
||||
wire.Bind(new(plugins.PluginMetricsRecorder), new(metrics.Metrics)),
|
||||
wire.Bind(new(agents.PluginLoader), new(plugins.Manager)),
|
||||
wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)),
|
||||
wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)),
|
||||
wire.Bind(new(core.Watcher), new(scanner.Watcher)),
|
||||
)
|
||||
|
||||
@@ -122,13 +120,13 @@ func GetPlaybackServer() playback.PlaybackServer {
|
||||
))
|
||||
}
|
||||
|
||||
func getPluginManager() *plugins.Manager {
|
||||
func getPluginManager() plugins.Manager {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func GetPluginManager(ctx context.Context) *plugins.Manager {
|
||||
func GetPluginManager(ctx context.Context) plugins.Manager {
|
||||
manager := getPluginManager()
|
||||
manager.SetSubsonicRouter(CreateSubsonicAPIRouter(ctx))
|
||||
return manager
|
||||
|
||||
@@ -41,7 +41,6 @@ type configOptions struct {
|
||||
UIWelcomeMessage string
|
||||
MaxSidebarPlaylists int
|
||||
EnableTranscodingConfig bool
|
||||
EnableTranscodingCancellation bool
|
||||
EnableDownloads bool
|
||||
EnableExternalServices bool
|
||||
EnableInsightsCollector bool
|
||||
@@ -87,9 +86,11 @@ type configOptions struct {
|
||||
AuthRequestLimit int
|
||||
AuthWindowLength time.Duration
|
||||
PasswordEncryptionKey string
|
||||
ExtAuth extAuthOptions
|
||||
ReverseProxyUserHeader string
|
||||
ReverseProxyWhitelist string
|
||||
Plugins pluginsOptions
|
||||
HTTPHeaders httpHeaderOptions `json:",omitzero"`
|
||||
PluginConfig map[string]map[string]string
|
||||
HTTPSecurityHeaders secureOptions `json:",omitzero"`
|
||||
Prometheus prometheusOptions `json:",omitzero"`
|
||||
Scanner scannerOptions `json:",omitzero"`
|
||||
Jukebox jukeboxOptions `json:",omitzero"`
|
||||
@@ -101,38 +102,36 @@ type configOptions struct {
|
||||
Spotify spotifyOptions `json:",omitzero"`
|
||||
Deezer deezerOptions `json:",omitzero"`
|
||||
ListenBrainz listenBrainzOptions `json:",omitzero"`
|
||||
EnableScrobbleHistory bool
|
||||
Tags map[string]TagConf `json:",omitempty"`
|
||||
Tags map[string]TagConf `json:",omitempty"`
|
||||
Agents string
|
||||
|
||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||
DevLogLevels map[string]string `json:",omitempty"`
|
||||
DevLogSourceLine bool
|
||||
DevEnableProfiler bool
|
||||
DevAutoCreateAdminPassword string
|
||||
DevAutoLoginUsername string
|
||||
DevActivityPanel bool
|
||||
DevActivityPanelUpdateRate time.Duration
|
||||
DevSidebarPlaylists bool
|
||||
DevShowArtistPage bool
|
||||
DevUIShowConfig bool
|
||||
DevNewEventStream bool
|
||||
DevOffsetOptimize int
|
||||
DevArtworkMaxRequests int
|
||||
DevArtworkThrottleBacklogLimit int
|
||||
DevArtworkThrottleBacklogTimeout time.Duration
|
||||
DevArtistInfoTimeToLive time.Duration
|
||||
DevAlbumInfoTimeToLive time.Duration
|
||||
DevExternalScanner bool
|
||||
DevScannerThreads uint
|
||||
DevSelectiveWatcher bool
|
||||
DevInsightsInitialDelay time.Duration
|
||||
DevEnablePlayerInsights bool
|
||||
DevEnablePluginsInsights bool
|
||||
DevPluginCompilationTimeout time.Duration
|
||||
DevExternalArtistFetchMultiplier float64
|
||||
DevOptimizeDB bool
|
||||
DevPreserveUnicodeInExternalCalls bool
|
||||
DevLogLevels map[string]string `json:",omitempty"`
|
||||
DevLogSourceLine bool
|
||||
DevEnableProfiler bool
|
||||
DevAutoCreateAdminPassword string
|
||||
DevAutoLoginUsername string
|
||||
DevActivityPanel bool
|
||||
DevActivityPanelUpdateRate time.Duration
|
||||
DevSidebarPlaylists bool
|
||||
DevShowArtistPage bool
|
||||
DevUIShowConfig bool
|
||||
DevNewEventStream bool
|
||||
DevOffsetOptimize int
|
||||
DevArtworkMaxRequests int
|
||||
DevArtworkThrottleBacklogLimit int
|
||||
DevArtworkThrottleBacklogTimeout time.Duration
|
||||
DevArtistInfoTimeToLive time.Duration
|
||||
DevAlbumInfoTimeToLive time.Duration
|
||||
DevExternalScanner bool
|
||||
DevScannerThreads uint
|
||||
DevSelectiveWatcher bool
|
||||
DevInsightsInitialDelay time.Duration
|
||||
DevEnablePlayerInsights bool
|
||||
DevEnablePluginsInsights bool
|
||||
DevPluginCompilationTimeout time.Duration
|
||||
DevExternalArtistFetchMultiplier float64
|
||||
DevOptimizeDB bool
|
||||
}
|
||||
|
||||
type scannerOptions struct {
|
||||
@@ -153,7 +152,6 @@ type subsonicOptions struct {
|
||||
ArtistParticipations bool
|
||||
DefaultReportRealPath bool
|
||||
LegacyClients string
|
||||
MinimalClients string
|
||||
}
|
||||
|
||||
type TagConf struct {
|
||||
@@ -188,8 +186,8 @@ type listenBrainzOptions struct {
|
||||
BaseURL string
|
||||
}
|
||||
|
||||
type httpHeaderOptions struct {
|
||||
FrameOptions string
|
||||
type secureOptions struct {
|
||||
CustomFrameOptionsValue string
|
||||
}
|
||||
|
||||
type prometheusOptions struct {
|
||||
@@ -226,16 +224,9 @@ type inspectOptions struct {
|
||||
}
|
||||
|
||||
type pluginsOptions struct {
|
||||
Enabled bool
|
||||
Folder string
|
||||
CacheSize string
|
||||
AutoReload bool
|
||||
LogLevel string
|
||||
}
|
||||
|
||||
type extAuthOptions struct {
|
||||
TrustedSources string
|
||||
UserHeader string
|
||||
Enabled bool
|
||||
Folder string
|
||||
CacheSize string
|
||||
}
|
||||
|
||||
var (
|
||||
@@ -256,11 +247,6 @@ func LoadFromFile(confFile string) {
|
||||
func Load(noConfigDump bool) {
|
||||
parseIniFileConfiguration()
|
||||
|
||||
// Map deprecated options to their new names for backwards compatibility
|
||||
mapDeprecatedOption("ReverseProxyWhitelist", "ExtAuth.TrustedSources")
|
||||
mapDeprecatedOption("ReverseProxyUserHeader", "ExtAuth.UserHeader")
|
||||
mapDeprecatedOption("HTTPSecurityHeaders.CustomFrameOptionsValue", "HTTPHeaders.FrameOptions")
|
||||
|
||||
err := viper.Unmarshal(&Server)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
@@ -344,18 +330,9 @@ func Load(noConfigDump bool) {
|
||||
Server.BaseScheme = u.Scheme
|
||||
}
|
||||
|
||||
// Log configuration source
|
||||
if Server.ConfigFile != "" {
|
||||
log.Info("Loaded configuration", "file", Server.ConfigFile)
|
||||
} else if hasNDEnvVars() {
|
||||
log.Info("No configuration file found. Loaded configuration only from environment variables")
|
||||
} else {
|
||||
log.Warn("No configuration file found. Using default values. To specify a config file, use the --configfile flag or set the ND_CONFIGFILE environment variable.")
|
||||
}
|
||||
|
||||
// Print current configuration if log level is Debug
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) && !noConfigDump {
|
||||
prettyConf := pretty.Sprintf("Configuration: %# v", Server)
|
||||
prettyConf := pretty.Sprintf("Loaded configuration from '%s': %# v", Server.ConfigFile, Server)
|
||||
if Server.EnableLogRedacting {
|
||||
prettyConf = log.Redact(prettyConf)
|
||||
}
|
||||
@@ -366,12 +343,13 @@ func Load(noConfigDump bool) {
|
||||
disableExternalServices()
|
||||
}
|
||||
|
||||
logDeprecatedOptions("Scanner.GenreSeparators", "")
|
||||
logDeprecatedOptions("Scanner.GroupAlbumReleases", "")
|
||||
logDeprecatedOptions("DevEnableBufferedScrobble", "") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
||||
logDeprecatedOptions("ReverseProxyWhitelist", "ExtAuth.TrustedSources")
|
||||
logDeprecatedOptions("ReverseProxyUserHeader", "ExtAuth.UserHeader")
|
||||
logDeprecatedOptions("HTTPSecurityHeaders.CustomFrameOptionsValue", "HTTPHeaders.FrameOptions")
|
||||
if Server.Scanner.Extractor != consts.DefaultScannerExtractor {
|
||||
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
|
||||
Server.Scanner.Extractor = consts.DefaultScannerExtractor
|
||||
}
|
||||
logDeprecatedOptions("Scanner.GenreSeparators")
|
||||
logDeprecatedOptions("Scanner.GroupAlbumReleases")
|
||||
logDeprecatedOptions("DevEnableBufferedScrobble") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
||||
|
||||
// Call init hooks
|
||||
for _, hook := range hooks {
|
||||
@@ -379,29 +357,15 @@ func Load(noConfigDump bool) {
|
||||
}
|
||||
}
|
||||
|
||||
func logDeprecatedOptions(oldName, newName string) {
|
||||
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(oldName, ".", "_"))
|
||||
newEnvVar := "ND_" + strings.ToUpper(strings.ReplaceAll(newName, ".", "_"))
|
||||
logWarning := func(oldName, newName string) {
|
||||
if newName != "" {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release. Please use the new '%s'", oldName, newName))
|
||||
} else {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", oldName))
|
||||
func logDeprecatedOptions(options ...string) {
|
||||
for _, option := range options {
|
||||
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(option, ".", "_"))
|
||||
if os.Getenv(envVar) != "" {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", envVar))
|
||||
}
|
||||
if viper.InConfig(option) {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", option))
|
||||
}
|
||||
}
|
||||
if os.Getenv(envVar) != "" {
|
||||
logWarning(envVar, newEnvVar)
|
||||
}
|
||||
if viper.InConfig(oldName) {
|
||||
logWarning(oldName, newName)
|
||||
}
|
||||
}
|
||||
|
||||
// mapDeprecatedOption is used to provide backwards compatibility for deprecated options. It should be called after
|
||||
// the config has been read by viper, but before unmarshalling it into the Config struct.
|
||||
func mapDeprecatedOption(legacyName, newName string) {
|
||||
if viper.IsSet(legacyName) {
|
||||
viper.Set(newName, viper.Get(legacyName))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -511,16 +475,6 @@ func AddHook(hook func()) {
|
||||
hooks = append(hooks, hook)
|
||||
}
|
||||
|
||||
// hasNDEnvVars checks if any ND_ prefixed environment variables are set (excluding ND_CONFIGFILE)
|
||||
func hasNDEnvVars() bool {
|
||||
for _, env := range os.Environ() {
|
||||
if strings.HasPrefix(env, "ND_") && !strings.HasPrefix(env, "ND_CONFIGFILE=") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func setViperDefaults() {
|
||||
viper.SetDefault("musicfolder", filepath.Join(".", "music"))
|
||||
viper.SetDefault("cachefolder", "")
|
||||
@@ -538,7 +492,6 @@ func setViperDefaults() {
|
||||
viper.SetDefault("uiwelcomemessage", "")
|
||||
viper.SetDefault("maxsidebarplaylists", consts.DefaultMaxSidebarPlaylists)
|
||||
viper.SetDefault("enabletranscodingconfig", false)
|
||||
viper.SetDefault("enabletranscodingcancellation", false)
|
||||
viper.SetDefault("transcodingcachesize", "100MB")
|
||||
viper.SetDefault("imagecachesize", "100MB")
|
||||
viper.SetDefault("albumplaycountmode", consts.AlbumPlayCountModeAbsolute)
|
||||
@@ -583,8 +536,8 @@ func setViperDefaults() {
|
||||
viper.SetDefault("authrequestlimit", 5)
|
||||
viper.SetDefault("authwindowlength", 20*time.Second)
|
||||
viper.SetDefault("passwordencryptionkey", "")
|
||||
viper.SetDefault("extauth.userheader", "Remote-User")
|
||||
viper.SetDefault("extauth.trustedsources", "")
|
||||
viper.SetDefault("reverseproxyuserheader", "Remote-User")
|
||||
viper.SetDefault("reverseproxywhitelist", "")
|
||||
viper.SetDefault("prometheus.enabled", false)
|
||||
viper.SetDefault("prometheus.metricspath", consts.PrometheusDefaultPath)
|
||||
viper.SetDefault("prometheus.password", "")
|
||||
@@ -605,7 +558,7 @@ func setViperDefaults() {
|
||||
viper.SetDefault("subsonic.appendsubtitle", true)
|
||||
viper.SetDefault("subsonic.artistparticipations", false)
|
||||
viper.SetDefault("subsonic.defaultreportrealpath", false)
|
||||
viper.SetDefault("subsonic.legacyclients", "DSub,SubMusic")
|
||||
viper.SetDefault("subsonic.legacyclients", "DSub")
|
||||
viper.SetDefault("agents", "lastfm,spotify,deezer")
|
||||
viper.SetDefault("lastfm.enabled", true)
|
||||
viper.SetDefault("lastfm.language", "en")
|
||||
@@ -618,8 +571,7 @@ func setViperDefaults() {
|
||||
viper.SetDefault("deezer.language", "en")
|
||||
viper.SetDefault("listenbrainz.enabled", true)
|
||||
viper.SetDefault("listenbrainz.baseurl", "https://api.listenbrainz.org/1/")
|
||||
viper.SetDefault("enablescrobblehistory", true)
|
||||
viper.SetDefault("httpheaders.frameoptions", "DENY")
|
||||
viper.SetDefault("httpsecurityheaders.customframeoptionsvalue", "DENY")
|
||||
viper.SetDefault("backup.path", "")
|
||||
viper.SetDefault("backup.schedule", "")
|
||||
viper.SetDefault("backup.count", 0)
|
||||
@@ -631,8 +583,7 @@ func setViperDefaults() {
|
||||
viper.SetDefault("inspect.backlogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||
viper.SetDefault("plugins.folder", "")
|
||||
viper.SetDefault("plugins.enabled", false)
|
||||
viper.SetDefault("plugins.cachesize", "200MB")
|
||||
viper.SetDefault("plugins.autoreload", false)
|
||||
viper.SetDefault("plugins.cachesize", "100MB")
|
||||
|
||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||
viper.SetDefault("devlogsourceline", false)
|
||||
@@ -660,7 +611,6 @@ func setViperDefaults() {
|
||||
viper.SetDefault("devplugincompilationtimeout", time.Minute)
|
||||
viper.SetDefault("devexternalartistfetchmultiplier", 1.5)
|
||||
viper.SetDefault("devoptimizedb", true)
|
||||
viper.SetDefault("devpreserveunicodeinexternalcalls", false)
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
||||
@@ -41,9 +41,6 @@ var _ = Describe("Configuration", func() {
|
||||
Expect(conf.Server.Tags["custom"].Aliases).To(Equal([]string{format, "test"}))
|
||||
Expect(conf.Server.Tags["artist"].Split).To(Equal([]string{";"}))
|
||||
|
||||
// Check deprecated option mapping
|
||||
Expect(conf.Server.ExtAuth.UserHeader).To(Equal("X-Auth-User"))
|
||||
|
||||
// The config file used should be the one we created
|
||||
Expect(conf.Server.ConfigFile).To(Equal(filename))
|
||||
},
|
||||
|
||||
1
conf/testdata/cfg.ini
vendored
1
conf/testdata/cfg.ini
vendored
@@ -1,7 +1,6 @@
|
||||
[default]
|
||||
MusicFolder = /ini/music
|
||||
UIWelcomeMessage = 'Welcome ini' ; Just a comment to test the LoadOptions
|
||||
ReverseProxyUserHeader = 'X-Auth-User'
|
||||
|
||||
[Tags]
|
||||
Custom.Aliases = ini,test
|
||||
|
||||
1
conf/testdata/cfg.json
vendored
1
conf/testdata/cfg.json
vendored
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"musicFolder": "/json/music",
|
||||
"uiWelcomeMessage": "Welcome json",
|
||||
"reverseProxyUserHeader": "X-Auth-User",
|
||||
"Tags": {
|
||||
"artist": {
|
||||
"split": ";"
|
||||
|
||||
1
conf/testdata/cfg.toml
vendored
1
conf/testdata/cfg.toml
vendored
@@ -1,6 +1,5 @@
|
||||
musicFolder = "/toml/music"
|
||||
uiWelcomeMessage = "Welcome toml"
|
||||
ReverseProxyUserHeader = "X-Auth-User"
|
||||
|
||||
Tags.artist.Split = ';'
|
||||
|
||||
|
||||
1
conf/testdata/cfg.yaml
vendored
1
conf/testdata/cfg.yaml
vendored
@@ -1,6 +1,5 @@
|
||||
musicFolder: "/yaml/music"
|
||||
uiWelcomeMessage: "Welcome yaml"
|
||||
reverseProxyUserHeader: "X-Auth-User"
|
||||
Tags:
|
||||
artist:
|
||||
split: [";"]
|
||||
|
||||
@@ -150,8 +150,6 @@ var (
|
||||
}
|
||||
)
|
||||
|
||||
var HTTPUserAgent = "Navidrome" + "/" + Version
|
||||
|
||||
var (
|
||||
VariousArtists = "Various Artists"
|
||||
// TODO This will be dynamic when using disambiguation
|
||||
|
||||
@@ -64,7 +64,6 @@ func (a *Agents) getEnabledAgentNames() []enabledAgent {
|
||||
if a.pluginLoader != nil {
|
||||
availablePlugins = a.pluginLoader.PluginNames("MetadataAgent")
|
||||
}
|
||||
log.Trace("Available MetadataAgent plugins", "plugins", availablePlugins)
|
||||
|
||||
configuredAgents := strings.Split(conf.Server.Agents, ",")
|
||||
|
||||
@@ -355,9 +354,6 @@ func (a *Agents) GetAlbumImages(ctx context.Context, name, artist, mbid string)
|
||||
continue
|
||||
}
|
||||
images, err := retriever.GetAlbumImages(ctx, name, artist, mbid)
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Agent GetAlbumImages failed", "agent", ag.AgentName(), "album", name, "artist", artist, "mbid", mbid, err)
|
||||
}
|
||||
if len(images) > 0 && err == nil {
|
||||
log.Debug(ctx, "Got Album Images", "agent", ag.AgentName(), "album", name, "artist", artist,
|
||||
"mbid", mbid, "elapsed", time.Since(start))
|
||||
|
||||
@@ -43,7 +43,6 @@ func newClient(hc httpDoer, language string) *client {
|
||||
func (c *client) searchArtists(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||
params := url.Values{}
|
||||
params.Add("q", name)
|
||||
params.Add("order", "RANKING")
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", apiBaseURL+"/search/artist", nil)
|
||||
if err != nil {
|
||||
@@ -3,7 +3,6 @@ package deezer
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
@@ -83,20 +82,10 @@ func (s *deezerAgent) searchArtist(ctx context.Context, name string) (*Artist, e
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Trace(ctx, "Artists found", "count", len(artists), "searched_name", name)
|
||||
for i := range artists {
|
||||
log.Trace(ctx, fmt.Sprintf("Artists found #%d", i), "name", artists[i].Name, "id", artists[i].ID, "link", artists[i].Link)
|
||||
if i > 2 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If the first one has the same name, that's the one
|
||||
if !strings.EqualFold(artists[0].Name, name) {
|
||||
log.Trace(ctx, "Top artist do not match", "searched_name", name, "found_name", artists[0].Name)
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
log.Trace(ctx, "Found artist", "name", artists[0].Name, "id", artists[0].ID, "link", artists[0].Link)
|
||||
return &artists[0], err
|
||||
}
|
||||
|
||||
@@ -22,7 +22,6 @@ type AlbumInfo struct {
|
||||
}
|
||||
|
||||
type Artist struct {
|
||||
ID string
|
||||
Name string
|
||||
MBID string
|
||||
}
|
||||
@@ -33,7 +32,6 @@ type ExternalImage struct {
|
||||
}
|
||||
|
||||
type Song struct {
|
||||
ID string
|
||||
Name string
|
||||
MBID string
|
||||
}
|
||||
|
||||
@@ -290,11 +290,11 @@ func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName str
|
||||
return t.Track, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) getArtistForScrobble(track *model.MediaFile, role model.Role, displayName string) string {
|
||||
if conf.Server.LastFM.ScrobbleFirstArtistOnly && len(track.Participants[role]) > 0 {
|
||||
return track.Participants[role][0].Name
|
||||
func (l *lastfmAgent) getArtistForScrobble(track *model.MediaFile) string {
|
||||
if conf.Server.LastFM.ScrobbleFirstArtistOnly && len(track.Participants[model.RoleArtist]) > 0 {
|
||||
return track.Participants[model.RoleArtist][0].Name
|
||||
}
|
||||
return displayName
|
||||
return track.Artist
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
|
||||
@@ -304,13 +304,13 @@ func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *mode
|
||||
}
|
||||
|
||||
err = l.client.updateNowPlaying(ctx, sk, ScrobbleInfo{
|
||||
artist: l.getArtistForScrobble(track, model.RoleArtist, track.Artist),
|
||||
artist: l.getArtistForScrobble(track),
|
||||
track: track.Title,
|
||||
album: track.Album,
|
||||
trackNumber: track.TrackNumber,
|
||||
mbid: track.MbzRecordingID,
|
||||
duration: int(track.Duration),
|
||||
albumArtist: l.getArtistForScrobble(track, model.RoleAlbumArtist, track.AlbumArtist),
|
||||
albumArtist: track.AlbumArtist,
|
||||
})
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Last.fm client.updateNowPlaying returned error", "track", track.Title, err)
|
||||
@@ -330,13 +330,13 @@ func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.S
|
||||
return nil
|
||||
}
|
||||
err = l.client.scrobble(ctx, sk, ScrobbleInfo{
|
||||
artist: l.getArtistForScrobble(&s.MediaFile, model.RoleArtist, s.Artist),
|
||||
artist: l.getArtistForScrobble(&s.MediaFile),
|
||||
track: s.Title,
|
||||
album: s.Album,
|
||||
trackNumber: s.TrackNumber,
|
||||
mbid: s.MbzRecordingID,
|
||||
duration: int(s.Duration),
|
||||
albumArtist: l.getArtistForScrobble(&s.MediaFile, model.RoleAlbumArtist, s.AlbumArtist),
|
||||
albumArtist: s.AlbumArtist,
|
||||
timestamp: s.TimeStamp,
|
||||
})
|
||||
if err == nil {
|
||||
@@ -201,10 +201,6 @@ var _ = Describe("lastfmAgent", func() {
|
||||
{Artist: model.Artist{ID: "ar-1", Name: "First Artist"}},
|
||||
{Artist: model.Artist{ID: "ar-2", Name: "Second Artist"}},
|
||||
},
|
||||
model.RoleAlbumArtist: []model.Participant{
|
||||
{Artist: model.Artist{ID: "ar-1", Name: "First Album Artist"}},
|
||||
{Artist: model.Artist{ID: "ar-2", Name: "Second Album Artist"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
@@ -233,23 +229,6 @@ var _ = Describe("lastfmAgent", func() {
|
||||
err := agent.NowPlaying(ctx, "user-2", track, 0)
|
||||
Expect(err).To(MatchError(scrobbler.ErrNotAuthorized))
|
||||
})
|
||||
|
||||
When("ScrobbleFirstArtistOnly is true", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.LastFM.ScrobbleFirstArtistOnly = true
|
||||
})
|
||||
|
||||
It("uses only the first artist", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
|
||||
|
||||
err := agent.NowPlaying(ctx, "user-1", track, 0)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
sentParams := httpClient.SavedRequest.URL.Query()
|
||||
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
|
||||
Expect(sentParams.Get("albumArtist")).To(Equal("First Album Artist"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("scrobble", func() {
|
||||
@@ -288,7 +267,6 @@ var _ = Describe("lastfmAgent", func() {
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
sentParams := httpClient.SavedRequest.URL.Query()
|
||||
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
|
||||
Expect(sentParams.Get("albumArtist")).To(Equal("First Album Artist"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -182,7 +182,6 @@ func fromAlbumExternalSource(ctx context.Context, al model.Album, provider exter
|
||||
func fromURL(ctx context.Context, imageUrl *url.URL) (io.ReadCloser, string, error) {
|
||||
hc := http.Client{Timeout: 5 * time.Second}
|
||||
req, _ := http.NewRequestWithContext(ctx, http.MethodGet, imageUrl.String(), nil)
|
||||
req.Header.Set("User-Agent", consts.HTTPUserAgent)
|
||||
resp, err := hc.Do(req)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
|
||||
288
core/external/provider.go
vendored
288
core/external/provider.go
vendored
@@ -12,6 +12,10 @@ import (
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
_ "github.com/navidrome/navidrome/core/agents/deezer"
|
||||
_ "github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
_ "github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/core/agents/spotify"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
@@ -47,28 +51,12 @@ type provider struct {
|
||||
|
||||
type auxAlbum struct {
|
||||
model.Album
|
||||
}
|
||||
|
||||
// Name returns the appropriate album name for external API calls
|
||||
// based on the DevPreserveUnicodeInExternalCalls configuration option
|
||||
func (a *auxAlbum) Name() string {
|
||||
if conf.Server.DevPreserveUnicodeInExternalCalls {
|
||||
return a.Album.Name
|
||||
}
|
||||
return str.Clear(a.Album.Name)
|
||||
Name string
|
||||
}
|
||||
|
||||
type auxArtist struct {
|
||||
model.Artist
|
||||
}
|
||||
|
||||
// Name returns the appropriate artist name for external API calls
|
||||
// based on the DevPreserveUnicodeInExternalCalls configuration option
|
||||
func (a *auxArtist) Name() string {
|
||||
if conf.Server.DevPreserveUnicodeInExternalCalls {
|
||||
return a.Artist.Name
|
||||
}
|
||||
return str.Clear(a.Artist.Name)
|
||||
Name string
|
||||
}
|
||||
|
||||
type Agents interface {
|
||||
@@ -100,6 +88,7 @@ func (e *provider) getAlbum(ctx context.Context, id string) (auxAlbum, error) {
|
||||
switch v := entity.(type) {
|
||||
case *model.Album:
|
||||
album.Album = *v
|
||||
album.Name = str.Clear(v.Name)
|
||||
case *model.MediaFile:
|
||||
return e.getAlbum(ctx, v.AlbumID)
|
||||
default:
|
||||
@@ -117,9 +106,8 @@ func (e *provider) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album
|
||||
}
|
||||
|
||||
updatedAt := V(album.ExternalInfoUpdatedAt)
|
||||
albumName := album.Name()
|
||||
if updatedAt.IsZero() {
|
||||
log.Debug(ctx, "AlbumInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", albumName)
|
||||
log.Debug(ctx, "AlbumInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", album.Name)
|
||||
album, err = e.populateAlbumInfo(ctx, album)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -128,7 +116,7 @@ func (e *provider) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album
|
||||
|
||||
// If info is expired, trigger a populateAlbumInfo in the background
|
||||
if time.Since(updatedAt) > conf.Server.DevAlbumInfoTimeToLive {
|
||||
log.Debug("Found expired cached AlbumInfo, refreshing in the background", "updatedAt", album.ExternalInfoUpdatedAt, "name", albumName)
|
||||
log.Debug("Found expired cached AlbumInfo, refreshing in the background", "updatedAt", album.ExternalInfoUpdatedAt, "name", album.Name)
|
||||
e.albumQueue.enqueue(&album)
|
||||
}
|
||||
|
||||
@@ -137,13 +125,12 @@ func (e *provider) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album
|
||||
|
||||
func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAlbum, error) {
|
||||
start := time.Now()
|
||||
albumName := album.Name()
|
||||
info, err := e.ag.GetAlbumInfo(ctx, albumName, album.AlbumArtist, album.MbzAlbumID)
|
||||
info, err := e.ag.GetAlbumInfo(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
return album, nil
|
||||
}
|
||||
if err != nil {
|
||||
log.Error("Error refreshing AlbumInfo", "id", album.ID, "name", albumName, "artist", album.AlbumArtist,
|
||||
log.Error("Error refreshing AlbumInfo", "id", album.ID, "name", album.Name, "artist", album.AlbumArtist,
|
||||
"elapsed", time.Since(start), err)
|
||||
return album, err
|
||||
}
|
||||
@@ -155,7 +142,7 @@ func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAl
|
||||
album.Description = info.Description
|
||||
}
|
||||
|
||||
images, err := e.ag.GetAlbumImages(ctx, albumName, album.AlbumArtist, album.MbzAlbumID)
|
||||
images, err := e.ag.GetAlbumImages(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if err == nil && len(images) > 0 {
|
||||
sort.Slice(images, func(i, j int) bool {
|
||||
return images[i].Size > images[j].Size
|
||||
@@ -174,7 +161,7 @@ func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAl
|
||||
|
||||
err = e.ds.Album(ctx).UpdateExternalInfo(&album.Album)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error trying to update album external information", "id", album.ID, "name", albumName,
|
||||
log.Error(ctx, "Error trying to update album external information", "id", album.ID, "name", album.Name,
|
||||
"elapsed", time.Since(start), err)
|
||||
} else {
|
||||
log.Trace(ctx, "AlbumInfo collected", "album", album, "elapsed", time.Since(start))
|
||||
@@ -194,6 +181,7 @@ func (e *provider) getArtist(ctx context.Context, id string) (auxArtist, error)
|
||||
switch v := entity.(type) {
|
||||
case *model.Artist:
|
||||
artist.Artist = *v
|
||||
artist.Name = str.Clear(v.Name)
|
||||
case *model.MediaFile:
|
||||
return e.getArtist(ctx, v.ArtistID)
|
||||
case *model.Album:
|
||||
@@ -222,9 +210,8 @@ func (e *provider) refreshArtistInfo(ctx context.Context, id string) (auxArtist,
|
||||
|
||||
// If we don't have any info, retrieves it now
|
||||
updatedAt := V(artist.ExternalInfoUpdatedAt)
|
||||
artistName := artist.Name()
|
||||
if updatedAt.IsZero() {
|
||||
log.Debug(ctx, "ArtistInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", artistName)
|
||||
log.Debug(ctx, "ArtistInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", artist.Name)
|
||||
artist, err = e.populateArtistInfo(ctx, artist)
|
||||
if err != nil {
|
||||
return auxArtist{}, err
|
||||
@@ -233,7 +220,7 @@ func (e *provider) refreshArtistInfo(ctx context.Context, id string) (auxArtist,
|
||||
|
||||
// If info is expired, trigger a populateArtistInfo in the background
|
||||
if time.Since(updatedAt) > conf.Server.DevArtistInfoTimeToLive {
|
||||
log.Debug("Found expired cached ArtistInfo, refreshing in the background", "updatedAt", updatedAt, "name", artistName)
|
||||
log.Debug("Found expired cached ArtistInfo, refreshing in the background", "updatedAt", updatedAt, "name", artist.Name)
|
||||
e.artistQueue.enqueue(&artist)
|
||||
}
|
||||
return artist, nil
|
||||
@@ -242,9 +229,8 @@ func (e *provider) refreshArtistInfo(ctx context.Context, id string) (auxArtist,
|
||||
func (e *provider) populateArtistInfo(ctx context.Context, artist auxArtist) (auxArtist, error) {
|
||||
start := time.Now()
|
||||
// Get MBID first, if it is not yet available
|
||||
artistName := artist.Name()
|
||||
if artist.MbzArtistID == "" {
|
||||
mbid, err := e.ag.GetArtistMBID(ctx, artist.ID, artistName)
|
||||
mbid, err := e.ag.GetArtistMBID(ctx, artist.ID, artist.Name)
|
||||
if mbid != "" && err == nil {
|
||||
artist.MbzArtistID = mbid
|
||||
}
|
||||
@@ -260,14 +246,14 @@ func (e *provider) populateArtistInfo(ctx context.Context, artist auxArtist) (au
|
||||
_ = g.Wait()
|
||||
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "ArtistInfo update canceled", "id", artist.ID, "name", artistName, "elapsed", time.Since(start), ctx.Err())
|
||||
log.Warn(ctx, "ArtistInfo update canceled", "elapsed", "id", artist.ID, "name", artist.Name, time.Since(start), ctx.Err())
|
||||
return artist, ctx.Err()
|
||||
}
|
||||
|
||||
artist.ExternalInfoUpdatedAt = P(time.Now())
|
||||
err := e.ds.Artist(ctx).UpdateExternalInfo(&artist.Artist)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error trying to update artist external information", "id", artist.ID, "name", artistName,
|
||||
log.Error(ctx, "Error trying to update artist external information", "id", artist.ID, "name", artist.Name,
|
||||
"elapsed", time.Since(start), err)
|
||||
} else {
|
||||
log.Trace(ctx, "ArtistInfo collected", "artist", artist, "elapsed", time.Since(start))
|
||||
@@ -295,7 +281,7 @@ func (e *provider) ArtistRadio(ctx context.Context, id string, count int) (model
|
||||
}
|
||||
|
||||
topCount := max(count, 20)
|
||||
topSongs, err := e.getMatchingTopSongs(ctx, e.ag, &auxArtist{Artist: a}, topCount)
|
||||
topSongs, err := e.getMatchingTopSongs(ctx, e.ag, &auxArtist{Name: a.Name, Artist: a}, topCount)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Error getting artist's top songs", "artist", a.Name, err)
|
||||
return nil
|
||||
@@ -358,23 +344,22 @@ func (e *provider) AlbumImage(ctx context.Context, id string) (*url.URL, error)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
albumName := album.Name()
|
||||
images, err := e.ag.GetAlbumImages(ctx, albumName, album.AlbumArtist, album.MbzAlbumID)
|
||||
images, err := e.ag.GetAlbumImages(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if err != nil {
|
||||
switch {
|
||||
case errors.Is(err, agents.ErrNotFound):
|
||||
log.Trace(ctx, "Album not found in agent", "albumID", id, "name", albumName, "artist", album.AlbumArtist)
|
||||
log.Trace(ctx, "Album not found in agent", "albumID", id, "name", album.Name, "artist", album.AlbumArtist)
|
||||
return nil, model.ErrNotFound
|
||||
case errors.Is(err, context.Canceled):
|
||||
log.Debug(ctx, "GetAlbumImages call canceled", err)
|
||||
default:
|
||||
log.Warn(ctx, "Error getting album images from agent", "albumID", id, "name", albumName, "artist", album.AlbumArtist, err)
|
||||
log.Warn(ctx, "Error getting album images from agent", "albumID", id, "name", album.Name, "artist", album.AlbumArtist, err)
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(images) == 0 {
|
||||
log.Warn(ctx, "Agent returned no images without error", "albumID", id, "name", albumName, "artist", album.AlbumArtist)
|
||||
log.Warn(ctx, "Agent returned no images without error", "albumID", id, "name", album.Name, "artist", album.AlbumArtist)
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
|
||||
@@ -416,32 +401,27 @@ func (e *provider) TopSongs(ctx context.Context, artistName string, count int) (
|
||||
}
|
||||
|
||||
func (e *provider) getMatchingTopSongs(ctx context.Context, agent agents.ArtistTopSongsRetriever, artist *auxArtist, count int) (model.MediaFiles, error) {
|
||||
artistName := artist.Name()
|
||||
songs, err := agent.GetArtistTopSongs(ctx, artist.ID, artistName, artist.MbzArtistID, count)
|
||||
songs, err := agent.GetArtistTopSongs(ctx, artist.ID, artist.Name, artist.MbzArtistID, count)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get top songs for artist %s: %w", artistName, err)
|
||||
return nil, fmt.Errorf("failed to get top songs for artist %s: %w", artist.Name, err)
|
||||
}
|
||||
|
||||
idMatches, err := e.loadTracksByID(ctx, songs)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load tracks by ID: %w", err)
|
||||
}
|
||||
mbidMatches, err := e.loadTracksByMBID(ctx, songs)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load tracks by MBID: %w", err)
|
||||
}
|
||||
titleMatches, err := e.loadTracksByTitle(ctx, songs, artist, idMatches, mbidMatches)
|
||||
titleMatches, err := e.loadTracksByTitle(ctx, songs, artist, mbidMatches)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load tracks by title: %w", err)
|
||||
}
|
||||
|
||||
log.Trace(ctx, "Top Songs loaded", "name", artistName, "numSongs", len(songs), "numIDMatches", len(idMatches), "numMBIDMatches", len(mbidMatches), "numTitleMatches", len(titleMatches))
|
||||
mfs := e.selectTopSongs(songs, idMatches, mbidMatches, titleMatches, count)
|
||||
log.Trace(ctx, "Top Songs loaded", "name", artist.Name, "numSongs", len(songs), "numMBIDMatches", len(mbidMatches), "numTitleMatches", len(titleMatches))
|
||||
mfs := e.selectTopSongs(songs, mbidMatches, titleMatches, count)
|
||||
|
||||
if len(mfs) == 0 {
|
||||
log.Debug(ctx, "No matching top songs found", "name", artistName)
|
||||
log.Debug(ctx, "No matching top songs found", "name", artist.Name)
|
||||
} else {
|
||||
log.Debug(ctx, "Found matching top songs", "name", artistName, "numSongs", len(mfs))
|
||||
log.Debug(ctx, "Found matching top songs", "name", artist.Name, "numSongs", len(mfs))
|
||||
}
|
||||
|
||||
return mfs, nil
|
||||
@@ -477,41 +457,9 @@ func (e *provider) loadTracksByMBID(ctx context.Context, songs []agents.Song) (m
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func (e *provider) loadTracksByID(ctx context.Context, songs []agents.Song) (map[string]model.MediaFile, error) {
|
||||
var ids []string
|
||||
for _, s := range songs {
|
||||
if s.ID != "" {
|
||||
ids = append(ids, s.ID)
|
||||
}
|
||||
}
|
||||
matches := map[string]model.MediaFile{}
|
||||
if len(ids) == 0 {
|
||||
return matches, nil
|
||||
}
|
||||
res, err := e.ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.And{
|
||||
squirrel.Eq{"media_file.id": ids},
|
||||
squirrel.Eq{"missing": false},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return matches, err
|
||||
}
|
||||
for _, mf := range res {
|
||||
if _, ok := matches[mf.ID]; !ok {
|
||||
matches[mf.ID] = mf
|
||||
}
|
||||
}
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func (e *provider) loadTracksByTitle(ctx context.Context, songs []agents.Song, artist *auxArtist, idMatches, mbidMatches map[string]model.MediaFile) (map[string]model.MediaFile, error) {
|
||||
func (e *provider) loadTracksByTitle(ctx context.Context, songs []agents.Song, artist *auxArtist, mbidMatches map[string]model.MediaFile) (map[string]model.MediaFile, error) {
|
||||
titleMap := map[string]string{}
|
||||
for _, s := range songs {
|
||||
// Skip if already matched by ID or MBID
|
||||
if s.ID != "" && idMatches[s.ID].ID != "" {
|
||||
continue
|
||||
}
|
||||
if s.MBID != "" && mbidMatches[s.MBID].ID != "" {
|
||||
continue
|
||||
}
|
||||
@@ -550,27 +498,18 @@ func (e *provider) loadTracksByTitle(ctx context.Context, songs []agents.Song, a
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func (e *provider) selectTopSongs(songs []agents.Song, byID, byMBID, byTitle map[string]model.MediaFile, count int) model.MediaFiles {
|
||||
func (e *provider) selectTopSongs(songs []agents.Song, byMBID, byTitle map[string]model.MediaFile, count int) model.MediaFiles {
|
||||
var mfs model.MediaFiles
|
||||
for _, t := range songs {
|
||||
if len(mfs) == count {
|
||||
break
|
||||
}
|
||||
// Try ID match first
|
||||
if t.ID != "" {
|
||||
if mf, ok := byID[t.ID]; ok {
|
||||
mfs = append(mfs, mf)
|
||||
continue
|
||||
}
|
||||
}
|
||||
// Try MBID match second
|
||||
if t.MBID != "" {
|
||||
if mf, ok := byMBID[t.MBID]; ok {
|
||||
mfs = append(mfs, mf)
|
||||
continue
|
||||
}
|
||||
}
|
||||
// Fall back to title match
|
||||
if mf, ok := byTitle[str.SanitizeFieldForSorting(t.Name)]; ok {
|
||||
mfs = append(mfs, mf)
|
||||
}
|
||||
@@ -579,7 +518,7 @@ func (e *provider) selectTopSongs(songs []agents.Song, byID, byMBID, byTitle map
|
||||
}
|
||||
|
||||
func (e *provider) callGetURL(ctx context.Context, agent agents.ArtistURLRetriever, artist *auxArtist) {
|
||||
artisURL, err := agent.GetArtistURL(ctx, artist.ID, artist.Name(), artist.MbzArtistID)
|
||||
artisURL, err := agent.GetArtistURL(ctx, artist.ID, artist.Name, artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
@@ -587,7 +526,7 @@ func (e *provider) callGetURL(ctx context.Context, agent agents.ArtistURLRetriev
|
||||
}
|
||||
|
||||
func (e *provider) callGetBiography(ctx context.Context, agent agents.ArtistBiographyRetriever, artist *auxArtist) {
|
||||
bio, err := agent.GetArtistBiography(ctx, artist.ID, artist.Name(), artist.MbzArtistID)
|
||||
bio, err := agent.GetArtistBiography(ctx, artist.ID, str.Clear(artist.Name), artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
@@ -597,7 +536,7 @@ func (e *provider) callGetBiography(ctx context.Context, agent agents.ArtistBiog
|
||||
}
|
||||
|
||||
func (e *provider) callGetImage(ctx context.Context, agent agents.ArtistImageRetriever, artist *auxArtist) {
|
||||
images, err := agent.GetArtistImages(ctx, artist.ID, artist.Name(), artist.MbzArtistID)
|
||||
images, err := agent.GetArtistImages(ctx, artist.ID, artist.Name, artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
@@ -616,14 +555,13 @@ func (e *provider) callGetImage(ctx context.Context, agent agents.ArtistImageRet
|
||||
|
||||
func (e *provider) callGetSimilar(ctx context.Context, agent agents.ArtistSimilarRetriever, artist *auxArtist,
|
||||
limit int, includeNotPresent bool) {
|
||||
artistName := artist.Name()
|
||||
similar, err := agent.GetSimilarArtists(ctx, artist.ID, artistName, artist.MbzArtistID, limit)
|
||||
similar, err := agent.GetSimilarArtists(ctx, artist.ID, artist.Name, artist.MbzArtistID, limit)
|
||||
if len(similar) == 0 || err != nil {
|
||||
return
|
||||
}
|
||||
start := time.Now()
|
||||
sa, err := e.mapSimilarArtists(ctx, similar, limit, includeNotPresent)
|
||||
log.Debug(ctx, "Mapped Similar Artists", "artist", artistName, "numSimilar", len(sa), "elapsed", time.Since(start))
|
||||
log.Debug(ctx, "Mapped Similar Artists", "artist", artist.Name, "numSimilar", len(sa), "elapsed", time.Since(start))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
@@ -634,51 +572,36 @@ func (e *provider) mapSimilarArtists(ctx context.Context, similar []agents.Artis
|
||||
var result model.Artists
|
||||
var notPresent []string
|
||||
|
||||
// Load artists by ID (highest priority)
|
||||
idMatches, err := e.loadArtistsByID(ctx, similar)
|
||||
artistNames := slice.Map(similar, func(artist agents.Artist) string { return artist.Name })
|
||||
|
||||
// Query all artists at once
|
||||
clauses := slice.Map(artistNames, func(name string) squirrel.Sqlizer {
|
||||
return squirrel.Like{"artist.name": name}
|
||||
})
|
||||
artists, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Or(clauses),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Load artists by MBID (second priority)
|
||||
mbidMatches, err := e.loadArtistsByMBID(ctx, similar, idMatches)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Load artists by name (lowest priority, fallback)
|
||||
nameMatches, err := e.loadArtistsByName(ctx, similar, idMatches, mbidMatches)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
// Create a map for quick lookup
|
||||
artistMap := make(map[string]model.Artist)
|
||||
for _, artist := range artists {
|
||||
artistMap[artist.Name] = artist
|
||||
}
|
||||
|
||||
count := 0
|
||||
|
||||
// Process the similar artists using priority: ID → MBID → Name
|
||||
// Process the similar artists
|
||||
for _, s := range similar {
|
||||
if count >= limit {
|
||||
break
|
||||
}
|
||||
// Try ID match first
|
||||
if s.ID != "" {
|
||||
if artist, found := idMatches[s.ID]; found {
|
||||
result = append(result, artist)
|
||||
count++
|
||||
continue
|
||||
}
|
||||
}
|
||||
// Try MBID match second
|
||||
if s.MBID != "" {
|
||||
if artist, found := mbidMatches[s.MBID]; found {
|
||||
result = append(result, artist)
|
||||
count++
|
||||
continue
|
||||
}
|
||||
}
|
||||
// Fall back to name match
|
||||
if artist, found := nameMatches[s.Name]; found {
|
||||
if artist, found := artistMap[s.Name]; found {
|
||||
result = append(result, artist)
|
||||
count++
|
||||
|
||||
if count >= limit {
|
||||
break
|
||||
}
|
||||
} else {
|
||||
notPresent = append(notPresent, s.Name)
|
||||
}
|
||||
@@ -701,95 +624,6 @@ func (e *provider) mapSimilarArtists(ctx context.Context, similar []agents.Artis
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *provider) loadArtistsByID(ctx context.Context, similar []agents.Artist) (map[string]model.Artist, error) {
|
||||
var ids []string
|
||||
for _, s := range similar {
|
||||
if s.ID != "" {
|
||||
ids = append(ids, s.ID)
|
||||
}
|
||||
}
|
||||
matches := map[string]model.Artist{}
|
||||
if len(ids) == 0 {
|
||||
return matches, nil
|
||||
}
|
||||
res, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Eq{"artist.id": ids},
|
||||
})
|
||||
if err != nil {
|
||||
return matches, err
|
||||
}
|
||||
for _, a := range res {
|
||||
if _, ok := matches[a.ID]; !ok {
|
||||
matches[a.ID] = a
|
||||
}
|
||||
}
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func (e *provider) loadArtistsByMBID(ctx context.Context, similar []agents.Artist, idMatches map[string]model.Artist) (map[string]model.Artist, error) {
|
||||
var mbids []string
|
||||
for _, s := range similar {
|
||||
// Skip if already matched by ID
|
||||
if s.ID != "" && idMatches[s.ID].ID != "" {
|
||||
continue
|
||||
}
|
||||
if s.MBID != "" {
|
||||
mbids = append(mbids, s.MBID)
|
||||
}
|
||||
}
|
||||
matches := map[string]model.Artist{}
|
||||
if len(mbids) == 0 {
|
||||
return matches, nil
|
||||
}
|
||||
res, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Eq{"mbz_artist_id": mbids},
|
||||
})
|
||||
if err != nil {
|
||||
return matches, err
|
||||
}
|
||||
for _, a := range res {
|
||||
if id := a.MbzArtistID; id != "" {
|
||||
if _, ok := matches[id]; !ok {
|
||||
matches[id] = a
|
||||
}
|
||||
}
|
||||
}
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func (e *provider) loadArtistsByName(ctx context.Context, similar []agents.Artist, idMatches map[string]model.Artist, mbidMatches map[string]model.Artist) (map[string]model.Artist, error) {
|
||||
var names []string
|
||||
for _, s := range similar {
|
||||
// Skip if already matched by ID or MBID
|
||||
if s.ID != "" && idMatches[s.ID].ID != "" {
|
||||
continue
|
||||
}
|
||||
if s.MBID != "" && mbidMatches[s.MBID].ID != "" {
|
||||
continue
|
||||
}
|
||||
names = append(names, s.Name)
|
||||
}
|
||||
matches := map[string]model.Artist{}
|
||||
if len(names) == 0 {
|
||||
return matches, nil
|
||||
}
|
||||
clauses := slice.Map(names, func(name string) squirrel.Sqlizer {
|
||||
return squirrel.Like{"artist.name": name}
|
||||
})
|
||||
res, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Or(clauses),
|
||||
})
|
||||
if err != nil {
|
||||
return matches, err
|
||||
}
|
||||
for _, a := range res {
|
||||
if _, ok := matches[a.Name]; !ok {
|
||||
matches[a.Name] = a
|
||||
}
|
||||
}
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
func (e *provider) findArtistByName(ctx context.Context, artistName string) (*auxArtist, error) {
|
||||
artists, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Like{"artist.name": artistName},
|
||||
@@ -801,7 +635,11 @@ func (e *provider) findArtistByName(ctx context.Context, artistName string) (*au
|
||||
if len(artists) == 0 {
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
return &auxArtist{Artist: artists[0]}, nil
|
||||
artist := &auxArtist{
|
||||
Artist: artists[0],
|
||||
Name: str.Clear(artists[0].Name),
|
||||
}
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *provider) loadSimilar(ctx context.Context, artist *auxArtist, count int, includeNotPresent bool) error {
|
||||
@@ -817,7 +655,7 @@ func (e *provider) loadSimilar(ctx context.Context, artist *auxArtist, count int
|
||||
Filters: squirrel.Eq{"artist.id": ids},
|
||||
})
|
||||
if err != nil {
|
||||
log.Error("Error loading similar artists", "id", artist.ID, "name", artist.Name(), err)
|
||||
log.Error("Error loading similar artists", "id", artist.ID, "name", artist.Name, err)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
63
core/external/provider_albumimage_test.go
vendored
63
core/external/provider_albumimage_test.go
vendored
@@ -260,69 +260,6 @@ var _ = Describe("Provider - AlbumImage", func() {
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumAgent.AssertNotCalled(GinkgoT(), "GetAlbumImages", mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
Context("Unicode handling in album names", func() {
|
||||
var albumWithEnDash *model.Album
|
||||
var expectedURL *url.URL
|
||||
|
||||
const (
|
||||
originalAlbumName = "Raising Hell–Deluxe" // Album name with en dash
|
||||
normalizedAlbumName = "Raising Hell-Deluxe" // Normalized version with hyphen
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
// Test with en dash (–) in album name
|
||||
albumWithEnDash = &model.Album{ID: "album-endash", Name: originalAlbumName, AlbumArtistID: "artist-1"}
|
||||
mockArtistRepo.Mock = mock.Mock{} // Reset default expectations
|
||||
mockAlbumRepo.Mock = mock.Mock{} // Reset default expectations
|
||||
mockArtistRepo.On("Get", "album-endash").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "album-endash").Return(albumWithEnDash, nil).Once()
|
||||
|
||||
expectedURL, _ = url.Parse("http://example.com/album.jpg")
|
||||
|
||||
// Mock the album agent to return an image for the album
|
||||
mockAlbumAgent.On("GetAlbumImages", ctx, mock.AnythingOfType("string"), "", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/album.jpg", Size: 1000},
|
||||
}, nil).Once()
|
||||
})
|
||||
|
||||
When("DevPreserveUnicodeInExternalCalls is true", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.DevPreserveUnicodeInExternalCalls = true
|
||||
})
|
||||
|
||||
It("preserves Unicode characters in album names", func() {
|
||||
// Act
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-endash")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-endash")
|
||||
// This is the key assertion: ensure the original Unicode name is used
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, originalAlbumName, "", "")
|
||||
})
|
||||
})
|
||||
|
||||
When("DevPreserveUnicodeInExternalCalls is false", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.DevPreserveUnicodeInExternalCalls = false
|
||||
})
|
||||
|
||||
It("normalizes Unicode characters", func() {
|
||||
// Act
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-endash")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-endash")
|
||||
// This assertion ensures the normalized name is used (en dash → hyphen)
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, normalizedAlbumName, "", "")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// mockAlbumInfoAgent implementation
|
||||
|
||||
61
core/external/provider_artistimage_test.go
vendored
61
core/external/provider_artistimage_test.go
vendored
@@ -265,67 +265,6 @@ var _ = Describe("Provider - ArtistImage", func() {
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
Context("Unicode handling in artist names", func() {
|
||||
var artistWithEnDash *model.Artist
|
||||
var expectedURL *url.URL
|
||||
|
||||
const (
|
||||
originalArtistName = "Run–D.M.C." // Artist name with en dash
|
||||
normalizedArtistName = "Run-D.M.C." // Normalized version with hyphen
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
// Test with en dash (–) in artist name like "Run–D.M.C."
|
||||
artistWithEnDash = &model.Artist{ID: "artist-endash", Name: originalArtistName}
|
||||
mockArtistRepo.Mock = mock.Mock{} // Reset default expectations
|
||||
mockArtistRepo.On("Get", "artist-endash").Return(artistWithEnDash, nil).Once()
|
||||
|
||||
expectedURL, _ = url.Parse("http://example.com/rundmc.jpg")
|
||||
|
||||
// Mock the image agent to return an image for the artist
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-endash", mock.AnythingOfType("string"), "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/rundmc.jpg", Size: 1000},
|
||||
}, nil).Once()
|
||||
|
||||
})
|
||||
|
||||
When("DevPreserveUnicodeInExternalCalls is true", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.DevPreserveUnicodeInExternalCalls = true
|
||||
})
|
||||
It("preserves Unicode characters in artist names", func() {
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-endash")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-endash")
|
||||
// This is the key assertion: ensure the original Unicode name is used
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-endash", originalArtistName, "")
|
||||
})
|
||||
})
|
||||
|
||||
When("DevPreserveUnicodeInExternalCalls is false", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.DevPreserveUnicodeInExternalCalls = false
|
||||
})
|
||||
|
||||
It("normalizes Unicode characters", func() {
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-endash")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-endash")
|
||||
// This assertion ensures the normalized name is used (en dash → hyphen)
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-endash", normalizedArtistName, "")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// mockArtistImageAgent implementation using testify/mock
|
||||
|
||||
11
core/external/provider_artistradio_test.go
vendored
11
core/external/provider_artistradio_test.go
vendored
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -68,16 +67,8 @@ var _ = Describe("Provider - ArtistRadio", func() {
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return(similarAgentsResp, nil).Once()
|
||||
|
||||
// Mock the three-phase artist lookup: ID (skipped - no IDs), MBID, then Name
|
||||
// MBID lookup returns empty (no match)
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
_, ok := opt.Filters.(squirrel.Eq)
|
||||
return opt.Max == 0 && ok
|
||||
})).Return(model.Artists{}, nil).Once()
|
||||
// Name lookup returns the similar artist
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
_, ok := opt.Filters.(squirrel.Or)
|
||||
return opt.Max == 0 && ok
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{similarArtist}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
|
||||
62
core/external/provider_topsongs_test.go
vendored
62
core/external/provider_topsongs_test.go
vendored
@@ -4,10 +4,10 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
_ "github.com/navidrome/navidrome/adapters/lastfm"
|
||||
_ "github.com/navidrome/navidrome/adapters/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/adapters/spotify"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
_ "github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
_ "github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/core/agents/spotify"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
@@ -271,60 +271,4 @@ var _ = Describe("Provider - TopSongs", func() {
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("matches songs by ID first when agent provides IDs", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response with IDs provided (highest priority matching)
|
||||
// Note: Songs have no MBID to ensure only ID matching is used
|
||||
agentSongs := []agents.Song{
|
||||
{ID: "song-1", Name: "Song One"},
|
||||
{ID: "song-2", Name: "Song Two"},
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 2).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock ID lookup (first query - should match both songs directly)
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1, song2}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 2)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(2))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
Expect(songs[1].ID).To(Equal("song-2"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("falls back to MBID when ID is not found", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response with ID that won't be found, but MBID that will
|
||||
agentSongs := []agents.Song{
|
||||
{ID: "non-existent-id", Name: "Song One", MBID: "mbid-song-1"},
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 1).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock ID lookup - returns empty (ID not found)
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{}, nil).Once()
|
||||
// Mock MBID lookup - finds the song
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 1)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
})
|
||||
|
||||
84
core/external/provider_updateartistinfo_test.go
vendored
84
core/external/provider_updateartistinfo_test.go
vendored
@@ -226,88 +226,4 @@ var _ = Describe("Provider - UpdateArtistInfo", func() {
|
||||
Expect(updatedArtist.ID).To(Equal("ar-agent-fail"))
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("matches similar artists by ID first when agent provides IDs", func() {
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-id-match",
|
||||
Name: "ID Match Artist",
|
||||
}
|
||||
similarByID := model.Artist{ID: "ar-similar-by-id", Name: "Similar By ID", MbzArtistID: "mbid-similar"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarByID})
|
||||
|
||||
// Agent returns similar artist with ID (highest priority matching)
|
||||
rawSimilar := []agents.Artist{
|
||||
{ID: "ar-similar-by-id", Name: "Different Name", MBID: "different-mbid"},
|
||||
}
|
||||
|
||||
ag.On("GetArtistMBID", ctx, "ar-id-match", "ID Match Artist").Return("", nil).Once()
|
||||
ag.On("GetArtistImages", ctx, "ar-id-match", "ID Match Artist", mock.Anything).Return(nil, nil).Maybe()
|
||||
ag.On("GetArtistBiography", ctx, "ar-id-match", "ID Match Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetArtistURL", ctx, "ar-id-match", "ID Match Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetSimilarArtists", ctx, "ar-id-match", "ID Match Artist", mock.Anything, 100).Return(rawSimilar, nil).Once()
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-id-match", 10, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
// Should match by ID, not by name or MBID
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal("ar-similar-by-id"))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal("Similar By ID"))
|
||||
})
|
||||
|
||||
It("matches similar artists by MBID when ID is empty", func() {
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-mbid-match",
|
||||
Name: "MBID Match Artist",
|
||||
}
|
||||
similarByMBID := model.Artist{ID: "ar-similar-by-mbid", Name: "Similar By MBID", MbzArtistID: "mbid-similar"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarByMBID})
|
||||
|
||||
// Agent returns similar artist with only MBID (no ID)
|
||||
rawSimilar := []agents.Artist{
|
||||
{Name: "Different Name", MBID: "mbid-similar"},
|
||||
}
|
||||
|
||||
ag.On("GetArtistMBID", ctx, "ar-mbid-match", "MBID Match Artist").Return("", nil).Once()
|
||||
ag.On("GetArtistImages", ctx, "ar-mbid-match", "MBID Match Artist", mock.Anything).Return(nil, nil).Maybe()
|
||||
ag.On("GetArtistBiography", ctx, "ar-mbid-match", "MBID Match Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetArtistURL", ctx, "ar-mbid-match", "MBID Match Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetSimilarArtists", ctx, "ar-mbid-match", "MBID Match Artist", mock.Anything, 100).Return(rawSimilar, nil).Once()
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-mbid-match", 10, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
// Should match by MBID since ID was empty
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal("ar-similar-by-mbid"))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal("Similar By MBID"))
|
||||
})
|
||||
|
||||
It("falls back to name matching when ID and MBID don't match", func() {
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-name-match",
|
||||
Name: "Name Match Artist",
|
||||
}
|
||||
similarByName := model.Artist{ID: "ar-similar-by-name", Name: "Similar By Name"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarByName})
|
||||
|
||||
// Agent returns similar artist with non-matching ID and MBID
|
||||
rawSimilar := []agents.Artist{
|
||||
{ID: "non-existent-id", Name: "Similar By Name", MBID: "non-existent-mbid"},
|
||||
}
|
||||
|
||||
ag.On("GetArtistMBID", ctx, "ar-name-match", "Name Match Artist").Return("", nil).Once()
|
||||
ag.On("GetArtistImages", ctx, "ar-name-match", "Name Match Artist", mock.Anything).Return(nil, nil).Maybe()
|
||||
ag.On("GetArtistBiography", ctx, "ar-name-match", "Name Match Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetArtistURL", ctx, "ar-name-match", "Name Match Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetSimilarArtists", ctx, "ar-name-match", "Name Match Artist", mock.Anything, 100).Return(rawSimilar, nil).Once()
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-name-match", 10, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
// Should fall back to name matching since ID and MBID didn't match
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal("ar-similar-by-name"))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal("Similar By Name"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -112,7 +112,7 @@ func (e *ffmpeg) start(ctx context.Context, args []string) (io.ReadCloser, error
|
||||
log.Trace(ctx, "Executing ffmpeg command", "cmd", args)
|
||||
j := &ffCmd{args: args}
|
||||
j.PipeReader, j.out = io.Pipe()
|
||||
err := j.start(ctx)
|
||||
err := j.start()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -127,8 +127,8 @@ type ffCmd struct {
|
||||
cmd *exec.Cmd
|
||||
}
|
||||
|
||||
func (j *ffCmd) start(ctx context.Context) error {
|
||||
cmd := exec.CommandContext(ctx, j.args[0], j.args[1:]...) // #nosec
|
||||
func (j *ffCmd) start() error {
|
||||
cmd := exec.Command(j.args[0], j.args[1:]...) // #nosec
|
||||
cmd.Stdout = j.out
|
||||
if log.IsGreaterOrEqualTo(log.LevelTrace) {
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
package ffmpeg
|
||||
|
||||
import (
|
||||
"context"
|
||||
"runtime"
|
||||
sync "sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
@@ -69,98 +65,4 @@ var _ = Describe("ffmpeg", func() {
|
||||
Expect(args).To(Equal([]string{"/usr/bin/with spaces/ffmpeg.exe", "-i", "one.mp3", "-f", "ffmetadata"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("FFmpeg", func() {
|
||||
Context("when FFmpeg is available", func() {
|
||||
var ff FFmpeg
|
||||
|
||||
BeforeEach(func() {
|
||||
ffOnce = sync.Once{}
|
||||
ff = New()
|
||||
// Skip if FFmpeg is not available
|
||||
if !ff.IsAvailable() {
|
||||
Skip("FFmpeg not available on this system")
|
||||
}
|
||||
})
|
||||
|
||||
It("should interrupt transcoding when context is cancelled", func() {
|
||||
ctx, cancel := context.WithTimeout(GinkgoT().Context(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Use a command that generates audio indefinitely
|
||||
// -f lavfi uses FFmpeg's built-in audio source
|
||||
// -t 0 means no time limit (runs forever)
|
||||
command := "ffmpeg -f lavfi -i sine=frequency=1000:duration=0 -f mp3 -"
|
||||
|
||||
// The input file is not used here, but we need to provide a valid path to the Transcode function
|
||||
stream, err := ff.Transcode(ctx, command, "tests/fixtures/test.mp3", 128, 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
defer stream.Close()
|
||||
|
||||
// Read some data first to ensure FFmpeg is running
|
||||
buf := make([]byte, 1024)
|
||||
_, err = stream.Read(buf)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Cancel the context
|
||||
cancel()
|
||||
|
||||
// Next read should fail due to cancelled context
|
||||
_, err = stream.Read(buf)
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should handle immediate context cancellation", func() {
|
||||
ctx, cancel := context.WithCancel(GinkgoT().Context())
|
||||
cancel() // Cancel immediately
|
||||
|
||||
// This should fail immediately
|
||||
_, err := ff.Transcode(ctx, "ffmpeg -i %s -f mp3 -", "tests/fixtures/test.mp3", 128, 0)
|
||||
Expect(err).To(MatchError(context.Canceled))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with mock process behavior", func() {
|
||||
var longRunningCmd string
|
||||
BeforeEach(func() {
|
||||
// Use a long-running command for testing cancellation
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
// Use PowerShell's Start-Sleep
|
||||
ffmpegPath = "powershell"
|
||||
longRunningCmd = "powershell -Command Start-Sleep -Seconds 10"
|
||||
default:
|
||||
// Use sleep on Unix-like systems
|
||||
ffmpegPath = "sleep"
|
||||
longRunningCmd = "sleep 10"
|
||||
}
|
||||
})
|
||||
|
||||
It("should terminate the underlying process when context is cancelled", func() {
|
||||
ff := New()
|
||||
ctx, cancel := context.WithTimeout(GinkgoT().Context(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
// Start a process that will run for a while
|
||||
stream, err := ff.Transcode(ctx, longRunningCmd, "tests/fixtures/test.mp3", 0, 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
defer stream.Close()
|
||||
|
||||
// Give the process time to start
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
|
||||
// Cancel the context
|
||||
cancel()
|
||||
|
||||
// Try to read from the stream, which should fail
|
||||
buf := make([]byte, 100)
|
||||
_, err = stream.Read(buf)
|
||||
Expect(err).To(HaveOccurred(), "Expected stream to be closed due to process termination")
|
||||
|
||||
// Verify the stream is closed by attempting another read
|
||||
_, err = stream.Read(buf)
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -37,21 +37,19 @@ type Library interface {
|
||||
}
|
||||
|
||||
type libraryService struct {
|
||||
ds model.DataStore
|
||||
scanner model.Scanner
|
||||
watcher Watcher
|
||||
broker events.Broker
|
||||
pluginManager PluginUnloader
|
||||
ds model.DataStore
|
||||
scanner model.Scanner
|
||||
watcher Watcher
|
||||
broker events.Broker
|
||||
}
|
||||
|
||||
// NewLibrary creates a new Library service
|
||||
func NewLibrary(ds model.DataStore, scanner model.Scanner, watcher Watcher, broker events.Broker, pluginManager PluginUnloader) Library {
|
||||
func NewLibrary(ds model.DataStore, scanner model.Scanner, watcher Watcher, broker events.Broker) Library {
|
||||
return &libraryService{
|
||||
ds: ds,
|
||||
scanner: scanner,
|
||||
watcher: watcher,
|
||||
broker: broker,
|
||||
pluginManager: pluginManager,
|
||||
ds: ds,
|
||||
scanner: scanner,
|
||||
watcher: watcher,
|
||||
broker: broker,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,7 +141,6 @@ func (s *libraryService) NewRepository(ctx context.Context) rest.Repository {
|
||||
scanner: s.scanner,
|
||||
watcher: s.watcher,
|
||||
broker: s.broker,
|
||||
pluginManager: s.pluginManager,
|
||||
}
|
||||
return wrapper
|
||||
}
|
||||
@@ -151,12 +148,11 @@ func (s *libraryService) NewRepository(ctx context.Context) rest.Repository {
|
||||
type libraryRepositoryWrapper struct {
|
||||
rest.Repository
|
||||
model.LibraryRepository
|
||||
ctx context.Context
|
||||
ds model.DataStore
|
||||
scanner model.Scanner
|
||||
watcher Watcher
|
||||
broker events.Broker
|
||||
pluginManager PluginUnloader
|
||||
ctx context.Context
|
||||
ds model.DataStore
|
||||
scanner model.Scanner
|
||||
watcher Watcher
|
||||
broker events.Broker
|
||||
}
|
||||
|
||||
func (r *libraryRepositoryWrapper) Save(entity interface{}) (string, error) {
|
||||
@@ -276,10 +272,6 @@ func (r *libraryRepositoryWrapper) Delete(id string) error {
|
||||
log.Debug(r.ctx, "Library deleted - sent refresh event", "libraryID", libID, "name", lib.Name)
|
||||
}
|
||||
|
||||
// After successful deletion, check if any plugins were auto-disabled
|
||||
// and need to be unloaded from memory
|
||||
r.pluginManager.UnloadDisabledPlugins(r.ctx)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -32,7 +32,6 @@ var _ = Describe("Library Service", func() {
|
||||
var scanner *tests.MockScanner
|
||||
var watcherManager *mockWatcherManager
|
||||
var broker *mockEventBroker
|
||||
var pluginManager *mockPluginUnloader
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
@@ -51,9 +50,7 @@ var _ = Describe("Library Service", func() {
|
||||
}
|
||||
// Create a mock event broker
|
||||
broker = &mockEventBroker{}
|
||||
// Create a mock plugin unloader
|
||||
pluginManager = &mockPluginUnloader{}
|
||||
service = core.NewLibrary(ds, scanner, watcherManager, broker, pluginManager)
|
||||
service = core.NewLibrary(ds, scanner, watcherManager, broker)
|
||||
ctx = context.Background()
|
||||
|
||||
// Create a temporary directory for testing valid paths
|
||||
@@ -872,45 +869,8 @@ var _ = Describe("Library Service", func() {
|
||||
Expect(broker.Events).To(HaveLen(1))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Plugin Manager Integration", func() {
|
||||
var repo rest.Persistable
|
||||
|
||||
BeforeEach(func() {
|
||||
// Reset the call count for each test
|
||||
pluginManager.unloadCalls = 0
|
||||
r := service.NewRepository(ctx)
|
||||
repo = r.(rest.Persistable)
|
||||
})
|
||||
|
||||
It("calls UnloadDisabledPlugins after successful library deletion", func() {
|
||||
libraryRepo.SetData(model.Libraries{
|
||||
{ID: 2, Name: "Library to Delete", Path: tempDir},
|
||||
})
|
||||
|
||||
err := repo.Delete("2")
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(pluginManager.unloadCalls).To(Equal(1))
|
||||
})
|
||||
|
||||
It("does not call UnloadDisabledPlugins when library deletion fails", func() {
|
||||
// Try to delete non-existent library
|
||||
err := repo.Delete("999")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(pluginManager.unloadCalls).To(Equal(0))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// mockPluginUnloader is a simple mock for testing UnloadDisabledPlugins calls
|
||||
type mockPluginUnloader struct {
|
||||
unloadCalls int
|
||||
}
|
||||
|
||||
func (m *mockPluginUnloader) UnloadDisabledPlugins(ctx context.Context) {
|
||||
m.unloadCalls++
|
||||
}
|
||||
|
||||
// mockWatcherManager provides a simple mock implementation of core.Watcher for testing
|
||||
type mockWatcherManager struct {
|
||||
StartedWatchers []model.Library
|
||||
|
||||
@@ -204,20 +204,7 @@ func NewTranscodingCache() TranscodingCache {
|
||||
log.Error(ctx, "Error loading transcoding command", "format", job.format, err)
|
||||
return nil, os.ErrInvalid
|
||||
}
|
||||
|
||||
// Choose the appropriate context based on EnableTranscodingCancellation configuration.
|
||||
// This is where we decide whether transcoding processes should be cancellable or not.
|
||||
var transcodingCtx context.Context
|
||||
if conf.Server.EnableTranscodingCancellation {
|
||||
// Use the request context directly, allowing cancellation when client disconnects
|
||||
transcodingCtx = ctx
|
||||
} else {
|
||||
// Use background context with request values preserved.
|
||||
// This prevents cancellation but maintains request metadata (user, client, etc.)
|
||||
transcodingCtx = request.AddValues(context.Background(), ctx)
|
||||
}
|
||||
|
||||
out, err := job.ms.transcoder.Transcode(transcodingCtx, t.Command, job.filePath, job.bitRate, job.offset)
|
||||
out, err := job.ms.transcoder.Transcode(ctx, t.Command, job.filePath, job.bitRate, job.offset)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error starting transcoder", "id", job.mf.ID, err)
|
||||
return nil, os.ErrInvalid
|
||||
|
||||
@@ -23,8 +23,7 @@ import (
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/plugins"
|
||||
"github.com/navidrome/navidrome/server/events"
|
||||
"github.com/navidrome/navidrome/plugins/schema"
|
||||
"github.com/navidrome/navidrome/utils/singleton"
|
||||
)
|
||||
|
||||
@@ -38,12 +37,18 @@ var (
|
||||
)
|
||||
|
||||
type insightsCollector struct {
|
||||
ds model.DataStore
|
||||
lastRun atomic.Int64
|
||||
lastStatus atomic.Bool
|
||||
ds model.DataStore
|
||||
pluginLoader PluginLoader
|
||||
lastRun atomic.Int64
|
||||
lastStatus atomic.Bool
|
||||
}
|
||||
|
||||
func GetInstance(ds model.DataStore) Insights {
|
||||
// PluginLoader defines an interface for loading plugins
|
||||
type PluginLoader interface {
|
||||
PluginList() map[string]schema.PluginManifest
|
||||
}
|
||||
|
||||
func GetInstance(ds model.DataStore, pluginLoader PluginLoader) Insights {
|
||||
return singleton.GetInstance(func() *insightsCollector {
|
||||
id, err := ds.Property(context.TODO()).Get(consts.InsightsIDKey)
|
||||
if err != nil {
|
||||
@@ -55,7 +60,7 @@ func GetInstance(ds model.DataStore) Insights {
|
||||
}
|
||||
}
|
||||
insightsID = id
|
||||
return &insightsCollector{ds: ds}
|
||||
return &insightsCollector{ds: ds, pluginLoader: pluginLoader}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -218,7 +223,7 @@ var staticData = sync.OnceValue(func() insights.Data {
|
||||
data.Config.ScanSchedule = conf.Server.Scanner.Schedule
|
||||
data.Config.ScanWatcherWait = uint64(math.Trunc(conf.Server.Scanner.WatcherWait.Seconds()))
|
||||
data.Config.ScanOnStartup = conf.Server.Scanner.ScanOnStartup
|
||||
data.Config.ReverseProxyConfigured = conf.Server.ExtAuth.TrustedSources != ""
|
||||
data.Config.ReverseProxyConfigured = conf.Server.ReverseProxyWhitelist != ""
|
||||
data.Config.HasCustomPID = conf.Server.PID.Track != "" || conf.Server.PID.Album != ""
|
||||
data.Config.HasCustomTags = len(conf.Server.Tags) > 0
|
||||
|
||||
@@ -314,16 +319,12 @@ func (c *insightsCollector) hasSmartPlaylists(ctx context.Context) (bool, error)
|
||||
|
||||
// collectPlugins collects information about installed plugins
|
||||
func (c *insightsCollector) collectPlugins(_ context.Context) map[string]insights.PluginInfo {
|
||||
// TODO Fix import/inject cycles
|
||||
manager := plugins.GetManager(c.ds, events.GetBroker(), nil)
|
||||
info := manager.GetPluginInfo()
|
||||
|
||||
result := make(map[string]insights.PluginInfo, len(info))
|
||||
for name, p := range info {
|
||||
result[name] = insights.PluginInfo{
|
||||
Name: p.Name,
|
||||
Version: p.Version,
|
||||
plugins := make(map[string]insights.PluginInfo)
|
||||
for id, manifest := range c.pluginLoader.PluginList() {
|
||||
plugins[id] = insights.PluginInfo{
|
||||
Name: manifest.Name,
|
||||
Version: manifest.Version,
|
||||
}
|
||||
}
|
||||
return result
|
||||
return plugins
|
||||
}
|
||||
|
||||
@@ -42,7 +42,6 @@ type MountInfo struct {
|
||||
|
||||
var fsTypeMap = map[int64]string{
|
||||
0x5346414f: "afs",
|
||||
0x187: "autofs",
|
||||
0x61756673: "aufs",
|
||||
0x9123683E: "btrfs",
|
||||
0xc36400: "ceph",
|
||||
@@ -56,11 +55,9 @@ var fsTypeMap = map[int64]string{
|
||||
0x6a656a63: "fakeowner", // FS inside a container
|
||||
0x65735546: "fuse",
|
||||
0x4244: "hfs",
|
||||
0x482b: "hfs+",
|
||||
0x9660: "iso9660",
|
||||
0x3153464a: "jfs",
|
||||
0x00006969: "nfs",
|
||||
0x5346544e: "ntfs", // NTFS_SB_MAGIC
|
||||
0x7366746e: "ntfs",
|
||||
0x794c7630: "overlayfs",
|
||||
0x9fa0: "proc",
|
||||
@@ -72,16 +69,8 @@ var fsTypeMap = map[int64]string{
|
||||
0x01021997: "v9fs",
|
||||
0x786f4256: "vboxsf",
|
||||
0x4d44: "vfat",
|
||||
0xca451a4e: "virtiofs",
|
||||
0x58465342: "xfs",
|
||||
0x2FC12FC1: "zfs",
|
||||
0x7c7c6673: "prlfs", // Parallels Shared Folders
|
||||
|
||||
// Signed/unsigned conversion issues (negative hex values converted to uint32)
|
||||
-0x6edc97c2: "btrfs", // 0x9123683e
|
||||
-0x1acb2be: "smb2", // 0xfe534d42
|
||||
-0xacb2be: "cifs", // 0xff534d42
|
||||
-0xd0adff0: "f2fs", // 0xf2f52010
|
||||
}
|
||||
|
||||
func getFilesystemType(path string) (string, error) {
|
||||
|
||||
@@ -1,28 +1,27 @@
|
||||
package tests
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/deluan/rest"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
)
|
||||
|
||||
// MockLibraryService provides a simple wrapper around MockLibraryRepo
|
||||
// that implements the core.Library interface for testing.
|
||||
// Returns concrete type to avoid import cycles - callers assign to core.Library.
|
||||
type MockLibraryService struct {
|
||||
*MockLibraryRepo
|
||||
// MockLibraryWrapper provides a simple wrapper around MockLibraryRepo
|
||||
// that implements the core.Library interface for testing
|
||||
type MockLibraryWrapper struct {
|
||||
*tests.MockLibraryRepo
|
||||
}
|
||||
|
||||
// MockLibraryRestAdapter adapts MockLibraryRepo to rest.Repository interface
|
||||
type MockLibraryRestAdapter struct {
|
||||
*MockLibraryRepo
|
||||
*tests.MockLibraryRepo
|
||||
}
|
||||
|
||||
// NewMockLibraryService creates a new mock library service for testing.
|
||||
// Returns concrete type - assign to core.Library at call site.
|
||||
func NewMockLibraryService() *MockLibraryService {
|
||||
repo := &MockLibraryRepo{
|
||||
// NewMockLibraryService creates a new mock library service for testing
|
||||
func NewMockLibraryService() Library {
|
||||
repo := &tests.MockLibraryRepo{
|
||||
Data: make(map[int]model.Library),
|
||||
}
|
||||
// Set up default test data
|
||||
@@ -30,10 +29,10 @@ func NewMockLibraryService() *MockLibraryService {
|
||||
{ID: 1, Name: "Test Library 1", Path: "/music/library1"},
|
||||
{ID: 2, Name: "Test Library 2", Path: "/music/library2"},
|
||||
})
|
||||
return &MockLibraryService{MockLibraryRepo: repo}
|
||||
return &MockLibraryWrapper{MockLibraryRepo: repo}
|
||||
}
|
||||
|
||||
func (m *MockLibraryService) NewRepository(ctx context.Context) rest.Repository {
|
||||
func (m *MockLibraryWrapper) NewRepository(ctx context.Context) rest.Repository {
|
||||
return &MockLibraryRestAdapter{MockLibraryRepo: m.MockLibraryRepo}
|
||||
}
|
||||
|
||||
@@ -42,3 +41,6 @@ func (m *MockLibraryService) NewRepository(ctx context.Context) rest.Repository
|
||||
func (a *MockLibraryRestAdapter) Delete(id string) error {
|
||||
return a.DeleteByStringID(id)
|
||||
}
|
||||
|
||||
var _ Library = (*MockLibraryWrapper)(nil)
|
||||
var _ rest.Repository = (*MockLibraryRestAdapter)(nil)
|
||||
@@ -1,7 +1,6 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
@@ -10,7 +9,7 @@ import (
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -168,11 +167,6 @@ func (s *playlists) parseNSP(_ context.Context, pls *model.Playlist, reader io.R
|
||||
if nsp.Comment != "" {
|
||||
pls.Comment = nsp.Comment
|
||||
}
|
||||
if nsp.Public != nil {
|
||||
pls.Public = *nsp.Public
|
||||
} else {
|
||||
pls.Public = conf.Server.DefaultPlaylistPublicVisibility
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -200,35 +194,22 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
||||
}
|
||||
filteredLines = append(filteredLines, line)
|
||||
}
|
||||
resolvedPaths, err := s.resolvePaths(ctx, folder, filteredLines)
|
||||
paths, err := s.normalizePaths(ctx, pls, folder, filteredLines)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Error resolving paths in playlist", "playlist", pls.Name, err)
|
||||
log.Warn(ctx, "Error normalizing paths in playlist", "playlist", pls.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Normalize to NFD for filesystem compatibility (macOS). Database stores paths in NFD.
|
||||
// See https://github.com/navidrome/navidrome/issues/4663
|
||||
resolvedPaths = slice.Map(resolvedPaths, func(path string) string {
|
||||
return strings.ToLower(norm.NFD.String(path))
|
||||
})
|
||||
|
||||
found, err := mediaFileRepository.FindByPaths(resolvedPaths)
|
||||
found, err := mediaFileRepository.FindByPaths(paths)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Error reading files from DB", "playlist", pls.Name, err)
|
||||
continue
|
||||
}
|
||||
// Build lookup map with library-qualified keys, normalized for comparison
|
||||
existing := make(map[string]int, len(found))
|
||||
for idx := range found {
|
||||
// Normalize to lowercase for case-insensitive comparison
|
||||
// Key format: "libraryID:path"
|
||||
key := fmt.Sprintf("%d:%s", found[idx].LibraryID, strings.ToLower(found[idx].Path))
|
||||
existing[key] = idx
|
||||
existing[normalizePathForComparison(found[idx].Path)] = idx
|
||||
}
|
||||
|
||||
// Find media files in the order of the resolved paths, to keep playlist order
|
||||
for _, path := range resolvedPaths {
|
||||
idx, ok := existing[path]
|
||||
for _, path := range paths {
|
||||
idx, ok := existing[normalizePathForComparison(path)]
|
||||
if ok {
|
||||
mfs = append(mfs, found[idx])
|
||||
} else {
|
||||
@@ -245,150 +226,69 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
||||
return nil
|
||||
}
|
||||
|
||||
// pathResolution holds the result of resolving a playlist path to a library-relative path.
|
||||
type pathResolution struct {
|
||||
absolutePath string
|
||||
libraryPath string
|
||||
libraryID int
|
||||
valid bool
|
||||
// normalizePathForComparison normalizes a file path to NFC form and converts to lowercase
|
||||
// for consistent comparison. This fixes Unicode normalization issues on macOS where
|
||||
// Apple Music creates playlists with NFC-encoded paths but the filesystem uses NFD.
|
||||
func normalizePathForComparison(path string) string {
|
||||
return strings.ToLower(norm.NFC.String(path))
|
||||
}
|
||||
|
||||
// ToQualifiedString converts the path resolution to a library-qualified string with forward slashes.
|
||||
// Format: "libraryID:relativePath" with forward slashes for path separators.
|
||||
func (r pathResolution) ToQualifiedString() (string, error) {
|
||||
if !r.valid {
|
||||
return "", fmt.Errorf("invalid path resolution")
|
||||
}
|
||||
relativePath, err := filepath.Rel(r.libraryPath, r.absolutePath)
|
||||
// TODO This won't work for multiple libraries
|
||||
func (s *playlists) normalizePaths(ctx context.Context, pls *model.Playlist, folder *model.Folder, lines []string) ([]string, error) {
|
||||
libRegex, err := s.compileLibraryPaths(ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return nil, err
|
||||
}
|
||||
// Convert path separators to forward slashes
|
||||
return fmt.Sprintf("%d:%s", r.libraryID, filepath.ToSlash(relativePath)), nil
|
||||
}
|
||||
|
||||
// libraryMatcher holds sorted libraries with cleaned paths for efficient path matching.
|
||||
type libraryMatcher struct {
|
||||
libraries model.Libraries
|
||||
cleanedPaths []string
|
||||
}
|
||||
res := make([]string, 0, len(lines))
|
||||
for idx, line := range lines {
|
||||
var libPath string
|
||||
var filePath string
|
||||
|
||||
// findLibraryForPath finds which library contains the given absolute path.
|
||||
// Returns library ID and path, or 0 and empty string if not found.
|
||||
func (lm *libraryMatcher) findLibraryForPath(absolutePath string) (int, string) {
|
||||
// Check sorted libraries (longest path first) to find the best match
|
||||
for i, cleanLibPath := range lm.cleanedPaths {
|
||||
// Check if absolutePath is under this library path
|
||||
if strings.HasPrefix(absolutePath, cleanLibPath) {
|
||||
// Ensure it's a proper path boundary (not just a prefix)
|
||||
if len(absolutePath) == len(cleanLibPath) || absolutePath[len(cleanLibPath)] == filepath.Separator {
|
||||
return lm.libraries[i].ID, cleanLibPath
|
||||
if folder != nil && !filepath.IsAbs(line) {
|
||||
libPath = folder.LibraryPath
|
||||
filePath = filepath.Join(folder.AbsolutePath(), line)
|
||||
} else {
|
||||
cleanLine := filepath.Clean(line)
|
||||
if libPath = libRegex.FindString(cleanLine); libPath != "" {
|
||||
filePath = cleanLine
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0, ""
|
||||
}
|
||||
|
||||
// newLibraryMatcher creates a libraryMatcher with libraries sorted by path length (longest first).
|
||||
// This ensures correct matching when library paths are prefixes of each other.
|
||||
// Example: /music-classical must be checked before /music
|
||||
// Otherwise, /music-classical/track.mp3 would match /music instead of /music-classical
|
||||
func newLibraryMatcher(libs model.Libraries) *libraryMatcher {
|
||||
// Sort libraries by path length (descending) to ensure longest paths match first.
|
||||
slices.SortFunc(libs, func(i, j model.Library) int {
|
||||
return cmp.Compare(len(j.Path), len(i.Path)) // Reverse order for descending
|
||||
})
|
||||
|
||||
// Pre-clean all library paths once for efficient matching
|
||||
cleanedPaths := make([]string, len(libs))
|
||||
for i, lib := range libs {
|
||||
cleanedPaths[i] = filepath.Clean(lib.Path)
|
||||
}
|
||||
return &libraryMatcher{
|
||||
libraries: libs,
|
||||
cleanedPaths: cleanedPaths,
|
||||
}
|
||||
}
|
||||
|
||||
// pathResolver handles path resolution logic for playlist imports.
|
||||
type pathResolver struct {
|
||||
matcher *libraryMatcher
|
||||
}
|
||||
|
||||
// newPathResolver creates a pathResolver with libraries loaded from the datastore.
|
||||
func newPathResolver(ctx context.Context, ds model.DataStore) (*pathResolver, error) {
|
||||
libs, err := ds.Library(ctx).GetAll()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
matcher := newLibraryMatcher(libs)
|
||||
return &pathResolver{matcher: matcher}, nil
|
||||
}
|
||||
|
||||
// resolvePath determines the absolute path and library path for a playlist entry.
|
||||
// For absolute paths, it uses them directly.
|
||||
// For relative paths, it resolves them relative to the playlist's folder location.
|
||||
// Example: playlist at /music/playlists/test.m3u with line "../songs/abc.mp3"
|
||||
//
|
||||
// resolves to /music/songs/abc.mp3
|
||||
func (r *pathResolver) resolvePath(line string, folder *model.Folder) pathResolution {
|
||||
var absolutePath string
|
||||
if folder != nil && !filepath.IsAbs(line) {
|
||||
// Resolve relative path to absolute path based on playlist location
|
||||
absolutePath = filepath.Clean(filepath.Join(folder.AbsolutePath(), line))
|
||||
} else {
|
||||
// Use absolute path directly after cleaning
|
||||
absolutePath = filepath.Clean(line)
|
||||
}
|
||||
|
||||
return r.findInLibraries(absolutePath)
|
||||
}
|
||||
|
||||
// findInLibraries matches an absolute path against all known libraries and returns
|
||||
// a pathResolution with the library information. Returns an invalid resolution if
|
||||
// the path is not found in any library.
|
||||
func (r *pathResolver) findInLibraries(absolutePath string) pathResolution {
|
||||
libID, libPath := r.matcher.findLibraryForPath(absolutePath)
|
||||
if libID == 0 {
|
||||
return pathResolution{valid: false}
|
||||
}
|
||||
return pathResolution{
|
||||
absolutePath: absolutePath,
|
||||
libraryPath: libPath,
|
||||
libraryID: libID,
|
||||
valid: true,
|
||||
}
|
||||
}
|
||||
|
||||
// resolvePaths converts playlist file paths to library-qualified paths (format: "libraryID:relativePath").
|
||||
// For relative paths, it resolves them to absolute paths first, then determines which
|
||||
// library they belong to. This allows playlists to reference files across library boundaries.
|
||||
func (s *playlists) resolvePaths(ctx context.Context, folder *model.Folder, lines []string) ([]string, error) {
|
||||
resolver, err := newPathResolver(ctx, s.ds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
results := make([]string, 0, len(lines))
|
||||
for idx, line := range lines {
|
||||
resolution := resolver.resolvePath(line, folder)
|
||||
|
||||
if !resolution.valid {
|
||||
if libPath != "" {
|
||||
if rel, err := filepath.Rel(libPath, filePath); err == nil {
|
||||
res = append(res, rel)
|
||||
} else {
|
||||
log.Debug(ctx, "Error getting relative path", "playlist", pls.Name, "path", line, "libPath", libPath,
|
||||
"filePath", filePath, err)
|
||||
}
|
||||
} else {
|
||||
log.Warn(ctx, "Path in playlist not found in any library", "path", line, "line", idx)
|
||||
continue
|
||||
}
|
||||
}
|
||||
return slice.Map(res, filepath.ToSlash), nil
|
||||
}
|
||||
|
||||
qualifiedPath, err := resolution.ToQualifiedString()
|
||||
if err != nil {
|
||||
log.Debug(ctx, "Error getting library-qualified path", "path", line,
|
||||
"libPath", resolution.libraryPath, "filePath", resolution.absolutePath, err)
|
||||
continue
|
||||
}
|
||||
|
||||
results = append(results, qualifiedPath)
|
||||
func (s *playlists) compileLibraryPaths(ctx context.Context) (*regexp.Regexp, error) {
|
||||
libs, err := s.ds.Library(ctx).GetAll()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return results, nil
|
||||
// Create regex patterns for each library path
|
||||
patterns := make([]string, len(libs))
|
||||
for i, lib := range libs {
|
||||
cleanPath := filepath.Clean(lib.Path)
|
||||
escapedPath := regexp.QuoteMeta(cleanPath)
|
||||
patterns[i] = fmt.Sprintf("^%s(?:/|$)", escapedPath)
|
||||
}
|
||||
// Combine all patterns into a single regex
|
||||
combinedPattern := strings.Join(patterns, "|")
|
||||
re, err := regexp.Compile(combinedPattern)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("compiling library paths `%s`: %w", combinedPattern, err)
|
||||
}
|
||||
return re, nil
|
||||
}
|
||||
|
||||
func (s *playlists) updatePlaylist(ctx context.Context, newPls *model.Playlist) error {
|
||||
@@ -414,10 +314,7 @@ func (s *playlists) updatePlaylist(ctx context.Context, newPls *model.Playlist)
|
||||
} else {
|
||||
log.Info(ctx, "Adding synced playlist", "playlist", newPls.Name, "path", newPls.Path, "owner", owner.UserName)
|
||||
newPls.OwnerID = owner.ID
|
||||
// For NSP files, Public may already be set from the file; for M3U, use server default
|
||||
if !newPls.IsSmartPlaylist() {
|
||||
newPls.Public = conf.Server.DefaultPlaylistPublicVisibility
|
||||
}
|
||||
newPls.Public = conf.Server.DefaultPlaylistPublicVisibility
|
||||
}
|
||||
return s.ds.Playlist(ctx).Put(newPls)
|
||||
}
|
||||
@@ -481,7 +378,6 @@ type nspFile struct {
|
||||
criteria.Criteria
|
||||
Name string `json:"name"`
|
||||
Comment string `json:"comment"`
|
||||
Public *bool `json:"public"`
|
||||
}
|
||||
|
||||
func (i *nspFile) UnmarshalJSON(data []byte) error {
|
||||
@@ -492,8 +388,5 @@ func (i *nspFile) UnmarshalJSON(data []byte) error {
|
||||
}
|
||||
i.Name, _ = m["name"].(string)
|
||||
i.Comment, _ = m["comment"].(string)
|
||||
if public, ok := m["public"].(bool); ok {
|
||||
i.Public = &public
|
||||
}
|
||||
return json.Unmarshal(data, &i.Criteria)
|
||||
}
|
||||
|
||||
@@ -1,406 +0,0 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("libraryMatcher", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var mockLibRepo *tests.MockLibraryRepo
|
||||
ctx := context.Background()
|
||||
|
||||
BeforeEach(func() {
|
||||
mockLibRepo = &tests.MockLibraryRepo{}
|
||||
ds = &tests.MockDataStore{
|
||||
MockedLibrary: mockLibRepo,
|
||||
}
|
||||
})
|
||||
|
||||
// Helper function to create a libraryMatcher from the mock datastore
|
||||
createMatcher := func(ds model.DataStore) *libraryMatcher {
|
||||
libs, err := ds.Library(ctx).GetAll()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
return newLibraryMatcher(libs)
|
||||
}
|
||||
|
||||
Describe("Longest library path matching", func() {
|
||||
It("matches the longest library path when multiple libraries share a prefix", func() {
|
||||
// Setup libraries with prefix conflicts
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/music"},
|
||||
{ID: 2, Path: "/music-classical"},
|
||||
{ID: 3, Path: "/music-classical/opera"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
// Test that longest path matches first and returns correct library ID
|
||||
testCases := []struct {
|
||||
path string
|
||||
expectedLibID int
|
||||
expectedLibPath string
|
||||
}{
|
||||
{"/music-classical/opera/track.mp3", 3, "/music-classical/opera"},
|
||||
{"/music-classical/track.mp3", 2, "/music-classical"},
|
||||
{"/music/track.mp3", 1, "/music"},
|
||||
{"/music-classical/opera/subdir/file.mp3", 3, "/music-classical/opera"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
libID, libPath := matcher.findLibraryForPath(tc.path)
|
||||
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d, but got %d", tc.path, tc.expectedLibID, libID)
|
||||
Expect(libPath).To(Equal(tc.expectedLibPath), "Path %s should match library path %s, but got %s", tc.path, tc.expectedLibPath, libPath)
|
||||
}
|
||||
})
|
||||
|
||||
It("handles libraries with similar prefixes but different structures", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/home/user/music"},
|
||||
{ID: 2, Path: "/home/user/music-backup"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
// Test that music-backup library is matched correctly
|
||||
libID, libPath := matcher.findLibraryForPath("/home/user/music-backup/track.mp3")
|
||||
Expect(libID).To(Equal(2))
|
||||
Expect(libPath).To(Equal("/home/user/music-backup"))
|
||||
|
||||
// Test that music library is still matched correctly
|
||||
libID, libPath = matcher.findLibraryForPath("/home/user/music/track.mp3")
|
||||
Expect(libID).To(Equal(1))
|
||||
Expect(libPath).To(Equal("/home/user/music"))
|
||||
})
|
||||
|
||||
It("matches path that is exactly the library root", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/music"},
|
||||
{ID: 2, Path: "/music-classical"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
// Exact library path should match
|
||||
libID, libPath := matcher.findLibraryForPath("/music-classical")
|
||||
Expect(libID).To(Equal(2))
|
||||
Expect(libPath).To(Equal("/music-classical"))
|
||||
})
|
||||
|
||||
It("handles complex nested library structures", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/media"},
|
||||
{ID: 2, Path: "/media/audio"},
|
||||
{ID: 3, Path: "/media/audio/classical"},
|
||||
{ID: 4, Path: "/media/audio/classical/baroque"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
testCases := []struct {
|
||||
path string
|
||||
expectedLibID int
|
||||
expectedLibPath string
|
||||
}{
|
||||
{"/media/audio/classical/baroque/bach/track.mp3", 4, "/media/audio/classical/baroque"},
|
||||
{"/media/audio/classical/mozart/track.mp3", 3, "/media/audio/classical"},
|
||||
{"/media/audio/rock/track.mp3", 2, "/media/audio"},
|
||||
{"/media/video/movie.mp4", 1, "/media"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
libID, libPath := matcher.findLibraryForPath(tc.path)
|
||||
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d", tc.path, tc.expectedLibID)
|
||||
Expect(libPath).To(Equal(tc.expectedLibPath), "Path %s should match library path %s", tc.path, tc.expectedLibPath)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Edge cases", func() {
|
||||
It("handles empty library list", func() {
|
||||
mockLibRepo.SetData([]model.Library{})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
Expect(matcher).ToNot(BeNil())
|
||||
|
||||
// Should not match anything
|
||||
libID, libPath := matcher.findLibraryForPath("/music/track.mp3")
|
||||
Expect(libID).To(Equal(0))
|
||||
Expect(libPath).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("handles single library", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/music"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
libID, libPath := matcher.findLibraryForPath("/music/track.mp3")
|
||||
Expect(libID).To(Equal(1))
|
||||
Expect(libPath).To(Equal("/music"))
|
||||
})
|
||||
|
||||
It("handles libraries with special characters in paths", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/music[test]"},
|
||||
{ID: 2, Path: "/music(backup)"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
Expect(matcher).ToNot(BeNil())
|
||||
|
||||
// Special characters should match literally
|
||||
libID, libPath := matcher.findLibraryForPath("/music[test]/track.mp3")
|
||||
Expect(libID).To(Equal(1))
|
||||
Expect(libPath).To(Equal("/music[test]"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Path matching order", func() {
|
||||
It("ensures longest paths match first", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/a"},
|
||||
{ID: 2, Path: "/ab"},
|
||||
{ID: 3, Path: "/abc"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
// Verify that longer paths match correctly (not cut off by shorter prefix)
|
||||
testCases := []struct {
|
||||
path string
|
||||
expectedLibID int
|
||||
}{
|
||||
{"/abc/file.mp3", 3},
|
||||
{"/ab/file.mp3", 2},
|
||||
{"/a/file.mp3", 1},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
libID, _ := matcher.findLibraryForPath(tc.path)
|
||||
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d", tc.path, tc.expectedLibID)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
var _ = Describe("pathResolver", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var mockLibRepo *tests.MockLibraryRepo
|
||||
var resolver *pathResolver
|
||||
ctx := context.Background()
|
||||
|
||||
BeforeEach(func() {
|
||||
mockLibRepo = &tests.MockLibraryRepo{}
|
||||
ds = &tests.MockDataStore{
|
||||
MockedLibrary: mockLibRepo,
|
||||
}
|
||||
|
||||
// Setup test libraries
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/music"},
|
||||
{ID: 2, Path: "/music-classical"},
|
||||
{ID: 3, Path: "/podcasts"},
|
||||
})
|
||||
|
||||
var err error
|
||||
resolver, err = newPathResolver(ctx, ds)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
Describe("resolvePath", func() {
|
||||
It("resolves absolute paths", func() {
|
||||
resolution := resolver.resolvePath("/music/artist/album/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(1))
|
||||
Expect(resolution.libraryPath).To(Equal("/music"))
|
||||
Expect(resolution.absolutePath).To(Equal("/music/artist/album/track.mp3"))
|
||||
})
|
||||
|
||||
It("resolves relative paths when folder is provided", func() {
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
resolution := resolver.resolvePath("../artist/album/track.mp3", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(1))
|
||||
Expect(resolution.absolutePath).To(Equal("/music/artist/album/track.mp3"))
|
||||
})
|
||||
|
||||
It("returns invalid resolution for paths outside any library", func() {
|
||||
resolution := resolver.resolvePath("/outside/library/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("resolvePath", func() {
|
||||
Context("With absolute paths", func() {
|
||||
It("resolves path within a library", func() {
|
||||
resolution := resolver.resolvePath("/music/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(1))
|
||||
Expect(resolution.libraryPath).To(Equal("/music"))
|
||||
Expect(resolution.absolutePath).To(Equal("/music/track.mp3"))
|
||||
})
|
||||
|
||||
It("resolves path to the longest matching library", func() {
|
||||
resolution := resolver.resolvePath("/music-classical/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(2))
|
||||
Expect(resolution.libraryPath).To(Equal("/music-classical"))
|
||||
})
|
||||
|
||||
It("returns invalid resolution for path outside libraries", func() {
|
||||
resolution := resolver.resolvePath("/videos/movie.mp4", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeFalse())
|
||||
})
|
||||
|
||||
It("cleans the path before matching", func() {
|
||||
resolution := resolver.resolvePath("/music//artist/../artist/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.absolutePath).To(Equal("/music/artist/track.mp3"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("With relative paths", func() {
|
||||
It("resolves relative path within same library", func() {
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
resolution := resolver.resolvePath("../songs/track.mp3", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(1))
|
||||
Expect(resolution.absolutePath).To(Equal("/music/songs/track.mp3"))
|
||||
})
|
||||
|
||||
It("resolves relative path to different library", func() {
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
// Path goes up and into a different library
|
||||
resolution := resolver.resolvePath("../../podcasts/episode.mp3", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(3))
|
||||
Expect(resolution.libraryPath).To(Equal("/podcasts"))
|
||||
})
|
||||
|
||||
It("uses matcher to find correct library for resolved path", func() {
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
// This relative path resolves to music-classical library
|
||||
resolution := resolver.resolvePath("../../music-classical/track.mp3", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(2))
|
||||
Expect(resolution.libraryPath).To(Equal("/music-classical"))
|
||||
})
|
||||
|
||||
It("returns invalid for relative paths escaping all libraries", func() {
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
resolution := resolver.resolvePath("../../../../etc/passwd", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeFalse())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Cross-library resolution scenarios", func() {
|
||||
It("handles playlist in library A referencing file in library B", func() {
|
||||
// Playlist is in /music/playlists
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
// Relative path that goes to /podcasts library
|
||||
resolution := resolver.resolvePath("../../podcasts/show/episode.mp3", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(3), "Should resolve to podcasts library")
|
||||
Expect(resolution.libraryPath).To(Equal("/podcasts"))
|
||||
})
|
||||
|
||||
It("prefers longer library paths when resolving", func() {
|
||||
// Ensure /music-classical is matched instead of /music
|
||||
resolution := resolver.resolvePath("/music-classical/baroque/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(2), "Should match /music-classical, not /music")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
var _ = Describe("pathResolution", func() {
|
||||
Describe("ToQualifiedString", func() {
|
||||
It("converts valid resolution to qualified string with forward slashes", func() {
|
||||
resolution := pathResolution{
|
||||
absolutePath: "/music/artist/album/track.mp3",
|
||||
libraryPath: "/music",
|
||||
libraryID: 1,
|
||||
valid: true,
|
||||
}
|
||||
|
||||
qualifiedStr, err := resolution.ToQualifiedString()
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(qualifiedStr).To(Equal("1:artist/album/track.mp3"))
|
||||
})
|
||||
|
||||
It("handles Windows-style paths by converting to forward slashes", func() {
|
||||
resolution := pathResolution{
|
||||
absolutePath: "/music/artist/album/track.mp3",
|
||||
libraryPath: "/music",
|
||||
libraryID: 2,
|
||||
valid: true,
|
||||
}
|
||||
|
||||
qualifiedStr, err := resolution.ToQualifiedString()
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
// Should always use forward slashes regardless of OS
|
||||
Expect(qualifiedStr).To(ContainSubstring("2:"))
|
||||
Expect(qualifiedStr).ToNot(ContainSubstring("\\"))
|
||||
})
|
||||
|
||||
It("returns error for invalid resolution", func() {
|
||||
resolution := pathResolution{valid: false}
|
||||
|
||||
_, err := resolution.ToQualifiedString()
|
||||
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,4 +1,4 @@
|
||||
package core_test
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/criteria"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
@@ -21,7 +20,7 @@ import (
|
||||
|
||||
var _ = Describe("Playlists", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var ps core.Playlists
|
||||
var ps Playlists
|
||||
var mockPlsRepo mockedPlaylistRepo
|
||||
var mockLibRepo *tests.MockLibraryRepo
|
||||
ctx := context.Background()
|
||||
@@ -34,16 +33,16 @@ var _ = Describe("Playlists", func() {
|
||||
MockedLibrary: mockLibRepo,
|
||||
}
|
||||
ctx = request.WithUser(ctx, model.User{ID: "123"})
|
||||
// Path should be libPath, but we want to match the root folder referenced in the m3u, which is `/`
|
||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/"}})
|
||||
})
|
||||
|
||||
Describe("ImportFile", func() {
|
||||
var folder *model.Folder
|
||||
BeforeEach(func() {
|
||||
ps = core.NewPlaylists(ds)
|
||||
ps = NewPlaylists(ds)
|
||||
ds.MockedMediaFile = &mockedMediaFileRepo{}
|
||||
libPath, _ := os.Getwd()
|
||||
// Set up library with the actual library path that matches the folder
|
||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: libPath}})
|
||||
folder = &model.Folder{
|
||||
ID: "1",
|
||||
LibraryID: 1,
|
||||
@@ -112,245 +111,6 @@ var _ = Describe("Playlists", func() {
|
||||
_, err := ps.ImportFile(ctx, folder, "invalid_json.nsp")
|
||||
Expect(err.Error()).To(ContainSubstring("line 19, column 1: invalid character '\\n'"))
|
||||
})
|
||||
It("parses NSP with public: true and creates public playlist", func() {
|
||||
pls, err := ps.ImportFile(ctx, folder, "public_playlist.nsp")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Name).To(Equal("Public Playlist"))
|
||||
Expect(pls.Public).To(BeTrue())
|
||||
})
|
||||
It("parses NSP with public: false and creates private playlist", func() {
|
||||
pls, err := ps.ImportFile(ctx, folder, "private_playlist.nsp")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Name).To(Equal("Private Playlist"))
|
||||
Expect(pls.Public).To(BeFalse())
|
||||
})
|
||||
It("uses server default when public field is absent", func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.DefaultPlaylistPublicVisibility = true
|
||||
|
||||
pls, err := ps.ImportFile(ctx, folder, "recently_played.nsp")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Name).To(Equal("Recently Played"))
|
||||
Expect(pls.Public).To(BeTrue()) // Should be true since server default is true
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Cross-library relative paths", func() {
|
||||
var tmpDir, plsDir, songsDir string
|
||||
|
||||
BeforeEach(func() {
|
||||
// Create temp directory structure
|
||||
tmpDir = GinkgoT().TempDir()
|
||||
plsDir = tmpDir + "/playlists"
|
||||
songsDir = tmpDir + "/songs"
|
||||
Expect(os.Mkdir(plsDir, 0755)).To(Succeed())
|
||||
Expect(os.Mkdir(songsDir, 0755)).To(Succeed())
|
||||
|
||||
// Setup two different libraries with paths matching our temp structure
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: songsDir},
|
||||
{ID: 2, Path: plsDir},
|
||||
})
|
||||
|
||||
// Create a mock media file repository that returns files for both libraries
|
||||
// Note: The paths are relative to their respective library roots
|
||||
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
|
||||
data: []string{
|
||||
"abc.mp3", // This is songs/abc.mp3 relative to songsDir
|
||||
"def.mp3", // This is playlists/def.mp3 relative to plsDir
|
||||
},
|
||||
}
|
||||
ps = core.NewPlaylists(ds)
|
||||
})
|
||||
|
||||
It("handles relative paths that reference files in other libraries", func() {
|
||||
// Create a temporary playlist file with relative path
|
||||
plsContent := "#PLAYLIST:Cross Library Test\n../songs/abc.mp3\ndef.mp3"
|
||||
plsFile := plsDir + "/test.m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||
|
||||
// Playlist is in the Playlists library folder
|
||||
// Important: Path should be relative to LibraryPath, and Name is the folder name
|
||||
plsFolder := &model.Folder{
|
||||
ID: "2",
|
||||
LibraryID: 2,
|
||||
LibraryPath: plsDir,
|
||||
Path: "",
|
||||
Name: "",
|
||||
}
|
||||
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(2))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3")) // From songsDir library
|
||||
Expect(pls.Tracks[1].Path).To(Equal("def.mp3")) // From plsDir library
|
||||
})
|
||||
|
||||
It("ignores paths that point outside all libraries", func() {
|
||||
// Create a temporary playlist file with path outside libraries
|
||||
plsContent := "#PLAYLIST:Outside Test\n../../outside.mp3\nabc.mp3"
|
||||
plsFile := plsDir + "/test.m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||
|
||||
plsFolder := &model.Folder{
|
||||
ID: "2",
|
||||
LibraryID: 2,
|
||||
LibraryPath: plsDir,
|
||||
Path: "",
|
||||
Name: "",
|
||||
}
|
||||
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
// Should only find abc.mp3, not outside.mp3
|
||||
Expect(pls.Tracks).To(HaveLen(1))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3"))
|
||||
})
|
||||
|
||||
It("handles relative paths with multiple '../' components", func() {
|
||||
// Create a nested structure: tmpDir/playlists/subfolder/test.m3u
|
||||
subFolder := plsDir + "/subfolder"
|
||||
Expect(os.Mkdir(subFolder, 0755)).To(Succeed())
|
||||
|
||||
// Create the media file in the subfolder directory
|
||||
// The mock will return it as "def.mp3" relative to plsDir
|
||||
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
|
||||
data: []string{
|
||||
"abc.mp3", // From songsDir library
|
||||
"def.mp3", // From plsDir library root
|
||||
},
|
||||
}
|
||||
|
||||
// From subfolder, ../../songs/abc.mp3 should resolve to songs library
|
||||
// ../def.mp3 should resolve to plsDir/def.mp3
|
||||
plsContent := "#PLAYLIST:Nested Test\n../../songs/abc.mp3\n../def.mp3"
|
||||
plsFile := subFolder + "/test.m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||
|
||||
// The folder: AbsolutePath = LibraryPath + Path + Name
|
||||
// So for /playlists/subfolder: LibraryPath=/playlists, Path="", Name="subfolder"
|
||||
plsFolder := &model.Folder{
|
||||
ID: "2",
|
||||
LibraryID: 2,
|
||||
LibraryPath: plsDir,
|
||||
Path: "", // Empty because subfolder is directly under library root
|
||||
Name: "subfolder", // The folder name
|
||||
}
|
||||
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(2))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3")) // From songsDir library
|
||||
Expect(pls.Tracks[1].Path).To(Equal("def.mp3")) // From plsDir library root
|
||||
})
|
||||
|
||||
It("correctly resolves libraries when one path is a prefix of another", func() {
|
||||
// This tests the bug where /music would match before /music-classical
|
||||
// Create temp directory structure with prefix conflict
|
||||
tmpDir := GinkgoT().TempDir()
|
||||
musicDir := tmpDir + "/music"
|
||||
musicClassicalDir := tmpDir + "/music-classical"
|
||||
Expect(os.Mkdir(musicDir, 0755)).To(Succeed())
|
||||
Expect(os.Mkdir(musicClassicalDir, 0755)).To(Succeed())
|
||||
|
||||
// Setup two libraries where one is a prefix of the other
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: musicDir}, // /tmp/xxx/music
|
||||
{ID: 2, Path: musicClassicalDir}, // /tmp/xxx/music-classical
|
||||
})
|
||||
|
||||
// Mock will return tracks from both libraries
|
||||
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
|
||||
data: []string{
|
||||
"rock.mp3", // From music library
|
||||
"bach.mp3", // From music-classical library
|
||||
},
|
||||
}
|
||||
|
||||
// Create playlist in music library that references music-classical
|
||||
plsContent := "#PLAYLIST:Cross Prefix Test\nrock.mp3\n../music-classical/bach.mp3"
|
||||
plsFile := musicDir + "/test.m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||
|
||||
plsFolder := &model.Folder{
|
||||
ID: "1",
|
||||
LibraryID: 1,
|
||||
LibraryPath: musicDir,
|
||||
Path: "",
|
||||
Name: "",
|
||||
}
|
||||
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(2))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("rock.mp3")) // From music library
|
||||
Expect(pls.Tracks[1].Path).To(Equal("bach.mp3")) // From music-classical library (not music!)
|
||||
})
|
||||
|
||||
It("correctly handles identical relative paths from different libraries", func() {
|
||||
// This tests the bug where two libraries have files at the same relative path
|
||||
// and only one appears in the playlist
|
||||
tmpDir := GinkgoT().TempDir()
|
||||
musicDir := tmpDir + "/music"
|
||||
classicalDir := tmpDir + "/classical"
|
||||
Expect(os.Mkdir(musicDir, 0755)).To(Succeed())
|
||||
Expect(os.Mkdir(classicalDir, 0755)).To(Succeed())
|
||||
Expect(os.MkdirAll(musicDir+"/album", 0755)).To(Succeed())
|
||||
Expect(os.MkdirAll(classicalDir+"/album", 0755)).To(Succeed())
|
||||
// Create placeholder files so paths resolve correctly
|
||||
Expect(os.WriteFile(musicDir+"/album/track.mp3", []byte{}, 0600)).To(Succeed())
|
||||
Expect(os.WriteFile(classicalDir+"/album/track.mp3", []byte{}, 0600)).To(Succeed())
|
||||
|
||||
// Both libraries have a file at "album/track.mp3"
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: musicDir},
|
||||
{ID: 2, Path: classicalDir},
|
||||
})
|
||||
|
||||
// Mock returns files with same relative path but different IDs and library IDs
|
||||
// Keys use the library-qualified format: "libraryID:path"
|
||||
ds.MockedMediaFile = &mockedMediaFileRepo{
|
||||
data: map[string]model.MediaFile{
|
||||
"1:album/track.mp3": {ID: "music-track", Path: "album/track.mp3", LibraryID: 1, Title: "Rock Song"},
|
||||
"2:album/track.mp3": {ID: "classical-track", Path: "album/track.mp3", LibraryID: 2, Title: "Classical Piece"},
|
||||
},
|
||||
}
|
||||
// Recreate playlists service to pick up new mock
|
||||
ps = core.NewPlaylists(ds)
|
||||
|
||||
// Create playlist in music library that references both tracks
|
||||
plsContent := "#PLAYLIST:Same Path Test\nalbum/track.mp3\n../classical/album/track.mp3"
|
||||
plsFile := musicDir + "/test.m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||
|
||||
plsFolder := &model.Folder{
|
||||
ID: "1",
|
||||
LibraryID: 1,
|
||||
LibraryPath: musicDir,
|
||||
Path: "",
|
||||
Name: "",
|
||||
}
|
||||
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Should have BOTH tracks, not just one
|
||||
Expect(pls.Tracks).To(HaveLen(2), "Playlist should contain both tracks with same relative path")
|
||||
|
||||
// Verify we got tracks from DIFFERENT libraries (the key fix!)
|
||||
// Collect the library IDs
|
||||
libIDs := make(map[int]bool)
|
||||
for _, track := range pls.Tracks {
|
||||
libIDs[track.LibraryID] = true
|
||||
}
|
||||
Expect(libIDs).To(HaveLen(2), "Tracks should come from two different libraries")
|
||||
Expect(libIDs[1]).To(BeTrue(), "Should have track from library 1")
|
||||
Expect(libIDs[2]).To(BeTrue(), "Should have track from library 2")
|
||||
|
||||
// Both tracks should have the same relative path
|
||||
Expect(pls.Tracks[0].Path).To(Equal("album/track.mp3"))
|
||||
Expect(pls.Tracks[1].Path).To(Equal("album/track.mp3"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -359,7 +119,7 @@ var _ = Describe("Playlists", func() {
|
||||
BeforeEach(func() {
|
||||
repo = &mockedMediaFileFromListRepo{}
|
||||
ds.MockedMediaFile = repo
|
||||
ps = core.NewPlaylists(ds)
|
||||
ps = NewPlaylists(ds)
|
||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/music"}, {ID: 2, Path: "/new"}})
|
||||
ctx = request.WithUser(ctx, model.User{ID: "123"})
|
||||
})
|
||||
@@ -446,23 +206,53 @@ var _ = Describe("Playlists", func() {
|
||||
Expect(pls.Tracks[0].Path).To(Equal("abc/tEsT1.Mp3"))
|
||||
})
|
||||
|
||||
It("handles Unicode normalization when comparing paths (NFD vs NFC)", func() {
|
||||
// Simulate macOS filesystem: stores paths in NFD (decomposed) form
|
||||
// "è" (U+00E8) in NFC becomes "e" + "◌̀" (U+0065 + U+0300) in NFD
|
||||
nfdPath := "artist/Mich" + string([]rune{'e', '\u0300'}) + "le/song.mp3" // NFD: e + combining grave
|
||||
It("handles Unicode normalization when comparing paths", func() {
|
||||
// Test case for Apple Music playlists that use NFC encoding vs macOS filesystem NFD
|
||||
// The character "è" can be represented as NFC (single codepoint) or NFD (e + combining accent)
|
||||
|
||||
const pathWithAccents = "artist/Michèle Desrosiers/album/Noël.m4a"
|
||||
|
||||
// Simulate a database entry with NFD encoding (as stored by macOS filesystem)
|
||||
nfdPath := norm.NFD.String(pathWithAccents)
|
||||
repo.data = []string{nfdPath}
|
||||
|
||||
// Simulate Apple Music M3U: uses NFC (composed) form
|
||||
nfcPath := "/music/artist/Mich\u00E8le/song.mp3" // NFC: single è character
|
||||
m3u := nfcPath + "\n"
|
||||
// Simulate an Apple Music M3U playlist entry with NFC encoding
|
||||
nfcPath := norm.NFC.String("/music/" + pathWithAccents)
|
||||
m3u := strings.Join([]string{
|
||||
nfcPath,
|
||||
}, "\n")
|
||||
f := strings.NewReader(m3u)
|
||||
|
||||
pls, err := ps.ImportM3U(ctx, f)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(1))
|
||||
// Should match despite different Unicode normalization forms
|
||||
Expect(pls.Tracks).To(HaveLen(1), "Should find the track despite Unicode normalization differences")
|
||||
Expect(pls.Tracks[0].Path).To(Equal(nfdPath))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("normalizePathForComparison", func() {
|
||||
It("normalizes Unicode characters to NFC form and converts to lowercase", func() {
|
||||
// Test with NFD (decomposed) input - as would come from macOS filesystem
|
||||
nfdPath := norm.NFD.String("Michèle") // Explicitly convert to NFD form
|
||||
normalized := normalizePathForComparison(nfdPath)
|
||||
Expect(normalized).To(Equal("michèle"))
|
||||
|
||||
// Test with NFC (composed) input - as would come from Apple Music M3U
|
||||
nfcPath := "Michèle" // This might be in NFC form
|
||||
normalizedNfc := normalizePathForComparison(nfcPath)
|
||||
|
||||
// Ensure the two paths are not equal in their original forms
|
||||
Expect(nfdPath).ToNot(Equal(nfcPath))
|
||||
|
||||
// Both should normalize to the same result
|
||||
Expect(normalized).To(Equal(normalizedNfc))
|
||||
})
|
||||
|
||||
It("handles paths with mixed case and Unicode characters", func() {
|
||||
path := "Artist/Noël Coward/Album/Song.mp3"
|
||||
normalized := normalizePathForComparison(path)
|
||||
Expect(normalized).To(Equal("artist/noël coward/album/song.mp3"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("InPlaylistsPath", func() {
|
||||
@@ -479,27 +269,27 @@ var _ = Describe("Playlists", func() {
|
||||
|
||||
It("returns true if PlaylistsPath is empty", func() {
|
||||
conf.Server.PlaylistsPath = ""
|
||||
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
|
||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns true if PlaylistsPath is any (**/**)", func() {
|
||||
conf.Server.PlaylistsPath = "**/**"
|
||||
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
|
||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns true if folder is in PlaylistsPath", func() {
|
||||
conf.Server.PlaylistsPath = "other/**:playlists/**"
|
||||
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
|
||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns false if folder is not in PlaylistsPath", func() {
|
||||
conf.Server.PlaylistsPath = "other"
|
||||
Expect(core.InPlaylistsPath(folder)).To(BeFalse())
|
||||
Expect(InPlaylistsPath(folder)).To(BeFalse())
|
||||
})
|
||||
|
||||
It("returns true if for a playlist in root of MusicFolder if PlaylistsPath is '.'", func() {
|
||||
conf.Server.PlaylistsPath = "."
|
||||
Expect(core.InPlaylistsPath(folder)).To(BeFalse())
|
||||
Expect(InPlaylistsPath(folder)).To(BeFalse())
|
||||
|
||||
folder2 := model.Folder{
|
||||
LibraryPath: "/music",
|
||||
@@ -507,47 +297,22 @@ var _ = Describe("Playlists", func() {
|
||||
Name: ".",
|
||||
}
|
||||
|
||||
Expect(core.InPlaylistsPath(folder2)).To(BeTrue())
|
||||
Expect(InPlaylistsPath(folder2)).To(BeTrue())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// mockedMediaFileRepo's FindByPaths method returns MediaFiles for the given paths.
|
||||
// If data map is provided, looks up files by key; otherwise creates them from paths.
|
||||
// mockedMediaFileRepo's FindByPaths method returns a list of MediaFiles with the same paths as the input
|
||||
type mockedMediaFileRepo struct {
|
||||
model.MediaFileRepository
|
||||
data map[string]model.MediaFile
|
||||
}
|
||||
|
||||
func (r *mockedMediaFileRepo) FindByPaths(paths []string) (model.MediaFiles, error) {
|
||||
var mfs model.MediaFiles
|
||||
|
||||
// If data map provided, look up files
|
||||
if r.data != nil {
|
||||
for _, path := range paths {
|
||||
if mf, ok := r.data[path]; ok {
|
||||
mfs = append(mfs, mf)
|
||||
}
|
||||
}
|
||||
return mfs, nil
|
||||
}
|
||||
|
||||
// Otherwise, create MediaFiles from paths
|
||||
for idx, path := range paths {
|
||||
// Strip library qualifier if present (format: "libraryID:path")
|
||||
actualPath := path
|
||||
libraryID := 1
|
||||
if parts := strings.SplitN(path, ":", 2); len(parts) == 2 {
|
||||
if id, err := strconv.Atoi(parts[0]); err == nil {
|
||||
libraryID = id
|
||||
actualPath = parts[1]
|
||||
}
|
||||
}
|
||||
|
||||
mfs = append(mfs, model.MediaFile{
|
||||
ID: strconv.Itoa(idx),
|
||||
Path: actualPath,
|
||||
LibraryID: libraryID,
|
||||
ID: strconv.Itoa(idx),
|
||||
Path: path,
|
||||
})
|
||||
}
|
||||
return mfs, nil
|
||||
@@ -559,38 +324,13 @@ type mockedMediaFileFromListRepo struct {
|
||||
data []string
|
||||
}
|
||||
|
||||
func (r *mockedMediaFileFromListRepo) FindByPaths(paths []string) (model.MediaFiles, error) {
|
||||
func (r *mockedMediaFileFromListRepo) FindByPaths([]string) (model.MediaFiles, error) {
|
||||
var mfs model.MediaFiles
|
||||
|
||||
for idx, dataPath := range r.data {
|
||||
// Normalize the data path to NFD (simulates macOS filesystem storage)
|
||||
normalizedDataPath := norm.NFD.String(dataPath)
|
||||
|
||||
for _, requestPath := range paths {
|
||||
// Strip library qualifier if present (format: "libraryID:path")
|
||||
actualPath := requestPath
|
||||
libraryID := 1
|
||||
if parts := strings.SplitN(requestPath, ":", 2); len(parts) == 2 {
|
||||
if id, err := strconv.Atoi(parts[0]); err == nil {
|
||||
libraryID = id
|
||||
actualPath = parts[1]
|
||||
}
|
||||
}
|
||||
|
||||
// The request path should already be normalized to NFD by production code
|
||||
// before calling FindByPaths (to match DB storage)
|
||||
normalizedRequestPath := norm.NFD.String(actualPath)
|
||||
|
||||
// Case-insensitive comparison (like SQL's "collate nocase")
|
||||
if strings.EqualFold(normalizedRequestPath, normalizedDataPath) {
|
||||
mfs = append(mfs, model.MediaFile{
|
||||
ID: strconv.Itoa(idx),
|
||||
Path: dataPath, // Return original path from DB
|
||||
LibraryID: libraryID,
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
for idx, path := range r.data {
|
||||
mfs = append(mfs, model.MediaFile{
|
||||
ID: strconv.Itoa(idx),
|
||||
Path: path,
|
||||
})
|
||||
}
|
||||
return mfs, nil
|
||||
}
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
package publicurl
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
)
|
||||
|
||||
// ImageURL generates a public URL for artwork images.
|
||||
// It creates a signed token for the artwork ID and builds a complete public URL.
|
||||
func ImageURL(req *http.Request, artID model.ArtworkID, size int) string {
|
||||
token, _ := auth.CreatePublicToken(map[string]any{"id": artID.String()})
|
||||
uri := path.Join(consts.URLPathPublicImages, token)
|
||||
params := url.Values{}
|
||||
if size > 0 {
|
||||
params.Add("size", strconv.Itoa(size))
|
||||
}
|
||||
return PublicURL(req, uri, params)
|
||||
}
|
||||
|
||||
// PublicURL builds a full URL for public-facing resources.
|
||||
// It uses ShareURL from config if available, otherwise falls back to extracting
|
||||
// the scheme and host from the provided http.Request.
|
||||
// If req is nil and ShareURL is not set, it defaults to http://localhost.
|
||||
func PublicURL(req *http.Request, u string, params url.Values) string {
|
||||
if conf.Server.ShareURL == "" {
|
||||
return AbsoluteURL(req, u, params)
|
||||
}
|
||||
shareUrl, err := url.Parse(conf.Server.ShareURL)
|
||||
if err != nil {
|
||||
return AbsoluteURL(req, u, params)
|
||||
}
|
||||
buildUrl, err := url.Parse(u)
|
||||
if err != nil {
|
||||
return AbsoluteURL(req, u, params)
|
||||
}
|
||||
buildUrl.Scheme = shareUrl.Scheme
|
||||
buildUrl.Host = shareUrl.Host
|
||||
if len(params) > 0 {
|
||||
buildUrl.RawQuery = params.Encode()
|
||||
}
|
||||
return buildUrl.String()
|
||||
}
|
||||
|
||||
// AbsoluteURL builds an absolute URL from a relative path.
|
||||
// It uses BaseHost/BaseScheme from config if available, otherwise extracts
|
||||
// the scheme and host from the http.Request.
|
||||
// If req is nil and BaseHost is not set, it defaults to http://localhost.
|
||||
func AbsoluteURL(req *http.Request, u string, params url.Values) string {
|
||||
buildUrl, err := url.Parse(u)
|
||||
if err != nil {
|
||||
log.Error(req.Context(), "Failed to parse URL path", "url", u, err)
|
||||
return ""
|
||||
}
|
||||
if strings.HasPrefix(u, "/") {
|
||||
buildUrl.Path = path.Join(conf.Server.BasePath, buildUrl.Path)
|
||||
if conf.Server.BaseHost != "" {
|
||||
buildUrl.Scheme = cmp.Or(conf.Server.BaseScheme, "http")
|
||||
buildUrl.Host = conf.Server.BaseHost
|
||||
} else if req != nil {
|
||||
buildUrl.Scheme = req.URL.Scheme
|
||||
buildUrl.Host = req.Host
|
||||
} else {
|
||||
buildUrl.Scheme = "http"
|
||||
buildUrl.Host = "localhost"
|
||||
}
|
||||
}
|
||||
if len(params) > 0 {
|
||||
buildUrl.RawQuery = params.Encode()
|
||||
}
|
||||
return buildUrl.String()
|
||||
}
|
||||
@@ -1,174 +0,0 @@
|
||||
package publicurl_test
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/go-chi/jwtauth/v5"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
"github.com/navidrome/navidrome/core/publicurl"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestPublicURL(t *testing.T) {
|
||||
tests.Init(t, false)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Public URL Suite")
|
||||
}
|
||||
|
||||
var _ = Describe("Public URL Utilities", func() {
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
})
|
||||
|
||||
Describe("PublicURL", func() {
|
||||
When("ShareURL is set", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.ShareURL = "https://share.example.com"
|
||||
})
|
||||
|
||||
It("uses ShareURL as the base", func() {
|
||||
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
|
||||
result := publicurl.PublicURL(r, "/path/to/resource", nil)
|
||||
Expect(result).To(Equal("https://share.example.com/path/to/resource"))
|
||||
})
|
||||
|
||||
It("includes query parameters", func() {
|
||||
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
|
||||
params := url.Values{"size": []string{"300"}, "format": []string{"png"}}
|
||||
result := publicurl.PublicURL(r, "/image/123", params)
|
||||
Expect(result).To(ContainSubstring("https://share.example.com/image/123"))
|
||||
Expect(result).To(ContainSubstring("size=300"))
|
||||
Expect(result).To(ContainSubstring("format=png"))
|
||||
})
|
||||
|
||||
It("works without a request", func() {
|
||||
result := publicurl.PublicURL(nil, "/path/to/resource", nil)
|
||||
Expect(result).To(Equal("https://share.example.com/path/to/resource"))
|
||||
})
|
||||
})
|
||||
|
||||
When("ShareURL is not set", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.ShareURL = ""
|
||||
})
|
||||
|
||||
It("falls back to AbsoluteURL with request", func() {
|
||||
r, _ := http.NewRequest("GET", "https://myserver.com/test", nil)
|
||||
r.Host = "myserver.com"
|
||||
result := publicurl.PublicURL(r, "/path/to/resource", nil)
|
||||
Expect(result).To(Equal("https://myserver.com/path/to/resource"))
|
||||
})
|
||||
|
||||
It("falls back to localhost without request", func() {
|
||||
result := publicurl.PublicURL(nil, "/path/to/resource", nil)
|
||||
Expect(result).To(Equal("http://localhost/path/to/resource"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("AbsoluteURL", func() {
|
||||
When("BaseHost is set", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.BaseHost = "configured.example.com"
|
||||
conf.Server.BaseScheme = "https"
|
||||
conf.Server.BasePath = ""
|
||||
})
|
||||
|
||||
It("uses BaseHost and BaseScheme", func() {
|
||||
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
|
||||
result := publicurl.AbsoluteURL(r, "/path/to/resource", nil)
|
||||
Expect(result).To(Equal("https://configured.example.com/path/to/resource"))
|
||||
})
|
||||
|
||||
It("defaults to http scheme if BaseScheme is empty", func() {
|
||||
conf.Server.BaseScheme = ""
|
||||
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
|
||||
result := publicurl.AbsoluteURL(r, "/path/to/resource", nil)
|
||||
Expect(result).To(Equal("http://configured.example.com/path/to/resource"))
|
||||
})
|
||||
})
|
||||
|
||||
When("BaseHost is not set", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.BaseHost = ""
|
||||
conf.Server.BasePath = ""
|
||||
})
|
||||
|
||||
It("extracts host from request", func() {
|
||||
r, _ := http.NewRequest("GET", "https://request.example.com/test", nil)
|
||||
r.Host = "request.example.com"
|
||||
result := publicurl.AbsoluteURL(r, "/path/to/resource", nil)
|
||||
Expect(result).To(Equal("https://request.example.com/path/to/resource"))
|
||||
})
|
||||
|
||||
It("falls back to localhost without request", func() {
|
||||
result := publicurl.AbsoluteURL(nil, "/path/to/resource", nil)
|
||||
Expect(result).To(Equal("http://localhost/path/to/resource"))
|
||||
})
|
||||
})
|
||||
|
||||
When("BasePath is set", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.BasePath = "/navidrome"
|
||||
conf.Server.BaseHost = "example.com"
|
||||
conf.Server.BaseScheme = "https"
|
||||
})
|
||||
|
||||
It("prepends BasePath to the URL", func() {
|
||||
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
|
||||
result := publicurl.AbsoluteURL(r, "/path/to/resource", nil)
|
||||
Expect(result).To(Equal("https://example.com/navidrome/path/to/resource"))
|
||||
})
|
||||
})
|
||||
|
||||
It("passes through absolute URLs unchanged", func() {
|
||||
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
|
||||
result := publicurl.AbsoluteURL(r, "https://other.example.com/path", nil)
|
||||
Expect(result).To(Equal("https://other.example.com/path"))
|
||||
})
|
||||
|
||||
It("includes query parameters", func() {
|
||||
conf.Server.BaseHost = "example.com"
|
||||
conf.Server.BaseScheme = "https"
|
||||
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
|
||||
params := url.Values{"key": []string{"value"}}
|
||||
result := publicurl.AbsoluteURL(r, "/path", params)
|
||||
Expect(result).To(Equal("https://example.com/path?key=value"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("ImageURL", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.ShareURL = "https://share.example.com"
|
||||
// Initialize JWT auth for token generation
|
||||
auth.TokenAuth = jwtauth.New("HS256", []byte("test secret"), nil)
|
||||
})
|
||||
|
||||
It("generates a URL with the artwork token", func() {
|
||||
artID := model.NewArtworkID(model.KindAlbumArtwork, "album-123", nil)
|
||||
result := publicurl.ImageURL(nil, artID, 0)
|
||||
Expect(result).To(HavePrefix("https://share.example.com/share/img/"))
|
||||
})
|
||||
|
||||
It("includes size parameter when provided", func() {
|
||||
artID := model.NewArtworkID(model.KindArtistArtwork, "artist-1", nil)
|
||||
result := publicurl.ImageURL(nil, artID, 300)
|
||||
Expect(result).To(ContainSubstring("size=300"))
|
||||
})
|
||||
|
||||
It("omits size parameter when zero", func() {
|
||||
artID := model.NewArtworkID(model.KindMediaFileArtwork, "track-1", nil)
|
||||
result := publicurl.ImageURL(nil, artID, 0)
|
||||
Expect(result).ToNot(ContainSubstring("size="))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -9,27 +9,11 @@ import (
|
||||
"github.com/navidrome/navidrome/model"
|
||||
)
|
||||
|
||||
// Loader is a function that loads a scrobbler by name.
|
||||
// It returns the scrobbler and true if found, or nil and false if not available.
|
||||
// This allows the buffered scrobbler to always get the current plugin instance.
|
||||
type Loader func() (Scrobbler, bool)
|
||||
|
||||
// newBufferedScrobbler creates a buffered scrobbler that wraps a static scrobbler instance.
|
||||
// Use this for builtin scrobblers that don't change.
|
||||
func newBufferedScrobbler(ds model.DataStore, s Scrobbler, service string) *bufferedScrobbler {
|
||||
return newBufferedScrobblerWithLoader(ds, service, func() (Scrobbler, bool) {
|
||||
return s, true
|
||||
})
|
||||
}
|
||||
|
||||
// newBufferedScrobblerWithLoader creates a buffered scrobbler that dynamically loads
|
||||
// the underlying scrobbler on each call. Use this for plugin scrobblers that may be
|
||||
// reloaded (e.g., after configuration changes).
|
||||
func newBufferedScrobblerWithLoader(ds model.DataStore, service string, loader Loader) *bufferedScrobbler {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
b := &bufferedScrobbler{
|
||||
ds: ds,
|
||||
loader: loader,
|
||||
wrapped: s,
|
||||
service: service,
|
||||
wakeSignal: make(chan struct{}, 1),
|
||||
ctx: ctx,
|
||||
@@ -41,7 +25,7 @@ func newBufferedScrobblerWithLoader(ds model.DataStore, service string, loader L
|
||||
|
||||
type bufferedScrobbler struct {
|
||||
ds model.DataStore
|
||||
loader Loader
|
||||
wrapped Scrobbler
|
||||
service string
|
||||
wakeSignal chan struct{}
|
||||
ctx context.Context
|
||||
@@ -55,19 +39,11 @@ func (b *bufferedScrobbler) Stop() {
|
||||
}
|
||||
|
||||
func (b *bufferedScrobbler) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
s, ok := b.loader()
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return s.IsAuthorized(ctx, userId)
|
||||
return b.wrapped.IsAuthorized(ctx, userId)
|
||||
}
|
||||
|
||||
func (b *bufferedScrobbler) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
|
||||
s, ok := b.loader()
|
||||
if !ok {
|
||||
return errors.New("scrobbler not available")
|
||||
}
|
||||
return s.NowPlaying(ctx, userId, track, position)
|
||||
return b.wrapped.NowPlaying(ctx, userId, track, position)
|
||||
}
|
||||
|
||||
func (b *bufferedScrobbler) Scrobble(ctx context.Context, userId string, s Scrobble) error {
|
||||
@@ -131,13 +107,8 @@ func (b *bufferedScrobbler) processUserQueue(ctx context.Context, userId string)
|
||||
if entry == nil {
|
||||
return true
|
||||
}
|
||||
s, ok := b.loader()
|
||||
if !ok {
|
||||
log.Warn(ctx, "Scrobbler not available, will retry later", "scrobbler", b.service)
|
||||
return false
|
||||
}
|
||||
log.Debug(ctx, "Sending scrobble", "scrobbler", b.service, "track", entry.Title, "artist", entry.Artist)
|
||||
err = s.Scrobble(ctx, entry.UserID, Scrobble{
|
||||
err = b.wrapped.Scrobble(ctx, entry.UserID, Scrobble{
|
||||
MediaFile: entry.MediaFile,
|
||||
TimeStamp: entry.PlayTime,
|
||||
})
|
||||
|
||||
@@ -38,9 +38,9 @@ var _ = Describe("BufferedScrobbler", func() {
|
||||
It("forwards NowPlaying calls", func() {
|
||||
track := &model.MediaFile{ID: "123", Title: "Test Track"}
|
||||
Expect(bs.NowPlaying(ctx, "user1", track, 0)).To(Succeed())
|
||||
Expect(scr.GetNowPlayingCalled()).To(BeTrue())
|
||||
Expect(scr.GetUserID()).To(Equal("user1"))
|
||||
Expect(scr.GetTrack()).To(Equal(track))
|
||||
Expect(scr.NowPlayingCalled).To(BeTrue())
|
||||
Expect(scr.UserID).To(Equal("user1"))
|
||||
Expect(scr.Track).To(Equal(track))
|
||||
})
|
||||
|
||||
It("enqueues scrobbles to buffer", func() {
|
||||
@@ -51,10 +51,9 @@ var _ = Describe("BufferedScrobbler", func() {
|
||||
Expect(scr.ScrobbleCalled.Load()).To(BeFalse())
|
||||
|
||||
Expect(bs.Scrobble(ctx, "user1", scrobble)).To(Succeed())
|
||||
Expect(buffer.Length()).To(Equal(int64(1)))
|
||||
|
||||
// Wait for the background goroutine to process the scrobble.
|
||||
// We don't check buffer.Length() here because the background goroutine
|
||||
// may dequeue the entry before we can observe it.
|
||||
// Wait for the scrobble to be sent
|
||||
Eventually(scr.ScrobbleCalled.Load).Should(BeTrue())
|
||||
|
||||
lastScrobble := scr.LastScrobble.Load()
|
||||
|
||||
@@ -31,13 +31,6 @@ type Submission struct {
|
||||
Timestamp time.Time
|
||||
}
|
||||
|
||||
type nowPlayingEntry struct {
|
||||
ctx context.Context
|
||||
userId string
|
||||
track *model.MediaFile
|
||||
position int
|
||||
}
|
||||
|
||||
type PlayTracker interface {
|
||||
NowPlaying(ctx context.Context, playerId string, playerName string, trackId string, position int) error
|
||||
GetNowPlaying(ctx context.Context) ([]NowPlayingInfo, error)
|
||||
@@ -59,11 +52,6 @@ type playTracker struct {
|
||||
pluginScrobblers map[string]Scrobbler
|
||||
pluginLoader PluginLoader
|
||||
mu sync.RWMutex
|
||||
npQueue map[string]nowPlayingEntry
|
||||
npMu sync.Mutex
|
||||
npSignal chan struct{}
|
||||
shutdown chan struct{}
|
||||
workerDone chan struct{}
|
||||
}
|
||||
|
||||
func GetPlayTracker(ds model.DataStore, broker events.Broker, pluginManager PluginLoader) PlayTracker {
|
||||
@@ -83,10 +71,6 @@ func newPlayTracker(ds model.DataStore, broker events.Broker, pluginManager Plug
|
||||
builtinScrobblers: make(map[string]Scrobbler),
|
||||
pluginScrobblers: make(map[string]Scrobbler),
|
||||
pluginLoader: pluginManager,
|
||||
npQueue: make(map[string]nowPlayingEntry),
|
||||
npSignal: make(chan struct{}, 1),
|
||||
shutdown: make(chan struct{}),
|
||||
workerDone: make(chan struct{}),
|
||||
}
|
||||
if conf.Server.EnableNowPlaying {
|
||||
m.OnExpiration(func(_ string, _ NowPlayingInfo) {
|
||||
@@ -106,17 +90,10 @@ func newPlayTracker(ds model.DataStore, broker events.Broker, pluginManager Plug
|
||||
p.builtinScrobblers[name] = s
|
||||
}
|
||||
log.Debug("List of builtin scrobblers enabled", "names", enabled)
|
||||
go p.nowPlayingWorker()
|
||||
return p
|
||||
}
|
||||
|
||||
// stopNowPlayingWorker stops the background worker. This is primarily for testing.
|
||||
func (p *playTracker) stopNowPlayingWorker() {
|
||||
close(p.shutdown)
|
||||
<-p.workerDone // Wait for worker to finish
|
||||
}
|
||||
|
||||
// pluginNamesMatchScrobblers returns true if the set of pluginNames matches the keys in pluginScrobblers.
|
||||
// pluginNamesMatchScrobblers returns true if the set of pluginNames matches the keys in pluginScrobblers
|
||||
func pluginNamesMatchScrobblers(pluginNames []string, scrobblers map[string]Scrobbler) bool {
|
||||
if len(pluginNames) != len(scrobblers) {
|
||||
return false
|
||||
@@ -129,9 +106,7 @@ func pluginNamesMatchScrobblers(pluginNames []string, scrobblers map[string]Scro
|
||||
return true
|
||||
}
|
||||
|
||||
// refreshPluginScrobblers updates the pluginScrobblers map to match the current set of plugin scrobblers.
|
||||
// The buffered scrobblers use a loader function to dynamically get the current plugin instance,
|
||||
// so we only need to add/remove scrobblers when plugins are added/removed (not when reloaded).
|
||||
// refreshPluginScrobblers updates the pluginScrobblers map to match the current set of plugin scrobblers
|
||||
func (p *playTracker) refreshPluginScrobblers() {
|
||||
p.mu.Lock()
|
||||
defer p.mu.Unlock()
|
||||
@@ -150,16 +125,15 @@ func (p *playTracker) refreshPluginScrobblers() {
|
||||
// Build a set of current plugins for faster lookups
|
||||
current := make(map[string]struct{}, len(pluginNames))
|
||||
|
||||
// Process additions - add new plugins with a loader that dynamically fetches the current instance
|
||||
// Process additions - add new plugins
|
||||
for _, name := range pluginNames {
|
||||
current[name] = struct{}{}
|
||||
// Only create a new scrobbler if it doesn't exist
|
||||
if _, exists := p.pluginScrobblers[name]; !exists {
|
||||
// Capture the name for the closure
|
||||
pluginName := name
|
||||
loader := p.pluginLoader
|
||||
p.pluginScrobblers[name] = newBufferedScrobblerWithLoader(p.ds, name, func() (Scrobbler, bool) {
|
||||
return loader.LoadScrobbler(pluginName)
|
||||
})
|
||||
s, ok := p.pluginLoader.LoadScrobbler(name)
|
||||
if ok && s != nil {
|
||||
p.pluginScrobblers[name] = newBufferedScrobbler(p.ds, s, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,60 +198,11 @@ func (p *playTracker) NowPlaying(ctx context.Context, playerId string, playerNam
|
||||
}
|
||||
player, _ := request.PlayerFrom(ctx)
|
||||
if player.ScrobbleEnabled {
|
||||
p.enqueueNowPlaying(ctx, playerId, user.ID, mf, position)
|
||||
p.dispatchNowPlaying(ctx, user.ID, mf, position)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *playTracker) enqueueNowPlaying(ctx context.Context, playerId string, userId string, track *model.MediaFile, position int) {
|
||||
p.npMu.Lock()
|
||||
defer p.npMu.Unlock()
|
||||
ctx = context.WithoutCancel(ctx) // Prevent cancellation from affecting background processing
|
||||
p.npQueue[playerId] = nowPlayingEntry{
|
||||
ctx: ctx,
|
||||
userId: userId,
|
||||
track: track,
|
||||
position: position,
|
||||
}
|
||||
p.sendNowPlayingSignal()
|
||||
}
|
||||
|
||||
func (p *playTracker) sendNowPlayingSignal() {
|
||||
// Don't block if the previous signal was not read yet
|
||||
select {
|
||||
case p.npSignal <- struct{}{}:
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
func (p *playTracker) nowPlayingWorker() {
|
||||
defer close(p.workerDone)
|
||||
for {
|
||||
select {
|
||||
case <-p.shutdown:
|
||||
return
|
||||
case <-time.After(time.Second):
|
||||
case <-p.npSignal:
|
||||
}
|
||||
|
||||
p.npMu.Lock()
|
||||
if len(p.npQueue) == 0 {
|
||||
p.npMu.Unlock()
|
||||
continue
|
||||
}
|
||||
|
||||
// Keep a copy of the entries to process and clear the queue
|
||||
entries := p.npQueue
|
||||
p.npQueue = make(map[string]nowPlayingEntry)
|
||||
p.npMu.Unlock()
|
||||
|
||||
// Process entries without holding lock
|
||||
for _, entry := range entries {
|
||||
p.dispatchNowPlaying(entry.ctx, entry.userId, entry.track, entry.position)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *playTracker) dispatchNowPlaying(ctx context.Context, userId string, t *model.MediaFile, position int) {
|
||||
if t.Artist == consts.UnknownArtist {
|
||||
log.Debug(ctx, "Ignoring external NowPlaying update for track with unknown artist", "track", t.Title, "artist", t.Artist)
|
||||
@@ -351,14 +276,8 @@ func (p *playTracker) incPlay(ctx context.Context, track *model.MediaFile, times
|
||||
}
|
||||
for _, artist := range track.Participants[model.RoleArtist] {
|
||||
err = tx.Artist(ctx).IncPlayCount(artist.ID, timestamp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if conf.Server.EnableScrobbleHistory {
|
||||
return tx.Scrobble(ctx).RecordScrobble(track.ID, timestamp)
|
||||
}
|
||||
return nil
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -24,26 +24,15 @@ import (
|
||||
// Moved to top-level scope to avoid linter issues
|
||||
|
||||
type mockPluginLoader struct {
|
||||
mu sync.RWMutex
|
||||
names []string
|
||||
scrobblers map[string]Scrobbler
|
||||
}
|
||||
|
||||
func (m *mockPluginLoader) PluginNames(service string) []string {
|
||||
m.mu.RLock()
|
||||
defer m.mu.RUnlock()
|
||||
return m.names
|
||||
}
|
||||
|
||||
func (m *mockPluginLoader) SetNames(names []string) {
|
||||
m.mu.Lock()
|
||||
defer m.mu.Unlock()
|
||||
m.names = names
|
||||
}
|
||||
|
||||
func (m *mockPluginLoader) LoadScrobbler(name string) (Scrobbler, bool) {
|
||||
m.mu.RLock()
|
||||
defer m.mu.RUnlock()
|
||||
s, ok := m.scrobblers[name]
|
||||
return s, ok
|
||||
}
|
||||
@@ -57,24 +46,24 @@ var _ = Describe("PlayTracker", func() {
|
||||
var album model.Album
|
||||
var artist1 model.Artist
|
||||
var artist2 model.Artist
|
||||
var fake *fakeScrobbler
|
||||
var fake fakeScrobbler
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
ctx = GinkgoT().Context()
|
||||
ctx = context.Background()
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1"})
|
||||
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
||||
ds = &tests.MockDataStore{}
|
||||
fake = &fakeScrobbler{Authorized: true}
|
||||
fake = fakeScrobbler{Authorized: true}
|
||||
Register("fake", func(model.DataStore) Scrobbler {
|
||||
return fake
|
||||
return &fake
|
||||
})
|
||||
Register("disabled", func(model.DataStore) Scrobbler {
|
||||
return nil
|
||||
})
|
||||
eventBroker = &fakeEventBroker{}
|
||||
tracker = newPlayTracker(ds, eventBroker, nil)
|
||||
tracker.(*playTracker).builtinScrobblers["fake"] = fake // Bypass buffering for tests
|
||||
tracker.(*playTracker).builtinScrobblers["fake"] = &fake // Bypass buffering for tests
|
||||
|
||||
track = model.MediaFile{
|
||||
ID: "123",
|
||||
@@ -97,11 +86,6 @@ var _ = Describe("PlayTracker", func() {
|
||||
_ = ds.Album(ctx).(*tests.MockAlbumRepo).Put(&album)
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
// Stop the worker goroutine to prevent data races between tests
|
||||
tracker.(*playTracker).stopNowPlayingWorker()
|
||||
})
|
||||
|
||||
It("does not register disabled scrobblers", func() {
|
||||
Expect(tracker.(*playTracker).builtinScrobblers).To(HaveKey("fake"))
|
||||
Expect(tracker.(*playTracker).builtinScrobblers).ToNot(HaveKey("disabled"))
|
||||
@@ -111,10 +95,10 @@ var _ = Describe("PlayTracker", func() {
|
||||
It("sends track to agent", func() {
|
||||
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Eventually(func() bool { return fake.GetNowPlayingCalled() }).Should(BeTrue())
|
||||
Expect(fake.GetUserID()).To(Equal("u-1"))
|
||||
Expect(fake.GetTrack().ID).To(Equal("123"))
|
||||
Expect(fake.GetTrack().Participants).To(Equal(track.Participants))
|
||||
Expect(fake.NowPlayingCalled).To(BeTrue())
|
||||
Expect(fake.UserID).To(Equal("u-1"))
|
||||
Expect(fake.Track.ID).To(Equal("123"))
|
||||
Expect(fake.Track.Participants).To(Equal(track.Participants))
|
||||
})
|
||||
It("does not send track to agent if user has not authorized", func() {
|
||||
fake.Authorized = false
|
||||
@@ -122,7 +106,7 @@ var _ = Describe("PlayTracker", func() {
|
||||
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(fake.GetNowPlayingCalled()).To(BeFalse())
|
||||
Expect(fake.NowPlayingCalled).To(BeFalse())
|
||||
})
|
||||
It("does not send track to agent if player is not enabled to send scrobbles", func() {
|
||||
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: false})
|
||||
@@ -130,7 +114,7 @@ var _ = Describe("PlayTracker", func() {
|
||||
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(fake.GetNowPlayingCalled()).To(BeFalse())
|
||||
Expect(fake.NowPlayingCalled).To(BeFalse())
|
||||
})
|
||||
It("does not send track to agent if artist is unknown", func() {
|
||||
track.Artist = consts.UnknownArtist
|
||||
@@ -138,7 +122,7 @@ var _ = Describe("PlayTracker", func() {
|
||||
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(fake.GetNowPlayingCalled()).To(BeFalse())
|
||||
Expect(fake.NowPlayingCalled).To(BeFalse())
|
||||
})
|
||||
|
||||
It("stores position when greater than zero", func() {
|
||||
@@ -146,12 +130,11 @@ var _ = Describe("PlayTracker", func() {
|
||||
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", pos)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
Eventually(func() int { return fake.GetPosition() }).Should(Equal(pos))
|
||||
|
||||
playing, err := tracker.GetNowPlaying(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(playing).To(HaveLen(1))
|
||||
Expect(playing[0].Position).To(Equal(pos))
|
||||
Expect(fake.Position).To(Equal(pos))
|
||||
})
|
||||
|
||||
It("sends event with count", func() {
|
||||
@@ -170,17 +153,6 @@ var _ = Describe("PlayTracker", func() {
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(eventBroker.getEvents()).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("passes user to scrobbler via context (fix for issue #4787)", func() {
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "testuser"})
|
||||
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
||||
|
||||
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Eventually(func() bool { return fake.GetNowPlayingCalled() }).Should(BeTrue())
|
||||
// Verify the username was passed through async dispatch via context
|
||||
Eventually(func() string { return fake.GetUsername() }).Should(Equal("testuser"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetNowPlaying", func() {
|
||||
@@ -188,9 +160,9 @@ var _ = Describe("PlayTracker", func() {
|
||||
track2 := track
|
||||
track2.ID = "456"
|
||||
_ = ds.MediaFile(ctx).Put(&track2)
|
||||
ctx = request.WithUser(GinkgoT().Context(), model.User{UserName: "user-1"})
|
||||
ctx = request.WithUser(context.Background(), model.User{UserName: "user-1"})
|
||||
_ = tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
ctx = request.WithUser(GinkgoT().Context(), model.User{UserName: "user-2"})
|
||||
ctx = request.WithUser(context.Background(), model.User{UserName: "user-2"})
|
||||
_ = tracker.NowPlaying(ctx, "player-2", "player-two", "456", 0)
|
||||
|
||||
playing, err := tracker.GetNowPlaying(ctx)
|
||||
@@ -238,7 +210,7 @@ var _ = Describe("PlayTracker", func() {
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(fake.ScrobbleCalled.Load()).To(BeTrue())
|
||||
Expect(fake.GetUserID()).To(Equal("u-1"))
|
||||
Expect(fake.UserID).To(Equal("u-1"))
|
||||
lastScrobble := fake.LastScrobble.Load()
|
||||
Expect(lastScrobble.TimeStamp).To(BeTemporally("~", ts, 1*time.Second))
|
||||
Expect(lastScrobble.ID).To(Equal("123"))
|
||||
@@ -302,82 +274,49 @@ var _ = Describe("PlayTracker", func() {
|
||||
Expect(artist1.PlayCount).To(Equal(int64(1)))
|
||||
Expect(artist2.PlayCount).To(Equal(int64(1)))
|
||||
})
|
||||
|
||||
Context("Scrobble History", func() {
|
||||
It("records scrobble in repository", func() {
|
||||
conf.Server.EnableScrobbleHistory = true
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "user-1"})
|
||||
ts := time.Now()
|
||||
|
||||
err := tracker.Submit(ctx, []Submission{{TrackID: "123", Timestamp: ts}})
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
mockDS := ds.(*tests.MockDataStore)
|
||||
mockScrobble := mockDS.Scrobble(ctx).(*tests.MockScrobbleRepo)
|
||||
Expect(mockScrobble.RecordedScrobbles).To(HaveLen(1))
|
||||
Expect(mockScrobble.RecordedScrobbles[0].MediaFileID).To(Equal("123"))
|
||||
Expect(mockScrobble.RecordedScrobbles[0].UserID).To(Equal("u-1"))
|
||||
Expect(mockScrobble.RecordedScrobbles[0].SubmissionTime).To(Equal(ts))
|
||||
})
|
||||
|
||||
It("does not record scrobble when history is disabled", func() {
|
||||
conf.Server.EnableScrobbleHistory = false
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "user-1"})
|
||||
ts := time.Now()
|
||||
|
||||
err := tracker.Submit(ctx, []Submission{{TrackID: "123", Timestamp: ts}})
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
mockDS := ds.(*tests.MockDataStore)
|
||||
mockScrobble := mockDS.Scrobble(ctx).(*tests.MockScrobbleRepo)
|
||||
Expect(mockScrobble.RecordedScrobbles).To(HaveLen(0))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Plugin scrobbler logic", func() {
|
||||
var pluginLoader *mockPluginLoader
|
||||
var pluginFake *fakeScrobbler
|
||||
var pluginFake fakeScrobbler
|
||||
|
||||
BeforeEach(func() {
|
||||
pluginFake = &fakeScrobbler{Authorized: true}
|
||||
pluginFake = fakeScrobbler{Authorized: true}
|
||||
pluginLoader = &mockPluginLoader{
|
||||
names: []string{"plugin1"},
|
||||
scrobblers: map[string]Scrobbler{"plugin1": pluginFake},
|
||||
scrobblers: map[string]Scrobbler{"plugin1": &pluginFake},
|
||||
}
|
||||
tracker = newPlayTracker(ds, events.GetBroker(), pluginLoader)
|
||||
|
||||
// Bypass buffering for both built-in and plugin scrobblers
|
||||
tracker.(*playTracker).builtinScrobblers["fake"] = fake
|
||||
tracker.(*playTracker).pluginScrobblers["plugin1"] = pluginFake
|
||||
tracker.(*playTracker).builtinScrobblers["fake"] = &fake
|
||||
tracker.(*playTracker).pluginScrobblers["plugin1"] = &pluginFake
|
||||
})
|
||||
|
||||
It("registers and uses plugin scrobbler for NowPlaying", func() {
|
||||
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Eventually(func() bool { return pluginFake.GetNowPlayingCalled() }).Should(BeTrue())
|
||||
Expect(pluginFake.NowPlayingCalled).To(BeTrue())
|
||||
})
|
||||
|
||||
It("removes plugin scrobbler if not present anymore", func() {
|
||||
// First call: plugin present
|
||||
_ = tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
Eventually(func() bool { return pluginFake.GetNowPlayingCalled() }).Should(BeTrue())
|
||||
pluginFake.nowPlayingCalled.Store(false)
|
||||
Expect(pluginFake.NowPlayingCalled).To(BeTrue())
|
||||
pluginFake.NowPlayingCalled = false
|
||||
// Remove plugin
|
||||
pluginLoader.SetNames([]string{})
|
||||
pluginLoader.names = []string{}
|
||||
_ = tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
// Should not be called since plugin was removed
|
||||
Consistently(func() bool { return pluginFake.GetNowPlayingCalled() }).Should(BeFalse())
|
||||
Expect(pluginFake.NowPlayingCalled).To(BeFalse())
|
||||
})
|
||||
|
||||
It("calls both builtin and plugin scrobblers for NowPlaying", func() {
|
||||
fake.nowPlayingCalled.Store(false)
|
||||
pluginFake.nowPlayingCalled.Store(false)
|
||||
fake.NowPlayingCalled = false
|
||||
pluginFake.NowPlayingCalled = false
|
||||
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Eventually(func() bool { return fake.GetNowPlayingCalled() }).Should(BeTrue())
|
||||
Eventually(func() bool { return pluginFake.GetNowPlayingCalled() }).Should(BeTrue())
|
||||
Expect(fake.NowPlayingCalled).To(BeTrue())
|
||||
Expect(pluginFake.NowPlayingCalled).To(BeTrue())
|
||||
})
|
||||
|
||||
It("calls plugin scrobbler for Submit", func() {
|
||||
@@ -395,7 +334,7 @@ var _ = Describe("PlayTracker", func() {
|
||||
var mockedBS *mockBufferedScrobbler
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
ctx = context.Background()
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1"})
|
||||
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
||||
ds = &tests.MockDataStore{}
|
||||
@@ -420,7 +359,7 @@ var _ = Describe("PlayTracker", func() {
|
||||
|
||||
It("calls Stop on scrobblers when removing them", func() {
|
||||
// Change the plugin names to simulate a plugin being removed
|
||||
mockPlugin.SetNames([]string{})
|
||||
mockPlugin.names = []string{}
|
||||
|
||||
// Call refreshPluginScrobblers which should detect the removed plugin
|
||||
pTracker.refreshPluginScrobblers()
|
||||
@@ -432,189 +371,36 @@ var _ = Describe("PlayTracker", func() {
|
||||
Expect(pTracker.pluginScrobblers).NotTo(HaveKey("plugin1"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Plugin reload (config update) behavior", func() {
|
||||
var mockPlugin *mockPluginLoader
|
||||
var pTracker *playTracker
|
||||
var originalScrobbler *fakeScrobbler
|
||||
var reloadedScrobbler *fakeScrobbler
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1"})
|
||||
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
||||
ds = &tests.MockDataStore{}
|
||||
|
||||
// Setup initial plugin scrobbler
|
||||
originalScrobbler = &fakeScrobbler{Authorized: true}
|
||||
reloadedScrobbler = &fakeScrobbler{Authorized: true}
|
||||
|
||||
mockPlugin = &mockPluginLoader{
|
||||
names: []string{"plugin1"},
|
||||
scrobblers: map[string]Scrobbler{"plugin1": originalScrobbler},
|
||||
}
|
||||
|
||||
// Create tracker - this will create buffered scrobblers with loaders
|
||||
pTracker = newPlayTracker(ds, events.GetBroker(), mockPlugin)
|
||||
|
||||
// Trigger initial plugin registration
|
||||
pTracker.refreshPluginScrobblers()
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
pTracker.stopNowPlayingWorker()
|
||||
})
|
||||
|
||||
It("uses the new plugin instance after reload (simulating config update)", func() {
|
||||
// First call should use the original scrobbler
|
||||
scrobblers := pTracker.getActiveScrobblers()
|
||||
pluginScr := scrobblers["plugin1"]
|
||||
Expect(pluginScr).ToNot(BeNil())
|
||||
|
||||
err := pluginScr.NowPlaying(ctx, "u-1", &track, 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(originalScrobbler.GetNowPlayingCalled()).To(BeTrue())
|
||||
Expect(reloadedScrobbler.GetNowPlayingCalled()).To(BeFalse())
|
||||
|
||||
// Simulate plugin reload (config update): replace the scrobbler in the loader
|
||||
// This is what happens when UpdatePluginConfig is called - the plugin manager
|
||||
// unloads the old plugin and loads a new instance
|
||||
mockPlugin.mu.Lock()
|
||||
mockPlugin.scrobblers["plugin1"] = reloadedScrobbler
|
||||
mockPlugin.mu.Unlock()
|
||||
|
||||
// Reset call tracking
|
||||
originalScrobbler.nowPlayingCalled.Store(false)
|
||||
|
||||
// Get scrobblers again - should still return the same buffered scrobbler
|
||||
// but subsequent calls should use the new plugin instance via the loader
|
||||
scrobblers = pTracker.getActiveScrobblers()
|
||||
pluginScr = scrobblers["plugin1"]
|
||||
|
||||
err = pluginScr.NowPlaying(ctx, "u-1", &track, 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// The new scrobbler should be called, not the old one
|
||||
Expect(reloadedScrobbler.GetNowPlayingCalled()).To(BeTrue())
|
||||
Expect(originalScrobbler.GetNowPlayingCalled()).To(BeFalse())
|
||||
})
|
||||
|
||||
It("handles plugin becoming unavailable temporarily", func() {
|
||||
// First verify plugin works
|
||||
scrobblers := pTracker.getActiveScrobblers()
|
||||
pluginScr := scrobblers["plugin1"]
|
||||
|
||||
err := pluginScr.NowPlaying(ctx, "u-1", &track, 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(originalScrobbler.GetNowPlayingCalled()).To(BeTrue())
|
||||
|
||||
// Simulate plugin becoming unavailable (e.g., during reload)
|
||||
mockPlugin.mu.Lock()
|
||||
delete(mockPlugin.scrobblers, "plugin1")
|
||||
mockPlugin.mu.Unlock()
|
||||
|
||||
originalScrobbler.nowPlayingCalled.Store(false)
|
||||
|
||||
// NowPlaying should return error when plugin unavailable
|
||||
err = pluginScr.NowPlaying(ctx, "u-1", &track, 0)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(originalScrobbler.GetNowPlayingCalled()).To(BeFalse())
|
||||
|
||||
// Simulate plugin becoming available again
|
||||
mockPlugin.mu.Lock()
|
||||
mockPlugin.scrobblers["plugin1"] = reloadedScrobbler
|
||||
mockPlugin.mu.Unlock()
|
||||
|
||||
// Should work again with new instance
|
||||
err = pluginScr.NowPlaying(ctx, "u-1", &track, 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(reloadedScrobbler.GetNowPlayingCalled()).To(BeTrue())
|
||||
})
|
||||
|
||||
It("IsAuthorized uses the current plugin instance", func() {
|
||||
scrobblers := pTracker.getActiveScrobblers()
|
||||
pluginScr := scrobblers["plugin1"]
|
||||
|
||||
// Original is authorized
|
||||
Expect(pluginScr.IsAuthorized(ctx, "u-1")).To(BeTrue())
|
||||
|
||||
// Replace with unauthorized scrobbler
|
||||
unauthorizedScrobbler := &fakeScrobbler{Authorized: false}
|
||||
mockPlugin.mu.Lock()
|
||||
mockPlugin.scrobblers["plugin1"] = unauthorizedScrobbler
|
||||
mockPlugin.mu.Unlock()
|
||||
|
||||
// Should reflect the new scrobbler's authorization status
|
||||
Expect(pluginScr.IsAuthorized(ctx, "u-1")).To(BeFalse())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
type fakeScrobbler struct {
|
||||
Authorized bool
|
||||
nowPlayingCalled atomic.Bool
|
||||
NowPlayingCalled bool
|
||||
ScrobbleCalled atomic.Bool
|
||||
userID atomic.Pointer[string]
|
||||
username atomic.Pointer[string]
|
||||
track atomic.Pointer[model.MediaFile]
|
||||
position atomic.Int32
|
||||
UserID string
|
||||
Track *model.MediaFile
|
||||
Position int
|
||||
LastScrobble atomic.Pointer[Scrobble]
|
||||
Error error
|
||||
}
|
||||
|
||||
func (f *fakeScrobbler) GetNowPlayingCalled() bool {
|
||||
return f.nowPlayingCalled.Load()
|
||||
}
|
||||
|
||||
func (f *fakeScrobbler) GetUserID() string {
|
||||
if p := f.userID.Load(); p != nil {
|
||||
return *p
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (f *fakeScrobbler) GetTrack() *model.MediaFile {
|
||||
return f.track.Load()
|
||||
}
|
||||
|
||||
func (f *fakeScrobbler) GetPosition() int {
|
||||
return int(f.position.Load())
|
||||
}
|
||||
|
||||
func (f *fakeScrobbler) GetUsername() string {
|
||||
if p := f.username.Load(); p != nil {
|
||||
return *p
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (f *fakeScrobbler) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
return f.Error == nil && f.Authorized
|
||||
}
|
||||
|
||||
func (f *fakeScrobbler) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
|
||||
f.nowPlayingCalled.Store(true)
|
||||
f.NowPlayingCalled = true
|
||||
if f.Error != nil {
|
||||
return f.Error
|
||||
}
|
||||
f.userID.Store(&userId)
|
||||
// Capture username from context (this is what plugin scrobblers do)
|
||||
username, _ := request.UsernameFrom(ctx)
|
||||
if username == "" {
|
||||
if u, ok := request.UserFrom(ctx); ok {
|
||||
username = u.UserName
|
||||
}
|
||||
}
|
||||
if username != "" {
|
||||
f.username.Store(&username)
|
||||
}
|
||||
f.track.Store(track)
|
||||
f.position.Store(int32(position))
|
||||
f.UserID = userId
|
||||
f.Track = track
|
||||
f.Position = position
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *fakeScrobbler) Scrobble(ctx context.Context, userId string, s Scrobble) error {
|
||||
f.userID.Store(&userId)
|
||||
f.UserID = userId
|
||||
f.LastScrobble.Store(&s)
|
||||
f.ScrobbleCalled.Store(true)
|
||||
if f.Error != nil {
|
||||
|
||||
76
core/user.go
76
core/user.go
@@ -1,76 +0,0 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/deluan/rest"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
)
|
||||
|
||||
// PluginUnloader defines the interface for unloading disabled plugins.
|
||||
// This is satisfied by plugins.Manager but defined here to avoid import cycles.
|
||||
type PluginUnloader interface {
|
||||
UnloadDisabledPlugins(ctx context.Context)
|
||||
}
|
||||
|
||||
// User provides business logic for user management with plugin coordination.
|
||||
type User interface {
|
||||
NewRepository(ctx context.Context) rest.Repository
|
||||
}
|
||||
|
||||
type userService struct {
|
||||
ds model.DataStore
|
||||
pluginManager PluginUnloader
|
||||
}
|
||||
|
||||
// NewUser creates a new User service
|
||||
func NewUser(ds model.DataStore, pluginManager PluginUnloader) User {
|
||||
return &userService{
|
||||
ds: ds,
|
||||
pluginManager: pluginManager,
|
||||
}
|
||||
}
|
||||
|
||||
// NewRepository returns a REST repository wrapper for user operations.
|
||||
// The wrapper intercepts Delete operations to coordinate plugin unloading.
|
||||
func (s *userService) NewRepository(ctx context.Context) rest.Repository {
|
||||
repo := s.ds.User(ctx)
|
||||
wrapper := &userRepositoryWrapper{
|
||||
ctx: ctx,
|
||||
UserRepository: repo,
|
||||
pluginManager: s.pluginManager,
|
||||
}
|
||||
return wrapper
|
||||
}
|
||||
|
||||
type userRepositoryWrapper struct {
|
||||
model.UserRepository
|
||||
ctx context.Context
|
||||
pluginManager PluginUnloader
|
||||
}
|
||||
|
||||
// Save implements rest.Persistable by delegating to the underlying repository.
|
||||
func (r *userRepositoryWrapper) Save(entity interface{}) (string, error) {
|
||||
return r.UserRepository.(rest.Persistable).Save(entity)
|
||||
}
|
||||
|
||||
// Update implements rest.Persistable by delegating to the underlying repository.
|
||||
func (r *userRepositoryWrapper) Update(id string, entity interface{}, cols ...string) error {
|
||||
return r.UserRepository.(rest.Persistable).Update(id, entity, cols...)
|
||||
}
|
||||
|
||||
// Delete implements rest.Persistable and coordinates plugin unloading.
|
||||
func (r *userRepositoryWrapper) Delete(id string) error {
|
||||
// The underlying repository Delete handles the database cleanup
|
||||
// including calling cleanupPluginUserReferences
|
||||
err := r.UserRepository.(rest.Persistable).Delete(id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// After successful deletion, check if any plugins were auto-disabled
|
||||
// and need to be unloaded from memory
|
||||
r.pluginManager.UnloadDisabledPlugins(r.ctx)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,86 +0,0 @@
|
||||
package core_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/deluan/rest"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("User Service", func() {
|
||||
var service core.User
|
||||
var ds *tests.MockDataStore
|
||||
var userRepo *tests.MockedUserRepo
|
||||
var pluginManager *mockPluginUnloader
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
ds = &tests.MockDataStore{}
|
||||
userRepo = tests.CreateMockUserRepo()
|
||||
ds.MockedUser = userRepo
|
||||
pluginManager = &mockPluginUnloader{}
|
||||
service = core.NewUser(ds, pluginManager)
|
||||
ctx = GinkgoT().Context()
|
||||
})
|
||||
|
||||
Describe("NewRepository", func() {
|
||||
It("returns a rest.Persistable", func() {
|
||||
repo := service.NewRepository(ctx)
|
||||
_, ok := repo.(rest.Persistable)
|
||||
Expect(ok).To(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Delete", func() {
|
||||
var repo rest.Persistable
|
||||
|
||||
BeforeEach(func() {
|
||||
r := service.NewRepository(ctx)
|
||||
repo = r.(rest.Persistable)
|
||||
|
||||
// Add a test user
|
||||
user := &model.User{
|
||||
ID: "user-123",
|
||||
UserName: "testuser",
|
||||
IsAdmin: false,
|
||||
}
|
||||
user.NewPassword = "password"
|
||||
Expect(userRepo.Put(user)).To(Succeed())
|
||||
})
|
||||
|
||||
It("deletes the user successfully", func() {
|
||||
err := repo.Delete("user-123")
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
|
||||
// Verify user is deleted
|
||||
_, err = userRepo.Get("user-123")
|
||||
Expect(err).To(Equal(model.ErrNotFound))
|
||||
})
|
||||
|
||||
It("calls UnloadDisabledPlugins after successful deletion", func() {
|
||||
err := repo.Delete("user-123")
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(pluginManager.unloadCalls).To(Equal(1))
|
||||
})
|
||||
|
||||
It("does not call UnloadDisabledPlugins when deletion fails", func() {
|
||||
// Try to delete non-existent user
|
||||
err := repo.Delete("non-existent")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(pluginManager.unloadCalls).To(Equal(0))
|
||||
})
|
||||
|
||||
It("returns error when repository fails", func() {
|
||||
userRepo.Error = errors.New("database error")
|
||||
err := repo.Delete("user-123")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("database error"))
|
||||
Expect(pluginManager.unloadCalls).To(Equal(0))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -18,7 +18,6 @@ var Set = wire.NewSet(
|
||||
NewShare,
|
||||
NewPlaylists,
|
||||
NewLibrary,
|
||||
NewUser,
|
||||
NewMaintenance,
|
||||
agents.GetAgents,
|
||||
external.NewProvider,
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
-- +goose Up
|
||||
-- +goose StatementBegin
|
||||
CREATE TABLE scrobbles(
|
||||
media_file_id VARCHAR(255) NOT NULL
|
||||
REFERENCES media_file(id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE,
|
||||
user_id VARCHAR(255) NOT NULL
|
||||
REFERENCES user(id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE,
|
||||
submission_time INTEGER NOT NULL
|
||||
);
|
||||
CREATE INDEX scrobbles_date ON scrobbles (submission_time);
|
||||
-- +goose StatementEnd
|
||||
|
||||
-- +goose Down
|
||||
-- +goose StatementBegin
|
||||
DROP TABLE scrobbles;
|
||||
-- +goose StatementEnd
|
||||
@@ -1,99 +0,0 @@
|
||||
-- +goose Up
|
||||
-- Fix case-insensitive sorting for playlist names
|
||||
create table playlist_dg_tmp
|
||||
(
|
||||
id varchar(255) not null
|
||||
primary key,
|
||||
name varchar(255) collate NOCASE default '' not null,
|
||||
comment varchar(255) default '' not null,
|
||||
duration real default 0 not null,
|
||||
song_count integer default 0 not null,
|
||||
public bool default FALSE not null,
|
||||
created_at datetime,
|
||||
updated_at datetime,
|
||||
path string default '' not null,
|
||||
sync bool default false not null,
|
||||
size integer default 0 not null,
|
||||
rules varchar,
|
||||
evaluated_at datetime,
|
||||
owner_id varchar(255) not null
|
||||
constraint playlist_user_user_id_fk
|
||||
references user
|
||||
on update cascade on delete cascade
|
||||
);
|
||||
|
||||
insert into playlist_dg_tmp(id, name, comment, duration, song_count, public, created_at, updated_at, path, sync, size,
|
||||
rules, evaluated_at, owner_id)
|
||||
select id, name, comment, duration, song_count, public, created_at, updated_at, path, sync, size, rules, evaluated_at,
|
||||
owner_id
|
||||
from playlist;
|
||||
|
||||
drop table playlist;
|
||||
|
||||
alter table playlist_dg_tmp
|
||||
rename to playlist;
|
||||
|
||||
create index playlist_name
|
||||
on playlist (name);
|
||||
|
||||
create index playlist_created_at
|
||||
on playlist (created_at);
|
||||
|
||||
create index playlist_updated_at
|
||||
on playlist (updated_at);
|
||||
|
||||
create index playlist_evaluated_at
|
||||
on playlist (evaluated_at);
|
||||
|
||||
create index playlist_size
|
||||
on playlist (size);
|
||||
|
||||
-- +goose Down
|
||||
-- Note: Downgrade loses the collation but preserves data
|
||||
create table playlist_dg_tmp
|
||||
(
|
||||
id varchar(255) not null
|
||||
primary key,
|
||||
name varchar(255) default '' not null,
|
||||
comment varchar(255) default '' not null,
|
||||
duration real default 0 not null,
|
||||
song_count integer default 0 not null,
|
||||
public bool default FALSE not null,
|
||||
created_at datetime,
|
||||
updated_at datetime,
|
||||
path string default '' not null,
|
||||
sync bool default false not null,
|
||||
size integer default 0 not null,
|
||||
rules varchar,
|
||||
evaluated_at datetime,
|
||||
owner_id varchar(255) not null
|
||||
constraint playlist_user_user_id_fk
|
||||
references user
|
||||
on update cascade on delete cascade
|
||||
);
|
||||
|
||||
insert into playlist_dg_tmp(id, name, comment, duration, song_count, public, created_at, updated_at, path, sync, size,
|
||||
rules, evaluated_at, owner_id)
|
||||
select id, name, comment, duration, song_count, public, created_at, updated_at, path, sync, size, rules, evaluated_at,
|
||||
owner_id
|
||||
from playlist;
|
||||
|
||||
drop table playlist;
|
||||
|
||||
alter table playlist_dg_tmp
|
||||
rename to playlist;
|
||||
|
||||
create index playlist_name
|
||||
on playlist (name);
|
||||
|
||||
create index playlist_created_at
|
||||
on playlist (created_at);
|
||||
|
||||
create index playlist_updated_at
|
||||
on playlist (updated_at);
|
||||
|
||||
create index playlist_evaluated_at
|
||||
on playlist (evaluated_at);
|
||||
|
||||
create index playlist_size
|
||||
on playlist (size);
|
||||
@@ -1,19 +0,0 @@
|
||||
-- +goose Up
|
||||
CREATE TABLE IF NOT EXISTS plugin (
|
||||
id TEXT PRIMARY KEY,
|
||||
path TEXT NOT NULL,
|
||||
manifest JSONB NOT NULL,
|
||||
config JSONB,
|
||||
users JSONB,
|
||||
all_users BOOL NOT NULL DEFAULT false,
|
||||
libraries JSONB,
|
||||
all_libraries BOOL NOT NULL DEFAULT false,
|
||||
enabled BOOL NOT NULL DEFAULT false,
|
||||
last_error TEXT,
|
||||
sha256 TEXT NOT NULL,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL
|
||||
);
|
||||
|
||||
-- +goose Down
|
||||
DROP TABLE IF EXISTS plugin;
|
||||
67
go.mod
67
go.mod
@@ -5,13 +5,11 @@ go 1.25
|
||||
// Fork to fix https://github.com/navidrome/navidrome/issues/3254
|
||||
replace github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 => github.com/deluan/tag v0.0.0-20241002021117-dfe5e6ea396d
|
||||
|
||||
replace go.senan.xyz/taglib => github.com/deluan/go-taglib v0.0.0-20260117215539-f414ba45cbd9
|
||||
|
||||
require (
|
||||
github.com/Masterminds/squirrel v1.5.4
|
||||
github.com/RaveNoX/go-jsoncommentstrip v1.0.0
|
||||
github.com/andybalholm/cascadia v1.3.3
|
||||
github.com/bmatcuk/doublestar/v4 v4.9.2
|
||||
github.com/bmatcuk/doublestar/v4 v4.9.1
|
||||
github.com/bradleyjkemp/cupaloy/v2 v2.8.0
|
||||
github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf
|
||||
github.com/deluan/sanitize v0.0.0-20241120162836-fdfd8fdfaa55
|
||||
@@ -23,9 +21,8 @@ require (
|
||||
github.com/djherbis/stream v1.4.0
|
||||
github.com/djherbis/times v1.6.0
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/extism/go-sdk v1.7.1
|
||||
github.com/fatih/structs v1.1.0
|
||||
github.com/go-chi/chi/v5 v5.2.4
|
||||
github.com/go-chi/chi/v5 v5.2.3
|
||||
github.com/go-chi/cors v1.2.2
|
||||
github.com/go-chi/httprate v0.15.0
|
||||
github.com/go-chi/jwtauth/v5 v5.3.3
|
||||
@@ -39,15 +36,16 @@ require (
|
||||
github.com/jellydator/ttlcache/v3 v3.4.0
|
||||
github.com/kardianos/service v1.2.4
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
|
||||
github.com/knqyf263/go-plugin v0.9.0
|
||||
github.com/kr/pretty v0.3.1
|
||||
github.com/lestrrat-go/jwx/v2 v2.1.6
|
||||
github.com/maruel/natural v1.3.0
|
||||
github.com/maruel/natural v1.2.1
|
||||
github.com/matoous/go-nanoid/v2 v2.1.0
|
||||
github.com/mattn/go-sqlite3 v1.14.33
|
||||
github.com/mattn/go-sqlite3 v1.14.32
|
||||
github.com/microcosm-cc/bluemonday v1.0.27
|
||||
github.com/mileusna/useragent v1.3.5
|
||||
github.com/onsi/ginkgo/v2 v2.27.5
|
||||
github.com/onsi/gomega v1.39.0
|
||||
github.com/onsi/ginkgo/v2 v2.27.2
|
||||
github.com/onsi/gomega v1.38.2
|
||||
github.com/pelletier/go-toml/v2 v2.2.4
|
||||
github.com/pocketbase/dbx v1.11.0
|
||||
github.com/pressly/goose/v3 v3.26.0
|
||||
@@ -56,21 +54,21 @@ require (
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/spf13/viper v1.21.0
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/tetratelabs/wazero v1.11.0
|
||||
github.com/tetratelabs/wazero v1.10.1
|
||||
github.com/unrolled/secure v1.17.0
|
||||
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342
|
||||
go.senan.xyz/taglib v0.0.0-00010101000000-000000000000
|
||||
go.uber.org/goleak v1.3.0
|
||||
golang.org/x/image v0.35.0
|
||||
golang.org/x/net v0.49.0
|
||||
golang.org/x/sync v0.19.0
|
||||
golang.org/x/sys v0.40.0
|
||||
golang.org/x/term v0.39.0
|
||||
golang.org/x/text v0.33.0
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6
|
||||
golang.org/x/image v0.33.0
|
||||
golang.org/x/net v0.47.0
|
||||
golang.org/x/sync v0.18.0
|
||||
golang.org/x/sys v0.38.0
|
||||
golang.org/x/text v0.31.0
|
||||
golang.org/x/time v0.14.0
|
||||
google.golang.org/protobuf v1.36.10
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
@@ -82,23 +80,20 @@ require (
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/cespare/reflex v0.3.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/creack/pty v1.1.24 // indirect
|
||||
github.com/creack/pty v1.1.11 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect
|
||||
github.com/dylibso/observe-sdk/go v0.0.0-20240828172851-9145d8ad07e1 // indirect
|
||||
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
|
||||
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
|
||||
github.com/gobwas/glob v0.2.3 // indirect
|
||||
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/goccy/go-yaml v1.19.2 // indirect
|
||||
github.com/goccy/go-yaml v1.18.0 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/pprof v0.0.0-20260111202518-71be6bfdd440 // indirect
|
||||
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8 // indirect
|
||||
github.com/google/subcommands v1.2.0 // indirect
|
||||
github.com/gorilla/css v1.0.1 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20251118225945-96ee0021ea0f // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
@@ -116,8 +111,8 @@ require (
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/common v0.67.5 // indirect
|
||||
github.com/prometheus/procfs v0.19.2 // indirect
|
||||
github.com/prometheus/common v0.66.1 // indirect
|
||||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/rogpeppe/go-internal v1.14.1 // indirect
|
||||
github.com/sagikazarmark/locafero v0.12.0 // indirect
|
||||
github.com/sanity-io/litter v1.5.8 // indirect
|
||||
@@ -127,21 +122,17 @@ require (
|
||||
github.com/spf13/afero v1.15.0 // indirect
|
||||
github.com/spf13/cast v1.10.0 // indirect
|
||||
github.com/spf13/pflag v1.0.10 // indirect
|
||||
github.com/stretchr/objx v0.5.3 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/tetratelabs/wabin v0.0.0-20230304001439-f6f874872834 // indirect
|
||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.3 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/crypto v0.47.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 // indirect
|
||||
golang.org/x/mod v0.32.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20260109210033-bd525da824e2 // indirect
|
||||
golang.org/x/tools v0.41.0 // indirect
|
||||
google.golang.org/protobuf v1.36.11 // indirect
|
||||
gopkg.in/ini.v1 v1.67.1 // indirect
|
||||
golang.org/x/crypto v0.45.0 // indirect
|
||||
golang.org/x/mod v0.30.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 // indirect
|
||||
golang.org/x/tools v0.39.0 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
|
||||
)
|
||||
|
||||
|
||||
132
go.sum
132
go.sum
@@ -16,8 +16,8 @@ github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuP
|
||||
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bmatcuk/doublestar/v4 v4.9.2 h1:b0mc6WyRSYLjzofB2v/0cuDUZ+MqoGyH3r0dVij35GI=
|
||||
github.com/bmatcuk/doublestar/v4 v4.9.2/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
|
||||
github.com/bmatcuk/doublestar/v4 v4.9.1 h1:X8jg9rRZmJd4yRy7ZeNDRnM+T3ZfHv15JiBJ/avrEXE=
|
||||
github.com/bmatcuk/doublestar/v4 v4.9.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
|
||||
github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M=
|
||||
github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0=
|
||||
github.com/cespare/reflex v0.3.1 h1:N4Y/UmRrjwOkNT0oQQnYsdr6YBxvHqtSfPB4mqOyAKk=
|
||||
@@ -26,9 +26,8 @@ github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UF
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw=
|
||||
github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s=
|
||||
github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE=
|
||||
github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@@ -36,8 +35,6 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40=
|
||||
github.com/deluan/go-taglib v0.0.0-20260117215539-f414ba45cbd9 h1:h2YB9dUm8wQ1ERSQCFuIJp3tecnm+AOJiRFrw3zJa3E=
|
||||
github.com/deluan/go-taglib v0.0.0-20260117215539-f414ba45cbd9/go.mod h1:sKDN0U4qXDlq6LFK+aOAkDH4Me5nDV1V/A4B+B69xBA=
|
||||
github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf h1:tb246l2Zmpt/GpF9EcHCKTtwzrd0HGfEmoODFA/qnk4=
|
||||
github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf/go.mod h1:tSgDythFsl0QgS/PFWfIZqcJKnkADWneY80jaVRlqK8=
|
||||
github.com/deluan/sanitize v0.0.0-20241120162836-fdfd8fdfaa55 h1:wSCnggTs2f2ji6nFwQmfwgINcmSMj0xF0oHnoyRSPe4=
|
||||
@@ -58,10 +55,6 @@ github.com/djherbis/times v1.6.0 h1:w2ctJ92J8fBvWPxugmXIv7Nz7Q3iDMKNx9v5ocVH20c=
|
||||
github.com/djherbis/times v1.6.0/go.mod h1:gOHeRAz2h+VJNZ5Gmc/o7iD9k4wW7NMVqieYCY99oc0=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/dylibso/observe-sdk/go v0.0.0-20240828172851-9145d8ad07e1 h1:idfl8M8rPW93NehFw5H1qqH8yG158t5POr+LX9avbJY=
|
||||
github.com/dylibso/observe-sdk/go v0.0.0-20240828172851-9145d8ad07e1/go.mod h1:C8DzXehI4zAbrdlbtOByKX6pfivJTBiV9Jjqv56Yd9Q=
|
||||
github.com/extism/go-sdk v1.7.1 h1:lWJos6uY+tRFdlIHR+SJjwFDApY7OypS/2nMhiVQ9Sw=
|
||||
github.com/extism/go-sdk v1.7.1/go.mod h1:IT+Xdg5AZM9hVtpFUA+uZCJMge/hbvshl8bwzLtFyKA=
|
||||
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
|
||||
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
@@ -75,8 +68,8 @@ github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZ
|
||||
github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk=
|
||||
github.com/gkampitakis/go-snaps v0.5.15 h1:amyJrvM1D33cPHwVrjo9jQxX8g/7E2wYdZ+01KS3zGE=
|
||||
github.com/gkampitakis/go-snaps v0.5.15/go.mod h1:HNpx/9GoKisdhw9AFOBT1N7DBs9DiHo/hGheFGBZ+mc=
|
||||
github.com/go-chi/chi/v5 v5.2.4 h1:WtFKPHwlywe8Srng8j2BhOD9312j9cGUxG1SP4V2cR4=
|
||||
github.com/go-chi/chi/v5 v5.2.4/go.mod h1:X7Gx4mteadT3eDOMTsXzmI4/rwUpOwBHLpAfupzFJP0=
|
||||
github.com/go-chi/chi/v5 v5.2.3 h1:WQIt9uxdsAbgIYgid+BpYc+liqQZGMHRaUwp0JUcvdE=
|
||||
github.com/go-chi/chi/v5 v5.2.3/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
|
||||
github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE=
|
||||
github.com/go-chi/cors v1.2.2/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
|
||||
github.com/go-chi/httprate v0.15.0 h1:j54xcWV9KGmPf/X4H32/aTH+wBlrvxL7P+SdnRqxh5g=
|
||||
@@ -92,14 +85,12 @@ github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1v
|
||||
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
|
||||
github.com/go-viper/encoding/ini v0.1.1 h1:MVWY7B2XNw7lnOqHutGRc97bF3rP7omOdgjdMPAJgbs=
|
||||
github.com/go-viper/encoding/ini v0.1.1/go.mod h1:Pfi4M2V1eAGJVZ5q6FrkHPhtHED2YgLlXhvgMVrB+YQ=
|
||||
github.com/go-viper/mapstructure/v2 v2.5.0 h1:vM5IJoUAy3d7zRSVtIwQgBj7BiWtMPfmPEgAXnvj1Ro=
|
||||
github.com/go-viper/mapstructure/v2 v2.5.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
|
||||
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM=
|
||||
github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
|
||||
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/gohugoio/hashstructure v0.6.0 h1:7wMB/2CfXoThFYhdWRGv3u3rUM761Cq29CxUW+NltUg=
|
||||
github.com/gohugoio/hashstructure v0.6.0/go.mod h1:lapVLk9XidheHG1IQ4ZSbyYrXcaILU1ZEP/+vno5rBQ=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
@@ -108,8 +99,8 @@ github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc h1:hd+uUVsB1vdxohPneMrhGH2YfQuH5hRIK9u4/XCeUtw=
|
||||
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc/go.mod h1:SL66SJVysrh7YbDCP9tH30b8a9o/N2HeiQNUm85EKhc=
|
||||
github.com/google/pprof v0.0.0-20260111202518-71be6bfdd440 h1:oKBqR+eQXiIM7X8K1JEg9aoTEePLq/c6Awe484abOuA=
|
||||
github.com/google/pprof v0.0.0-20260111202518-71be6bfdd440/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI=
|
||||
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8 h1:3DsUAV+VNEQa2CUVLxCY3f87278uWfIDhJnbdvDjvmE=
|
||||
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8/go.mod h1:I6V7YzU0XDpsHqbsyrghnFZLO1gwK6NPTNvmetQIk9U=
|
||||
github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE=
|
||||
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
@@ -127,8 +118,6 @@ github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY
|
||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20251118225945-96ee0021ea0f h1:Fnl4pzx8SR7k7JuzyW8lEtSFH6EQ8xgcypgIn8pcGIE=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20251118225945-96ee0021ea0f/go.mod h1:gx7rwoVhcfuVKG5uya9Hs3Sxj7EIvldVofAWIUtGouw=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jellydator/ttlcache/v3 v3.4.0 h1:YS4P125qQS0tNhtL6aeYkheEaB/m8HCqdMMP4mnWdTY=
|
||||
@@ -145,6 +134,8 @@ github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zt
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
|
||||
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/knqyf263/go-plugin v0.9.0 h1:CQs2+lOPIlkZVtcb835ZYDEoyyWJWLbSTWeCs0EwTwI=
|
||||
github.com/knqyf263/go-plugin v0.9.0/go.mod h1:2z5lCO1/pez6qGo8CvCxSlBFSEat4MEp1DrnA+f7w8Q=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
@@ -171,14 +162,14 @@ github.com/lestrrat-go/jwx/v2 v2.1.6 h1:hxM1gfDILk/l5ylers6BX/Eq1m/pnxe9NBwW6lVf
|
||||
github.com/lestrrat-go/jwx/v2 v2.1.6/go.mod h1:Y722kU5r/8mV7fYDifjug0r8FK8mZdw0K0GpJw/l8pU=
|
||||
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
|
||||
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
|
||||
github.com/maruel/natural v1.3.0 h1:VsmCsBmEyrR46RomtgHs5hbKADGRVtliHTyCOLFBpsg=
|
||||
github.com/maruel/natural v1.3.0/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
|
||||
github.com/maruel/natural v1.2.1 h1:G/y4pwtTA07lbQsMefvsmEO0VN0NfqpxprxXDM4R/4o=
|
||||
github.com/maruel/natural v1.2.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
|
||||
github.com/matoous/go-nanoid/v2 v2.1.0 h1:P64+dmq21hhWdtvZfEAofnvJULaRR1Yib0+PnU669bE=
|
||||
github.com/matoous/go-nanoid/v2 v2.1.0/go.mod h1:KlbGNQ+FhrUNIHUxZdL63t7tl4LaPkZNpUULS8H4uVM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.33 h1:A5blZ5ulQo2AtayQ9/limgHEkFreKj1Dv226a1K73s0=
|
||||
github.com/mattn/go-sqlite3 v1.14.33/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
|
||||
github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY=
|
||||
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
|
||||
github.com/mfridman/tparse v0.18.0 h1:wh6dzOKaIwkUGyKgOntDW4liXSo37qg5AXbIhkMV3vE=
|
||||
@@ -195,10 +186,10 @@ github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdh
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/ogier/pflag v0.0.1 h1:RW6JSWSu/RkSatfcLtogGfFgpim5p7ARQ10ECk5O750=
|
||||
github.com/ogier/pflag v0.0.1/go.mod h1:zkFki7tvTa0tafRvTBIZTvzYyAu6kQhPZFnshFFPE+g=
|
||||
github.com/onsi/ginkgo/v2 v2.27.5 h1:ZeVgZMx2PDMdJm/+w5fE/OyG6ILo1Y3e+QX4zSR0zTE=
|
||||
github.com/onsi/ginkgo/v2 v2.27.5/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
|
||||
github.com/onsi/gomega v1.39.0 h1:y2ROC3hKFmQZJNFeGAMeHZKkjBL65mIZcvrLQBF9k6Q=
|
||||
github.com/onsi/gomega v1.39.0/go.mod h1:ZCU1pkQcXDO5Sl9/VVEGlDyp+zm0m1cmeG5TOzLgdh4=
|
||||
github.com/onsi/ginkgo/v2 v2.27.2 h1:LzwLj0b89qtIy6SSASkzlNvX6WktqurSHwkk2ipF/Ns=
|
||||
github.com/onsi/ginkgo/v2 v2.27.2/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
|
||||
github.com/onsi/gomega v1.38.2 h1:eZCjf2xjZAqe+LeWvKb5weQ+NcPwX84kqJ0cZNxok2A=
|
||||
github.com/onsi/gomega v1.38.2/go.mod h1:W2MJcYxRGV63b418Ai34Ud0hEdTVXq9NW9+Sx6uXf3k=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
@@ -216,10 +207,10 @@ github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h
|
||||
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
|
||||
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
||||
github.com/prometheus/common v0.67.5 h1:pIgK94WWlQt1WLwAC5j2ynLaBRDiinoAb86HZHTUGI4=
|
||||
github.com/prometheus/common v0.67.5/go.mod h1:SjE/0MzDEEAyrdr5Gqc6G+sXI67maCxzaT3A2+HqjUw=
|
||||
github.com/prometheus/procfs v0.19.2 h1:zUMhqEW66Ex7OXIiDkll3tl9a1ZdilUOd/F6ZXw4Vws=
|
||||
github.com/prometheus/procfs v0.19.2/go.mod h1:M0aotyiemPhBCM0z5w87kL22CxfcH05ZpYlu+b4J7mw=
|
||||
github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs=
|
||||
github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA=
|
||||
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
|
||||
github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/rjeczalik/notify v0.9.3 h1:6rJAzHTGKXGj76sbRgDiDcYj/HniypXmSJo1SWakZeY=
|
||||
@@ -253,8 +244,8 @@ github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
|
||||
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
|
||||
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
|
||||
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
|
||||
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
||||
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
||||
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
|
||||
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
|
||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
|
||||
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
@@ -262,27 +253,20 @@ github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU=
|
||||
github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/objx v0.5.3 h1:jmXUvGomnU1o3W/V5h2VEradbpJDwGrzugQQvL0POH4=
|
||||
github.com/stretchr/objx v0.5.3/go.mod h1:rDQraq+vQZU7Fde9LOZLr8Tax6zZvy4kuNKF+QYS+U0=
|
||||
github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
github.com/tetratelabs/wabin v0.0.0-20230304001439-f6f874872834 h1:ZF+QBjOI+tILZjBaFj3HgFonKXUcwgJ4djLb6i42S3Q=
|
||||
github.com/tetratelabs/wabin v0.0.0-20230304001439-f6f874872834/go.mod h1:m9ymHTgNSEjuxvw8E7WWe4Pl4hZQHXONY8wE6dMLaRk=
|
||||
github.com/tetratelabs/wazero v1.11.0 h1:+gKemEuKCTevU4d7ZTzlsvgd1uaToIDtlQlmNbwqYhA=
|
||||
github.com/tetratelabs/wazero v1.11.0/go.mod h1:eV28rsN8Q+xwjogd7f4/Pp4xFxO7uOGbLcD/LzB1wiU=
|
||||
github.com/tetratelabs/wazero v1.10.1 h1:2DugeJf6VVk58KTPszlNfeeN8AhhpwcZqkJj2wwFuH8=
|
||||
github.com/tetratelabs/wazero v1.10.1/go.mod h1:DRm5twOQ5Gr1AoEdSi0CLjDQF1J9ZAuyqFIjl1KKfQU=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
@@ -300,14 +284,12 @@ github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
|
||||
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
|
||||
github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
|
||||
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A=
|
||||
go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||
go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0=
|
||||
go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8=
|
||||
go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
||||
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
@@ -316,20 +298,20 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
|
||||
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 h1:Z/6YuSHTLOHfNFdb8zVZomZr7cqNgTJvA8+Qz75D8gU=
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHiYkrJyT+2uy9YZJB7H1k68CXZU=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6 h1:zfMcR1Cs4KNuomFFgGefv5N0czO2XZpUbxGUy8i8ug0=
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.35.0 h1:LKjiHdgMtO8z7Fh18nGY6KDcoEtVfsgLDPeLyguqb7I=
|
||||
golang.org/x/image v0.35.0/go.mod h1:MwPLTVgvxSASsxdLzKrl8BRFuyqMyGhLwmC+TO1Sybk=
|
||||
golang.org/x/image v0.33.0 h1:LXRZRnv1+zGd5XBUVRFmYEphyyKJjQjCRiOuAP3sZfQ=
|
||||
golang.org/x/image v0.33.0/go.mod h1:DD3OsTYT9chzuzTQt+zMcOlBHgfoKQb1gry8p76Y1sc=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
|
||||
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
|
||||
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
|
||||
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
@@ -341,8 +323,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
|
||||
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -350,8 +332,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180926160741-c2ed4eda69e7/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -368,11 +350,11 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
|
||||
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/telemetry v0.0.0-20260109210033-bd525da824e2 h1:O1cMQHRfwNpDfDJerqRoE2oD+AFlyid87D40L/OkkJo=
|
||||
golang.org/x/telemetry v0.0.0-20260109210033-bd525da824e2/go.mod h1:b7fPSJ0pKZ3ccUh8gnTONJxhn3c/PS6tyzQvyqw4iA8=
|
||||
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 h1:E2/AqCUMZGgd73TQkxUMcMla25GB9i/5HOdLr+uH7Vo=
|
||||
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
@@ -381,8 +363,6 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
|
||||
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
@@ -393,8 +373,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
@@ -404,17 +384,17 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
|
||||
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||
google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE=
|
||||
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/ini.v1 v1.67.1 h1:tVBILHy0R6e4wkYOn3XmiITt/hEVH4TFMYvAX2Ytz6k=
|
||||
gopkg.in/ini.v1 v1.67.1/go.mod h1:x/cyOwCgZqOkJoDIJ3c1KNHMo10+nLGAhh+kn3Zizss=
|
||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce h1:+JknDZhAj8YMt7GC73Ei8pv4MzjDUNPHgQWJdtMAaDU=
|
||||
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce/go.mod h1:5AcXVHNjg+BDxry382+8OKon8SEWiKktQR07RKPsv1c=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
||||
24
log/log.go
24
log/log.go
@@ -29,8 +29,8 @@ var redacted = &Hook{
|
||||
"(Secret:\")[\\w]*",
|
||||
"(Spotify.*ID:\")[\\w]*",
|
||||
"(PasswordEncryptionKey:[\\s]*\")[^\"]*",
|
||||
"(UserHeader:[\\s]*\")[^\"]*",
|
||||
"(TrustedSources:[\\s]*\")[^\"]*",
|
||||
"(ReverseProxyUserHeader:[\\s]*\")[^\"]*",
|
||||
"(ReverseProxyWhitelist:[\\s]*\")[^\"]*",
|
||||
"(MetricsPath:[\\s]*\")[^\"]*",
|
||||
"(DevAutoCreateAdminPassword:[\\s]*\")[^\"]*",
|
||||
"(DevAutoLoginUsername:[\\s]*\")[^\"]*",
|
||||
@@ -88,11 +88,11 @@ func SetLevel(l Level) {
|
||||
}
|
||||
|
||||
func SetLevelString(l string) {
|
||||
level := ParseLogLevel(l)
|
||||
level := levelFromString(l)
|
||||
SetLevel(level)
|
||||
}
|
||||
|
||||
func ParseLogLevel(l string) Level {
|
||||
func levelFromString(l string) Level {
|
||||
envLevel := strings.ToLower(l)
|
||||
var level Level
|
||||
switch envLevel {
|
||||
@@ -118,7 +118,7 @@ func SetLogLevels(levels map[string]string) {
|
||||
defer loggerMu.Unlock()
|
||||
logLevels = nil
|
||||
for k, v := range levels {
|
||||
logLevels = append(logLevels, levelPath{path: k, level: ParseLogLevel(v)})
|
||||
logLevels = append(logLevels, levelPath{path: k, level: levelFromString(v)})
|
||||
}
|
||||
sort.Slice(logLevels, func(i, j int) bool {
|
||||
return logLevels[i].path > logLevels[j].path
|
||||
@@ -185,31 +185,31 @@ func IsGreaterOrEqualTo(level Level) bool {
|
||||
}
|
||||
|
||||
func Fatal(args ...interface{}) {
|
||||
Log(LevelFatal, args...)
|
||||
log(LevelFatal, args...)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func Error(args ...interface{}) {
|
||||
Log(LevelError, args...)
|
||||
log(LevelError, args...)
|
||||
}
|
||||
|
||||
func Warn(args ...interface{}) {
|
||||
Log(LevelWarn, args...)
|
||||
log(LevelWarn, args...)
|
||||
}
|
||||
|
||||
func Info(args ...interface{}) {
|
||||
Log(LevelInfo, args...)
|
||||
log(LevelInfo, args...)
|
||||
}
|
||||
|
||||
func Debug(args ...interface{}) {
|
||||
Log(LevelDebug, args...)
|
||||
log(LevelDebug, args...)
|
||||
}
|
||||
|
||||
func Trace(args ...interface{}) {
|
||||
Log(LevelTrace, args...)
|
||||
log(LevelTrace, args...)
|
||||
}
|
||||
|
||||
func Log(level Level, args ...interface{}) {
|
||||
func log(level Level, args ...interface{}) {
|
||||
if !shouldLog(level, 3) {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -38,8 +38,6 @@ type DataStore interface {
|
||||
User(ctx context.Context) UserRepository
|
||||
UserProps(ctx context.Context) UserPropsRepository
|
||||
ScrobbleBuffer(ctx context.Context) ScrobbleBufferRepository
|
||||
Scrobble(ctx context.Context) ScrobbleRepository
|
||||
Plugin(ctx context.Context) PluginRepository
|
||||
|
||||
Resource(ctx context.Context, model interface{}) ResourceRepository
|
||||
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
package model
|
||||
|
||||
import "time"
|
||||
|
||||
type Plugin struct {
|
||||
ID string `structs:"id" json:"id"`
|
||||
Path string `structs:"path" json:"path"`
|
||||
Manifest string `structs:"manifest" json:"manifest"`
|
||||
Config string `structs:"config" json:"config,omitempty"`
|
||||
Users string `structs:"users" json:"users,omitempty"`
|
||||
AllUsers bool `structs:"all_users" json:"allUsers,omitempty"`
|
||||
Libraries string `structs:"libraries" json:"libraries,omitempty"`
|
||||
AllLibraries bool `structs:"all_libraries" json:"allLibraries,omitempty"`
|
||||
Enabled bool `structs:"enabled" json:"enabled"`
|
||||
LastError string `structs:"last_error" json:"lastError,omitempty"`
|
||||
SHA256 string `structs:"sha256" json:"sha256"`
|
||||
CreatedAt time.Time `structs:"created_at" json:"createdAt"`
|
||||
UpdatedAt time.Time `structs:"updated_at" json:"updatedAt"`
|
||||
}
|
||||
|
||||
type Plugins []Plugin
|
||||
|
||||
type PluginRepository interface {
|
||||
ResourceRepository
|
||||
CountAll(options ...QueryOptions) (int64, error)
|
||||
Delete(id string) error
|
||||
Get(id string) (*Plugin, error)
|
||||
GetAll(options ...QueryOptions) (Plugins, error)
|
||||
Put(p *Plugin) error
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
package model
|
||||
|
||||
import "time"
|
||||
|
||||
type Scrobble struct {
|
||||
MediaFileID string
|
||||
UserID string
|
||||
SubmissionTime time.Time
|
||||
}
|
||||
|
||||
type ScrobbleRepository interface {
|
||||
RecordScrobble(mediaFileID string, submissionTime time.Time) error
|
||||
}
|
||||
@@ -42,11 +42,8 @@ func (u User) HasLibraryAccess(libraryID int) bool {
|
||||
type Users []User
|
||||
|
||||
type UserRepository interface {
|
||||
ResourceRepository
|
||||
CountAll(...QueryOptions) (int64, error)
|
||||
Delete(id string) error
|
||||
Get(id string) (*User, error)
|
||||
GetAll(options ...QueryOptions) (Users, error)
|
||||
Put(*User) error
|
||||
UpdateLastLoginAt(id string) error
|
||||
UpdateLastAccessAt(id string) error
|
||||
|
||||
@@ -512,70 +512,6 @@ var _ = Describe("AlbumRepository", func() {
|
||||
// Clean up the test album created for this test
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
|
||||
})
|
||||
|
||||
It("removes stale role associations when artist role changes", func() {
|
||||
// Regression test for issue #4242: Composers displayed in albumartist list
|
||||
// This happens when an artist's role changes (e.g., was both albumartist and composer,
|
||||
// now only composer) and the old role association isn't properly removed.
|
||||
|
||||
// Create an artist that will have changing roles
|
||||
artist := &model.Artist{
|
||||
ID: "role-change-artist-1",
|
||||
Name: "Role Change Artist",
|
||||
OrderArtistName: "role change artist",
|
||||
}
|
||||
err := createArtistWithLibrary(artistRepo, artist, 1)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Create album with artist as both albumartist and composer
|
||||
album := &model.Album{
|
||||
LibraryID: 1,
|
||||
ID: "test-album-role-change",
|
||||
Name: "Test Album Role Change",
|
||||
AlbumArtistID: "role-change-artist-1",
|
||||
AlbumArtist: "Role Change Artist",
|
||||
Participants: model.Participants{
|
||||
model.RoleAlbumArtist: {
|
||||
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
|
||||
},
|
||||
model.RoleComposer: {
|
||||
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err = albumRepo.Put(album)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Verify initial state: artist has both albumartist and composer roles
|
||||
expected := []albumArtistRecord{
|
||||
{ArtistID: "role-change-artist-1", Role: "albumartist", SubRole: ""},
|
||||
{ArtistID: "role-change-artist-1", Role: "composer", SubRole: ""},
|
||||
}
|
||||
verifyAlbumArtists(album.ID, expected)
|
||||
|
||||
// Now update album so artist is ONLY a composer (remove albumartist role)
|
||||
album.Participants = model.Participants{
|
||||
model.RoleComposer: {
|
||||
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
|
||||
},
|
||||
}
|
||||
|
||||
err = albumRepo.Put(album)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Verify that the albumartist role was removed - only composer should remain
|
||||
// This is the key test: before the fix, the albumartist role would remain
|
||||
// causing composers to appear in the albumartist filter
|
||||
expectedAfter := []albumArtistRecord{
|
||||
{ArtistID: "role-change-artist-1", Role: "composer", SubRole: ""},
|
||||
}
|
||||
verifyAlbumArtists(album.ID, expectedAfter)
|
||||
|
||||
// Clean up
|
||||
_, _ = artistRepo.executeSQL(squirrel.Delete("artist").Where(squirrel.Eq{"id": artist.ID}))
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user