Compare commits

..

1 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
d05b5f5eb2 Initial plan 2025-11-08 19:00:39 +00:00
656 changed files with 37347 additions and 52117 deletions

View File

@@ -9,19 +9,12 @@ ARG INSTALL_NODE="true"
ARG NODE_VERSION="lts/*"
RUN if [ "${INSTALL_NODE}" = "true" ]; then su vscode -c "source /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi
# Install additional OS packages
# [Optional] Uncomment this section to install additional OS packages.
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends ffmpeg
&& apt-get -y install --no-install-recommends libtag1-dev ffmpeg
# Install TagLib from cross-taglib releases
ARG CROSS_TAGLIB_VERSION="2.1.1-1"
ARG TARGETARCH
RUN DOWNLOAD_ARCH="linux-${TARGETARCH}" \
&& wget -q "https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/taglib-${DOWNLOAD_ARCH}.tar.gz" -O /tmp/cross-taglib.tar.gz \
&& tar -xzf /tmp/cross-taglib.tar.gz -C /usr --strip-components=1 \
&& mv /usr/include/taglib/* /usr/include/ \
&& rmdir /usr/include/taglib \
&& rm /tmp/cross-taglib.tar.gz /usr/provenance.json
# [Optional] Uncomment the next line to use go get to install anything else you need
# RUN go get -x <your-dependency-or-tool>
# [Optional] Uncomment this line to install global node packages.
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1

View File

@@ -7,8 +7,7 @@
"VARIANT": "1.25",
// Options
"INSTALL_NODE": "true",
"NODE_VERSION": "v24",
"CROSS_TAGLIB_VERSION": "2.1.1-1"
"NODE_VERSION": "v24"
}
},
"workspaceMount": "",
@@ -55,10 +54,12 @@
4533,
4633
],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "make setup-dev",
// Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode",
"remoteEnv": {
"ND_MUSICFOLDER": "./music",
"ND_DATAFOLDER": "./data"
}
}
}

View File

@@ -25,7 +25,7 @@ jobs:
git_tag: ${{ steps.git-version.outputs.GIT_TAG }}
git_sha: ${{ steps.git-version.outputs.GIT_SHA }}
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
with:
fetch-depth: 0
fetch-tags: true
@@ -63,7 +63,7 @@ jobs:
name: Lint Go code
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
- name: Download TagLib
uses: ./.github/actions/download-taglib
@@ -71,7 +71,7 @@ jobs:
version: ${{ env.CROSS_TAGLIB_VERSION }}
- name: golangci-lint
uses: golangci/golangci-lint-action@v9
uses: golangci/golangci-lint-action@v8
with:
version: latest
problem-matchers: true
@@ -88,22 +88,12 @@ jobs:
exit 1
fi
- name: Run go generate
run: go generate ./...
- name: Verify no changes from go generate
run: |
git status --porcelain
if [ -n "$(git status --porcelain)" ]; then
echo 'Generated code is out of date. Run "make gen" and commit the changes'
exit 1
fi
go:
name: Test Go code
runs-on: ubuntu-latest
steps:
- name: Check out code into the Go module directory
uses: actions/checkout@v6
uses: actions/checkout@v5
- name: Download TagLib
uses: ./.github/actions/download-taglib
@@ -124,7 +114,7 @@ jobs:
env:
NODE_OPTIONS: "--max_old_space_size=4096"
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
- uses: actions/setup-node@v6
with:
node-version: 24
@@ -155,7 +145,7 @@ jobs:
name: Lint i18n files
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
- run: |
set -e
for file in resources/i18n/*.json; do
@@ -201,7 +191,7 @@ jobs:
PLATFORM=$(echo ${{ matrix.platform }} | tr '/' '_')
echo "PLATFORM=$PLATFORM" >> $GITHUB_ENV
- uses: actions/checkout@v6
- uses: actions/checkout@v5
- name: Prepare Docker Buildx
uses: ./.github/actions/prepare-docker
@@ -227,7 +217,7 @@ jobs:
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
- name: Upload Binaries
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v5
with:
name: navidrome-${{ env.PLATFORM }}
path: ./output
@@ -258,7 +248,7 @@ jobs:
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v5
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
with:
name: digests-${{ env.PLATFORM }}
@@ -266,55 +256,18 @@ jobs:
if-no-files-found: error
retention-days: 1
push-manifest-ghcr:
name: Push to GHCR
permissions:
contents: read
packages: write
push-manifest:
name: Push Docker manifest
runs-on: ubuntu-latest
needs: [build, check-push-enabled]
if: needs.check-push-enabled.outputs.is_enabled == 'true'
env:
REGISTRY_IMAGE: ghcr.io/${{ github.repository }}
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
- name: Download digests
uses: actions/download-artifact@v7
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: Prepare Docker Buildx
uses: ./.github/actions/prepare-docker
id: docker
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
- name: Create manifest list and push to ghcr.io
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map(select(startswith("ghcr.io"))) | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image in ghcr.io
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
push-manifest-dockerhub:
name: Push to Docker Hub
runs-on: ubuntu-latest
permissions:
contents: read
needs: [build, check-push-enabled]
if: needs.check-push-enabled.outputs.is_enabled == 'true' && vars.DOCKER_HUB_REPO != ''
continue-on-error: true
steps:
- uses: actions/checkout@v6
- name: Download digests
uses: actions/download-artifact@v7
uses: actions/download-artifact@v6
with:
path: /tmp/digests
pattern: digests-*
@@ -329,27 +282,28 @@ jobs:
hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
- name: Create manifest list and push to ghcr.io
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Create manifest list and push to Docker Hub
uses: nick-fields/retry@v3
with:
timeout_minutes: 5
max_attempts: 3
retry_wait_seconds: 30
command: |
cd /tmp/digests
docker buildx imagetools create $(jq -cr '.tags | map(select(startswith("ghcr.io") | not)) | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf 'ghcr.io/${{ github.repository }}@sha256:%s ' *)
working-directory: /tmp/digests
if: vars.DOCKER_HUB_REPO != ''
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ vars.DOCKER_HUB_REPO }}@sha256:%s ' *)
- name: Inspect image in ghcr.io
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.docker.outputs.version }}
- name: Inspect image in Docker Hub
if: vars.DOCKER_HUB_REPO != ''
run: |
docker buildx imagetools inspect ${{ vars.DOCKER_HUB_REPO }}:${{ steps.docker.outputs.version }}
cleanup-digests:
name: Cleanup digest artifacts
runs-on: ubuntu-latest
needs: [push-manifest-ghcr, push-manifest-dockerhub]
if: always() && needs.push-manifest-ghcr.result == 'success'
steps:
- name: Delete unnecessary digest artifacts
env:
GH_TOKEN: ${{ github.token }}
@@ -364,9 +318,9 @@ jobs:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
- uses: actions/download-artifact@v7
- uses: actions/download-artifact@v6
with:
path: ./binaries
pattern: navidrome-windows*
@@ -385,7 +339,7 @@ jobs:
du -h binaries/msi/*.msi
- name: Upload MSI files
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v5
with:
name: navidrome-windows-installers
path: binaries/msi/*.msi
@@ -398,12 +352,12 @@ jobs:
outputs:
package_list: ${{ steps.set-package-list.outputs.package_list }}
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
with:
fetch-depth: 0
fetch-tags: true
- uses: actions/download-artifact@v7
- uses: actions/download-artifact@v6
with:
path: ./binaries
pattern: navidrome-*
@@ -429,7 +383,7 @@ jobs:
rm ./dist/*.tar.gz ./dist/*.zip
- name: Upload all-packages artifact
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v5
with:
name: packages
path: dist/navidrome_0*
@@ -452,13 +406,13 @@ jobs:
item: ${{ fromJson(needs.release.outputs.package_list) }}
steps:
- name: Download all-packages artifact
uses: actions/download-artifact@v7
uses: actions/download-artifact@v6
with:
name: packages
path: ./dist
- name: Upload all-packages artifact
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v5
with:
name: navidrome_linux_${{ matrix.item }}
path: dist/navidrome_0*_linux_${{ matrix.item }}

View File

@@ -12,7 +12,7 @@ jobs:
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v6
- uses: dessant/lock-threads@v5
with:
process-only: 'issues, prs'
issue-inactive-days: 120

View File

@@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'navidrome' }}
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
- name: Get updated translations
id: poeditor
env:
@@ -24,7 +24,7 @@ jobs:
git status --porcelain
git diff
- name: Create Pull Request
uses: peter-evans/create-pull-request@v8
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.PAT }}
author: "navidrome-bot <navidrome-bot@navidrome.org>"

6
.gitignore vendored
View File

@@ -17,7 +17,6 @@ master.zip
testDB
cache/*
*.swp
coverage.out
dist
music
*.db*
@@ -26,13 +25,10 @@ docker-compose.yml
!contrib/docker-compose.yml
binaries
navidrome-*
/ndpgen
AGENTS.md
.github/prompts
.github/instructions
.github/git-commit-instructions.md
*.exe
*.test
*.wasm
*.ndp
openspec/
*.wasm

View File

@@ -2,10 +2,10 @@ FROM --platform=$BUILDPLATFORM ghcr.io/crazy-max/osxcross:14.5-debian AS osxcros
########################################################################################################################
### Build xx (original image: tonistiigi/xx)
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.20 AS xx-build
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS xx-build
# v1.9.0
ENV XX_VERSION=a5592eab7a57895e8d385394ff12241bc65ecd50
# v1.5.0
ENV XX_VERSION=b4e4c451c778822e6742bfc9d9a91d7c7d885c8a
RUN apk add -U --no-cache git
RUN git clone https://github.com/tonistiigi/xx && \
@@ -26,7 +26,7 @@ COPY --from=xx-build /out/ /usr/bin/
########################################################################################################################
### Get TagLib
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.20 AS taglib-build
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS taglib-build
ARG TARGETPLATFORM
ARG CROSS_TAGLIB_VERSION=2.1.1-1
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
@@ -122,7 +122,7 @@ COPY --from=build /out /
########################################################################################################################
### Build Final Image
FROM public.ecr.aws/docker/library/alpine:3.20 AS final
FROM public.ecr.aws/docker/library/alpine:3.19 AS final
LABEL maintainer="deluan@navidrome.org"
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
@@ -137,6 +137,7 @@ ENV ND_MUSICFOLDER=/music
ENV ND_DATAFOLDER=/data
ENV ND_CONFIGFILE=/data/navidrome.toml
ENV ND_PORT=4533
ENV GODEBUG="asyncpreemptoff=1"
RUN touch /.nddockerenv
EXPOSE ${ND_PORT}

View File

@@ -16,7 +16,7 @@ DOCKER_TAG ?= deluan/navidrome:develop
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
CROSS_TAGLIB_VERSION ?= 2.1.1-1
GOLANGCI_LINT_VERSION ?= v2.7.2
GOLANGCI_LINT_VERSION ?= v2.5.0
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
@@ -50,11 +50,11 @@ test: ##@Development Run Go tests. Use PKG variable to specify packages to test,
go test -tags netgo $(PKG)
.PHONY: test
testall: test test-i18n test-js ##@Development Run Go and JS tests
testall: test-race test-i18n test-js ##@Development Run Go and JS tests
.PHONY: testall
test-race: ##@Development Run Go tests with race detector
go test -tags netgo -race -shuffle=on $(PKG)
go test -tags netgo -race -shuffle=on ./...
.PHONY: test-race
test-js: ##@Development Run JS tests
@@ -85,7 +85,7 @@ install-golangci-lint: ##@Development Install golangci-lint if not present
.PHONY: install-golangci-lint
lint: install-golangci-lint ##@Development Lint Go code
PATH=$$PATH:./bin golangci-lint run --timeout 5m
PATH=$$PATH:./bin golangci-lint run -v --timeout 5m
.PHONY: lint
lintall: lint ##@Development Lint Go and JS code
@@ -103,15 +103,6 @@ wire: check_go_env ##@Development Update Dependency Injection
go tool wire gen -tags=netgo ./...
.PHONY: wire
gen: check_go_env ##@Development Run go generate for code generation
go generate ./...
cd plugins/cmd/ndpgen && go run . -host-wrappers -input=../../host -package=host
cd plugins/cmd/ndpgen && go run . -input=../../host -output=../../pdk -go -python -rust
cd plugins/cmd/ndpgen && go run . -capability-only -input=../../capabilities -output=../../pdk -go -rust
cd plugins/cmd/ndpgen && go run . -schemas -input=../../capabilities
go mod tidy -C plugins/pdk/go
.PHONY: gen
snapshots: ##@Development Update (GoLang) Snapshot tests
UPDATE_SNAPSHOTS=true go tool ginkgo ./server/subsonic/responses/...
.PHONY: snapshots
@@ -275,6 +266,24 @@ deprecated:
@echo "WARNING: This target is deprecated and will be removed in future releases. Use 'make build' instead."
.PHONY: deprecated
# Generate Go code from plugins/api/api.proto
plugin-gen: check_go_env ##@Development Generate Go code from plugins protobuf files
go generate ./plugins/...
.PHONY: plugin-gen
plugin-examples: check_go_env ##@Development Build all example plugins
$(MAKE) -C plugins/examples clean all
.PHONY: plugin-examples
plugin-clean: check_go_env ##@Development Clean all plugins
$(MAKE) -C plugins/examples clean
$(MAKE) -C plugins/testdata clean
.PHONY: plugin-clean
plugin-tests: check_go_env ##@Development Build all test plugins
$(MAKE) -C plugins/testdata clean all
.PHONY: plugin-tests
.DEFAULT_GOAL := help
HELP_FUN = \

View File

@@ -10,8 +10,11 @@ import (
"strconv"
"github.com/Masterminds/squirrel"
"github.com/navidrome/navidrome/core/auth"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/persistence"
"github.com/spf13/cobra"
)
@@ -49,7 +52,7 @@ var (
Short: "Export playlists",
Long: "Export Navidrome playlists to M3U files",
Run: func(cmd *cobra.Command, args []string) {
runExporter(cmd.Context())
runExporter()
},
}
@@ -57,13 +60,15 @@ var (
Use: "list",
Short: "List playlists",
Run: func(cmd *cobra.Command, args []string) {
runList(cmd.Context())
runList()
},
}
)
func runExporter(ctx context.Context) {
ds, ctx := getAdminContext(ctx)
func runExporter() {
sqlDB := db.Db()
ds := persistence.New(sqlDB)
ctx := auth.WithAdminUser(context.Background(), ds)
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
if err != nil && !errors.Is(err, model.ErrNotFound) {
log.Fatal("Error retrieving playlist", "name", playlistID, err)
@@ -95,19 +100,31 @@ func runExporter(ctx context.Context) {
}
}
func runList(ctx context.Context) {
func runList() {
if outputFormat != "csv" && outputFormat != "json" {
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
}
ds, ctx := getAdminContext(ctx)
sqlDB := db.Db()
ds := persistence.New(sqlDB)
ctx := auth.WithAdminUser(context.Background(), ds)
options := model.QueryOptions{Sort: "owner_name"}
if userID != "" {
user, err := getUser(ctx, userID, ds)
if err != nil {
log.Fatal(ctx, "Error retrieving user", "username or id", userID)
user, err := ds.User(ctx).FindByUsername(userID)
if err != nil && !errors.Is(err, model.ErrNotFound) {
log.Fatal("Error retrieving user by name", "name", userID, err)
}
if errors.Is(err, model.ErrNotFound) {
user, err = ds.User(ctx).Get(userID)
if err != nil {
log.Fatal("Error retrieving user by id", "id", userID, err)
}
}
options.Filters = squirrel.Eq{"owner_id": user.ID}
}

716
cmd/plugin.go Normal file
View File

@@ -0,0 +1,716 @@
package cmd
import (
"cmp"
"crypto/sha256"
"encoding/hex"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"text/tabwriter"
"time"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/plugins"
"github.com/navidrome/navidrome/plugins/schema"
"github.com/navidrome/navidrome/utils"
"github.com/navidrome/navidrome/utils/slice"
"github.com/spf13/cobra"
)
const (
pluginPackageExtension = ".ndp"
pluginDirPermissions = 0700
pluginFilePermissions = 0600
)
func init() {
pluginCmd := &cobra.Command{
Use: "plugin",
Short: "Manage Navidrome plugins",
Long: "Commands for managing Navidrome plugins",
}
listCmd := &cobra.Command{
Use: "list",
Short: "List installed plugins",
Long: "List all installed plugins with their metadata",
Run: pluginList,
}
infoCmd := &cobra.Command{
Use: "info [pluginPackage|pluginName]",
Short: "Show details of a plugin",
Long: "Show detailed information about a plugin package (.ndp file) or an installed plugin",
Args: cobra.ExactArgs(1),
Run: pluginInfo,
}
installCmd := &cobra.Command{
Use: "install [pluginPackage]",
Short: "Install a plugin from a .ndp file",
Long: "Install a Navidrome Plugin Package (.ndp) file",
Args: cobra.ExactArgs(1),
Run: pluginInstall,
}
removeCmd := &cobra.Command{
Use: "remove [pluginName]",
Short: "Remove an installed plugin",
Long: "Remove a plugin by name",
Args: cobra.ExactArgs(1),
Run: pluginRemove,
}
updateCmd := &cobra.Command{
Use: "update [pluginPackage]",
Short: "Update an existing plugin",
Long: "Update an installed plugin with a new version from a .ndp file",
Args: cobra.ExactArgs(1),
Run: pluginUpdate,
}
refreshCmd := &cobra.Command{
Use: "refresh [pluginName]",
Short: "Reload a plugin without restarting Navidrome",
Long: "Reload and recompile a plugin without needing to restart Navidrome",
Args: cobra.ExactArgs(1),
Run: pluginRefresh,
}
devCmd := &cobra.Command{
Use: "dev [folder_path]",
Short: "Create symlink to development folder",
Long: "Create a symlink from a plugin development folder to the plugins directory for easier development",
Args: cobra.ExactArgs(1),
Run: pluginDev,
}
pluginCmd.AddCommand(listCmd, infoCmd, installCmd, removeCmd, updateCmd, refreshCmd, devCmd)
rootCmd.AddCommand(pluginCmd)
}
// Validation helpers
func validatePluginPackageFile(path string) error {
if !utils.FileExists(path) {
return fmt.Errorf("plugin package not found: %s", path)
}
if filepath.Ext(path) != pluginPackageExtension {
return fmt.Errorf("not a valid plugin package: %s (expected %s extension)", path, pluginPackageExtension)
}
return nil
}
func validatePluginDirectory(pluginsDir, pluginName string) (string, error) {
pluginDir := filepath.Join(pluginsDir, pluginName)
if !utils.FileExists(pluginDir) {
return "", fmt.Errorf("plugin not found: %s (path: %s)", pluginName, pluginDir)
}
return pluginDir, nil
}
func resolvePluginPath(pluginDir string) (resolvedPath string, isSymlink bool, err error) {
// Check if it's a directory or a symlink
lstat, err := os.Lstat(pluginDir)
if err != nil {
return "", false, fmt.Errorf("failed to stat plugin: %w", err)
}
isSymlink = lstat.Mode()&os.ModeSymlink != 0
if isSymlink {
// Resolve the symlink target
targetDir, err := os.Readlink(pluginDir)
if err != nil {
return "", true, fmt.Errorf("failed to resolve symlink: %w", err)
}
// If target is a relative path, make it absolute
if !filepath.IsAbs(targetDir) {
targetDir = filepath.Join(filepath.Dir(pluginDir), targetDir)
}
// Verify the target exists and is a directory
targetInfo, err := os.Stat(targetDir)
if err != nil {
return "", true, fmt.Errorf("failed to access symlink target %s: %w", targetDir, err)
}
if !targetInfo.IsDir() {
return "", true, fmt.Errorf("symlink target is not a directory: %s", targetDir)
}
return targetDir, true, nil
} else if !lstat.IsDir() {
return "", false, fmt.Errorf("not a valid plugin directory: %s", pluginDir)
}
return pluginDir, false, nil
}
// Package handling helpers
func loadAndValidatePackage(ndpPath string) (*plugins.PluginPackage, error) {
if err := validatePluginPackageFile(ndpPath); err != nil {
return nil, err
}
pkg, err := plugins.LoadPackage(ndpPath)
if err != nil {
return nil, fmt.Errorf("failed to load plugin package: %w", err)
}
return pkg, nil
}
func extractAndSetupPlugin(ndpPath, targetDir string) error {
if err := plugins.ExtractPackage(ndpPath, targetDir); err != nil {
return fmt.Errorf("failed to extract plugin package: %w", err)
}
ensurePluginDirPermissions(targetDir)
return nil
}
// Display helpers
func displayPluginTableRow(w *tabwriter.Writer, discovery plugins.PluginDiscoveryEntry) {
if discovery.Error != nil {
// Handle global errors (like directory read failure)
if discovery.ID == "" {
log.Error("Failed to read plugins directory", "folder", conf.Server.Plugins.Folder, discovery.Error)
return
}
// Handle individual plugin errors - show them in the table
fmt.Fprintf(w, "%s\tERROR\tERROR\tERROR\tERROR\t%v\n", discovery.ID, discovery.Error)
return
}
// Mark symlinks with an indicator
nameDisplay := discovery.Manifest.Name
if discovery.IsSymlink {
nameDisplay = nameDisplay + " (dev)"
}
// Convert capabilities to strings
capabilities := slice.Map(discovery.Manifest.Capabilities, func(cap schema.PluginManifestCapabilitiesElem) string {
return string(cap)
})
fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%s\n",
discovery.ID,
nameDisplay,
cmp.Or(discovery.Manifest.Author, "-"),
cmp.Or(discovery.Manifest.Version, "-"),
strings.Join(capabilities, ", "),
cmp.Or(discovery.Manifest.Description, "-"))
}
func displayTypedPermissions(permissions schema.PluginManifestPermissions, indent string) {
if permissions.Http != nil {
fmt.Printf("%shttp:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Http.Reason)
fmt.Printf("%s Allow Local Network: %t\n", indent, permissions.Http.AllowLocalNetwork)
fmt.Printf("%s Allowed URLs:\n", indent)
for urlPattern, methodEnums := range permissions.Http.AllowedUrls {
methods := make([]string, len(methodEnums))
for i, methodEnum := range methodEnums {
methods[i] = string(methodEnum)
}
fmt.Printf("%s %s: [%s]\n", indent, urlPattern, strings.Join(methods, ", "))
}
fmt.Println()
}
if permissions.Config != nil {
fmt.Printf("%sconfig:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Config.Reason)
fmt.Println()
}
if permissions.Scheduler != nil {
fmt.Printf("%sscheduler:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Scheduler.Reason)
fmt.Println()
}
if permissions.Websocket != nil {
fmt.Printf("%swebsocket:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Websocket.Reason)
fmt.Printf("%s Allow Local Network: %t\n", indent, permissions.Websocket.AllowLocalNetwork)
fmt.Printf("%s Allowed URLs: [%s]\n", indent, strings.Join(permissions.Websocket.AllowedUrls, ", "))
fmt.Println()
}
if permissions.Cache != nil {
fmt.Printf("%scache:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Cache.Reason)
fmt.Println()
}
if permissions.Artwork != nil {
fmt.Printf("%sartwork:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Artwork.Reason)
fmt.Println()
}
if permissions.Subsonicapi != nil {
allowedUsers := "All Users"
if len(permissions.Subsonicapi.AllowedUsernames) > 0 {
allowedUsers = strings.Join(permissions.Subsonicapi.AllowedUsernames, ", ")
}
fmt.Printf("%ssubsonicapi:\n", indent)
fmt.Printf("%s Reason: %s\n", indent, permissions.Subsonicapi.Reason)
fmt.Printf("%s Allow Admins: %t\n", indent, permissions.Subsonicapi.AllowAdmins)
fmt.Printf("%s Allowed Usernames: [%s]\n", indent, allowedUsers)
fmt.Println()
}
}
func displayPluginDetails(manifest *schema.PluginManifest, fileInfo *pluginFileInfo, permInfo *pluginPermissionInfo) {
fmt.Println("\nPlugin Information:")
fmt.Printf(" Name: %s\n", manifest.Name)
fmt.Printf(" Author: %s\n", manifest.Author)
fmt.Printf(" Version: %s\n", manifest.Version)
fmt.Printf(" Description: %s\n", manifest.Description)
fmt.Print(" Capabilities: ")
capabilities := make([]string, len(manifest.Capabilities))
for i, cap := range manifest.Capabilities {
capabilities[i] = string(cap)
}
fmt.Print(strings.Join(capabilities, ", "))
fmt.Println()
// Display manifest permissions using the typed permissions
fmt.Println(" Required Permissions:")
displayTypedPermissions(manifest.Permissions, " ")
// Print file information if available
if fileInfo != nil {
fmt.Println("Package Information:")
fmt.Printf(" File: %s\n", fileInfo.path)
fmt.Printf(" Size: %d bytes (%.2f KB)\n", fileInfo.size, float64(fileInfo.size)/1024)
fmt.Printf(" SHA-256: %s\n", fileInfo.hash)
fmt.Printf(" Modified: %s\n", fileInfo.modTime.Format(time.RFC3339))
}
// Print file permissions information if available
if permInfo != nil {
fmt.Println("File Permissions:")
fmt.Printf(" Plugin Directory: %s (%s)\n", permInfo.dirPath, permInfo.dirMode)
if permInfo.isSymlink {
fmt.Printf(" Symlink Target: %s (%s)\n", permInfo.targetPath, permInfo.targetMode)
}
fmt.Printf(" Manifest File: %s\n", permInfo.manifestMode)
if permInfo.wasmMode != "" {
fmt.Printf(" WASM File: %s\n", permInfo.wasmMode)
}
}
}
type pluginFileInfo struct {
path string
size int64
hash string
modTime time.Time
}
type pluginPermissionInfo struct {
dirPath string
dirMode string
isSymlink bool
targetPath string
targetMode string
manifestMode string
wasmMode string
}
func getFileInfo(path string) *pluginFileInfo {
fileInfo, err := os.Stat(path)
if err != nil {
log.Error("Failed to get file information", err)
return nil
}
return &pluginFileInfo{
path: path,
size: fileInfo.Size(),
hash: calculateSHA256(path),
modTime: fileInfo.ModTime(),
}
}
func getPermissionInfo(pluginDir string) *pluginPermissionInfo {
// Get plugin directory permissions
dirInfo, err := os.Lstat(pluginDir)
if err != nil {
log.Error("Failed to get plugin directory permissions", err)
return nil
}
permInfo := &pluginPermissionInfo{
dirPath: pluginDir,
dirMode: dirInfo.Mode().String(),
}
// Check if it's a symlink
if dirInfo.Mode()&os.ModeSymlink != 0 {
permInfo.isSymlink = true
// Get target path and permissions
targetPath, err := os.Readlink(pluginDir)
if err == nil {
if !filepath.IsAbs(targetPath) {
targetPath = filepath.Join(filepath.Dir(pluginDir), targetPath)
}
permInfo.targetPath = targetPath
if targetInfo, err := os.Stat(targetPath); err == nil {
permInfo.targetMode = targetInfo.Mode().String()
}
}
}
// Get manifest file permissions
manifestPath := filepath.Join(pluginDir, "manifest.json")
if manifestInfo, err := os.Stat(manifestPath); err == nil {
permInfo.manifestMode = manifestInfo.Mode().String()
}
// Get WASM file permissions (look for .wasm files)
entries, err := os.ReadDir(pluginDir)
if err == nil {
for _, entry := range entries {
if filepath.Ext(entry.Name()) == ".wasm" {
wasmPath := filepath.Join(pluginDir, entry.Name())
if wasmInfo, err := os.Stat(wasmPath); err == nil {
permInfo.wasmMode = wasmInfo.Mode().String()
break // Just show the first WASM file found
}
}
}
}
return permInfo
}
// Command implementations
func pluginList(cmd *cobra.Command, args []string) {
discoveries := plugins.DiscoverPlugins(conf.Server.Plugins.Folder)
w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
fmt.Fprintln(w, "ID\tNAME\tAUTHOR\tVERSION\tCAPABILITIES\tDESCRIPTION")
for _, discovery := range discoveries {
displayPluginTableRow(w, discovery)
}
w.Flush()
}
func pluginInfo(cmd *cobra.Command, args []string) {
path := args[0]
pluginsDir := conf.Server.Plugins.Folder
var manifest *schema.PluginManifest
var fileInfo *pluginFileInfo
var permInfo *pluginPermissionInfo
if filepath.Ext(path) == pluginPackageExtension {
// It's a package file
pkg, err := loadAndValidatePackage(path)
if err != nil {
log.Fatal("Failed to load plugin package", err)
}
manifest = pkg.Manifest
fileInfo = getFileInfo(path)
// No permission info for package files
} else {
// It's a plugin name
pluginDir, err := validatePluginDirectory(pluginsDir, path)
if err != nil {
log.Fatal("Plugin validation failed", err)
}
manifest, err = plugins.LoadManifest(pluginDir)
if err != nil {
log.Fatal("Failed to load plugin manifest", err)
}
// Get permission info for installed plugins
permInfo = getPermissionInfo(pluginDir)
}
displayPluginDetails(manifest, fileInfo, permInfo)
}
func pluginInstall(cmd *cobra.Command, args []string) {
ndpPath := args[0]
pluginsDir := conf.Server.Plugins.Folder
pkg, err := loadAndValidatePackage(ndpPath)
if err != nil {
log.Fatal("Package validation failed", err)
}
// Create target directory based on plugin name
targetDir := filepath.Join(pluginsDir, pkg.Manifest.Name)
// Check if plugin already exists
if utils.FileExists(targetDir) {
log.Fatal("Plugin already installed", "name", pkg.Manifest.Name, "path", targetDir,
"use", "navidrome plugin update")
}
if err := extractAndSetupPlugin(ndpPath, targetDir); err != nil {
log.Fatal("Plugin installation failed", err)
}
fmt.Printf("Plugin '%s' v%s installed successfully\n", pkg.Manifest.Name, pkg.Manifest.Version)
}
func pluginRemove(cmd *cobra.Command, args []string) {
pluginName := args[0]
pluginsDir := conf.Server.Plugins.Folder
pluginDir, err := validatePluginDirectory(pluginsDir, pluginName)
if err != nil {
log.Fatal("Plugin validation failed", err)
}
_, isSymlink, err := resolvePluginPath(pluginDir)
if err != nil {
log.Fatal("Failed to resolve plugin path", err)
}
if isSymlink {
// For symlinked plugins (dev mode), just remove the symlink
if err := os.Remove(pluginDir); err != nil {
log.Fatal("Failed to remove plugin symlink", "name", pluginName, err)
}
fmt.Printf("Development plugin symlink '%s' removed successfully (target directory preserved)\n", pluginName)
} else {
// For regular plugins, remove the entire directory
if err := os.RemoveAll(pluginDir); err != nil {
log.Fatal("Failed to remove plugin directory", "name", pluginName, err)
}
fmt.Printf("Plugin '%s' removed successfully\n", pluginName)
}
}
func pluginUpdate(cmd *cobra.Command, args []string) {
ndpPath := args[0]
pluginsDir := conf.Server.Plugins.Folder
pkg, err := loadAndValidatePackage(ndpPath)
if err != nil {
log.Fatal("Package validation failed", err)
}
// Check if plugin exists
targetDir := filepath.Join(pluginsDir, pkg.Manifest.Name)
if !utils.FileExists(targetDir) {
log.Fatal("Plugin not found", "name", pkg.Manifest.Name, "path", targetDir,
"use", "navidrome plugin install")
}
// Create a backup of the existing plugin
backupDir := targetDir + ".bak." + time.Now().Format("20060102150405")
if err := os.Rename(targetDir, backupDir); err != nil {
log.Fatal("Failed to backup existing plugin", err)
}
// Extract the new package
if err := extractAndSetupPlugin(ndpPath, targetDir); err != nil {
// Restore backup if extraction failed
os.RemoveAll(targetDir)
_ = os.Rename(backupDir, targetDir) // Ignore error as we're already in a fatal path
log.Fatal("Plugin update failed", err)
}
// Remove the backup
os.RemoveAll(backupDir)
fmt.Printf("Plugin '%s' updated to v%s successfully\n", pkg.Manifest.Name, pkg.Manifest.Version)
}
func pluginRefresh(cmd *cobra.Command, args []string) {
pluginName := args[0]
pluginsDir := conf.Server.Plugins.Folder
pluginDir, err := validatePluginDirectory(pluginsDir, pluginName)
if err != nil {
log.Fatal("Plugin validation failed", err)
}
resolvedPath, isSymlink, err := resolvePluginPath(pluginDir)
if err != nil {
log.Fatal("Failed to resolve plugin path", err)
}
if isSymlink {
log.Debug("Processing symlinked plugin", "name", pluginName, "link", pluginDir, "target", resolvedPath)
}
fmt.Printf("Refreshing plugin '%s'...\n", pluginName)
// Get the plugin manager and refresh
mgr := GetPluginManager(cmd.Context())
log.Debug("Scanning plugins directory", "path", pluginsDir)
mgr.ScanPlugins()
log.Info("Waiting for plugin compilation to complete", "name", pluginName)
// Wait for compilation to complete
if err := mgr.EnsureCompiled(pluginName); err != nil {
log.Fatal("Failed to compile refreshed plugin", "name", pluginName, err)
}
log.Info("Plugin compilation completed successfully", "name", pluginName)
fmt.Printf("Plugin '%s' refreshed successfully\n", pluginName)
}
func pluginDev(cmd *cobra.Command, args []string) {
sourcePath, err := filepath.Abs(args[0])
if err != nil {
log.Fatal("Invalid path", "path", args[0], err)
}
pluginsDir := conf.Server.Plugins.Folder
// Validate source directory and manifest
if err := validateDevSource(sourcePath); err != nil {
log.Fatal("Source validation failed", err)
}
// Load manifest to get plugin name
manifest, err := plugins.LoadManifest(sourcePath)
if err != nil {
log.Fatal("Failed to load plugin manifest", "path", filepath.Join(sourcePath, "manifest.json"), err)
}
pluginName := cmp.Or(manifest.Name, filepath.Base(sourcePath))
targetPath := filepath.Join(pluginsDir, pluginName)
// Handle existing target
if err := handleExistingTarget(targetPath, sourcePath); err != nil {
log.Fatal("Failed to handle existing target", err)
}
// Create target directory if needed
if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil {
log.Fatal("Failed to create plugins directory", "path", filepath.Dir(targetPath), err)
}
// Create the symlink
if err := os.Symlink(sourcePath, targetPath); err != nil {
log.Fatal("Failed to create symlink", "source", sourcePath, "target", targetPath, err)
}
fmt.Printf("Development symlink created: '%s' -> '%s'\n", targetPath, sourcePath)
fmt.Println("Plugin can be refreshed with: navidrome plugin refresh", pluginName)
}
// Utility functions
func validateDevSource(sourcePath string) error {
sourceInfo, err := os.Stat(sourcePath)
if err != nil {
return fmt.Errorf("source folder not found: %s (%w)", sourcePath, err)
}
if !sourceInfo.IsDir() {
return fmt.Errorf("source path is not a directory: %s", sourcePath)
}
manifestPath := filepath.Join(sourcePath, "manifest.json")
if !utils.FileExists(manifestPath) {
return fmt.Errorf("source folder missing manifest.json: %s", sourcePath)
}
return nil
}
func handleExistingTarget(targetPath, sourcePath string) error {
if !utils.FileExists(targetPath) {
return nil // Nothing to handle
}
// Check if it's already a symlink to our source
existingLink, err := os.Readlink(targetPath)
if err == nil && existingLink == sourcePath {
fmt.Printf("Symlink already exists and points to the correct source\n")
return fmt.Errorf("symlink already exists") // This will cause early return in caller
}
// Handle case where target exists but is not a symlink to our source
fmt.Printf("Target path '%s' already exists.\n", targetPath)
fmt.Print("Do you want to replace it? (y/N): ")
var response string
_, err = fmt.Scanln(&response)
if err != nil || strings.ToLower(response) != "y" {
if err != nil {
log.Debug("Error reading input, assuming 'no'", err)
}
return fmt.Errorf("operation canceled")
}
// Remove existing target
if err := os.RemoveAll(targetPath); err != nil {
return fmt.Errorf("failed to remove existing target %s: %w", targetPath, err)
}
return nil
}
func ensurePluginDirPermissions(dir string) {
if err := os.Chmod(dir, pluginDirPermissions); err != nil {
log.Error("Failed to set plugin directory permissions", "dir", dir, err)
}
// Apply permissions to all files in the directory
entries, err := os.ReadDir(dir)
if err != nil {
log.Error("Failed to read plugin directory", "dir", dir, err)
return
}
for _, entry := range entries {
path := filepath.Join(dir, entry.Name())
info, err := os.Stat(path)
if err != nil {
log.Error("Failed to stat file", "path", path, err)
continue
}
mode := os.FileMode(pluginFilePermissions) // Files
if info.IsDir() {
mode = os.FileMode(pluginDirPermissions) // Directories
ensurePluginDirPermissions(path) // Recursive
}
if err := os.Chmod(path, mode); err != nil {
log.Error("Failed to set file permissions", "path", path, err)
}
}
}
func calculateSHA256(filePath string) string {
file, err := os.Open(filePath)
if err != nil {
log.Error("Failed to open file for hashing", err)
return "N/A"
}
defer file.Close()
hasher := sha256.New()
if _, err := io.Copy(hasher, file); err != nil {
log.Error("Failed to calculate hash", err)
return "N/A"
}
return hex.EncodeToString(hasher.Sum(nil))
}

193
cmd/plugin_test.go Normal file
View File

@@ -0,0 +1,193 @@
package cmd
import (
"io"
"os"
"path/filepath"
"strings"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/conf/configtest"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/spf13/cobra"
)
var _ = Describe("Plugin CLI Commands", func() {
var tempDir string
var cmd *cobra.Command
var stdOut *os.File
var origStdout *os.File
var outReader *os.File
// Helper to create a test plugin with the given name and details
createTestPlugin := func(name, author, version string, capabilities []string) string {
pluginDir := filepath.Join(tempDir, name)
Expect(os.MkdirAll(pluginDir, 0755)).To(Succeed())
// Create a properly formatted capabilities JSON array
capabilitiesJSON := `"` + strings.Join(capabilities, `", "`) + `"`
manifest := `{
"name": "` + name + `",
"author": "` + author + `",
"version": "` + version + `",
"description": "Plugin for testing",
"website": "https://test.navidrome.org/` + name + `",
"capabilities": [` + capabilitiesJSON + `],
"permissions": {}
}`
Expect(os.WriteFile(filepath.Join(pluginDir, "manifest.json"), []byte(manifest), 0600)).To(Succeed())
// Create a dummy WASM file
wasmContent := []byte("dummy wasm content for testing")
Expect(os.WriteFile(filepath.Join(pluginDir, "plugin.wasm"), wasmContent, 0600)).To(Succeed())
return pluginDir
}
// Helper to execute a command and return captured output
captureOutput := func(reader io.Reader) string {
stdOut.Close()
outputBytes, err := io.ReadAll(reader)
Expect(err).NotTo(HaveOccurred())
return string(outputBytes)
}
BeforeEach(func() {
DeferCleanup(configtest.SetupConfig())
tempDir = GinkgoT().TempDir()
// Setup config
conf.Server.Plugins.Enabled = true
conf.Server.Plugins.Folder = tempDir
// Create a command for testing
cmd = &cobra.Command{Use: "test"}
// Setup stdout capture
origStdout = os.Stdout
var err error
outReader, stdOut, err = os.Pipe()
Expect(err).NotTo(HaveOccurred())
os.Stdout = stdOut
DeferCleanup(func() {
os.Stdout = origStdout
})
})
AfterEach(func() {
os.Stdout = origStdout
if stdOut != nil {
stdOut.Close()
}
if outReader != nil {
outReader.Close()
}
})
Describe("Plugin list command", func() {
It("should list installed plugins", func() {
// Create test plugins
createTestPlugin("plugin1", "Test Author", "1.0.0", []string{"MetadataAgent"})
createTestPlugin("plugin2", "Another Author", "2.1.0", []string{"Scrobbler"})
// Execute command
pluginList(cmd, []string{})
// Verify output
output := captureOutput(outReader)
Expect(output).To(ContainSubstring("plugin1"))
Expect(output).To(ContainSubstring("Test Author"))
Expect(output).To(ContainSubstring("1.0.0"))
Expect(output).To(ContainSubstring("MetadataAgent"))
Expect(output).To(ContainSubstring("plugin2"))
Expect(output).To(ContainSubstring("Another Author"))
Expect(output).To(ContainSubstring("2.1.0"))
Expect(output).To(ContainSubstring("Scrobbler"))
})
})
Describe("Plugin info command", func() {
It("should display information about an installed plugin", func() {
// Create test plugin with multiple capabilities
createTestPlugin("test-plugin", "Test Author", "1.0.0",
[]string{"MetadataAgent", "Scrobbler"})
// Execute command
pluginInfo(cmd, []string{"test-plugin"})
// Verify output
output := captureOutput(outReader)
Expect(output).To(ContainSubstring("Name: test-plugin"))
Expect(output).To(ContainSubstring("Author: Test Author"))
Expect(output).To(ContainSubstring("Version: 1.0.0"))
Expect(output).To(ContainSubstring("Description: Plugin for testing"))
Expect(output).To(ContainSubstring("Capabilities: MetadataAgent, Scrobbler"))
})
})
Describe("Plugin remove command", func() {
It("should remove a regular plugin directory", func() {
// Create test plugin
pluginDir := createTestPlugin("regular-plugin", "Test Author", "1.0.0",
[]string{"MetadataAgent"})
// Execute command
pluginRemove(cmd, []string{"regular-plugin"})
// Verify output
output := captureOutput(outReader)
Expect(output).To(ContainSubstring("Plugin 'regular-plugin' removed successfully"))
// Verify directory is actually removed
_, err := os.Stat(pluginDir)
Expect(os.IsNotExist(err)).To(BeTrue())
})
It("should remove only the symlink for a development plugin", func() {
// Create a real source directory
sourceDir := filepath.Join(GinkgoT().TempDir(), "dev-plugin-source")
Expect(os.MkdirAll(sourceDir, 0755)).To(Succeed())
manifest := `{
"name": "dev-plugin",
"author": "Dev Author",
"version": "0.1.0",
"description": "Development plugin for testing",
"website": "https://test.navidrome.org/dev-plugin",
"capabilities": ["Scrobbler"],
"permissions": {}
}`
Expect(os.WriteFile(filepath.Join(sourceDir, "manifest.json"), []byte(manifest), 0600)).To(Succeed())
// Create a dummy WASM file
wasmContent := []byte("dummy wasm content for testing")
Expect(os.WriteFile(filepath.Join(sourceDir, "plugin.wasm"), wasmContent, 0600)).To(Succeed())
// Create a symlink in the plugins directory
symlinkPath := filepath.Join(tempDir, "dev-plugin")
Expect(os.Symlink(sourceDir, symlinkPath)).To(Succeed())
// Execute command
pluginRemove(cmd, []string{"dev-plugin"})
// Verify output
output := captureOutput(outReader)
Expect(output).To(ContainSubstring("Development plugin symlink 'dev-plugin' removed successfully"))
Expect(output).To(ContainSubstring("target directory preserved"))
// Verify the symlink is removed but source directory exists
_, err := os.Lstat(symlinkPath)
Expect(os.IsNotExist(err)).To(BeTrue())
_, err = os.Stat(sourceDir)
Expect(err).NotTo(HaveOccurred())
})
})
})

View File

@@ -330,20 +330,23 @@ func startPlaybackServer(ctx context.Context) func() error {
// startPluginManager starts the plugin manager, if configured.
func startPluginManager(ctx context.Context) func() error {
return func() error {
manager := GetPluginManager(ctx)
if !conf.Server.Plugins.Enabled {
log.Debug("Plugin system is DISABLED")
log.Debug("Plugins are DISABLED")
return nil
}
log.Info(ctx, "Starting plugin manager")
return manager.Start(ctx)
// Get the manager instance and scan for plugins
manager := GetPluginManager(ctx)
manager.ScanPlugins()
return nil
}
}
// TODO: Implement some struct tags to map flags to viper
func init() {
cobra.OnInitialize(func() {
conf.InitConfig(cfgFile, true)
conf.InitConfig(cfgFile)
})
rootCmd.PersistentFlags().StringVarP(&cfgFile, "configfile", "c", "", `config file (default "./navidrome.toml")`)
@@ -371,7 +374,6 @@ func init() {
rootCmd.Flags().Duration("scaninterval", viper.GetDuration("scaninterval"), "how frequently to scan for changes in your music library")
rootCmd.Flags().String("uiloginbackgroundurl", viper.GetString("uiloginbackgroundurl"), "URL to a backaground image used in the Login page")
rootCmd.Flags().Bool("enabletranscodingconfig", viper.GetBool("enabletranscodingconfig"), "enables transcoding configuration in the UI")
rootCmd.Flags().Bool("enabletranscodingcancellation", viper.GetBool("enabletranscodingcancellation"), "enables transcoding context cancellation")
rootCmd.Flags().String("transcodingcachesize", viper.GetString("transcodingcachesize"), "size of transcoding cache")
rootCmd.Flags().String("imagecachesize", viper.GetString("imagecachesize"), "size of image (art work) cache. set to 0 to disable cache")
rootCmd.Flags().String("albumplaycountmode", viper.GetString("albumplaycountmode"), "how to compute playcount for albums. absolute (default) or normalized")
@@ -395,7 +397,6 @@ func init() {
_ = viper.BindPFlag("prometheus.metricspath", rootCmd.Flags().Lookup("prometheus.metricspath"))
_ = viper.BindPFlag("enabletranscodingconfig", rootCmd.Flags().Lookup("enabletranscodingconfig"))
_ = viper.BindPFlag("enabletranscodingcancellation", rootCmd.Flags().Lookup("enabletranscodingcancellation"))
_ = viper.BindPFlag("transcodingcachesize", rootCmd.Flags().Lookup("transcodingcachesize"))
_ = viper.BindPFlag("imagecachesize", rootCmd.Flags().Lookup("imagecachesize"))
}

View File

@@ -1,17 +1,13 @@
package cmd
import (
"bufio"
"context"
"encoding/gob"
"fmt"
"os"
"strings"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/persistence"
"github.com/navidrome/navidrome/scanner"
"github.com/navidrome/navidrome/utils/pl"
@@ -21,15 +17,11 @@ import (
var (
fullScan bool
subprocess bool
targets []string
targetFile string
)
func init() {
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
scanCmd.Flags().StringArrayVarP(&targets, "target", "t", []string{}, "list of libraryID:folderPath pairs, can be repeated (e.g., \"-t 1:Music/Rock -t 1:Music/Jazz -t 2:Classical\")")
scanCmd.Flags().StringVar(&targetFile, "target-file", "", "path to file containing targets (one libraryID:folderPath per line)")
rootCmd.AddCommand(scanCmd)
}
@@ -76,25 +68,7 @@ func runScanner(ctx context.Context) {
ds := persistence.New(sqlDB)
pls := core.NewPlaylists(ds)
// Parse targets from command line or file
var scanTargets []model.ScanTarget
var err error
if targetFile != "" {
scanTargets, err = readTargetsFromFile(targetFile)
if err != nil {
log.Fatal(ctx, "Failed to read targets from file", err)
}
log.Info(ctx, "Scanning specific folders from file", "numTargets", len(scanTargets))
} else if len(targets) > 0 {
scanTargets, err = model.ParseTargets(targets)
if err != nil {
log.Fatal(ctx, "Failed to parse targets", err)
}
log.Info(ctx, "Scanning specific folders", "numTargets", len(scanTargets))
}
progress, err := scanner.CallScan(ctx, ds, pls, fullScan, scanTargets)
progress, err := scanner.CallScan(ctx, ds, pls, fullScan)
if err != nil {
log.Fatal(ctx, "Failed to scan", err)
}
@@ -106,31 +80,3 @@ func runScanner(ctx context.Context) {
trackScanInteractively(ctx, progress)
}
}
// readTargetsFromFile reads scan targets from a file, one per line.
// Each line should be in the format "libraryID:folderPath".
// Empty lines and lines starting with # are ignored.
func readTargetsFromFile(filePath string) ([]model.ScanTarget, error) {
file, err := os.Open(filePath)
if err != nil {
return nil, fmt.Errorf("failed to open target file: %w", err)
}
defer file.Close()
var targetStrings []string
scanner := bufio.NewScanner(file)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
// Skip empty lines and comments
if line == "" {
continue
}
targetStrings = append(targetStrings, line)
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("failed to read target file: %w", err)
}
return model.ParseTargets(targetStrings)
}

View File

@@ -1,89 +0,0 @@
package cmd
import (
"os"
"path/filepath"
"github.com/navidrome/navidrome/model"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("readTargetsFromFile", func() {
var tempDir string
BeforeEach(func() {
var err error
tempDir, err = os.MkdirTemp("", "navidrome-test-")
Expect(err).ToNot(HaveOccurred())
})
AfterEach(func() {
os.RemoveAll(tempDir)
})
It("reads valid targets from file", func() {
filePath := filepath.Join(tempDir, "targets.txt")
content := "1:Music/Rock\n2:Music/Jazz\n3:Classical\n"
err := os.WriteFile(filePath, []byte(content), 0600)
Expect(err).ToNot(HaveOccurred())
targets, err := readTargetsFromFile(filePath)
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(3))
Expect(targets[0]).To(Equal(model.ScanTarget{LibraryID: 1, FolderPath: "Music/Rock"}))
Expect(targets[1]).To(Equal(model.ScanTarget{LibraryID: 2, FolderPath: "Music/Jazz"}))
Expect(targets[2]).To(Equal(model.ScanTarget{LibraryID: 3, FolderPath: "Classical"}))
})
It("skips empty lines", func() {
filePath := filepath.Join(tempDir, "targets.txt")
content := "1:Music/Rock\n\n2:Music/Jazz\n\n"
err := os.WriteFile(filePath, []byte(content), 0600)
Expect(err).ToNot(HaveOccurred())
targets, err := readTargetsFromFile(filePath)
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
})
It("trims whitespace", func() {
filePath := filepath.Join(tempDir, "targets.txt")
content := " 1:Music/Rock \n\t2:Music/Jazz\t\n"
err := os.WriteFile(filePath, []byte(content), 0600)
Expect(err).ToNot(HaveOccurred())
targets, err := readTargetsFromFile(filePath)
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
Expect(targets[0].FolderPath).To(Equal("Music/Rock"))
Expect(targets[1].FolderPath).To(Equal("Music/Jazz"))
})
It("returns error for non-existent file", func() {
_, err := readTargetsFromFile("/nonexistent/file.txt")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("failed to open target file"))
})
It("returns error for invalid target format", func() {
filePath := filepath.Join(tempDir, "targets.txt")
content := "invalid-format\n"
err := os.WriteFile(filePath, []byte(content), 0600)
Expect(err).ToNot(HaveOccurred())
_, err = readTargetsFromFile(filePath)
Expect(err).To(HaveOccurred())
})
It("handles mixed valid and empty lines", func() {
filePath := filepath.Join(tempDir, "targets.txt")
content := "\n1:Music/Rock\n\n\n2:Music/Jazz\n\n"
err := os.WriteFile(filePath, []byte(content), 0600)
Expect(err).ToNot(HaveOccurred())
targets, err := readTargetsFromFile(filePath)
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
})
})

View File

@@ -1,477 +0,0 @@
package cmd
import (
"context"
"encoding/csv"
"encoding/json"
"errors"
"fmt"
"os"
"strconv"
"strings"
"syscall"
"time"
"github.com/Masterminds/squirrel"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/spf13/cobra"
"golang.org/x/term"
)
var (
email string
libraryIds []int
name string
removeEmail bool
removeName bool
setAdmin bool
setPassword bool
setRegularUser bool
)
func init() {
rootCmd.AddCommand(userRoot)
userCreateCommand.Flags().StringVarP(&userID, "username", "u", "", "username")
userCreateCommand.Flags().StringVarP(&email, "email", "e", "", "New user email")
userCreateCommand.Flags().IntSliceVarP(&libraryIds, "library-ids", "i", []int{}, "Comma-separated list of library IDs. Set the user's accessible libraries. If empty, the user can access all libraries. This is incompatible with admin, as admin can always access all libraries")
userCreateCommand.Flags().BoolVarP(&setAdmin, "admin", "a", false, "If set, make the user an admin. This user will have access to every library")
userCreateCommand.Flags().StringVar(&name, "name", "", "New user's name (this is separate from username used to log in)")
_ = userCreateCommand.MarkFlagRequired("username")
userRoot.AddCommand(userCreateCommand)
userDeleteCommand.Flags().StringVarP(&userID, "user", "u", "", "username or id")
_ = userDeleteCommand.MarkFlagRequired("user")
userRoot.AddCommand(userDeleteCommand)
userEditCommand.Flags().StringVarP(&userID, "user", "u", "", "username or id")
userEditCommand.Flags().BoolVar(&setAdmin, "set-admin", false, "If set, make the user an admin")
userEditCommand.Flags().BoolVar(&setRegularUser, "set-regular", false, "If set, make the user a non-admin")
userEditCommand.MarkFlagsMutuallyExclusive("set-admin", "set-regular")
userEditCommand.Flags().BoolVar(&removeEmail, "remove-email", false, "If set, clear the user's email")
userEditCommand.Flags().StringVarP(&email, "email", "e", "", "New user email")
userEditCommand.MarkFlagsMutuallyExclusive("email", "remove-email")
userEditCommand.Flags().BoolVar(&removeName, "remove-name", false, "If set, clear the user's name")
userEditCommand.Flags().StringVar(&name, "name", "", "New user name (this is separate from username used to log in)")
userEditCommand.MarkFlagsMutuallyExclusive("name", "remove-name")
userEditCommand.Flags().BoolVar(&setPassword, "set-password", false, "If set, the user's new password will be prompted on the CLI")
userEditCommand.Flags().IntSliceVarP(&libraryIds, "library-ids", "i", []int{}, "Comma-separated list of library IDs. Set the user's accessible libraries by id")
_ = userEditCommand.MarkFlagRequired("user")
userRoot.AddCommand(userEditCommand)
userListCommand.Flags().StringVarP(&outputFormat, "format", "f", "csv", "output format [supported values: csv, json]")
userRoot.AddCommand(userListCommand)
}
var (
userRoot = &cobra.Command{
Use: "user",
Short: "Administer users",
Long: "Create, delete, list, or update users",
}
userCreateCommand = &cobra.Command{
Use: "create",
Aliases: []string{"c"},
Short: "Create a new user",
Run: func(cmd *cobra.Command, args []string) {
runCreateUser(cmd.Context())
},
}
userDeleteCommand = &cobra.Command{
Use: "delete",
Aliases: []string{"d"},
Short: "Deletes an existing user",
Run: func(cmd *cobra.Command, args []string) {
runDeleteUser(cmd.Context())
},
}
userEditCommand = &cobra.Command{
Use: "edit",
Aliases: []string{"e"},
Short: "Edit a user",
Long: "Edit the password, admin status, and/or library access",
Run: func(cmd *cobra.Command, args []string) {
runUserEdit(cmd.Context())
},
}
userListCommand = &cobra.Command{
Use: "list",
Short: "List users",
Run: func(cmd *cobra.Command, args []string) {
runUserList(cmd.Context())
},
}
)
func promptPassword() string {
for {
fmt.Print("Enter new password (press enter with no password to cancel): ")
// This cast is necessary for some platforms
password, err := term.ReadPassword(int(syscall.Stdin)) //nolint:unconvert
if err != nil {
log.Fatal("Error getting password", err)
}
fmt.Print("\nConfirm new password (press enter with no password to cancel): ")
confirmation, err := term.ReadPassword(int(syscall.Stdin)) //nolint:unconvert
if err != nil {
log.Fatal("Error getting password confirmation", err)
}
// clear the line.
fmt.Println()
pass := string(password)
confirm := string(confirmation)
if pass == "" {
return ""
}
if pass == confirm {
return pass
}
fmt.Println("Password and password confirmation do not match")
}
}
func libraryError(libraries model.Libraries) error {
ids := make([]int, len(libraries))
for idx, library := range libraries {
ids[idx] = library.ID
}
return fmt.Errorf("not all available libraries found. Requested ids: %v, Found libraries: %v", libraryIds, ids)
}
func runCreateUser(ctx context.Context) {
password := promptPassword()
if password == "" {
log.Fatal("Empty password provided, user creation cancelled")
}
user := model.User{
UserName: userID,
Email: email,
Name: name,
IsAdmin: setAdmin,
NewPassword: password,
}
if user.Name == "" {
user.Name = userID
}
ds, ctx := getAdminContext(ctx)
err := ds.WithTx(func(tx model.DataStore) error {
existingUser, err := tx.User(ctx).FindByUsername(userID)
if existingUser != nil {
return fmt.Errorf("existing user '%s'", userID)
}
if err != nil && !errors.Is(err, model.ErrNotFound) {
return fmt.Errorf("failed to check existing username: %w", err)
}
if len(libraryIds) > 0 && !setAdmin {
user.Libraries, err = tx.Library(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"id": libraryIds}})
if err != nil {
return err
}
if len(user.Libraries) != len(libraryIds) {
return libraryError(user.Libraries)
}
} else {
user.Libraries, err = tx.Library(ctx).GetAll()
if err != nil {
return err
}
}
err = tx.User(ctx).Put(&user)
if err != nil {
return err
}
updatedIds := make([]int, len(user.Libraries))
for idx, lib := range user.Libraries {
updatedIds[idx] = lib.ID
}
err = tx.User(ctx).SetUserLibraries(user.ID, updatedIds)
return err
})
if err != nil {
log.Fatal(ctx, err)
}
log.Info(ctx, "Successfully created user", "id", user.ID, "username", user.UserName)
}
func runDeleteUser(ctx context.Context) {
ds, ctx := getAdminContext(ctx)
var err error
var user *model.User
err = ds.WithTx(func(tx model.DataStore) error {
count, err := tx.User(ctx).CountAll()
if err != nil {
return err
}
if count == 1 {
return errors.New("refusing to delete the last user")
}
user, err = getUser(ctx, userID, tx)
if err != nil {
return err
}
return tx.User(ctx).Delete(user.ID)
})
if err != nil {
log.Fatal(ctx, "Failed to delete user", err)
}
log.Info(ctx, "Deleted user", "username", user.UserName)
}
func runUserEdit(ctx context.Context) {
ds, ctx := getAdminContext(ctx)
var err error
var user *model.User
changes := []string{}
err = ds.WithTx(func(tx model.DataStore) error {
var newLibraries model.Libraries
user, err = getUser(ctx, userID, tx)
if err != nil {
return err
}
if len(libraryIds) > 0 && !setAdmin {
libraries, err := tx.Library(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"id": libraryIds}})
if err != nil {
return err
}
if len(libraries) != len(libraryIds) {
return libraryError(libraries)
}
newLibraries = libraries
changes = append(changes, "updated library ids")
}
if setAdmin && !user.IsAdmin {
libraries, err := tx.Library(ctx).GetAll()
if err != nil {
return err
}
user.IsAdmin = true
user.Libraries = libraries
changes = append(changes, "set admin")
newLibraries = libraries
}
if setRegularUser && user.IsAdmin {
user.IsAdmin = false
changes = append(changes, "set regular user")
}
if setPassword {
password := promptPassword()
if password != "" {
user.NewPassword = password
changes = append(changes, "updated password")
}
}
if email != "" && email != user.Email {
user.Email = email
changes = append(changes, "updated email")
} else if removeEmail && user.Email != "" {
user.Email = ""
changes = append(changes, "removed email")
}
if name != "" && name != user.Name {
user.Name = name
changes = append(changes, "updated name")
} else if removeName && user.Name != "" {
user.Name = ""
changes = append(changes, "removed name")
}
if len(changes) == 0 {
return nil
}
err := tx.User(ctx).Put(user)
if err != nil {
return err
}
if len(newLibraries) > 0 {
updatedIds := make([]int, len(newLibraries))
for idx, lib := range newLibraries {
updatedIds[idx] = lib.ID
}
err := tx.User(ctx).SetUserLibraries(user.ID, updatedIds)
if err != nil {
return err
}
}
return nil
})
if err != nil {
log.Fatal(ctx, "Failed to update user", err)
}
if len(changes) == 0 {
log.Info(ctx, "No changes for user", "user", user.UserName)
} else {
log.Info(ctx, "Updated user", "user", user.UserName, "changes", strings.Join(changes, ", "))
}
}
type displayLibrary struct {
ID int `json:"id"`
Path string `json:"path"`
}
type displayUser struct {
Id string `json:"id"`
Username string `json:"username"`
Name string `json:"name"`
Email string `json:"email"`
Admin bool `json:"admin"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
LastAccess *time.Time `json:"lastAccess"`
LastLogin *time.Time `json:"lastLogin"`
Libraries []displayLibrary `json:"libraries"`
}
func runUserList(ctx context.Context) {
if outputFormat != "csv" && outputFormat != "json" {
log.Fatal("Invalid output format. Must be one of csv, json", "format", outputFormat)
}
ds, ctx := getAdminContext(ctx)
users, err := ds.User(ctx).ReadAll()
if err != nil {
log.Fatal(ctx, "Failed to retrieve users", err)
}
userList := users.(model.Users)
if outputFormat == "csv" {
w := csv.NewWriter(os.Stdout)
_ = w.Write([]string{
"user id",
"username",
"user's name",
"user email",
"admin",
"created at",
"updated at",
"last access",
"last login",
"libraries",
})
for _, user := range userList {
paths := make([]string, len(user.Libraries))
for idx, library := range user.Libraries {
paths[idx] = fmt.Sprintf("%d:%s", library.ID, library.Path)
}
var lastAccess, lastLogin string
if user.LastAccessAt != nil {
lastAccess = user.LastAccessAt.Format(time.RFC3339Nano)
} else {
lastAccess = "never"
}
if user.LastLoginAt != nil {
lastLogin = user.LastLoginAt.Format(time.RFC3339Nano)
} else {
lastLogin = "never"
}
_ = w.Write([]string{
user.ID,
user.UserName,
user.Name,
user.Email,
strconv.FormatBool(user.IsAdmin),
user.CreatedAt.Format(time.RFC3339Nano),
user.UpdatedAt.Format(time.RFC3339Nano),
lastAccess,
lastLogin,
fmt.Sprintf("'%s'", strings.Join(paths, "|")),
})
}
w.Flush()
} else {
users := make([]displayUser, len(userList))
for idx, user := range userList {
paths := make([]displayLibrary, len(user.Libraries))
for idx, library := range user.Libraries {
paths[idx].ID = library.ID
paths[idx].Path = library.Path
}
users[idx].Id = user.ID
users[idx].Username = user.UserName
users[idx].Name = user.Name
users[idx].Email = user.Email
users[idx].Admin = user.IsAdmin
users[idx].CreatedAt = user.CreatedAt
users[idx].UpdatedAt = user.UpdatedAt
users[idx].LastAccess = user.LastAccessAt
users[idx].LastLogin = user.LastLoginAt
users[idx].Libraries = paths
}
j, _ := json.Marshal(users)
fmt.Printf("%s\n", j)
}
}

View File

@@ -1,42 +0,0 @@
package cmd
import (
"context"
"errors"
"fmt"
"github.com/navidrome/navidrome/core/auth"
"github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/persistence"
)
func getAdminContext(ctx context.Context) (model.DataStore, context.Context) {
sqlDB := db.Db()
ds := persistence.New(sqlDB)
ctx = auth.WithAdminUser(ctx, ds)
u, _ := request.UserFrom(ctx)
if !u.IsAdmin {
log.Fatal(ctx, "There must be at least one admin user to run this command.")
}
return ds, ctx
}
func getUser(ctx context.Context, id string, ds model.DataStore) (*model.User, error) {
user, err := ds.User(ctx).FindByUsername(id)
if err != nil && !errors.Is(err, model.ErrNotFound) {
return nil, fmt.Errorf("finding user by name: %w", err)
}
if errors.Is(err, model.ErrNotFound) {
user, err = ds.User(ctx).Get(id)
if err != nil {
return nil, fmt.Errorf("finding user by id: %w", err)
}
}
return user, nil
}

View File

@@ -47,7 +47,9 @@ func CreateServer() *server.Server {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
broker := events.GetBroker()
insights := metrics.GetInstance(dataStore)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
insights := metrics.GetInstance(dataStore, manager)
serverServer := server.New(dataStore, broker, insights)
return serverServer
}
@@ -57,21 +59,20 @@ func CreateNativeAPIRouter(ctx context.Context) *nativeapi.Router {
dataStore := persistence.New(sqlDB)
share := core.NewShare(dataStore)
playlists := core.NewPlaylists(dataStore)
insights := metrics.GetInstance(dataStore)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
insights := metrics.GetInstance(dataStore, manager)
fileCache := artwork.GetImageCache()
fFmpeg := ffmpeg.New()
broker := events.GetBroker()
manager := plugins.GetManager(dataStore, broker)
agentsAgents := agents.GetAgents(dataStore, manager)
provider := external.NewProvider(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
watcher := scanner.GetWatcher(dataStore, modelScanner)
library := core.NewLibrary(dataStore, modelScanner, watcher, broker)
maintenance := core.NewMaintenance(dataStore)
router := nativeapi.New(dataStore, share, playlists, insights, library, maintenance, manager)
broker := events.GetBroker()
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
watcher := scanner.GetWatcher(dataStore, scannerScanner)
library := core.NewLibrary(dataStore, scannerScanner, watcher, broker)
router := nativeapi.New(dataStore, share, playlists, insights, library)
return router
}
@@ -80,8 +81,8 @@ func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
dataStore := persistence.New(sqlDB)
fileCache := artwork.GetImageCache()
fFmpeg := ffmpeg.New()
broker := events.GetBroker()
manager := plugins.GetManager(dataStore, broker)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
agentsAgents := agents.GetAgents(dataStore, manager)
provider := external.NewProvider(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
@@ -91,12 +92,12 @@ func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
archiver := core.NewArchiver(mediaStreamer, dataStore, share)
players := core.NewPlayers(dataStore)
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
broker := events.GetBroker()
playlists := core.NewPlaylists(dataStore)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
playTracker := scrobbler.GetPlayTracker(dataStore, broker, manager)
playbackServer := playback.GetInstance(dataStore)
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, modelScanner, broker, playlists, playTracker, share, playbackServer, metricsMetrics)
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, scannerScanner, broker, playlists, playTracker, share, playbackServer, metricsMetrics)
return router
}
@@ -105,8 +106,8 @@ func CreatePublicRouter() *public.Router {
dataStore := persistence.New(sqlDB)
fileCache := artwork.GetImageCache()
fFmpeg := ffmpeg.New()
broker := events.GetBroker()
manager := plugins.GetManager(dataStore, broker)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
agentsAgents := agents.GetAgents(dataStore, manager)
provider := external.NewProvider(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
@@ -135,7 +136,9 @@ func CreateListenBrainzRouter() *listenbrainz.Router {
func CreateInsights() metrics.Insights {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
insights := metrics.GetInstance(dataStore)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
insights := metrics.GetInstance(dataStore, manager)
return insights
}
@@ -146,21 +149,21 @@ func CreatePrometheus() metrics.Metrics {
return metricsMetrics
}
func CreateScanner(ctx context.Context) model.Scanner {
func CreateScanner(ctx context.Context) scanner.Scanner {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
fileCache := artwork.GetImageCache()
fFmpeg := ffmpeg.New()
broker := events.GetBroker()
manager := plugins.GetManager(dataStore, broker)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
agentsAgents := agents.GetAgents(dataStore, manager)
provider := external.NewProvider(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
broker := events.GetBroker()
playlists := core.NewPlaylists(dataStore)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
return modelScanner
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
return scannerScanner
}
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
@@ -168,16 +171,16 @@ func CreateScanWatcher(ctx context.Context) scanner.Watcher {
dataStore := persistence.New(sqlDB)
fileCache := artwork.GetImageCache()
fFmpeg := ffmpeg.New()
broker := events.GetBroker()
manager := plugins.GetManager(dataStore, broker)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
agentsAgents := agents.GetAgents(dataStore, manager)
provider := external.NewProvider(dataStore, agentsAgents)
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
broker := events.GetBroker()
playlists := core.NewPlaylists(dataStore)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
watcher := scanner.GetWatcher(dataStore, modelScanner)
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
watcher := scanner.GetWatcher(dataStore, scannerScanner)
return watcher
}
@@ -188,19 +191,19 @@ func GetPlaybackServer() playback.PlaybackServer {
return playbackServer
}
func getPluginManager() *plugins.Manager {
func getPluginManager() plugins.Manager {
sqlDB := db.Db()
dataStore := persistence.New(sqlDB)
broker := events.GetBroker()
manager := plugins.GetManager(dataStore, broker)
metricsMetrics := metrics.GetPrometheusInstance(dataStore)
manager := plugins.GetManager(dataStore, metricsMetrics)
return manager
}
// wire_injectors.go:
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.GetWatcher, metrics.GetPrometheusInstance, db.Db, plugins.GetManager, wire.Bind(new(agents.PluginLoader), new(*plugins.Manager)), wire.Bind(new(scrobbler.PluginLoader), new(*plugins.Manager)), wire.Bind(new(nativeapi.PluginManager), new(*plugins.Manager)), wire.Bind(new(core.Watcher), new(scanner.Watcher)))
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.GetWatcher, plugins.GetManager, metrics.GetPrometheusInstance, db.Db, wire.Bind(new(agents.PluginLoader), new(plugins.Manager)), wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)), wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)), wire.Bind(new(core.Scanner), new(scanner.Scanner)), wire.Bind(new(core.Watcher), new(scanner.Watcher)))
func GetPluginManager(ctx context.Context) *plugins.Manager {
func GetPluginManager(ctx context.Context) plugins.Manager {
manager := getPluginManager()
manager.SetSubsonicRouter(CreateSubsonicAPIRouter(ctx))
return manager

View File

@@ -39,12 +39,13 @@ var allProviders = wire.NewSet(
events.GetBroker,
scanner.New,
scanner.GetWatcher,
plugins.GetManager,
metrics.GetPrometheusInstance,
db.Db,
plugins.GetManager,
wire.Bind(new(agents.PluginLoader), new(*plugins.Manager)),
wire.Bind(new(scrobbler.PluginLoader), new(*plugins.Manager)),
wire.Bind(new(nativeapi.PluginManager), new(*plugins.Manager)),
wire.Bind(new(agents.PluginLoader), new(plugins.Manager)),
wire.Bind(new(scrobbler.PluginLoader), new(plugins.Manager)),
wire.Bind(new(metrics.PluginLoader), new(plugins.Manager)),
wire.Bind(new(core.Scanner), new(scanner.Scanner)),
wire.Bind(new(core.Watcher), new(scanner.Watcher)),
)
@@ -102,7 +103,7 @@ func CreatePrometheus() metrics.Metrics {
))
}
func CreateScanner(ctx context.Context) model.Scanner {
func CreateScanner(ctx context.Context) scanner.Scanner {
panic(wire.Build(
allProviders,
))
@@ -120,13 +121,13 @@ func GetPlaybackServer() playback.PlaybackServer {
))
}
func getPluginManager() *plugins.Manager {
func getPluginManager() plugins.Manager {
panic(wire.Build(
allProviders,
))
}
func GetPluginManager(ctx context.Context) *plugins.Manager {
func GetPluginManager(ctx context.Context) plugins.Manager {
manager := getPluginManager()
manager.SetSubsonicRouter(CreateSubsonicAPIRouter(ctx))
return manager

View File

@@ -41,7 +41,6 @@ type configOptions struct {
UIWelcomeMessage string
MaxSidebarPlaylists int
EnableTranscodingConfig bool
EnableTranscodingCancellation bool
EnableDownloads bool
EnableExternalServices bool
EnableInsightsCollector bool
@@ -87,9 +86,11 @@ type configOptions struct {
AuthRequestLimit int
AuthWindowLength time.Duration
PasswordEncryptionKey string
ExtAuth extAuthOptions
ReverseProxyUserHeader string
ReverseProxyWhitelist string
Plugins pluginsOptions
HTTPHeaders httpHeaderOptions `json:",omitzero"`
PluginConfig map[string]map[string]string
HTTPSecurityHeaders secureOptions `json:",omitzero"`
Prometheus prometheusOptions `json:",omitzero"`
Scanner scannerOptions `json:",omitzero"`
Jukebox jukeboxOptions `json:",omitzero"`
@@ -101,38 +102,34 @@ type configOptions struct {
Spotify spotifyOptions `json:",omitzero"`
Deezer deezerOptions `json:",omitzero"`
ListenBrainz listenBrainzOptions `json:",omitzero"`
EnableScrobbleHistory bool
Tags map[string]TagConf `json:",omitempty"`
Tags map[string]TagConf `json:",omitempty"`
Agents string
// DevFlags. These are used to enable/disable debugging and incomplete features
DevLogLevels map[string]string `json:",omitempty"`
DevLogSourceLine bool
DevEnableProfiler bool
DevAutoCreateAdminPassword string
DevAutoLoginUsername string
DevActivityPanel bool
DevActivityPanelUpdateRate time.Duration
DevSidebarPlaylists bool
DevShowArtistPage bool
DevUIShowConfig bool
DevNewEventStream bool
DevOffsetOptimize int
DevArtworkMaxRequests int
DevArtworkThrottleBacklogLimit int
DevArtworkThrottleBacklogTimeout time.Duration
DevArtistInfoTimeToLive time.Duration
DevAlbumInfoTimeToLive time.Duration
DevExternalScanner bool
DevScannerThreads uint
DevSelectiveWatcher bool
DevInsightsInitialDelay time.Duration
DevEnablePlayerInsights bool
DevEnablePluginsInsights bool
DevPluginCompilationTimeout time.Duration
DevExternalArtistFetchMultiplier float64
DevOptimizeDB bool
DevPreserveUnicodeInExternalCalls bool
DevLogLevels map[string]string `json:",omitempty"`
DevLogSourceLine bool
DevEnableProfiler bool
DevAutoCreateAdminPassword string
DevAutoLoginUsername string
DevActivityPanel bool
DevActivityPanelUpdateRate time.Duration
DevSidebarPlaylists bool
DevShowArtistPage bool
DevUIShowConfig bool
DevNewEventStream bool
DevOffsetOptimize int
DevArtworkMaxRequests int
DevArtworkThrottleBacklogLimit int
DevArtworkThrottleBacklogTimeout time.Duration
DevArtistInfoTimeToLive time.Duration
DevAlbumInfoTimeToLive time.Duration
DevExternalScanner bool
DevScannerThreads uint
DevInsightsInitialDelay time.Duration
DevEnablePlayerInsights bool
DevEnablePluginsInsights bool
DevPluginCompilationTimeout time.Duration
DevExternalArtistFetchMultiplier float64
}
type scannerOptions struct {
@@ -178,8 +175,7 @@ type spotifyOptions struct {
}
type deezerOptions struct {
Enabled bool
Language string
Enabled bool
}
type listenBrainzOptions struct {
@@ -187,8 +183,8 @@ type listenBrainzOptions struct {
BaseURL string
}
type httpHeaderOptions struct {
FrameOptions string
type secureOptions struct {
CustomFrameOptionsValue string
}
type prometheusOptions struct {
@@ -225,16 +221,9 @@ type inspectOptions struct {
}
type pluginsOptions struct {
Enabled bool
Folder string
CacheSize string
AutoReload bool
LogLevel string
}
type extAuthOptions struct {
TrustedSources string
UserHeader string
Enabled bool
Folder string
CacheSize string
}
var (
@@ -255,11 +244,6 @@ func LoadFromFile(confFile string) {
func Load(noConfigDump bool) {
parseIniFileConfiguration()
// Map deprecated options to their new names for backwards compatibility
mapDeprecatedOption("ReverseProxyWhitelist", "ExtAuth.TrustedSources")
mapDeprecatedOption("ReverseProxyUserHeader", "ExtAuth.UserHeader")
mapDeprecatedOption("HTTPSecurityHeaders.CustomFrameOptionsValue", "HTTPHeaders.FrameOptions")
err := viper.Unmarshal(&Server)
if err != nil {
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
@@ -343,18 +327,9 @@ func Load(noConfigDump bool) {
Server.BaseScheme = u.Scheme
}
// Log configuration source
if Server.ConfigFile != "" {
log.Info("Loaded configuration", "file", Server.ConfigFile)
} else if hasNDEnvVars() {
log.Info("No configuration file found. Loaded configuration only from environment variables")
} else {
log.Warn("No configuration file found. Using default values. To specify a config file, use the --configfile flag or set the ND_CONFIGFILE environment variable.")
}
// Print current configuration if log level is Debug
if log.IsGreaterOrEqualTo(log.LevelDebug) && !noConfigDump {
prettyConf := pretty.Sprintf("Configuration: %# v", Server)
prettyConf := pretty.Sprintf("Loaded configuration from '%s': %# v", Server.ConfigFile, Server)
if Server.EnableLogRedacting {
prettyConf = log.Redact(prettyConf)
}
@@ -369,12 +344,9 @@ func Load(noConfigDump bool) {
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
Server.Scanner.Extractor = consts.DefaultScannerExtractor
}
logDeprecatedOptions("Scanner.GenreSeparators", "")
logDeprecatedOptions("Scanner.GroupAlbumReleases", "")
logDeprecatedOptions("DevEnableBufferedScrobble", "") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
logDeprecatedOptions("ReverseProxyWhitelist", "ExtAuth.TrustedSources")
logDeprecatedOptions("ReverseProxyUserHeader", "ExtAuth.UserHeader")
logDeprecatedOptions("HTTPSecurityHeaders.CustomFrameOptionsValue", "HTTPHeaders.FrameOptions")
logDeprecatedOptions("Scanner.GenreSeparators")
logDeprecatedOptions("Scanner.GroupAlbumReleases")
logDeprecatedOptions("DevEnableBufferedScrobble") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
// Call init hooks
for _, hook := range hooks {
@@ -382,29 +354,15 @@ func Load(noConfigDump bool) {
}
}
func logDeprecatedOptions(oldName, newName string) {
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(oldName, ".", "_"))
newEnvVar := "ND_" + strings.ToUpper(strings.ReplaceAll(newName, ".", "_"))
logWarning := func(oldName, newName string) {
if newName != "" {
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release. Please use the new '%s'", oldName, newName))
} else {
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", oldName))
func logDeprecatedOptions(options ...string) {
for _, option := range options {
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(option, ".", "_"))
if os.Getenv(envVar) != "" {
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", envVar))
}
if viper.InConfig(option) {
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", option))
}
}
if os.Getenv(envVar) != "" {
logWarning(envVar, newEnvVar)
}
if viper.InConfig(oldName) {
logWarning(oldName, newName)
}
}
// mapDeprecatedOption is used to provide backwards compatibility for deprecated options. It should be called after
// the config has been read by viper, but before unmarshalling it into the Config struct.
func mapDeprecatedOption(legacyName, newName string) {
if viper.IsSet(legacyName) {
viper.Set(newName, viper.Get(legacyName))
}
}
@@ -467,7 +425,7 @@ func validatePurgeMissingOption() error {
}
}
if !valid {
err := fmt.Errorf("invalid Scanner.PurgeMissing value: '%s'. Must be one of: %v", Server.Scanner.PurgeMissing, allowedValues)
err := fmt.Errorf("Invalid Scanner.PurgeMissing value: '%s'. Must be one of: %v", Server.Scanner.PurgeMissing, allowedValues)
log.Error(err.Error())
Server.Scanner.PurgeMissing = consts.PurgeMissingNever
return err
@@ -514,16 +472,6 @@ func AddHook(hook func()) {
hooks = append(hooks, hook)
}
// hasNDEnvVars checks if any ND_ prefixed environment variables are set (excluding ND_CONFIGFILE)
func hasNDEnvVars() bool {
for _, env := range os.Environ() {
if strings.HasPrefix(env, "ND_") && !strings.HasPrefix(env, "ND_CONFIGFILE=") {
return true
}
}
return false
}
func setViperDefaults() {
viper.SetDefault("musicfolder", filepath.Join(".", "music"))
viper.SetDefault("cachefolder", "")
@@ -541,7 +489,6 @@ func setViperDefaults() {
viper.SetDefault("uiwelcomemessage", "")
viper.SetDefault("maxsidebarplaylists", consts.DefaultMaxSidebarPlaylists)
viper.SetDefault("enabletranscodingconfig", false)
viper.SetDefault("enabletranscodingcancellation", false)
viper.SetDefault("transcodingcachesize", "100MB")
viper.SetDefault("imagecachesize", "100MB")
viper.SetDefault("albumplaycountmode", consts.AlbumPlayCountModeAbsolute)
@@ -586,8 +533,8 @@ func setViperDefaults() {
viper.SetDefault("authrequestlimit", 5)
viper.SetDefault("authwindowlength", 20*time.Second)
viper.SetDefault("passwordencryptionkey", "")
viper.SetDefault("extauth.userheader", "Remote-User")
viper.SetDefault("extauth.trustedsources", "")
viper.SetDefault("reverseproxyuserheader", "Remote-User")
viper.SetDefault("reverseproxywhitelist", "")
viper.SetDefault("prometheus.enabled", false)
viper.SetDefault("prometheus.metricspath", consts.PrometheusDefaultPath)
viper.SetDefault("prometheus.password", "")
@@ -608,7 +555,7 @@ func setViperDefaults() {
viper.SetDefault("subsonic.appendsubtitle", true)
viper.SetDefault("subsonic.artistparticipations", false)
viper.SetDefault("subsonic.defaultreportrealpath", false)
viper.SetDefault("subsonic.legacyclients", "DSub,SubMusic")
viper.SetDefault("subsonic.legacyclients", "DSub")
viper.SetDefault("agents", "lastfm,spotify,deezer")
viper.SetDefault("lastfm.enabled", true)
viper.SetDefault("lastfm.language", "en")
@@ -618,11 +565,9 @@ func setViperDefaults() {
viper.SetDefault("spotify.id", "")
viper.SetDefault("spotify.secret", "")
viper.SetDefault("deezer.enabled", true)
viper.SetDefault("deezer.language", "en")
viper.SetDefault("listenbrainz.enabled", true)
viper.SetDefault("listenbrainz.baseurl", "https://api.listenbrainz.org/1/")
viper.SetDefault("enablescrobblehistory", true)
viper.SetDefault("httpheaders.frameoptions", "DENY")
viper.SetDefault("httpsecurityheaders.customframeoptionsvalue", "DENY")
viper.SetDefault("backup.path", "")
viper.SetDefault("backup.schedule", "")
viper.SetDefault("backup.count", 0)
@@ -634,8 +579,7 @@ func setViperDefaults() {
viper.SetDefault("inspect.backlogtimeout", consts.RequestThrottleBacklogTimeout)
viper.SetDefault("plugins.folder", "")
viper.SetDefault("plugins.enabled", false)
viper.SetDefault("plugins.cachesize", "200MB")
viper.SetDefault("plugins.autoreload", false)
viper.SetDefault("plugins.cachesize", "100MB")
// DevFlags. These are used to enable/disable debugging and incomplete features
viper.SetDefault("devlogsourceline", false)
@@ -656,28 +600,20 @@ func setViperDefaults() {
viper.SetDefault("devalbuminfotimetolive", consts.AlbumInfoTimeToLive)
viper.SetDefault("devexternalscanner", true)
viper.SetDefault("devscannerthreads", 5)
viper.SetDefault("devselectivewatcher", true)
viper.SetDefault("devinsightsinitialdelay", consts.InsightsInitialDelay)
viper.SetDefault("devenableplayerinsights", true)
viper.SetDefault("devenablepluginsinsights", true)
viper.SetDefault("devplugincompilationtimeout", time.Minute)
viper.SetDefault("devexternalartistfetchmultiplier", 1.5)
viper.SetDefault("devoptimizedb", true)
viper.SetDefault("devpreserveunicodeinexternalcalls", false)
}
func init() {
setViperDefaults()
}
func InitConfig(cfgFile string, loadEnvVars bool) {
func InitConfig(cfgFile string) {
codecRegistry := viper.NewCodecRegistry()
_ = codecRegistry.RegisterCodec("ini", ini.Codec{
LoadOptions: ini.LoadOptions{
UnescapeValueDoubleQuotes: true,
UnescapeValueCommentSymbols: true,
},
})
_ = codecRegistry.RegisterCodec("ini", ini.Codec{})
viper.SetOptions(viper.WithCodecRegistry(codecRegistry))
cfgFile = getConfigFile(cfgFile)
@@ -691,12 +627,10 @@ func InitConfig(cfgFile string, loadEnvVars bool) {
}
_ = viper.BindEnv("port")
if loadEnvVars {
viper.SetEnvPrefix("ND")
replacer := strings.NewReplacer(".", "_")
viper.SetEnvKeyReplacer(replacer)
viper.AutomaticEnv()
}
viper.SetEnvPrefix("ND")
replacer := strings.NewReplacer(".", "_")
viper.SetEnvKeyReplacer(replacer)
viper.AutomaticEnv()
err := viper.ReadInConfig()
if viper.ConfigFileUsed() != "" && err != nil {

View File

@@ -31,7 +31,7 @@ var _ = Describe("Configuration", func() {
filename := filepath.Join("testdata", "cfg."+format)
// Initialize config with the test file
conf.InitConfig(filename, false)
conf.InitConfig(filename)
// Load the configuration (with noConfigDump=true)
conf.Load(true)
@@ -39,10 +39,6 @@ var _ = Describe("Configuration", func() {
Expect(conf.Server.MusicFolder).To(Equal(fmt.Sprintf("/%s/music", format)))
Expect(conf.Server.UIWelcomeMessage).To(Equal("Welcome " + format))
Expect(conf.Server.Tags["custom"].Aliases).To(Equal([]string{format, "test"}))
Expect(conf.Server.Tags["artist"].Split).To(Equal([]string{";"}))
// Check deprecated option mapping
Expect(conf.Server.ExtAuth.UserHeader).To(Equal("X-Auth-User"))
// The config file used should be the one we created
Expect(conf.Server.ConfigFile).To(Equal(filename))

View File

@@ -1,8 +1,6 @@
[default]
MusicFolder = /ini/music
UIWelcomeMessage = 'Welcome ini' ; Just a comment to test the LoadOptions
ReverseProxyUserHeader = 'X-Auth-User'
UIWelcomeMessage = Welcome ini
[Tags]
Custom.Aliases = ini,test
artist.Split = ";" # Should be able to read ; as a separator
Custom.Aliases = ini,test

View File

@@ -1,11 +1,7 @@
{
"musicFolder": "/json/music",
"uiWelcomeMessage": "Welcome json",
"reverseProxyUserHeader": "X-Auth-User",
"Tags": {
"artist": {
"split": ";"
},
"custom": {
"aliases": [
"json",

View File

@@ -1,8 +1,5 @@
musicFolder = "/toml/music"
uiWelcomeMessage = "Welcome toml"
ReverseProxyUserHeader = "X-Auth-User"
Tags.artist.Split = ';'
[Tags.custom]
aliases = ["toml", "test"]

View File

@@ -1,9 +1,6 @@
musicFolder: "/yaml/music"
uiWelcomeMessage: "Welcome yaml"
reverseProxyUserHeader: "X-Auth-User"
Tags:
artist:
split: [";"]
custom:
aliases:
- yaml

View File

@@ -150,8 +150,6 @@ var (
}
)
var HTTPUserAgent = "Navidrome" + "/" + Version
var (
VariousArtists = "Various Artists"
// TODO This will be dynamic when using disambiguation

View File

@@ -64,7 +64,6 @@ func (a *Agents) getEnabledAgentNames() []enabledAgent {
if a.pluginLoader != nil {
availablePlugins = a.pluginLoader.PluginNames("MetadataAgent")
}
log.Trace("Available MetadataAgent plugins", "plugins", availablePlugins)
configuredAgents := strings.Split(conf.Server.Agents, ",")
@@ -88,7 +87,7 @@ func (a *Agents) getEnabledAgentNames() []enabledAgent {
} else if isPlugin {
validAgents = append(validAgents, enabledAgent{name: name, isPlugin: true})
} else {
log.Debug("Unknown agent ignored", "name", name)
log.Warn("Unknown agent ignored", "name", name)
}
}
return validAgents
@@ -355,9 +354,6 @@ func (a *Agents) GetAlbumImages(ctx context.Context, name, artist, mbid string)
continue
}
images, err := retriever.GetAlbumImages(ctx, name, artist, mbid)
if err != nil {
log.Trace(ctx, "Agent GetAlbumImages failed", "agent", ag.AgentName(), "album", name, "artist", artist, "mbid", mbid, err)
}
if len(images) > 0 && err == nil {
log.Debug(ctx, "Got Album Images", "agent", ag.AgentName(), "album", name, "artist", artist,
"mbid", mbid, "elapsed", time.Since(start))

View File

@@ -1,7 +1,6 @@
package deezer
import (
bytes "bytes"
"context"
"encoding/json"
"errors"
@@ -10,14 +9,11 @@ import (
"net/http"
"net/url"
"strconv"
"strings"
"github.com/microcosm-cc/bluemonday"
"github.com/navidrome/navidrome/log"
)
const apiBaseURL = "https://api.deezer.com"
const authBaseURL = "https://auth.deezer.com"
var (
ErrNotFound = errors.New("deezer: not found")
@@ -29,21 +25,15 @@ type httpDoer interface {
type client struct {
httpDoer httpDoer
language string
jwt jwtToken
}
func newClient(hc httpDoer, language string) *client {
return &client{
httpDoer: hc,
language: language,
}
func newClient(hc httpDoer) *client {
return &client{hc}
}
func (c *client) searchArtists(ctx context.Context, name string, limit int) ([]Artist, error) {
params := url.Values{}
params.Add("q", name)
params.Add("order", "RANKING")
params.Add("limit", strconv.Itoa(limit))
req, err := http.NewRequestWithContext(ctx, "GET", apiBaseURL+"/search/artist", nil)
if err != nil {
@@ -63,7 +53,7 @@ func (c *client) searchArtists(ctx context.Context, name string, limit int) ([]A
return results.Data, nil
}
func (c *client) makeRequest(req *http.Request, response any) error {
func (c *client) makeRequest(req *http.Request, response interface{}) error {
log.Trace(req.Context(), fmt.Sprintf("Sending Deezer %s request", req.Method), "url", req.URL)
resp, err := c.httpDoer.Do(req)
if err != nil {
@@ -91,129 +81,3 @@ func (c *client) parseError(data []byte) error {
}
return fmt.Errorf("deezer error(%d): %s", deezerError.Error.Code, deezerError.Error.Message)
}
func (c *client) getRelatedArtists(ctx context.Context, artistID int) ([]Artist, error) {
req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("%s/artist/%d/related", apiBaseURL, artistID), nil)
if err != nil {
return nil, err
}
var results RelatedArtists
err = c.makeRequest(req, &results)
if err != nil {
return nil, err
}
return results.Data, nil
}
func (c *client) getTopTracks(ctx context.Context, artistID int, limit int) ([]Track, error) {
params := url.Values{}
params.Add("limit", strconv.Itoa(limit))
req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("%s/artist/%d/top", apiBaseURL, artistID), nil)
if err != nil {
return nil, err
}
req.URL.RawQuery = params.Encode()
var results TopTracks
err = c.makeRequest(req, &results)
if err != nil {
return nil, err
}
return results.Data, nil
}
const pipeAPIURL = "https://pipe.deezer.com/api"
var strictPolicy = bluemonday.StrictPolicy()
func (c *client) getArtistBio(ctx context.Context, artistID int) (string, error) {
jwt, err := c.getJWT(ctx)
if err != nil {
return "", fmt.Errorf("deezer: failed to get JWT: %w", err)
}
query := map[string]any{
"operationName": "ArtistBio",
"variables": map[string]any{
"artistId": strconv.Itoa(artistID),
},
"query": `query ArtistBio($artistId: String!) {
artist(artistId: $artistId) {
bio {
full
}
}
}`,
}
body, err := json.Marshal(query)
if err != nil {
return "", err
}
req, err := http.NewRequestWithContext(ctx, "POST", pipeAPIURL, bytes.NewReader(body))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept-Language", c.language)
req.Header.Set("Authorization", "Bearer "+jwt)
log.Trace(ctx, "Fetching Deezer artist biography via GraphQL", "artistId", artistID, "language", c.language)
resp, err := c.httpDoer.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return "", fmt.Errorf("deezer: failed to fetch biography: %s", resp.Status)
}
data, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
type graphQLResponse struct {
Data struct {
Artist struct {
Bio struct {
Full string `json:"full"`
} `json:"bio"`
} `json:"artist"`
} `json:"data"`
Errors []struct {
Message string `json:"message"`
}
}
var result graphQLResponse
if err := json.Unmarshal(data, &result); err != nil {
return "", fmt.Errorf("deezer: failed to parse GraphQL response: %w", err)
}
if len(result.Errors) > 0 {
var errs []error
for m := range result.Errors {
errs = append(errs, errors.New(result.Errors[m].Message))
}
err := errors.Join(errs...)
return "", fmt.Errorf("deezer: GraphQL error: %w", err)
}
if result.Data.Artist.Bio.Full == "" {
return "", errors.New("deezer: biography not found")
}
return cleanBio(result.Data.Artist.Bio.Full), nil
}
func cleanBio(bio string) string {
bio = strings.ReplaceAll(bio, "</p>", "\n")
return strictPolicy.Sanitize(bio)
}

View File

@@ -1,101 +0,0 @@
package deezer
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"sync"
"time"
"github.com/lestrrat-go/jwx/v2/jwt"
"github.com/navidrome/navidrome/log"
)
type jwtToken struct {
token string
expiresAt time.Time
mu sync.RWMutex
}
func (j *jwtToken) get() (string, bool) {
j.mu.RLock()
defer j.mu.RUnlock()
if time.Now().Before(j.expiresAt) {
return j.token, true
}
return "", false
}
func (j *jwtToken) set(token string, expiresIn time.Duration) {
j.mu.Lock()
defer j.mu.Unlock()
j.token = token
j.expiresAt = time.Now().Add(expiresIn)
}
func (c *client) getJWT(ctx context.Context) (string, error) {
// Check if we have a valid cached token
if token, valid := c.jwt.get(); valid {
return token, nil
}
// Fetch a new anonymous token
req, err := http.NewRequestWithContext(ctx, "GET", authBaseURL+"/login/anonymous?jo=p&rto=c", nil)
if err != nil {
return "", err
}
req.Header.Set("Accept", "application/json")
resp, err := c.httpDoer.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return "", fmt.Errorf("deezer: failed to get JWT token: %s", resp.Status)
}
data, err := io.ReadAll(resp.Body)
if err != nil {
return "", err
}
type authResponse struct {
JWT string `json:"jwt"`
}
var result authResponse
if err := json.Unmarshal(data, &result); err != nil {
return "", fmt.Errorf("deezer: failed to parse auth response: %w", err)
}
if result.JWT == "" {
return "", errors.New("deezer: no JWT token in response")
}
// Parse JWT to get actual expiration time
token, err := jwt.ParseString(result.JWT, jwt.WithVerify(false), jwt.WithValidate(false))
if err != nil {
return "", fmt.Errorf("deezer: failed to parse JWT token: %w", err)
}
// Calculate TTL with a 1-minute buffer for clock skew and network delays
expiresAt := token.Expiration()
if expiresAt.IsZero() {
return "", errors.New("deezer: JWT token has no expiration time")
}
ttl := time.Until(expiresAt) - 1*time.Minute
if ttl <= 0 {
return "", errors.New("deezer: JWT token already expired or expires too soon")
}
c.jwt.set(result.JWT, ttl)
log.Trace(ctx, "Fetched new Deezer JWT token", "expiresAt", expiresAt, "ttl", ttl)
return result.JWT, nil
}

View File

@@ -1,293 +0,0 @@
package deezer
import (
"bytes"
"context"
"fmt"
"io"
"net/http"
"sync"
"time"
"github.com/lestrrat-go/jwx/v2/jwt"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("JWT Authentication", func() {
var httpClient *fakeHttpClient
var client *client
var ctx context.Context
BeforeEach(func() {
httpClient = &fakeHttpClient{}
client = newClient(httpClient, "en")
ctx = context.Background()
})
Describe("getJWT", func() {
Context("with a valid JWT response", func() {
It("successfully fetches and caches a JWT token", func() {
testJWT := createTestJWT(5 * time.Minute)
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
})
token, err := client.getJWT(ctx)
Expect(err).To(BeNil())
Expect(token).To(Equal(testJWT))
})
It("returns the cached token on subsequent calls", func() {
testJWT := createTestJWT(5 * time.Minute)
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
})
// First call should fetch from API
token1, err := client.getJWT(ctx)
Expect(err).To(BeNil())
Expect(token1).To(Equal(testJWT))
Expect(httpClient.lastRequest.URL.Path).To(Equal("/login/anonymous"))
// Second call should return cached token without hitting API
httpClient.lastRequest = nil // Clear last request to verify no new request is made
token2, err := client.getJWT(ctx)
Expect(err).To(BeNil())
Expect(token2).To(Equal(testJWT))
Expect(httpClient.lastRequest).To(BeNil()) // No new request made
})
It("parses the JWT expiration time correctly", func() {
expectedExpiration := time.Now().Add(5 * time.Minute)
testToken, err := jwt.NewBuilder().
Expiration(expectedExpiration).
Build()
Expect(err).To(BeNil())
testJWT, err := jwt.Sign(testToken, jwt.WithInsecureNoSignature())
Expect(err).To(BeNil())
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, string(testJWT)))),
})
token, err := client.getJWT(ctx)
Expect(err).To(BeNil())
Expect(token).ToNot(BeEmpty())
// Verify the token is cached until close to expiration
// The cache should expire 1 minute before the JWT expires
expectedCacheExpiry := expectedExpiration.Add(-1 * time.Minute)
Expect(client.jwt.expiresAt).To(BeTemporally("~", expectedCacheExpiry, 2*time.Second))
})
})
Context("with JWT tokens that expire soon", func() {
It("rejects tokens that expire in less than 1 minute", func() {
// Create a token that expires in 30 seconds (less than 1-minute buffer)
testJWT := createTestJWT(30 * time.Second)
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
})
_, err := client.getJWT(ctx)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("JWT token already expired or expires too soon"))
})
It("rejects already expired tokens", func() {
// Create a token that expired 1 minute ago
testJWT := createTestJWT(-1 * time.Minute)
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
})
_, err := client.getJWT(ctx)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("JWT token already expired or expires too soon"))
})
It("accepts tokens that expire in more than 1 minute", func() {
// Create a token that expires in 2 minutes (just over the 1-minute buffer)
testJWT := createTestJWT(2 * time.Minute)
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, testJWT))),
})
token, err := client.getJWT(ctx)
Expect(err).To(BeNil())
Expect(token).ToNot(BeEmpty())
})
})
Context("with invalid responses", func() {
It("handles HTTP error responses", func() {
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 500,
Body: io.NopCloser(bytes.NewBufferString(`{"error":"Internal server error"}`)),
})
_, err := client.getJWT(ctx)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("failed to get JWT token"))
})
It("handles malformed JSON responses", func() {
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(`{invalid json}`)),
})
_, err := client.getJWT(ctx)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("failed to parse auth response"))
})
It("handles responses with empty JWT field", func() {
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(`{"jwt":""}`)),
})
_, err := client.getJWT(ctx)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(Equal("deezer: no JWT token in response"))
})
It("handles invalid JWT tokens", func() {
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(`{"jwt":"not-a-valid-jwt"}`)),
})
_, err := client.getJWT(ctx)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("failed to parse JWT token"))
})
It("rejects JWT tokens without expiration", func() {
// Create a JWT without expiration claim
testToken, err := jwt.NewBuilder().
Claim("custom", "value").
Build()
Expect(err).To(BeNil())
// Verify token has no expiration
Expect(testToken.Expiration().IsZero()).To(BeTrue())
testJWT, err := jwt.Sign(testToken, jwt.WithInsecureNoSignature())
Expect(err).To(BeNil())
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, string(testJWT)))),
})
_, err = client.getJWT(ctx)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(Equal("deezer: JWT token has no expiration time"))
})
})
Context("token caching behavior", func() {
It("fetches a new token when the cached token expires", func() {
// First token expires in 5 minutes
firstJWT := createTestJWT(5 * time.Minute)
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, firstJWT))),
})
token1, err := client.getJWT(ctx)
Expect(err).To(BeNil())
Expect(token1).To(Equal(firstJWT))
// Manually expire the cached token
client.jwt.expiresAt = time.Now().Add(-1 * time.Second)
// Second token with different expiration (10 minutes)
secondJWT := createTestJWT(10 * time.Minute)
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s"}`, secondJWT))),
})
token2, err := client.getJWT(ctx)
Expect(err).To(BeNil())
Expect(token2).To(Equal(secondJWT))
Expect(token2).ToNot(Equal(token1))
})
})
})
Describe("jwtToken cache", func() {
var cache *jwtToken
BeforeEach(func() {
cache = &jwtToken{}
})
It("returns false for expired tokens", func() {
cache.set("test-token", -1*time.Second) // Already expired
token, valid := cache.get()
Expect(valid).To(BeFalse())
Expect(token).To(BeEmpty())
})
It("returns true for valid tokens", func() {
cache.set("test-token", 4*time.Minute)
token, valid := cache.get()
Expect(valid).To(BeTrue())
Expect(token).To(Equal("test-token"))
})
It("is thread-safe for concurrent access", func() {
wg := sync.WaitGroup{}
// Writer goroutine
wg.Go(func() {
for i := 0; i < 100; i++ {
cache.set(fmt.Sprintf("token-%d", i), 1*time.Hour)
time.Sleep(1 * time.Millisecond)
}
})
// Reader goroutine
wg.Go(func() {
for i := 0; i < 100; i++ {
cache.get()
time.Sleep(1 * time.Millisecond)
}
})
// Wait for both goroutines to complete
wg.Wait()
// Verify final state is valid
token, valid := cache.get()
Expect(valid).To(BeTrue())
Expect(token).To(HavePrefix("token-"))
})
})
})
// createTestJWT creates a valid JWT token for testing purposes
func createTestJWT(expiresIn time.Duration) string {
token, err := jwt.NewBuilder().
Expiration(time.Now().Add(expiresIn)).
Build()
if err != nil {
panic(fmt.Sprintf("failed to create test JWT: %v", err))
}
signed, err := jwt.Sign(token, jwt.WithInsecureNoSignature())
if err != nil {
panic(fmt.Sprintf("failed to sign test JWT: %v", err))
}
return string(signed)
}

View File

@@ -2,11 +2,10 @@ package deezer
import (
"bytes"
"fmt"
"context"
"io"
"net/http"
"os"
"time"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
@@ -18,7 +17,7 @@ var _ = Describe("client", func() {
BeforeEach(func() {
httpClient = &fakeHttpClient{}
client = newClient(httpClient, "en")
client = newClient(httpClient)
})
Describe("ArtistImages", func() {
@@ -27,7 +26,7 @@ var _ = Describe("client", func() {
Expect(err).To(BeNil())
httpClient.mock("https://api.deezer.com/search/artist", http.Response{Body: f, StatusCode: 200})
artists, err := client.searchArtists(GinkgoT().Context(), "Michael Jackson", 20)
artists, err := client.searchArtists(context.TODO(), "Michael Jackson", 20)
Expect(err).To(BeNil())
Expect(artists).To(HaveLen(17))
Expect(artists[0].Name).To(Equal("Michael Jackson"))
@@ -40,136 +39,10 @@ var _ = Describe("client", func() {
Body: io.NopCloser(bytes.NewBufferString(`{"data":[],"total":0}`)),
})
_, err := client.searchArtists(GinkgoT().Context(), "Michael Jackson", 20)
_, err := client.searchArtists(context.TODO(), "Michael Jackson", 20)
Expect(err).To(MatchError(ErrNotFound))
})
})
Describe("ArtistBio", func() {
BeforeEach(func() {
// Mock the JWT token endpoint with a valid JWT that expires in 5 minutes
testJWT := createTestJWT(5 * time.Minute)
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s","refresh_token":""}`, testJWT))),
})
})
It("returns artist bio from a successful request", func() {
f, err := os.Open("tests/fixtures/deezer.artist.bio.json")
Expect(err).To(BeNil())
httpClient.mock("https://pipe.deezer.com/api", http.Response{Body: f, StatusCode: 200})
bio, err := client.getArtistBio(GinkgoT().Context(), 27)
Expect(err).To(BeNil())
Expect(bio).To(ContainSubstring("Schoolmates Thomas and Guy-Manuel"))
Expect(bio).ToNot(ContainSubstring("<p>"))
Expect(bio).ToNot(ContainSubstring("</p>"))
})
It("uses the configured language", func() {
client = newClient(httpClient, "fr")
// Mock JWT token for the new client instance with a valid JWT
testJWT := createTestJWT(5 * time.Minute)
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s","refresh_token":""}`, testJWT))),
})
f, err := os.Open("tests/fixtures/deezer.artist.bio.json")
Expect(err).To(BeNil())
httpClient.mock("https://pipe.deezer.com/api", http.Response{Body: f, StatusCode: 200})
_, err = client.getArtistBio(GinkgoT().Context(), 27)
Expect(err).To(BeNil())
Expect(httpClient.lastRequest.Header.Get("Accept-Language")).To(Equal("fr"))
})
It("includes the JWT token in the request", func() {
f, err := os.Open("tests/fixtures/deezer.artist.bio.json")
Expect(err).To(BeNil())
httpClient.mock("https://pipe.deezer.com/api", http.Response{Body: f, StatusCode: 200})
_, err = client.getArtistBio(GinkgoT().Context(), 27)
Expect(err).To(BeNil())
// Verify that the Authorization header has the Bearer token format
authHeader := httpClient.lastRequest.Header.Get("Authorization")
Expect(authHeader).To(HavePrefix("Bearer "))
Expect(len(authHeader)).To(BeNumerically(">", 20)) // JWT tokens are longer than 20 chars
})
It("handles GraphQL errors", func() {
errorResponse := `{
"data": {
"artist": {
"bio": {
"full": ""
}
}
},
"errors": [
{
"message": "Artist not found"
},
{
"message": "Invalid artist ID"
}
]
}`
httpClient.mock("https://pipe.deezer.com/api", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(errorResponse)),
})
_, err := client.getArtistBio(GinkgoT().Context(), 999)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("GraphQL error"))
Expect(err.Error()).To(ContainSubstring("Artist not found"))
Expect(err.Error()).To(ContainSubstring("Invalid artist ID"))
})
It("handles empty biography", func() {
emptyBioResponse := `{
"data": {
"artist": {
"bio": {
"full": ""
}
}
}
}`
httpClient.mock("https://pipe.deezer.com/api", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(emptyBioResponse)),
})
_, err := client.getArtistBio(GinkgoT().Context(), 27)
Expect(err).To(MatchError("deezer: biography not found"))
})
It("handles JWT token fetch failure", func() {
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 500,
Body: io.NopCloser(bytes.NewBufferString(`{"error":"Internal server error"}`)),
})
_, err := client.getArtistBio(GinkgoT().Context(), 27)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("failed to get JWT"))
})
It("handles JWT token that expires too soon", func() {
// Create a JWT that expires in 30 seconds (less than the 1-minute buffer)
expiredJWT := createTestJWT(30 * time.Second)
httpClient.mock("https://auth.deezer.com/login/anonymous", http.Response{
StatusCode: 200,
Body: io.NopCloser(bytes.NewBufferString(fmt.Sprintf(`{"jwt":"%s","refresh_token":""}`, expiredJWT))),
})
_, err := client.getArtistBio(GinkgoT().Context(), 27)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("JWT token already expired or expires too soon"))
})
})
})
type fakeHttpClient struct {

View File

@@ -3,7 +3,6 @@ package deezer
import (
"context"
"errors"
"fmt"
"net/http"
"strings"
@@ -13,7 +12,6 @@ import (
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/utils/cache"
"github.com/navidrome/navidrome/utils/slice"
)
const deezerAgentName = "deezer"
@@ -34,7 +32,7 @@ func deezerConstructor(dataStore model.DataStore) agents.Interface {
Timeout: consts.DefaultHttpClientTimeOut,
}
cachedHttpClient := cache.NewHTTPClient(httpClient, consts.DefaultHttpClientTimeOut)
agent.client = newClient(cachedHttpClient, conf.Server.Deezer.Language)
agent.client = newClient(cachedHttpClient)
return agent
}
@@ -83,73 +81,13 @@ func (s *deezerAgent) searchArtist(ctx context.Context, name string) (*Artist, e
return nil, err
}
log.Trace(ctx, "Artists found", "count", len(artists), "searched_name", name)
for i := range artists {
log.Trace(ctx, fmt.Sprintf("Artists found #%d", i), "name", artists[i].Name, "id", artists[i].ID, "link", artists[i].Link)
if i > 2 {
break
}
}
// If the first one has the same name, that's the one
if !strings.EqualFold(artists[0].Name, name) {
log.Trace(ctx, "Top artist do not match", "searched_name", name, "found_name", artists[0].Name)
return nil, agents.ErrNotFound
}
log.Trace(ctx, "Found artist", "name", artists[0].Name, "id", artists[0].ID, "link", artists[0].Link)
return &artists[0], err
}
func (s *deezerAgent) GetSimilarArtists(ctx context.Context, _, name, _ string, limit int) ([]agents.Artist, error) {
artist, err := s.searchArtist(ctx, name)
if err != nil {
return nil, err
}
related, err := s.client.getRelatedArtists(ctx, artist.ID)
if err != nil {
return nil, err
}
res := slice.Map(related, func(r Artist) agents.Artist {
return agents.Artist{
Name: r.Name,
}
})
if len(res) > limit {
res = res[:limit]
}
return res, nil
}
func (s *deezerAgent) GetArtistTopSongs(ctx context.Context, _, artistName, _ string, count int) ([]agents.Song, error) {
artist, err := s.searchArtist(ctx, artistName)
if err != nil {
return nil, err
}
tracks, err := s.client.getTopTracks(ctx, artist.ID, count)
if err != nil {
return nil, err
}
res := slice.Map(tracks, func(r Track) agents.Song {
return agents.Song{
Name: r.Title,
}
})
return res, nil
}
func (s *deezerAgent) GetArtistBiography(ctx context.Context, _, name, _ string) (string, error) {
artist, err := s.searchArtist(ctx, name)
if err != nil {
return "", err
}
return s.client.getArtistBio(ctx, artist.ID)
}
func init() {
conf.AddHook(func() {
if conf.Server.Deezer.Enabled {

View File

@@ -29,38 +29,3 @@ type Error struct {
Code int `json:"code"`
} `json:"error"`
}
type RelatedArtists struct {
Data []Artist `json:"data"`
Total int `json:"total"`
}
type TopTracks struct {
Data []Track `json:"data"`
Total int `json:"total"`
Next string `json:"next"`
}
type Track struct {
ID int `json:"id"`
Title string `json:"title"`
Link string `json:"link"`
Duration int `json:"duration"`
Rank int `json:"rank"`
Preview string `json:"preview"`
Artist Artist `json:"artist"`
Album Album `json:"album"`
Contributors []Artist `json:"contributors"`
}
type Album struct {
ID int `json:"id"`
Title string `json:"title"`
Cover string `json:"cover"`
CoverSmall string `json:"cover_small"`
CoverMedium string `json:"cover_medium"`
CoverBig string `json:"cover_big"`
CoverXl string `json:"cover_xl"`
Tracklist string `json:"tracklist"`
Type string `json:"type"`
}

View File

@@ -35,35 +35,4 @@ var _ = Describe("Responses", func() {
Expect(errorResp.Error.Message).To(Equal("Missing parameters: q"))
})
})
Describe("Related Artists", func() {
It("parses the related artists response correctly", func() {
var resp RelatedArtists
body, err := os.ReadFile("tests/fixtures/deezer.artist.related.json")
Expect(err).To(BeNil())
err = json.Unmarshal(body, &resp)
Expect(err).To(BeNil())
Expect(resp.Data).To(HaveLen(20))
justice := resp.Data[0]
Expect(justice.Name).To(Equal("Justice"))
Expect(justice.ID).To(Equal(6404))
})
})
Describe("Top Tracks", func() {
It("parses the top tracks response correctly", func() {
var resp TopTracks
body, err := os.ReadFile("tests/fixtures/deezer.artist.top.json")
Expect(err).To(BeNil())
err = json.Unmarshal(body, &resp)
Expect(err).To(BeNil())
Expect(resp.Data).To(HaveLen(5))
track := resp.Data[0]
Expect(track.Title).To(Equal("Instant Crush (feat. Julian Casablancas)"))
Expect(track.ID).To(Equal(67238732))
Expect(track.Album.Title).To(Equal("Random Access Memories"))
})
})
})

View File

@@ -38,7 +38,6 @@ type lastfmAgent struct {
secret string
lang string
client *client
httpClient httpDoer
getInfoMutex sync.Mutex
}
@@ -57,7 +56,6 @@ func lastFMConstructor(ds model.DataStore) *lastfmAgent {
Timeout: consts.DefaultHttpClientTimeOut,
}
chc := cache.NewHTTPClient(hc, consts.DefaultHttpClientTimeOut)
l.httpClient = chc
l.client = newClient(l.apiKey, l.secret, l.lang, chc)
return l
}
@@ -192,13 +190,13 @@ func (l *lastfmAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbi
return res, nil
}
var (
artistOpenGraphQuery = cascadia.MustCompile(`html > head > meta[property="og:image"]`)
artistIgnoredImage = "2a96cbd8b46e442fc41c2b86b821562f" // Last.fm artist placeholder image name
)
var artistOpenGraphQuery = cascadia.MustCompile(`html > head > meta[property="og:image"]`)
func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string) ([]agents.ExternalImage, error) {
log.Debug(ctx, "Getting artist images from Last.fm", "name", name)
hc := http.Client{
Timeout: consts.DefaultHttpClientTimeOut,
}
a, err := l.callArtistGetInfo(ctx, name)
if err != nil {
return nil, fmt.Errorf("get artist info: %w", err)
@@ -207,7 +205,7 @@ func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string)
if err != nil {
return nil, fmt.Errorf("create artist image request: %w", err)
}
resp, err := l.httpClient.Do(req)
resp, err := hc.Do(req)
if err != nil {
return nil, fmt.Errorf("get artist url: %w", err)
}
@@ -224,16 +222,11 @@ func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string)
return res, nil
}
for _, attr := range n.Attr {
if attr.Key != "content" {
continue
}
if strings.Contains(attr.Val, artistIgnoredImage) {
log.Debug(ctx, "Artist image is ignored default image", "name", name, "url", attr.Val)
return res, nil
}
res = []agents.ExternalImage{
{URL: attr.Val},
if attr.Key == "content" {
res = []agents.ExternalImage{
{URL: attr.Val},
}
break
}
}
return res, nil
@@ -290,11 +283,11 @@ func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName str
return t.Track, nil
}
func (l *lastfmAgent) getArtistForScrobble(track *model.MediaFile, role model.Role, displayName string) string {
if conf.Server.LastFM.ScrobbleFirstArtistOnly && len(track.Participants[role]) > 0 {
return track.Participants[role][0].Name
func (l *lastfmAgent) getArtistForScrobble(track *model.MediaFile) string {
if conf.Server.LastFM.ScrobbleFirstArtistOnly && len(track.Participants[model.RoleArtist]) > 0 {
return track.Participants[model.RoleArtist][0].Name
}
return displayName
return track.Artist
}
func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
@@ -304,13 +297,13 @@ func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *mode
}
err = l.client.updateNowPlaying(ctx, sk, ScrobbleInfo{
artist: l.getArtistForScrobble(track, model.RoleArtist, track.Artist),
artist: l.getArtistForScrobble(track),
track: track.Title,
album: track.Album,
trackNumber: track.TrackNumber,
mbid: track.MbzRecordingID,
duration: int(track.Duration),
albumArtist: l.getArtistForScrobble(track, model.RoleAlbumArtist, track.AlbumArtist),
albumArtist: track.AlbumArtist,
})
if err != nil {
log.Warn(ctx, "Last.fm client.updateNowPlaying returned error", "track", track.Title, err)
@@ -330,13 +323,13 @@ func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.S
return nil
}
err = l.client.scrobble(ctx, sk, ScrobbleInfo{
artist: l.getArtistForScrobble(&s.MediaFile, model.RoleArtist, s.Artist),
artist: l.getArtistForScrobble(&s.MediaFile),
track: s.Title,
album: s.Album,
trackNumber: s.TrackNumber,
mbid: s.MbzRecordingID,
duration: int(s.Duration),
albumArtist: l.getArtistForScrobble(&s.MediaFile, model.RoleAlbumArtist, s.AlbumArtist),
albumArtist: s.AlbumArtist,
timestamp: s.TimeStamp,
})
if err == nil {

View File

@@ -201,10 +201,6 @@ var _ = Describe("lastfmAgent", func() {
{Artist: model.Artist{ID: "ar-1", Name: "First Artist"}},
{Artist: model.Artist{ID: "ar-2", Name: "Second Artist"}},
},
model.RoleAlbumArtist: []model.Participant{
{Artist: model.Artist{ID: "ar-1", Name: "First Album Artist"}},
{Artist: model.Artist{ID: "ar-2", Name: "Second Album Artist"}},
},
},
}
})
@@ -233,23 +229,6 @@ var _ = Describe("lastfmAgent", func() {
err := agent.NowPlaying(ctx, "user-2", track, 0)
Expect(err).To(MatchError(scrobbler.ErrNotAuthorized))
})
When("ScrobbleFirstArtistOnly is true", func() {
BeforeEach(func() {
conf.Server.LastFM.ScrobbleFirstArtistOnly = true
})
It("uses only the first artist", func() {
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString("{}")), StatusCode: 200}
err := agent.NowPlaying(ctx, "user-1", track, 0)
Expect(err).ToNot(HaveOccurred())
sentParams := httpClient.SavedRequest.URL.Query()
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
Expect(sentParams.Get("albumArtist")).To(Equal("First Album Artist"))
})
})
})
Describe("scrobble", func() {
@@ -288,7 +267,6 @@ var _ = Describe("lastfmAgent", func() {
Expect(err).ToNot(HaveOccurred())
sentParams := httpClient.SavedRequest.URL.Query()
Expect(sentParams.Get("artist")).To(Equal("First Artist"))
Expect(sentParams.Get("albumArtist")).To(Equal("First Album Artist"))
})
})
@@ -415,73 +393,4 @@ var _ = Describe("lastfmAgent", func() {
})
})
})
Describe("GetArtistImages", func() {
var agent *lastfmAgent
var apiClient *tests.FakeHttpClient
var httpClient *tests.FakeHttpClient
BeforeEach(func() {
apiClient = &tests.FakeHttpClient{}
httpClient = &tests.FakeHttpClient{}
client := newClient("API_KEY", "SECRET", "pt", apiClient)
agent = lastFMConstructor(ds)
agent.client = client
agent.httpClient = httpClient
})
It("returns the artist image from the page", func() {
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.html")
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
Expect(err).ToNot(HaveOccurred())
Expect(images).To(HaveLen(1))
Expect(images[0].URL).To(Equal("https://lastfm.freetls.fastly.net/i/u/ar0/818148bf682d429dc21b59a73ef6f68e.png"))
})
It("returns empty list if image is the ignored default image", func() {
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.ignored.html")
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
Expect(err).ToNot(HaveOccurred())
Expect(images).To(BeEmpty())
})
It("returns empty list if page has no meta tags", func() {
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
fScraper, _ := os.Open("tests/fixtures/lastfm.artist.page.no_meta.html")
httpClient.Res = http.Response{Body: fScraper, StatusCode: 200}
images, err := agent.GetArtistImages(ctx, "123", "U2", "")
Expect(err).ToNot(HaveOccurred())
Expect(images).To(BeEmpty())
})
It("returns error if API call fails", func() {
apiClient.Err = errors.New("api error")
_, err := agent.GetArtistImages(ctx, "123", "U2", "")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("get artist info"))
})
It("returns error if scraper call fails", func() {
fApi, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
apiClient.Res = http.Response{Body: fApi, StatusCode: 200}
httpClient.Err = errors.New("scraper error")
_, err := agent.GetArtistImages(ctx, "123", "U2", "")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("get artist url"))
})
})
})

View File

@@ -90,7 +90,6 @@ var _ = Describe("CacheWarmer", func() {
})
It("deduplicates items in buffer", func() {
fc.SetReady(false) // Make cache unavailable so items stay in buffer
cw := NewCacheWarmer(aw, fc).(*cacheWarmer)
cw.PreCache(model.MustParseArtworkID("al-1"))
cw.PreCache(model.MustParseArtworkID("al-1"))

View File

@@ -1,7 +1,6 @@
package artwork
import (
"cmp"
"context"
"crypto/md5"
"fmt"
@@ -12,7 +11,6 @@ import (
"time"
"github.com/Masterminds/squirrel"
"github.com/maruel/natural"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/core/external"
@@ -118,30 +116,8 @@ func loadAlbumFoldersPaths(ctx context.Context, ds model.DataStore, albums ...mo
}
// Sort image files to ensure consistent selection of cover art
// This prioritizes files without numeric suffixes (e.g., cover.jpg over cover.1.jpg)
// by comparing base filenames without extensions
slices.SortFunc(imgFiles, compareImageFiles)
// This prioritizes files from lower-numbered disc folders by sorting the paths
slices.Sort(imgFiles)
return paths, imgFiles, &updatedAt, nil
}
// compareImageFiles compares two image file paths for sorting.
// It extracts the base filename (without extension) and compares case-insensitively.
// This ensures that "cover.jpg" sorts before "cover.1.jpg" since "cover" < "cover.1".
// Note: This function is called O(n log n) times during sorting, but in practice albums
// typically have only 1-20 image files, making the repeated string operations negligible.
func compareImageFiles(a, b string) int {
// Case-insensitive comparison
a = strings.ToLower(a)
b = strings.ToLower(b)
// Extract base filenames without extensions
baseA := strings.TrimSuffix(filepath.Base(a), filepath.Ext(a))
baseB := strings.TrimSuffix(filepath.Base(b), filepath.Ext(b))
// Compare base names first, then full paths if equal
return cmp.Or(
natural.Compare(baseA, baseB),
natural.Compare(a, b),
)
}

View File

@@ -27,7 +27,26 @@ var _ = Describe("Album Artwork Reader", func() {
expectedAt = now.Add(5 * time.Minute)
// Set up the test folders with image files
repo = &fakeFolderRepo{}
repo = &fakeFolderRepo{
result: []model.Folder{
{
Path: "Artist/Album/Disc1",
ImagesUpdatedAt: expectedAt,
ImageFiles: []string{"cover.jpg", "back.jpg"},
},
{
Path: "Artist/Album/Disc2",
ImagesUpdatedAt: now,
ImageFiles: []string{"cover.jpg"},
},
{
Path: "Artist/Album/Disc10",
ImagesUpdatedAt: now,
ImageFiles: []string{"cover.jpg"},
},
},
err: nil,
}
ds = &fakeDataStore{
folderRepo: repo,
}
@@ -39,82 +58,19 @@ var _ = Describe("Album Artwork Reader", func() {
})
It("returns sorted image files", func() {
repo.result = []model.Folder{
{
Path: "Artist/Album/Disc1",
ImagesUpdatedAt: expectedAt,
ImageFiles: []string{"cover.jpg", "back.jpg", "cover.1.jpg"},
},
{
Path: "Artist/Album/Disc2",
ImagesUpdatedAt: now,
ImageFiles: []string{"cover.jpg"},
},
{
Path: "Artist/Album/Disc10",
ImagesUpdatedAt: now,
ImageFiles: []string{"cover.jpg"},
},
}
_, imgFiles, imagesUpdatedAt, err := loadAlbumFoldersPaths(ctx, ds, album)
Expect(err).ToNot(HaveOccurred())
Expect(*imagesUpdatedAt).To(Equal(expectedAt))
// Check that image files are sorted by base name (without extension)
Expect(imgFiles).To(HaveLen(5))
// Check that image files are sorted alphabetically
Expect(imgFiles).To(HaveLen(4))
// Files should be sorted by base filename without extension, then by full path
// "back" < "cover", so back.jpg comes first
// Then all cover.jpg files, sorted by path
// The files should be sorted by full path
Expect(imgFiles[0]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/back.jpg")))
Expect(imgFiles[1]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/cover.jpg")))
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/Disc2/cover.jpg")))
Expect(imgFiles[3]).To(Equal(filepath.FromSlash("Artist/Album/Disc10/cover.jpg")))
Expect(imgFiles[4]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/cover.1.jpg")))
})
It("prioritizes files without numeric suffixes", func() {
// Test case for issue #4683: cover.jpg should come before cover.1.jpg
repo.result = []model.Folder{
{
Path: "Artist/Album",
ImagesUpdatedAt: now,
ImageFiles: []string{"cover.1.jpg", "cover.jpg", "cover.2.jpg"},
},
}
_, imgFiles, _, err := loadAlbumFoldersPaths(ctx, ds, album)
Expect(err).ToNot(HaveOccurred())
Expect(imgFiles).To(HaveLen(3))
// cover.jpg should come first because "cover" < "cover.1" < "cover.2"
Expect(imgFiles[0]).To(Equal(filepath.FromSlash("Artist/Album/cover.jpg")))
Expect(imgFiles[1]).To(Equal(filepath.FromSlash("Artist/Album/cover.1.jpg")))
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/cover.2.jpg")))
})
It("handles case-insensitive sorting", func() {
// Test that Cover.jpg and cover.jpg are treated as equivalent
repo.result = []model.Folder{
{
Path: "Artist/Album",
ImagesUpdatedAt: now,
ImageFiles: []string{"Folder.jpg", "cover.jpg", "BACK.jpg"},
},
}
_, imgFiles, _, err := loadAlbumFoldersPaths(ctx, ds, album)
Expect(err).ToNot(HaveOccurred())
Expect(imgFiles).To(HaveLen(3))
// Files should be sorted case-insensitively: BACK, cover, Folder
Expect(imgFiles[0]).To(Equal(filepath.FromSlash("Artist/Album/BACK.jpg")))
Expect(imgFiles[1]).To(Equal(filepath.FromSlash("Artist/Album/cover.jpg")))
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/Folder.jpg")))
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/Disc10/cover.jpg")))
Expect(imgFiles[3]).To(Equal(filepath.FromSlash("Artist/Album/Disc2/cover.jpg")))
})
})
})

View File

@@ -8,7 +8,6 @@ import (
"io/fs"
"os"
"path/filepath"
"slices"
"strings"
"time"
@@ -140,22 +139,11 @@ func findImageInFolder(ctx context.Context, folder, pattern string) (io.ReadClos
return nil, "", err
}
// Filter to valid image files
var imagePaths []string
for _, m := range matches {
if !model.IsImageFile(m) {
continue
}
imagePaths = append(imagePaths, m)
}
// Sort image files by prioritizing base filenames without numeric
// suffixes (e.g., artist.jpg before artist.1.jpg)
slices.SortFunc(imagePaths, compareImageFiles)
// Try to open files in sorted order
for _, p := range imagePaths {
filePath := filepath.Join(folder, p)
filePath := filepath.Join(folder, m)
f, err := os.Open(filePath)
if err != nil {
log.Warn(ctx, "Could not open cover art file", "file", filePath, err)

View File

@@ -240,79 +240,24 @@ var _ = Describe("artistArtworkReader", func() {
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
// Create multiple matching files
Expect(os.WriteFile(filepath.Join(artistDir, "artist.abc"), []byte("text file"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.png"), []byte("png image"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.jpg"), []byte("jpg image"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.png"), []byte("png image"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.txt"), []byte("text file"), 0600)).To(Succeed())
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
})
It("returns the first valid image file in sorted order", func() {
It("returns the first valid image file", func() {
reader, path, err := testFunc()
Expect(err).ToNot(HaveOccurred())
Expect(reader).ToNot(BeNil())
// Should return an image file,
// Files are sorted: jpg comes before png alphabetically.
// .abc comes first, but it's not an image.
Expect(path).To(ContainSubstring("artist.jpg"))
reader.Close()
})
})
When("prioritizing files without numeric suffixes", func() {
BeforeEach(func() {
// Test case for issue #4683: artist.jpg should come before artist.1.jpg
artistDir := filepath.Join(tempDir, "artist")
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
// Create multiple matches with and without numeric suffixes
Expect(os.WriteFile(filepath.Join(artistDir, "artist.1.jpg"), []byte("artist 1"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.jpg"), []byte("artist main"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.2.jpg"), []byte("artist 2"), 0600)).To(Succeed())
testFunc = fromArtistFolder(ctx, artistDir, "artist.*")
})
It("returns artist.jpg before artist.1.jpg and artist.2.jpg", func() {
reader, path, err := testFunc()
Expect(err).ToNot(HaveOccurred())
Expect(reader).ToNot(BeNil())
Expect(path).To(ContainSubstring("artist.jpg"))
// Verify it's the main file, not a numbered variant
data, err := io.ReadAll(reader)
Expect(err).ToNot(HaveOccurred())
Expect(string(data)).To(Equal("artist main"))
reader.Close()
})
})
When("handling case-insensitive sorting", func() {
BeforeEach(func() {
// Test case to ensure case-insensitive natural sorting
artistDir := filepath.Join(tempDir, "artist")
Expect(os.MkdirAll(artistDir, 0755)).To(Succeed())
// Create files with mixed case names
Expect(os.WriteFile(filepath.Join(artistDir, "Folder.jpg"), []byte("folder"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "artist.jpg"), []byte("artist"), 0600)).To(Succeed())
Expect(os.WriteFile(filepath.Join(artistDir, "BACK.jpg"), []byte("back"), 0600)).To(Succeed())
testFunc = fromArtistFolder(ctx, artistDir, "*.*")
})
It("sorts case-insensitively", func() {
reader, path, err := testFunc()
Expect(err).ToNot(HaveOccurred())
Expect(reader).ToNot(BeNil())
// Should return artist.jpg first (case-insensitive: "artist" < "back" < "folder")
Expect(path).To(ContainSubstring("artist.jpg"))
data, err := io.ReadAll(reader)
Expect(err).ToNot(HaveOccurred())
Expect(string(data)).To(Equal("artist"))
// Should return an image file, not the text file
Expect(path).To(SatisfyAny(
ContainSubstring("artist.jpg"),
ContainSubstring("artist.png"),
))
Expect(path).ToNot(ContainSubstring("artist.txt"))
reader.Close()
})
})

View File

@@ -182,7 +182,6 @@ func fromAlbumExternalSource(ctx context.Context, al model.Album, provider exter
func fromURL(ctx context.Context, imageUrl *url.URL) (io.ReadCloser, string, error) {
hc := http.Client{Timeout: 5 * time.Second}
req, _ := http.NewRequestWithContext(ctx, http.MethodGet, imageUrl.String(), nil)
req.Header.Set("User-Agent", consts.HTTPUserAgent)
resp, err := hc.Do(req)
if err != nil {
return nil, "", err

View File

@@ -113,9 +113,9 @@ func WithAdminUser(ctx context.Context, ds model.DataStore) context.Context {
if err != nil {
c, err := ds.User(ctx).CountAll()
if c == 0 && err == nil {
log.Debug(ctx, "No admin user yet!", err)
log.Debug(ctx, "Scanner: No admin user yet!", err)
} else {
log.Error(ctx, "No admin user found!", err)
log.Error(ctx, "Scanner: No admin user found!", err)
}
u = &model.User{}
}

View File

@@ -51,28 +51,12 @@ type provider struct {
type auxAlbum struct {
model.Album
}
// Name returns the appropriate album name for external API calls
// based on the DevPreserveUnicodeInExternalCalls configuration option
func (a *auxAlbum) Name() string {
if conf.Server.DevPreserveUnicodeInExternalCalls {
return a.Album.Name
}
return str.Clear(a.Album.Name)
Name string
}
type auxArtist struct {
model.Artist
}
// Name returns the appropriate artist name for external API calls
// based on the DevPreserveUnicodeInExternalCalls configuration option
func (a *auxArtist) Name() string {
if conf.Server.DevPreserveUnicodeInExternalCalls {
return a.Artist.Name
}
return str.Clear(a.Artist.Name)
Name string
}
type Agents interface {
@@ -104,6 +88,7 @@ func (e *provider) getAlbum(ctx context.Context, id string) (auxAlbum, error) {
switch v := entity.(type) {
case *model.Album:
album.Album = *v
album.Name = str.Clear(v.Name)
case *model.MediaFile:
return e.getAlbum(ctx, v.AlbumID)
default:
@@ -121,9 +106,8 @@ func (e *provider) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album
}
updatedAt := V(album.ExternalInfoUpdatedAt)
albumName := album.Name()
if updatedAt.IsZero() {
log.Debug(ctx, "AlbumInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", albumName)
log.Debug(ctx, "AlbumInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", album.Name)
album, err = e.populateAlbumInfo(ctx, album)
if err != nil {
return nil, err
@@ -132,7 +116,7 @@ func (e *provider) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album
// If info is expired, trigger a populateAlbumInfo in the background
if time.Since(updatedAt) > conf.Server.DevAlbumInfoTimeToLive {
log.Debug("Found expired cached AlbumInfo, refreshing in the background", "updatedAt", album.ExternalInfoUpdatedAt, "name", albumName)
log.Debug("Found expired cached AlbumInfo, refreshing in the background", "updatedAt", album.ExternalInfoUpdatedAt, "name", album.Name)
e.albumQueue.enqueue(&album)
}
@@ -141,13 +125,12 @@ func (e *provider) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album
func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAlbum, error) {
start := time.Now()
albumName := album.Name()
info, err := e.ag.GetAlbumInfo(ctx, albumName, album.AlbumArtist, album.MbzAlbumID)
info, err := e.ag.GetAlbumInfo(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
if errors.Is(err, agents.ErrNotFound) {
return album, nil
}
if err != nil {
log.Error("Error refreshing AlbumInfo", "id", album.ID, "name", albumName, "artist", album.AlbumArtist,
log.Error("Error refreshing AlbumInfo", "id", album.ID, "name", album.Name, "artist", album.AlbumArtist,
"elapsed", time.Since(start), err)
return album, err
}
@@ -159,7 +142,7 @@ func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAl
album.Description = info.Description
}
images, err := e.ag.GetAlbumImages(ctx, albumName, album.AlbumArtist, album.MbzAlbumID)
images, err := e.ag.GetAlbumImages(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
if err == nil && len(images) > 0 {
sort.Slice(images, func(i, j int) bool {
return images[i].Size > images[j].Size
@@ -178,7 +161,7 @@ func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAl
err = e.ds.Album(ctx).UpdateExternalInfo(&album.Album)
if err != nil {
log.Error(ctx, "Error trying to update album external information", "id", album.ID, "name", albumName,
log.Error(ctx, "Error trying to update album external information", "id", album.ID, "name", album.Name,
"elapsed", time.Since(start), err)
} else {
log.Trace(ctx, "AlbumInfo collected", "album", album, "elapsed", time.Since(start))
@@ -198,6 +181,7 @@ func (e *provider) getArtist(ctx context.Context, id string) (auxArtist, error)
switch v := entity.(type) {
case *model.Artist:
artist.Artist = *v
artist.Name = str.Clear(v.Name)
case *model.MediaFile:
return e.getArtist(ctx, v.ArtistID)
case *model.Album:
@@ -226,9 +210,8 @@ func (e *provider) refreshArtistInfo(ctx context.Context, id string) (auxArtist,
// If we don't have any info, retrieves it now
updatedAt := V(artist.ExternalInfoUpdatedAt)
artistName := artist.Name()
if updatedAt.IsZero() {
log.Debug(ctx, "ArtistInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", artistName)
log.Debug(ctx, "ArtistInfo not cached. Retrieving it now", "updatedAt", updatedAt, "id", id, "name", artist.Name)
artist, err = e.populateArtistInfo(ctx, artist)
if err != nil {
return auxArtist{}, err
@@ -237,7 +220,7 @@ func (e *provider) refreshArtistInfo(ctx context.Context, id string) (auxArtist,
// If info is expired, trigger a populateArtistInfo in the background
if time.Since(updatedAt) > conf.Server.DevArtistInfoTimeToLive {
log.Debug("Found expired cached ArtistInfo, refreshing in the background", "updatedAt", updatedAt, "name", artistName)
log.Debug("Found expired cached ArtistInfo, refreshing in the background", "updatedAt", updatedAt, "name", artist.Name)
e.artistQueue.enqueue(&artist)
}
return artist, nil
@@ -246,9 +229,8 @@ func (e *provider) refreshArtistInfo(ctx context.Context, id string) (auxArtist,
func (e *provider) populateArtistInfo(ctx context.Context, artist auxArtist) (auxArtist, error) {
start := time.Now()
// Get MBID first, if it is not yet available
artistName := artist.Name()
if artist.MbzArtistID == "" {
mbid, err := e.ag.GetArtistMBID(ctx, artist.ID, artistName)
mbid, err := e.ag.GetArtistMBID(ctx, artist.ID, artist.Name)
if mbid != "" && err == nil {
artist.MbzArtistID = mbid
}
@@ -264,14 +246,14 @@ func (e *provider) populateArtistInfo(ctx context.Context, artist auxArtist) (au
_ = g.Wait()
if utils.IsCtxDone(ctx) {
log.Warn(ctx, "ArtistInfo update canceled", "id", artist.ID, "name", artistName, "elapsed", time.Since(start), ctx.Err())
log.Warn(ctx, "ArtistInfo update canceled", "elapsed", "id", artist.ID, "name", artist.Name, time.Since(start), ctx.Err())
return artist, ctx.Err()
}
artist.ExternalInfoUpdatedAt = P(time.Now())
err := e.ds.Artist(ctx).UpdateExternalInfo(&artist.Artist)
if err != nil {
log.Error(ctx, "Error trying to update artist external information", "id", artist.ID, "name", artistName,
log.Error(ctx, "Error trying to update artist external information", "id", artist.ID, "name", artist.Name,
"elapsed", time.Since(start), err)
} else {
log.Trace(ctx, "ArtistInfo collected", "artist", artist, "elapsed", time.Since(start))
@@ -299,7 +281,7 @@ func (e *provider) ArtistRadio(ctx context.Context, id string, count int) (model
}
topCount := max(count, 20)
topSongs, err := e.getMatchingTopSongs(ctx, e.ag, &auxArtist{Artist: a}, topCount)
topSongs, err := e.getMatchingTopSongs(ctx, e.ag, &auxArtist{Name: a.Name, Artist: a}, topCount)
if err != nil {
log.Warn(ctx, "Error getting artist's top songs", "artist", a.Name, err)
return nil
@@ -362,23 +344,22 @@ func (e *provider) AlbumImage(ctx context.Context, id string) (*url.URL, error)
return nil, err
}
albumName := album.Name()
images, err := e.ag.GetAlbumImages(ctx, albumName, album.AlbumArtist, album.MbzAlbumID)
images, err := e.ag.GetAlbumImages(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
if err != nil {
switch {
case errors.Is(err, agents.ErrNotFound):
log.Trace(ctx, "Album not found in agent", "albumID", id, "name", albumName, "artist", album.AlbumArtist)
log.Trace(ctx, "Album not found in agent", "albumID", id, "name", album.Name, "artist", album.AlbumArtist)
return nil, model.ErrNotFound
case errors.Is(err, context.Canceled):
log.Debug(ctx, "GetAlbumImages call canceled", err)
default:
log.Warn(ctx, "Error getting album images from agent", "albumID", id, "name", albumName, "artist", album.AlbumArtist, err)
log.Warn(ctx, "Error getting album images from agent", "albumID", id, "name", album.Name, "artist", album.AlbumArtist, err)
}
return nil, err
}
if len(images) == 0 {
log.Warn(ctx, "Agent returned no images without error", "albumID", id, "name", albumName, "artist", album.AlbumArtist)
log.Warn(ctx, "Agent returned no images without error", "albumID", id, "name", album.Name, "artist", album.AlbumArtist)
return nil, model.ErrNotFound
}
@@ -420,10 +401,9 @@ func (e *provider) TopSongs(ctx context.Context, artistName string, count int) (
}
func (e *provider) getMatchingTopSongs(ctx context.Context, agent agents.ArtistTopSongsRetriever, artist *auxArtist, count int) (model.MediaFiles, error) {
artistName := artist.Name()
songs, err := agent.GetArtistTopSongs(ctx, artist.ID, artistName, artist.MbzArtistID, count)
songs, err := agent.GetArtistTopSongs(ctx, artist.ID, artist.Name, artist.MbzArtistID, count)
if err != nil {
return nil, fmt.Errorf("failed to get top songs for artist %s: %w", artistName, err)
return nil, fmt.Errorf("failed to get top songs for artist %s: %w", artist.Name, err)
}
mbidMatches, err := e.loadTracksByMBID(ctx, songs)
@@ -435,13 +415,13 @@ func (e *provider) getMatchingTopSongs(ctx context.Context, agent agents.ArtistT
return nil, fmt.Errorf("failed to load tracks by title: %w", err)
}
log.Trace(ctx, "Top Songs loaded", "name", artistName, "numSongs", len(songs), "numMBIDMatches", len(mbidMatches), "numTitleMatches", len(titleMatches))
log.Trace(ctx, "Top Songs loaded", "name", artist.Name, "numSongs", len(songs), "numMBIDMatches", len(mbidMatches), "numTitleMatches", len(titleMatches))
mfs := e.selectTopSongs(songs, mbidMatches, titleMatches, count)
if len(mfs) == 0 {
log.Debug(ctx, "No matching top songs found", "name", artistName)
log.Debug(ctx, "No matching top songs found", "name", artist.Name)
} else {
log.Debug(ctx, "Found matching top songs", "name", artistName, "numSongs", len(mfs))
log.Debug(ctx, "Found matching top songs", "name", artist.Name, "numSongs", len(mfs))
}
return mfs, nil
@@ -538,7 +518,7 @@ func (e *provider) selectTopSongs(songs []agents.Song, byMBID, byTitle map[strin
}
func (e *provider) callGetURL(ctx context.Context, agent agents.ArtistURLRetriever, artist *auxArtist) {
artisURL, err := agent.GetArtistURL(ctx, artist.ID, artist.Name(), artist.MbzArtistID)
artisURL, err := agent.GetArtistURL(ctx, artist.ID, artist.Name, artist.MbzArtistID)
if err != nil {
return
}
@@ -546,7 +526,7 @@ func (e *provider) callGetURL(ctx context.Context, agent agents.ArtistURLRetriev
}
func (e *provider) callGetBiography(ctx context.Context, agent agents.ArtistBiographyRetriever, artist *auxArtist) {
bio, err := agent.GetArtistBiography(ctx, artist.ID, artist.Name(), artist.MbzArtistID)
bio, err := agent.GetArtistBiography(ctx, artist.ID, str.Clear(artist.Name), artist.MbzArtistID)
if err != nil {
return
}
@@ -556,7 +536,7 @@ func (e *provider) callGetBiography(ctx context.Context, agent agents.ArtistBiog
}
func (e *provider) callGetImage(ctx context.Context, agent agents.ArtistImageRetriever, artist *auxArtist) {
images, err := agent.GetArtistImages(ctx, artist.ID, artist.Name(), artist.MbzArtistID)
images, err := agent.GetArtistImages(ctx, artist.ID, artist.Name, artist.MbzArtistID)
if err != nil {
return
}
@@ -575,14 +555,13 @@ func (e *provider) callGetImage(ctx context.Context, agent agents.ArtistImageRet
func (e *provider) callGetSimilar(ctx context.Context, agent agents.ArtistSimilarRetriever, artist *auxArtist,
limit int, includeNotPresent bool) {
artistName := artist.Name()
similar, err := agent.GetSimilarArtists(ctx, artist.ID, artistName, artist.MbzArtistID, limit)
similar, err := agent.GetSimilarArtists(ctx, artist.ID, artist.Name, artist.MbzArtistID, limit)
if len(similar) == 0 || err != nil {
return
}
start := time.Now()
sa, err := e.mapSimilarArtists(ctx, similar, limit, includeNotPresent)
log.Debug(ctx, "Mapped Similar Artists", "artist", artistName, "numSimilar", len(sa), "elapsed", time.Since(start))
log.Debug(ctx, "Mapped Similar Artists", "artist", artist.Name, "numSimilar", len(sa), "elapsed", time.Since(start))
if err != nil {
return
}
@@ -656,7 +635,11 @@ func (e *provider) findArtistByName(ctx context.Context, artistName string) (*au
if len(artists) == 0 {
return nil, model.ErrNotFound
}
return &auxArtist{Artist: artists[0]}, nil
artist := &auxArtist{
Artist: artists[0],
Name: str.Clear(artists[0].Name),
}
return artist, nil
}
func (e *provider) loadSimilar(ctx context.Context, artist *auxArtist, count int, includeNotPresent bool) error {
@@ -672,7 +655,7 @@ func (e *provider) loadSimilar(ctx context.Context, artist *auxArtist, count int
Filters: squirrel.Eq{"artist.id": ids},
})
if err != nil {
log.Error("Error loading similar artists", "id", artist.ID, "name", artist.Name(), err)
log.Error("Error loading similar artists", "id", artist.ID, "name", artist.Name, err)
return err
}

View File

@@ -260,69 +260,6 @@ var _ = Describe("Provider - AlbumImage", func() {
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "not-found")
mockAlbumAgent.AssertNotCalled(GinkgoT(), "GetAlbumImages", mock.Anything, mock.Anything, mock.Anything)
})
Context("Unicode handling in album names", func() {
var albumWithEnDash *model.Album
var expectedURL *url.URL
const (
originalAlbumName = "Raising HellDeluxe" // Album name with en dash
normalizedAlbumName = "Raising Hell-Deluxe" // Normalized version with hyphen
)
BeforeEach(func() {
// Test with en dash () in album name
albumWithEnDash = &model.Album{ID: "album-endash", Name: originalAlbumName, AlbumArtistID: "artist-1"}
mockArtistRepo.Mock = mock.Mock{} // Reset default expectations
mockAlbumRepo.Mock = mock.Mock{} // Reset default expectations
mockArtistRepo.On("Get", "album-endash").Return(nil, model.ErrNotFound).Once()
mockAlbumRepo.On("Get", "album-endash").Return(albumWithEnDash, nil).Once()
expectedURL, _ = url.Parse("http://example.com/album.jpg")
// Mock the album agent to return an image for the album
mockAlbumAgent.On("GetAlbumImages", ctx, mock.AnythingOfType("string"), "", "").
Return([]agents.ExternalImage{
{URL: "http://example.com/album.jpg", Size: 1000},
}, nil).Once()
})
When("DevPreserveUnicodeInExternalCalls is true", func() {
BeforeEach(func() {
conf.Server.DevPreserveUnicodeInExternalCalls = true
})
It("preserves Unicode characters in album names", func() {
// Act
imgURL, err := provider.AlbumImage(ctx, "album-endash")
// Assert
Expect(err).ToNot(HaveOccurred())
Expect(imgURL).To(Equal(expectedURL))
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-endash")
// This is the key assertion: ensure the original Unicode name is used
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, originalAlbumName, "", "")
})
})
When("DevPreserveUnicodeInExternalCalls is false", func() {
BeforeEach(func() {
conf.Server.DevPreserveUnicodeInExternalCalls = false
})
It("normalizes Unicode characters", func() {
// Act
imgURL, err := provider.AlbumImage(ctx, "album-endash")
// Assert
Expect(err).ToNot(HaveOccurred())
Expect(imgURL).To(Equal(expectedURL))
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-endash")
// This assertion ensures the normalized name is used (en dash → hyphen)
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumImages", ctx, normalizedAlbumName, "", "")
})
})
})
})
// mockAlbumInfoAgent implementation

View File

@@ -265,67 +265,6 @@ var _ = Describe("Provider - ArtistImage", func() {
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
})
Context("Unicode handling in artist names", func() {
var artistWithEnDash *model.Artist
var expectedURL *url.URL
const (
originalArtistName = "RunD.M.C." // Artist name with en dash
normalizedArtistName = "Run-D.M.C." // Normalized version with hyphen
)
BeforeEach(func() {
// Test with en dash () in artist name like "RunD.M.C."
artistWithEnDash = &model.Artist{ID: "artist-endash", Name: originalArtistName}
mockArtistRepo.Mock = mock.Mock{} // Reset default expectations
mockArtistRepo.On("Get", "artist-endash").Return(artistWithEnDash, nil).Once()
expectedURL, _ = url.Parse("http://example.com/rundmc.jpg")
// Mock the image agent to return an image for the artist
mockImageAgent.On("GetArtistImages", ctx, "artist-endash", mock.AnythingOfType("string"), "").
Return([]agents.ExternalImage{
{URL: "http://example.com/rundmc.jpg", Size: 1000},
}, nil).Once()
})
When("DevPreserveUnicodeInExternalCalls is true", func() {
BeforeEach(func() {
conf.Server.DevPreserveUnicodeInExternalCalls = true
})
It("preserves Unicode characters in artist names", func() {
// Act
imgURL, err := provider.ArtistImage(ctx, "artist-endash")
// Assert
Expect(err).ToNot(HaveOccurred())
Expect(imgURL).To(Equal(expectedURL))
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-endash")
// This is the key assertion: ensure the original Unicode name is used
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-endash", originalArtistName, "")
})
})
When("DevPreserveUnicodeInExternalCalls is false", func() {
BeforeEach(func() {
conf.Server.DevPreserveUnicodeInExternalCalls = false
})
It("normalizes Unicode characters", func() {
// Act
imgURL, err := provider.ArtistImage(ctx, "artist-endash")
// Assert
Expect(err).ToNot(HaveOccurred())
Expect(imgURL).To(Equal(expectedURL))
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-endash")
// This assertion ensures the normalized name is used (en dash → hyphen)
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-endash", normalizedArtistName, "")
})
})
})
})
// mockArtistImageAgent implementation using testify/mock

View File

@@ -112,7 +112,7 @@ func (e *ffmpeg) start(ctx context.Context, args []string) (io.ReadCloser, error
log.Trace(ctx, "Executing ffmpeg command", "cmd", args)
j := &ffCmd{args: args}
j.PipeReader, j.out = io.Pipe()
err := j.start(ctx)
err := j.start()
if err != nil {
return nil, err
}
@@ -127,8 +127,8 @@ type ffCmd struct {
cmd *exec.Cmd
}
func (j *ffCmd) start(ctx context.Context) error {
cmd := exec.CommandContext(ctx, j.args[0], j.args[1:]...) // #nosec
func (j *ffCmd) start() error {
cmd := exec.Command(j.args[0], j.args[1:]...) // #nosec
cmd.Stdout = j.out
if log.IsGreaterOrEqualTo(log.LevelTrace) {
cmd.Stderr = os.Stderr

View File

@@ -1,11 +1,7 @@
package ffmpeg
import (
"context"
"runtime"
sync "sync"
"testing"
"time"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/tests"
@@ -69,98 +65,4 @@ var _ = Describe("ffmpeg", func() {
Expect(args).To(Equal([]string{"/usr/bin/with spaces/ffmpeg.exe", "-i", "one.mp3", "-f", "ffmetadata"}))
})
})
Describe("FFmpeg", func() {
Context("when FFmpeg is available", func() {
var ff FFmpeg
BeforeEach(func() {
ffOnce = sync.Once{}
ff = New()
// Skip if FFmpeg is not available
if !ff.IsAvailable() {
Skip("FFmpeg not available on this system")
}
})
It("should interrupt transcoding when context is cancelled", func() {
ctx, cancel := context.WithTimeout(GinkgoT().Context(), 5*time.Second)
defer cancel()
// Use a command that generates audio indefinitely
// -f lavfi uses FFmpeg's built-in audio source
// -t 0 means no time limit (runs forever)
command := "ffmpeg -f lavfi -i sine=frequency=1000:duration=0 -f mp3 -"
// The input file is not used here, but we need to provide a valid path to the Transcode function
stream, err := ff.Transcode(ctx, command, "tests/fixtures/test.mp3", 128, 0)
Expect(err).ToNot(HaveOccurred())
defer stream.Close()
// Read some data first to ensure FFmpeg is running
buf := make([]byte, 1024)
_, err = stream.Read(buf)
Expect(err).ToNot(HaveOccurred())
// Cancel the context
cancel()
// Next read should fail due to cancelled context
_, err = stream.Read(buf)
Expect(err).To(HaveOccurred())
})
It("should handle immediate context cancellation", func() {
ctx, cancel := context.WithCancel(GinkgoT().Context())
cancel() // Cancel immediately
// This should fail immediately
_, err := ff.Transcode(ctx, "ffmpeg -i %s -f mp3 -", "tests/fixtures/test.mp3", 128, 0)
Expect(err).To(MatchError(context.Canceled))
})
})
Context("with mock process behavior", func() {
var longRunningCmd string
BeforeEach(func() {
// Use a long-running command for testing cancellation
switch runtime.GOOS {
case "windows":
// Use PowerShell's Start-Sleep
ffmpegPath = "powershell"
longRunningCmd = "powershell -Command Start-Sleep -Seconds 10"
default:
// Use sleep on Unix-like systems
ffmpegPath = "sleep"
longRunningCmd = "sleep 10"
}
})
It("should terminate the underlying process when context is cancelled", func() {
ff := New()
ctx, cancel := context.WithTimeout(GinkgoT().Context(), 5*time.Second)
defer cancel()
// Start a process that will run for a while
stream, err := ff.Transcode(ctx, longRunningCmd, "tests/fixtures/test.mp3", 0, 0)
Expect(err).ToNot(HaveOccurred())
defer stream.Close()
// Give the process time to start
time.Sleep(50 * time.Millisecond)
// Cancel the context
cancel()
// Try to read from the stream, which should fail
buf := make([]byte, 100)
_, err = stream.Read(buf)
Expect(err).To(HaveOccurred(), "Expected stream to be closed due to process termination")
// Verify the stream is closed by attempting another read
_, err = stream.Read(buf)
Expect(err).To(HaveOccurred())
})
})
})
})

View File

@@ -21,6 +21,11 @@ import (
"github.com/navidrome/navidrome/utils/slice"
)
// Scanner interface for triggering scans
type Scanner interface {
ScanAll(ctx context.Context, fullScan bool) (warnings []string, err error)
}
// Watcher interface for managing file system watchers
type Watcher interface {
Watch(ctx context.Context, lib *model.Library) error
@@ -38,13 +43,13 @@ type Library interface {
type libraryService struct {
ds model.DataStore
scanner model.Scanner
scanner Scanner
watcher Watcher
broker events.Broker
}
// NewLibrary creates a new Library service
func NewLibrary(ds model.DataStore, scanner model.Scanner, watcher Watcher, broker events.Broker) Library {
func NewLibrary(ds model.DataStore, scanner Scanner, watcher Watcher, broker events.Broker) Library {
return &libraryService{
ds: ds,
scanner: scanner,
@@ -150,7 +155,7 @@ type libraryRepositoryWrapper struct {
model.LibraryRepository
ctx context.Context
ds model.DataStore
scanner model.Scanner
scanner Scanner
watcher Watcher
broker events.Broker
}
@@ -187,7 +192,7 @@ func (r *libraryRepositoryWrapper) Save(entity interface{}) (string, error) {
return strconv.Itoa(lib.ID), nil
}
func (r *libraryRepositoryWrapper) Update(id string, entity interface{}, _ ...string) error {
func (r *libraryRepositoryWrapper) Update(id string, entity interface{}, cols ...string) error {
lib := entity.(*model.Library)
libID, err := strconv.Atoi(id)
if err != nil {

View File

@@ -29,7 +29,7 @@ var _ = Describe("Library Service", func() {
var userRepo *tests.MockedUserRepo
var ctx context.Context
var tempDir string
var scanner *tests.MockScanner
var scanner *mockScanner
var watcherManager *mockWatcherManager
var broker *mockEventBroker
@@ -43,7 +43,7 @@ var _ = Describe("Library Service", func() {
ds.MockedUser = userRepo
// Create a mock scanner that tracks calls
scanner = tests.NewMockScanner()
scanner = &mockScanner{}
// Create a mock watcher manager
watcherManager = &mockWatcherManager{
libraryStates: make(map[int]model.Library),
@@ -616,12 +616,11 @@ var _ = Describe("Library Service", func() {
// Wait briefly for the goroutine to complete
Eventually(func() int {
return scanner.GetScanAllCallCount()
return scanner.len()
}, "1s", "10ms").Should(Equal(1))
// Verify scan was called with correct parameters
calls := scanner.GetScanAllCalls()
Expect(calls[0].FullScan).To(BeFalse()) // Should be quick scan
Expect(scanner.ScanCalls[0].FullScan).To(BeFalse()) // Should be quick scan
})
It("triggers scan when updating library path", func() {
@@ -642,12 +641,11 @@ var _ = Describe("Library Service", func() {
// Wait briefly for the goroutine to complete
Eventually(func() int {
return scanner.GetScanAllCallCount()
return scanner.len()
}, "1s", "10ms").Should(Equal(1))
// Verify scan was called with correct parameters
calls := scanner.GetScanAllCalls()
Expect(calls[0].FullScan).To(BeFalse()) // Should be quick scan
Expect(scanner.ScanCalls[0].FullScan).To(BeFalse()) // Should be quick scan
})
It("does not trigger scan when updating library without path change", func() {
@@ -663,7 +661,7 @@ var _ = Describe("Library Service", func() {
// Wait a bit to ensure no scan was triggered
Consistently(func() int {
return scanner.GetScanAllCallCount()
return scanner.len()
}, "100ms", "10ms").Should(Equal(0))
})
@@ -676,7 +674,7 @@ var _ = Describe("Library Service", func() {
// Ensure no scan was triggered since creation failed
Consistently(func() int {
return scanner.GetScanAllCallCount()
return scanner.len()
}, "100ms", "10ms").Should(Equal(0))
})
@@ -693,7 +691,7 @@ var _ = Describe("Library Service", func() {
// Ensure no scan was triggered since update failed
Consistently(func() int {
return scanner.GetScanAllCallCount()
return scanner.len()
}, "100ms", "10ms").Should(Equal(0))
})
@@ -709,12 +707,11 @@ var _ = Describe("Library Service", func() {
// Wait briefly for the goroutine to complete
Eventually(func() int {
return scanner.GetScanAllCallCount()
return scanner.len()
}, "1s", "10ms").Should(Equal(1))
// Verify scan was called with correct parameters
calls := scanner.GetScanAllCalls()
Expect(calls[0].FullScan).To(BeFalse()) // Should be quick scan
Expect(scanner.ScanCalls[0].FullScan).To(BeFalse()) // Should be quick scan
})
It("does not trigger scan when library deletion fails", func() {
@@ -724,7 +721,7 @@ var _ = Describe("Library Service", func() {
// Ensure no scan was triggered since deletion failed
Consistently(func() int {
return scanner.GetScanAllCallCount()
return scanner.len()
}, "100ms", "10ms").Should(Equal(0))
})
@@ -871,6 +868,31 @@ var _ = Describe("Library Service", func() {
})
})
// mockScanner provides a simple mock implementation of core.Scanner for testing
type mockScanner struct {
ScanCalls []ScanCall
mu sync.RWMutex
}
type ScanCall struct {
FullScan bool
}
func (m *mockScanner) ScanAll(ctx context.Context, fullScan bool) (warnings []string, err error) {
m.mu.Lock()
defer m.mu.Unlock()
m.ScanCalls = append(m.ScanCalls, ScanCall{
FullScan: fullScan,
})
return []string{}, nil
}
func (m *mockScanner) len() int {
m.mu.RLock()
defer m.mu.RUnlock()
return len(m.ScanCalls)
}
// mockWatcherManager provides a simple mock implementation of core.Watcher for testing
type mockWatcherManager struct {
StartedWatchers []model.Library

View File

@@ -1,226 +0,0 @@
package core
import (
"context"
"fmt"
"slices"
"sync"
"time"
"github.com/Masterminds/squirrel"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/utils/slice"
)
type Maintenance interface {
// DeleteMissingFiles deletes specific missing files by their IDs
DeleteMissingFiles(ctx context.Context, ids []string) error
// DeleteAllMissingFiles deletes all files marked as missing
DeleteAllMissingFiles(ctx context.Context) error
}
type maintenanceService struct {
ds model.DataStore
wg sync.WaitGroup
}
func NewMaintenance(ds model.DataStore) Maintenance {
return &maintenanceService{
ds: ds,
}
}
func (s *maintenanceService) DeleteMissingFiles(ctx context.Context, ids []string) error {
return s.deleteMissing(ctx, ids)
}
func (s *maintenanceService) DeleteAllMissingFiles(ctx context.Context) error {
return s.deleteMissing(ctx, nil)
}
// deleteMissing handles the deletion of missing files and triggers necessary cleanup operations
func (s *maintenanceService) deleteMissing(ctx context.Context, ids []string) error {
// Track affected album IDs before deletion for refresh
affectedAlbumIDs, err := s.getAffectedAlbumIDs(ctx, ids)
if err != nil {
log.Warn(ctx, "Error tracking affected albums for refresh", err)
// Don't fail the operation, just log the warning
}
// Delete missing files within a transaction
err = s.ds.WithTx(func(tx model.DataStore) error {
if len(ids) == 0 {
_, err := tx.MediaFile(ctx).DeleteAllMissing()
return err
}
return tx.MediaFile(ctx).DeleteMissing(ids)
})
if err != nil {
log.Error(ctx, "Error deleting missing tracks from DB", "ids", ids, err)
return err
}
// Run garbage collection to clean up orphaned records
if err := s.ds.GC(ctx); err != nil {
log.Error(ctx, "Error running GC after deleting missing tracks", err)
return err
}
// Refresh statistics in background
s.refreshStatsAsync(ctx, affectedAlbumIDs)
return nil
}
// refreshAlbums recalculates album attributes (size, duration, song count, etc.) from media files.
// It uses batch queries to minimize database round-trips for efficiency.
func (s *maintenanceService) refreshAlbums(ctx context.Context, albumIDs []string) error {
if len(albumIDs) == 0 {
return nil
}
log.Debug(ctx, "Refreshing albums", "count", len(albumIDs))
// Process in chunks to avoid query size limits
const chunkSize = 100
for chunk := range slice.CollectChunks(slices.Values(albumIDs), chunkSize) {
if err := s.refreshAlbumChunk(ctx, chunk); err != nil {
return fmt.Errorf("refreshing album chunk: %w", err)
}
}
log.Debug(ctx, "Successfully refreshed albums", "count", len(albumIDs))
return nil
}
// refreshAlbumChunk processes a single chunk of album IDs
func (s *maintenanceService) refreshAlbumChunk(ctx context.Context, albumIDs []string) error {
albumRepo := s.ds.Album(ctx)
mfRepo := s.ds.MediaFile(ctx)
// Batch load existing albums
albums, err := albumRepo.GetAll(model.QueryOptions{
Filters: squirrel.Eq{"album.id": albumIDs},
})
if err != nil {
return fmt.Errorf("loading albums: %w", err)
}
// Create a map for quick lookup
albumMap := make(map[string]*model.Album, len(albums))
for i := range albums {
albumMap[albums[i].ID] = &albums[i]
}
// Batch load all media files for these albums
mediaFiles, err := mfRepo.GetAll(model.QueryOptions{
Filters: squirrel.Eq{"album_id": albumIDs},
Sort: "album_id, path",
})
if err != nil {
return fmt.Errorf("loading media files: %w", err)
}
// Group media files by album ID
filesByAlbum := make(map[string]model.MediaFiles)
for i := range mediaFiles {
albumID := mediaFiles[i].AlbumID
filesByAlbum[albumID] = append(filesByAlbum[albumID], mediaFiles[i])
}
// Recalculate each album from its media files
for albumID, oldAlbum := range albumMap {
mfs, hasTracks := filesByAlbum[albumID]
if !hasTracks {
// Album has no tracks anymore, skip (will be cleaned up by GC)
log.Debug(ctx, "Skipping album with no tracks", "albumID", albumID)
continue
}
// Recalculate album from media files
newAlbum := mfs.ToAlbum()
// Only update if something changed (avoid unnecessary writes)
if !oldAlbum.Equals(newAlbum) {
// Preserve original timestamps
newAlbum.UpdatedAt = time.Now()
newAlbum.CreatedAt = oldAlbum.CreatedAt
if err := albumRepo.Put(&newAlbum); err != nil {
log.Error(ctx, "Error updating album during refresh", "albumID", albumID, err)
// Continue with other albums instead of failing entirely
continue
}
log.Trace(ctx, "Refreshed album", "albumID", albumID, "name", newAlbum.Name)
}
}
return nil
}
// getAffectedAlbumIDs returns distinct album IDs from missing media files
func (s *maintenanceService) getAffectedAlbumIDs(ctx context.Context, ids []string) ([]string, error) {
var filters squirrel.Sqlizer = squirrel.Eq{"missing": true}
if len(ids) > 0 {
filters = squirrel.And{
squirrel.Eq{"missing": true},
squirrel.Eq{"media_file.id": ids},
}
}
mfs, err := s.ds.MediaFile(ctx).GetAll(model.QueryOptions{
Filters: filters,
})
if err != nil {
return nil, err
}
// Extract unique album IDs
albumIDMap := make(map[string]struct{}, len(mfs))
for _, mf := range mfs {
if mf.AlbumID != "" {
albumIDMap[mf.AlbumID] = struct{}{}
}
}
albumIDs := make([]string, 0, len(albumIDMap))
for id := range albumIDMap {
albumIDs = append(albumIDs, id)
}
return albumIDs, nil
}
// refreshStatsAsync refreshes artist and album statistics in background goroutines
func (s *maintenanceService) refreshStatsAsync(ctx context.Context, affectedAlbumIDs []string) {
// Refresh artist stats in background
s.wg.Add(1)
go func() {
defer s.wg.Done()
bgCtx := request.AddValues(context.Background(), ctx)
if _, err := s.ds.Artist(bgCtx).RefreshStats(true); err != nil {
log.Error(bgCtx, "Error refreshing artist stats after deleting missing files", err)
} else {
log.Debug(bgCtx, "Successfully refreshed artist stats after deleting missing files")
}
// Refresh album stats in background if we have affected albums
if len(affectedAlbumIDs) > 0 {
if err := s.refreshAlbums(bgCtx, affectedAlbumIDs); err != nil {
log.Error(bgCtx, "Error refreshing album stats after deleting missing files", err)
} else {
log.Debug(bgCtx, "Successfully refreshed album stats after deleting missing files", "count", len(affectedAlbumIDs))
}
}
}()
}
// Wait waits for all background goroutines to complete.
// WARNING: This method is ONLY for testing. Never call this in production code.
// Calling Wait() in production will block until ALL background operations complete
// and may cause race conditions with new operations starting.
func (s *maintenanceService) wait() {
s.wg.Wait()
}

View File

@@ -1,364 +0,0 @@
package core
import (
"context"
"errors"
"sync"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/tests"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/sirupsen/logrus"
)
var _ = Describe("Maintenance", func() {
var ds *tests.MockDataStore
var mfRepo *extendedMediaFileRepo
var service Maintenance
var ctx context.Context
BeforeEach(func() {
ctx = context.Background()
ctx = request.WithUser(ctx, model.User{ID: "user1", IsAdmin: true})
ds = createTestDataStore()
mfRepo = ds.MockedMediaFile.(*extendedMediaFileRepo)
service = NewMaintenance(ds)
})
Describe("DeleteMissingFiles", func() {
Context("with specific IDs", func() {
It("deletes specific missing files and runs GC", func() {
// Setup: mock missing files with album IDs
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
{ID: "mf2", AlbumID: "album2", Missing: true},
})
err := service.DeleteMissingFiles(ctx, []string{"mf1", "mf2"})
Expect(err).ToNot(HaveOccurred())
Expect(mfRepo.deleteMissingCalled).To(BeTrue())
Expect(mfRepo.deletedIDs).To(Equal([]string{"mf1", "mf2"}))
Expect(ds.GCCalled).To(BeTrue(), "GC should be called after deletion")
})
It("triggers artist stats refresh and album refresh after deletion", func() {
artistRepo := ds.MockedArtist.(*extendedArtistRepo)
// Setup: mock missing files with albums
albumRepo := ds.MockedAlbum.(*extendedAlbumRepo)
albumRepo.SetData(model.Albums{
{ID: "album1", Name: "Test Album", SongCount: 5},
})
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
{ID: "mf2", AlbumID: "album1", Missing: false, Size: 1000, Duration: 180},
{ID: "mf3", AlbumID: "album1", Missing: false, Size: 2000, Duration: 200},
})
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
Expect(err).ToNot(HaveOccurred())
// Wait for background goroutines to complete
service.(*maintenanceService).wait()
// RefreshStats should be called
Expect(artistRepo.IsRefreshStatsCalled()).To(BeTrue(), "Artist stats should be refreshed")
// Album should be updated with new calculated values
Expect(albumRepo.GetPutCallCount()).To(BeNumerically(">", 0), "Album.Put() should be called to refresh album data")
})
It("returns error if deletion fails", func() {
mfRepo.deleteMissingError = errors.New("delete failed")
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("delete failed"))
})
It("continues even if album tracking fails", func() {
mfRepo.SetError(true)
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
// Should not fail, just log warning
Expect(err).ToNot(HaveOccurred())
Expect(mfRepo.deleteMissingCalled).To(BeTrue())
})
It("returns error if GC fails", func() {
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
})
// Set GC to return error
ds.GCError = errors.New("gc failed")
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("gc failed"))
})
})
Context("album ID extraction", func() {
It("extracts unique album IDs from missing files", func() {
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
{ID: "mf2", AlbumID: "album1", Missing: true},
{ID: "mf3", AlbumID: "album2", Missing: true},
})
err := service.DeleteMissingFiles(ctx, []string{"mf1", "mf2", "mf3"})
Expect(err).ToNot(HaveOccurred())
})
It("skips files without album IDs", func() {
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "", Missing: true},
{ID: "mf2", AlbumID: "album1", Missing: true},
})
err := service.DeleteMissingFiles(ctx, []string{"mf1", "mf2"})
Expect(err).ToNot(HaveOccurred())
})
})
})
Describe("DeleteAllMissingFiles", func() {
It("deletes all missing files and runs GC", func() {
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
{ID: "mf2", AlbumID: "album2", Missing: true},
{ID: "mf3", AlbumID: "album3", Missing: true},
})
err := service.DeleteAllMissingFiles(ctx)
Expect(err).ToNot(HaveOccurred())
Expect(ds.GCCalled).To(BeTrue(), "GC should be called after deletion")
})
It("returns error if deletion fails", func() {
mfRepo.SetError(true)
err := service.DeleteAllMissingFiles(ctx)
Expect(err).To(HaveOccurred())
})
It("handles empty result gracefully", func() {
mfRepo.SetData(model.MediaFiles{})
err := service.DeleteAllMissingFiles(ctx)
Expect(err).ToNot(HaveOccurred())
})
})
Describe("Album refresh logic", func() {
var albumRepo *extendedAlbumRepo
BeforeEach(func() {
albumRepo = ds.MockedAlbum.(*extendedAlbumRepo)
})
Context("when album has no tracks after deletion", func() {
It("skips the album without updating it", func() {
// Setup album with no remaining tracks
albumRepo.SetData(model.Albums{
{ID: "album1", Name: "Empty Album", SongCount: 1},
})
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
})
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
Expect(err).ToNot(HaveOccurred())
// Wait for background goroutines to complete
service.(*maintenanceService).wait()
// Album should NOT be updated because it has no tracks left
Expect(albumRepo.GetPutCallCount()).To(Equal(0), "Album with no tracks should not be updated")
})
})
Context("when Put fails for one album", func() {
It("continues processing other albums", func() {
albumRepo.SetData(model.Albums{
{ID: "album1", Name: "Album 1"},
{ID: "album2", Name: "Album 2"},
})
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
{ID: "mf2", AlbumID: "album1", Missing: false, Size: 1000, Duration: 180},
{ID: "mf3", AlbumID: "album2", Missing: true},
{ID: "mf4", AlbumID: "album2", Missing: false, Size: 2000, Duration: 200},
})
// Make Put fail on first call but succeed on subsequent calls
albumRepo.putError = errors.New("put failed")
albumRepo.failOnce = true
err := service.DeleteMissingFiles(ctx, []string{"mf1", "mf3"})
// Should not fail even if one album's Put fails
Expect(err).ToNot(HaveOccurred())
// Wait for background goroutines to complete
service.(*maintenanceService).wait()
// Put should have been called multiple times
Expect(albumRepo.GetPutCallCount()).To(BeNumerically(">", 0), "Put should be attempted")
})
})
Context("when media file loading fails", func() {
It("logs warning but continues when tracking affected albums fails", func() {
// Set up log capturing
hook, cleanup := tests.LogHook()
defer cleanup()
albumRepo.SetData(model.Albums{
{ID: "album1", Name: "Album 1"},
})
mfRepo.SetData(model.MediaFiles{
{ID: "mf1", AlbumID: "album1", Missing: true},
})
// Make GetAll fail when loading media files
mfRepo.SetError(true)
err := service.DeleteMissingFiles(ctx, []string{"mf1"})
// Deletion should succeed despite the tracking error
Expect(err).ToNot(HaveOccurred())
Expect(mfRepo.deleteMissingCalled).To(BeTrue())
// Verify the warning was logged
Expect(hook.LastEntry()).ToNot(BeNil())
Expect(hook.LastEntry().Level).To(Equal(logrus.WarnLevel))
Expect(hook.LastEntry().Message).To(Equal("Error tracking affected albums for refresh"))
})
})
})
})
// Test helper to create a mock DataStore with controllable behavior
func createTestDataStore() *tests.MockDataStore {
ds := &tests.MockDataStore{}
// Create extended album repo with Put tracking
albumRepo := &extendedAlbumRepo{
MockAlbumRepo: tests.CreateMockAlbumRepo(),
}
ds.MockedAlbum = albumRepo
// Create extended artist repo with RefreshStats tracking
artistRepo := &extendedArtistRepo{
MockArtistRepo: tests.CreateMockArtistRepo(),
}
ds.MockedArtist = artistRepo
// Create extended media file repo with DeleteMissing support
mfRepo := &extendedMediaFileRepo{
MockMediaFileRepo: tests.CreateMockMediaFileRepo(),
}
ds.MockedMediaFile = mfRepo
return ds
}
// Extension of MockMediaFileRepo to add DeleteMissing method
type extendedMediaFileRepo struct {
*tests.MockMediaFileRepo
deleteMissingCalled bool
deletedIDs []string
deleteMissingError error
}
func (m *extendedMediaFileRepo) DeleteMissing(ids []string) error {
m.deleteMissingCalled = true
m.deletedIDs = ids
if m.deleteMissingError != nil {
return m.deleteMissingError
}
// Actually delete from the mock data
for _, id := range ids {
delete(m.Data, id)
}
return nil
}
// Extension of MockAlbumRepo to track Put calls
type extendedAlbumRepo struct {
*tests.MockAlbumRepo
mu sync.RWMutex
putCallCount int
lastPutData *model.Album
putError error
failOnce bool
}
func (m *extendedAlbumRepo) Put(album *model.Album) error {
m.mu.Lock()
m.putCallCount++
m.lastPutData = album
// Handle failOnce behavior
var err error
if m.putError != nil {
if m.failOnce {
err = m.putError
m.putError = nil // Clear error after first failure
m.mu.Unlock()
return err
}
err = m.putError
m.mu.Unlock()
return err
}
m.mu.Unlock()
return m.MockAlbumRepo.Put(album)
}
func (m *extendedAlbumRepo) GetPutCallCount() int {
m.mu.RLock()
defer m.mu.RUnlock()
return m.putCallCount
}
// Extension of MockArtistRepo to track RefreshStats calls
type extendedArtistRepo struct {
*tests.MockArtistRepo
mu sync.RWMutex
refreshStatsCalled bool
refreshStatsError error
}
func (m *extendedArtistRepo) RefreshStats(allArtists bool) (int64, error) {
m.mu.Lock()
m.refreshStatsCalled = true
err := m.refreshStatsError
m.mu.Unlock()
if err != nil {
return 0, err
}
return m.MockArtistRepo.RefreshStats(allArtists)
}
func (m *extendedArtistRepo) IsRefreshStatsCalled() bool {
m.mu.RLock()
defer m.mu.RUnlock()
return m.refreshStatsCalled
}

View File

@@ -204,20 +204,7 @@ func NewTranscodingCache() TranscodingCache {
log.Error(ctx, "Error loading transcoding command", "format", job.format, err)
return nil, os.ErrInvalid
}
// Choose the appropriate context based on EnableTranscodingCancellation configuration.
// This is where we decide whether transcoding processes should be cancellable or not.
var transcodingCtx context.Context
if conf.Server.EnableTranscodingCancellation {
// Use the request context directly, allowing cancellation when client disconnects
transcodingCtx = ctx
} else {
// Use background context with request values preserved.
// This prevents cancellation but maintains request metadata (user, client, etc.)
transcodingCtx = request.AddValues(context.Background(), ctx)
}
out, err := job.ms.transcoder.Transcode(transcodingCtx, t.Command, job.filePath, job.bitRate, job.offset)
out, err := job.ms.transcoder.Transcode(ctx, t.Command, job.filePath, job.bitRate, job.offset)
if err != nil {
log.Error(ctx, "Error starting transcoder", "id", job.mf.ID, err)
return nil, os.ErrInvalid

View File

@@ -6,7 +6,6 @@ import (
"encoding/json"
"math"
"net/http"
"os"
"path/filepath"
"runtime"
"runtime/debug"
@@ -22,9 +21,7 @@ import (
"github.com/navidrome/navidrome/core/metrics/insights"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/plugins"
"github.com/navidrome/navidrome/server/events"
"github.com/navidrome/navidrome/plugins/schema"
"github.com/navidrome/navidrome/utils/singleton"
)
@@ -38,12 +35,18 @@ var (
)
type insightsCollector struct {
ds model.DataStore
lastRun atomic.Int64
lastStatus atomic.Bool
ds model.DataStore
pluginLoader PluginLoader
lastRun atomic.Int64
lastStatus atomic.Bool
}
func GetInstance(ds model.DataStore) Insights {
// PluginLoader defines an interface for loading plugins
type PluginLoader interface {
PluginList() map[string]schema.PluginManifest
}
func GetInstance(ds model.DataStore, pluginLoader PluginLoader) Insights {
return singleton.GetInstance(func() *insightsCollector {
id, err := ds.Property(context.TODO()).Get(consts.InsightsIDKey)
if err != nil {
@@ -55,21 +58,14 @@ func GetInstance(ds model.DataStore) Insights {
}
}
insightsID = id
return &insightsCollector{ds: ds}
return &insightsCollector{ds: ds, pluginLoader: pluginLoader}
})
}
func (c *insightsCollector) Run(ctx context.Context) {
ctx = auth.WithAdminUser(ctx, c.ds)
for {
// Refresh admin context on each iteration to handle cases where
// admin user wasn't available on previous runs
insightsCtx := auth.WithAdminUser(ctx, c.ds)
u, _ := request.UserFrom(insightsCtx)
if !u.IsAdmin {
log.Trace(insightsCtx, "No admin user available, skipping insights collection")
} else {
c.sendInsights(insightsCtx)
}
c.sendInsights(ctx)
select {
case <-time.After(consts.InsightsUpdateInterval):
continue
@@ -164,13 +160,6 @@ var staticData = sync.OnceValue(func() insights.Data {
data.Build.Settings, data.Build.GoVersion = buildInfo()
data.OS.Containerized = consts.InContainer
// Install info
packageFilename := filepath.Join(conf.Server.DataFolder, ".package")
packageFileData, err := os.ReadFile(packageFilename)
if err == nil {
data.OS.Package = string(packageFileData)
}
// OS info
data.OS.Type = runtime.GOOS
data.OS.Arch = runtime.GOARCH
@@ -218,7 +207,7 @@ var staticData = sync.OnceValue(func() insights.Data {
data.Config.ScanSchedule = conf.Server.Scanner.Schedule
data.Config.ScanWatcherWait = uint64(math.Trunc(conf.Server.Scanner.WatcherWait.Seconds()))
data.Config.ScanOnStartup = conf.Server.Scanner.ScanOnStartup
data.Config.ReverseProxyConfigured = conf.Server.ExtAuth.TrustedSources != ""
data.Config.ReverseProxyConfigured = conf.Server.ReverseProxyWhitelist != ""
data.Config.HasCustomPID = conf.Server.PID.Track != "" || conf.Server.PID.Album != ""
data.Config.HasCustomTags = len(conf.Server.Tags) > 0
@@ -314,16 +303,12 @@ func (c *insightsCollector) hasSmartPlaylists(ctx context.Context) (bool, error)
// collectPlugins collects information about installed plugins
func (c *insightsCollector) collectPlugins(_ context.Context) map[string]insights.PluginInfo {
// TODO Fix import/inject cycles
manager := plugins.GetManager(c.ds, events.GetBroker())
info := manager.GetPluginInfo()
result := make(map[string]insights.PluginInfo, len(info))
for name, p := range info {
result[name] = insights.PluginInfo{
Name: p.Name,
Version: p.Version,
plugins := make(map[string]insights.PluginInfo)
for id, manifest := range c.pluginLoader.PluginList() {
plugins[id] = insights.PluginInfo{
Name: manifest.Name,
Version: manifest.Version,
}
}
return result
return plugins
}

View File

@@ -16,7 +16,6 @@ type Data struct {
Containerized bool `json:"containerized"`
Arch string `json:"arch"`
NumCPU int `json:"numCPU"`
Package string `json:"package,omitempty"`
} `json:"os"`
Mem struct {
Alloc uint64 `json:"alloc"`

View File

@@ -42,7 +42,6 @@ type MountInfo struct {
var fsTypeMap = map[int64]string{
0x5346414f: "afs",
0x187: "autofs",
0x61756673: "aufs",
0x9123683E: "btrfs",
0xc36400: "ceph",
@@ -56,11 +55,9 @@ var fsTypeMap = map[int64]string{
0x6a656a63: "fakeowner", // FS inside a container
0x65735546: "fuse",
0x4244: "hfs",
0x482b: "hfs+",
0x9660: "iso9660",
0x3153464a: "jfs",
0x00006969: "nfs",
0x5346544e: "ntfs", // NTFS_SB_MAGIC
0x7366746e: "ntfs",
0x794c7630: "overlayfs",
0x9fa0: "proc",
@@ -72,16 +69,8 @@ var fsTypeMap = map[int64]string{
0x01021997: "v9fs",
0x786f4256: "vboxsf",
0x4d44: "vfat",
0xca451a4e: "virtiofs",
0x58465342: "xfs",
0x2FC12FC1: "zfs",
0x7c7c6673: "prlfs", // Parallels Shared Folders
// Signed/unsigned conversion issues (negative hex values converted to uint32)
-0x6edc97c2: "btrfs", // 0x9123683e
-0x1acb2be: "smb2", // 0xfe534d42
-0xacb2be: "cifs", // 0xff534d42
-0xd0adff0: "f2fs", // 0xf2f52010
}
func getFilesystemType(path string) (string, error) {

View File

@@ -1,7 +1,6 @@
package core
import (
"cmp"
"context"
"encoding/json"
"errors"
@@ -10,7 +9,7 @@ import (
"net/url"
"os"
"path/filepath"
"slices"
"regexp"
"strings"
"time"
@@ -195,35 +194,22 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
}
filteredLines = append(filteredLines, line)
}
resolvedPaths, err := s.resolvePaths(ctx, folder, filteredLines)
paths, err := s.normalizePaths(ctx, pls, folder, filteredLines)
if err != nil {
log.Warn(ctx, "Error resolving paths in playlist", "playlist", pls.Name, err)
log.Warn(ctx, "Error normalizing paths in playlist", "playlist", pls.Name, err)
continue
}
// Normalize to NFD for filesystem compatibility (macOS). Database stores paths in NFD.
// See https://github.com/navidrome/navidrome/issues/4663
resolvedPaths = slice.Map(resolvedPaths, func(path string) string {
return strings.ToLower(norm.NFD.String(path))
})
found, err := mediaFileRepository.FindByPaths(resolvedPaths)
found, err := mediaFileRepository.FindByPaths(paths)
if err != nil {
log.Warn(ctx, "Error reading files from DB", "playlist", pls.Name, err)
continue
}
// Build lookup map with library-qualified keys, normalized for comparison
existing := make(map[string]int, len(found))
for idx := range found {
// Normalize to lowercase for case-insensitive comparison
// Key format: "libraryID:path"
key := fmt.Sprintf("%d:%s", found[idx].LibraryID, strings.ToLower(found[idx].Path))
existing[key] = idx
existing[normalizePathForComparison(found[idx].Path)] = idx
}
// Find media files in the order of the resolved paths, to keep playlist order
for _, path := range resolvedPaths {
idx, ok := existing[path]
for _, path := range paths {
idx, ok := existing[normalizePathForComparison(path)]
if ok {
mfs = append(mfs, found[idx])
} else {
@@ -240,150 +226,69 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
return nil
}
// pathResolution holds the result of resolving a playlist path to a library-relative path.
type pathResolution struct {
absolutePath string
libraryPath string
libraryID int
valid bool
// normalizePathForComparison normalizes a file path to NFC form and converts to lowercase
// for consistent comparison. This fixes Unicode normalization issues on macOS where
// Apple Music creates playlists with NFC-encoded paths but the filesystem uses NFD.
func normalizePathForComparison(path string) string {
return strings.ToLower(norm.NFC.String(path))
}
// ToQualifiedString converts the path resolution to a library-qualified string with forward slashes.
// Format: "libraryID:relativePath" with forward slashes for path separators.
func (r pathResolution) ToQualifiedString() (string, error) {
if !r.valid {
return "", fmt.Errorf("invalid path resolution")
}
relativePath, err := filepath.Rel(r.libraryPath, r.absolutePath)
// TODO This won't work for multiple libraries
func (s *playlists) normalizePaths(ctx context.Context, pls *model.Playlist, folder *model.Folder, lines []string) ([]string, error) {
libRegex, err := s.compileLibraryPaths(ctx)
if err != nil {
return "", err
return nil, err
}
// Convert path separators to forward slashes
return fmt.Sprintf("%d:%s", r.libraryID, filepath.ToSlash(relativePath)), nil
}
// libraryMatcher holds sorted libraries with cleaned paths for efficient path matching.
type libraryMatcher struct {
libraries model.Libraries
cleanedPaths []string
}
res := make([]string, 0, len(lines))
for idx, line := range lines {
var libPath string
var filePath string
// findLibraryForPath finds which library contains the given absolute path.
// Returns library ID and path, or 0 and empty string if not found.
func (lm *libraryMatcher) findLibraryForPath(absolutePath string) (int, string) {
// Check sorted libraries (longest path first) to find the best match
for i, cleanLibPath := range lm.cleanedPaths {
// Check if absolutePath is under this library path
if strings.HasPrefix(absolutePath, cleanLibPath) {
// Ensure it's a proper path boundary (not just a prefix)
if len(absolutePath) == len(cleanLibPath) || absolutePath[len(cleanLibPath)] == filepath.Separator {
return lm.libraries[i].ID, cleanLibPath
if folder != nil && !filepath.IsAbs(line) {
libPath = folder.LibraryPath
filePath = filepath.Join(folder.AbsolutePath(), line)
} else {
cleanLine := filepath.Clean(line)
if libPath = libRegex.FindString(cleanLine); libPath != "" {
filePath = cleanLine
}
}
}
return 0, ""
}
// newLibraryMatcher creates a libraryMatcher with libraries sorted by path length (longest first).
// This ensures correct matching when library paths are prefixes of each other.
// Example: /music-classical must be checked before /music
// Otherwise, /music-classical/track.mp3 would match /music instead of /music-classical
func newLibraryMatcher(libs model.Libraries) *libraryMatcher {
// Sort libraries by path length (descending) to ensure longest paths match first.
slices.SortFunc(libs, func(i, j model.Library) int {
return cmp.Compare(len(j.Path), len(i.Path)) // Reverse order for descending
})
// Pre-clean all library paths once for efficient matching
cleanedPaths := make([]string, len(libs))
for i, lib := range libs {
cleanedPaths[i] = filepath.Clean(lib.Path)
}
return &libraryMatcher{
libraries: libs,
cleanedPaths: cleanedPaths,
}
}
// pathResolver handles path resolution logic for playlist imports.
type pathResolver struct {
matcher *libraryMatcher
}
// newPathResolver creates a pathResolver with libraries loaded from the datastore.
func newPathResolver(ctx context.Context, ds model.DataStore) (*pathResolver, error) {
libs, err := ds.Library(ctx).GetAll()
if err != nil {
return nil, err
}
matcher := newLibraryMatcher(libs)
return &pathResolver{matcher: matcher}, nil
}
// resolvePath determines the absolute path and library path for a playlist entry.
// For absolute paths, it uses them directly.
// For relative paths, it resolves them relative to the playlist's folder location.
// Example: playlist at /music/playlists/test.m3u with line "../songs/abc.mp3"
//
// resolves to /music/songs/abc.mp3
func (r *pathResolver) resolvePath(line string, folder *model.Folder) pathResolution {
var absolutePath string
if folder != nil && !filepath.IsAbs(line) {
// Resolve relative path to absolute path based on playlist location
absolutePath = filepath.Clean(filepath.Join(folder.AbsolutePath(), line))
} else {
// Use absolute path directly after cleaning
absolutePath = filepath.Clean(line)
}
return r.findInLibraries(absolutePath)
}
// findInLibraries matches an absolute path against all known libraries and returns
// a pathResolution with the library information. Returns an invalid resolution if
// the path is not found in any library.
func (r *pathResolver) findInLibraries(absolutePath string) pathResolution {
libID, libPath := r.matcher.findLibraryForPath(absolutePath)
if libID == 0 {
return pathResolution{valid: false}
}
return pathResolution{
absolutePath: absolutePath,
libraryPath: libPath,
libraryID: libID,
valid: true,
}
}
// resolvePaths converts playlist file paths to library-qualified paths (format: "libraryID:relativePath").
// For relative paths, it resolves them to absolute paths first, then determines which
// library they belong to. This allows playlists to reference files across library boundaries.
func (s *playlists) resolvePaths(ctx context.Context, folder *model.Folder, lines []string) ([]string, error) {
resolver, err := newPathResolver(ctx, s.ds)
if err != nil {
return nil, err
}
results := make([]string, 0, len(lines))
for idx, line := range lines {
resolution := resolver.resolvePath(line, folder)
if !resolution.valid {
if libPath != "" {
if rel, err := filepath.Rel(libPath, filePath); err == nil {
res = append(res, rel)
} else {
log.Debug(ctx, "Error getting relative path", "playlist", pls.Name, "path", line, "libPath", libPath,
"filePath", filePath, err)
}
} else {
log.Warn(ctx, "Path in playlist not found in any library", "path", line, "line", idx)
continue
}
}
return slice.Map(res, filepath.ToSlash), nil
}
qualifiedPath, err := resolution.ToQualifiedString()
if err != nil {
log.Debug(ctx, "Error getting library-qualified path", "path", line,
"libPath", resolution.libraryPath, "filePath", resolution.absolutePath, err)
continue
}
results = append(results, qualifiedPath)
func (s *playlists) compileLibraryPaths(ctx context.Context) (*regexp.Regexp, error) {
libs, err := s.ds.Library(ctx).GetAll()
if err != nil {
return nil, err
}
return results, nil
// Create regex patterns for each library path
patterns := make([]string, len(libs))
for i, lib := range libs {
cleanPath := filepath.Clean(lib.Path)
escapedPath := regexp.QuoteMeta(cleanPath)
patterns[i] = fmt.Sprintf("^%s(?:/|$)", escapedPath)
}
// Combine all patterns into a single regex
combinedPattern := strings.Join(patterns, "|")
re, err := regexp.Compile(combinedPattern)
if err != nil {
return nil, fmt.Errorf("compiling library paths `%s`: %w", combinedPattern, err)
}
return re, nil
}
func (s *playlists) updatePlaylist(ctx context.Context, newPls *model.Playlist) error {

View File

@@ -1,406 +0,0 @@
package core
import (
"context"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/tests"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("libraryMatcher", func() {
var ds *tests.MockDataStore
var mockLibRepo *tests.MockLibraryRepo
ctx := context.Background()
BeforeEach(func() {
mockLibRepo = &tests.MockLibraryRepo{}
ds = &tests.MockDataStore{
MockedLibrary: mockLibRepo,
}
})
// Helper function to create a libraryMatcher from the mock datastore
createMatcher := func(ds model.DataStore) *libraryMatcher {
libs, err := ds.Library(ctx).GetAll()
Expect(err).ToNot(HaveOccurred())
return newLibraryMatcher(libs)
}
Describe("Longest library path matching", func() {
It("matches the longest library path when multiple libraries share a prefix", func() {
// Setup libraries with prefix conflicts
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: "/music"},
{ID: 2, Path: "/music-classical"},
{ID: 3, Path: "/music-classical/opera"},
})
matcher := createMatcher(ds)
// Test that longest path matches first and returns correct library ID
testCases := []struct {
path string
expectedLibID int
expectedLibPath string
}{
{"/music-classical/opera/track.mp3", 3, "/music-classical/opera"},
{"/music-classical/track.mp3", 2, "/music-classical"},
{"/music/track.mp3", 1, "/music"},
{"/music-classical/opera/subdir/file.mp3", 3, "/music-classical/opera"},
}
for _, tc := range testCases {
libID, libPath := matcher.findLibraryForPath(tc.path)
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d, but got %d", tc.path, tc.expectedLibID, libID)
Expect(libPath).To(Equal(tc.expectedLibPath), "Path %s should match library path %s, but got %s", tc.path, tc.expectedLibPath, libPath)
}
})
It("handles libraries with similar prefixes but different structures", func() {
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: "/home/user/music"},
{ID: 2, Path: "/home/user/music-backup"},
})
matcher := createMatcher(ds)
// Test that music-backup library is matched correctly
libID, libPath := matcher.findLibraryForPath("/home/user/music-backup/track.mp3")
Expect(libID).To(Equal(2))
Expect(libPath).To(Equal("/home/user/music-backup"))
// Test that music library is still matched correctly
libID, libPath = matcher.findLibraryForPath("/home/user/music/track.mp3")
Expect(libID).To(Equal(1))
Expect(libPath).To(Equal("/home/user/music"))
})
It("matches path that is exactly the library root", func() {
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: "/music"},
{ID: 2, Path: "/music-classical"},
})
matcher := createMatcher(ds)
// Exact library path should match
libID, libPath := matcher.findLibraryForPath("/music-classical")
Expect(libID).To(Equal(2))
Expect(libPath).To(Equal("/music-classical"))
})
It("handles complex nested library structures", func() {
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: "/media"},
{ID: 2, Path: "/media/audio"},
{ID: 3, Path: "/media/audio/classical"},
{ID: 4, Path: "/media/audio/classical/baroque"},
})
matcher := createMatcher(ds)
testCases := []struct {
path string
expectedLibID int
expectedLibPath string
}{
{"/media/audio/classical/baroque/bach/track.mp3", 4, "/media/audio/classical/baroque"},
{"/media/audio/classical/mozart/track.mp3", 3, "/media/audio/classical"},
{"/media/audio/rock/track.mp3", 2, "/media/audio"},
{"/media/video/movie.mp4", 1, "/media"},
}
for _, tc := range testCases {
libID, libPath := matcher.findLibraryForPath(tc.path)
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d", tc.path, tc.expectedLibID)
Expect(libPath).To(Equal(tc.expectedLibPath), "Path %s should match library path %s", tc.path, tc.expectedLibPath)
}
})
})
Describe("Edge cases", func() {
It("handles empty library list", func() {
mockLibRepo.SetData([]model.Library{})
matcher := createMatcher(ds)
Expect(matcher).ToNot(BeNil())
// Should not match anything
libID, libPath := matcher.findLibraryForPath("/music/track.mp3")
Expect(libID).To(Equal(0))
Expect(libPath).To(BeEmpty())
})
It("handles single library", func() {
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: "/music"},
})
matcher := createMatcher(ds)
libID, libPath := matcher.findLibraryForPath("/music/track.mp3")
Expect(libID).To(Equal(1))
Expect(libPath).To(Equal("/music"))
})
It("handles libraries with special characters in paths", func() {
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: "/music[test]"},
{ID: 2, Path: "/music(backup)"},
})
matcher := createMatcher(ds)
Expect(matcher).ToNot(BeNil())
// Special characters should match literally
libID, libPath := matcher.findLibraryForPath("/music[test]/track.mp3")
Expect(libID).To(Equal(1))
Expect(libPath).To(Equal("/music[test]"))
})
})
Describe("Path matching order", func() {
It("ensures longest paths match first", func() {
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: "/a"},
{ID: 2, Path: "/ab"},
{ID: 3, Path: "/abc"},
})
matcher := createMatcher(ds)
// Verify that longer paths match correctly (not cut off by shorter prefix)
testCases := []struct {
path string
expectedLibID int
}{
{"/abc/file.mp3", 3},
{"/ab/file.mp3", 2},
{"/a/file.mp3", 1},
}
for _, tc := range testCases {
libID, _ := matcher.findLibraryForPath(tc.path)
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d", tc.path, tc.expectedLibID)
}
})
})
})
var _ = Describe("pathResolver", func() {
var ds *tests.MockDataStore
var mockLibRepo *tests.MockLibraryRepo
var resolver *pathResolver
ctx := context.Background()
BeforeEach(func() {
mockLibRepo = &tests.MockLibraryRepo{}
ds = &tests.MockDataStore{
MockedLibrary: mockLibRepo,
}
// Setup test libraries
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: "/music"},
{ID: 2, Path: "/music-classical"},
{ID: 3, Path: "/podcasts"},
})
var err error
resolver, err = newPathResolver(ctx, ds)
Expect(err).ToNot(HaveOccurred())
})
Describe("resolvePath", func() {
It("resolves absolute paths", func() {
resolution := resolver.resolvePath("/music/artist/album/track.mp3", nil)
Expect(resolution.valid).To(BeTrue())
Expect(resolution.libraryID).To(Equal(1))
Expect(resolution.libraryPath).To(Equal("/music"))
Expect(resolution.absolutePath).To(Equal("/music/artist/album/track.mp3"))
})
It("resolves relative paths when folder is provided", func() {
folder := &model.Folder{
Path: "playlists",
LibraryPath: "/music",
LibraryID: 1,
}
resolution := resolver.resolvePath("../artist/album/track.mp3", folder)
Expect(resolution.valid).To(BeTrue())
Expect(resolution.libraryID).To(Equal(1))
Expect(resolution.absolutePath).To(Equal("/music/artist/album/track.mp3"))
})
It("returns invalid resolution for paths outside any library", func() {
resolution := resolver.resolvePath("/outside/library/track.mp3", nil)
Expect(resolution.valid).To(BeFalse())
})
})
Describe("resolvePath", func() {
Context("With absolute paths", func() {
It("resolves path within a library", func() {
resolution := resolver.resolvePath("/music/track.mp3", nil)
Expect(resolution.valid).To(BeTrue())
Expect(resolution.libraryID).To(Equal(1))
Expect(resolution.libraryPath).To(Equal("/music"))
Expect(resolution.absolutePath).To(Equal("/music/track.mp3"))
})
It("resolves path to the longest matching library", func() {
resolution := resolver.resolvePath("/music-classical/track.mp3", nil)
Expect(resolution.valid).To(BeTrue())
Expect(resolution.libraryID).To(Equal(2))
Expect(resolution.libraryPath).To(Equal("/music-classical"))
})
It("returns invalid resolution for path outside libraries", func() {
resolution := resolver.resolvePath("/videos/movie.mp4", nil)
Expect(resolution.valid).To(BeFalse())
})
It("cleans the path before matching", func() {
resolution := resolver.resolvePath("/music//artist/../artist/track.mp3", nil)
Expect(resolution.valid).To(BeTrue())
Expect(resolution.absolutePath).To(Equal("/music/artist/track.mp3"))
})
})
Context("With relative paths", func() {
It("resolves relative path within same library", func() {
folder := &model.Folder{
Path: "playlists",
LibraryPath: "/music",
LibraryID: 1,
}
resolution := resolver.resolvePath("../songs/track.mp3", folder)
Expect(resolution.valid).To(BeTrue())
Expect(resolution.libraryID).To(Equal(1))
Expect(resolution.absolutePath).To(Equal("/music/songs/track.mp3"))
})
It("resolves relative path to different library", func() {
folder := &model.Folder{
Path: "playlists",
LibraryPath: "/music",
LibraryID: 1,
}
// Path goes up and into a different library
resolution := resolver.resolvePath("../../podcasts/episode.mp3", folder)
Expect(resolution.valid).To(BeTrue())
Expect(resolution.libraryID).To(Equal(3))
Expect(resolution.libraryPath).To(Equal("/podcasts"))
})
It("uses matcher to find correct library for resolved path", func() {
folder := &model.Folder{
Path: "playlists",
LibraryPath: "/music",
LibraryID: 1,
}
// This relative path resolves to music-classical library
resolution := resolver.resolvePath("../../music-classical/track.mp3", folder)
Expect(resolution.valid).To(BeTrue())
Expect(resolution.libraryID).To(Equal(2))
Expect(resolution.libraryPath).To(Equal("/music-classical"))
})
It("returns invalid for relative paths escaping all libraries", func() {
folder := &model.Folder{
Path: "playlists",
LibraryPath: "/music",
LibraryID: 1,
}
resolution := resolver.resolvePath("../../../../etc/passwd", folder)
Expect(resolution.valid).To(BeFalse())
})
})
})
Describe("Cross-library resolution scenarios", func() {
It("handles playlist in library A referencing file in library B", func() {
// Playlist is in /music/playlists
folder := &model.Folder{
Path: "playlists",
LibraryPath: "/music",
LibraryID: 1,
}
// Relative path that goes to /podcasts library
resolution := resolver.resolvePath("../../podcasts/show/episode.mp3", folder)
Expect(resolution.valid).To(BeTrue())
Expect(resolution.libraryID).To(Equal(3), "Should resolve to podcasts library")
Expect(resolution.libraryPath).To(Equal("/podcasts"))
})
It("prefers longer library paths when resolving", func() {
// Ensure /music-classical is matched instead of /music
resolution := resolver.resolvePath("/music-classical/baroque/track.mp3", nil)
Expect(resolution.valid).To(BeTrue())
Expect(resolution.libraryID).To(Equal(2), "Should match /music-classical, not /music")
})
})
})
var _ = Describe("pathResolution", func() {
Describe("ToQualifiedString", func() {
It("converts valid resolution to qualified string with forward slashes", func() {
resolution := pathResolution{
absolutePath: "/music/artist/album/track.mp3",
libraryPath: "/music",
libraryID: 1,
valid: true,
}
qualifiedStr, err := resolution.ToQualifiedString()
Expect(err).ToNot(HaveOccurred())
Expect(qualifiedStr).To(Equal("1:artist/album/track.mp3"))
})
It("handles Windows-style paths by converting to forward slashes", func() {
resolution := pathResolution{
absolutePath: "/music/artist/album/track.mp3",
libraryPath: "/music",
libraryID: 2,
valid: true,
}
qualifiedStr, err := resolution.ToQualifiedString()
Expect(err).ToNot(HaveOccurred())
// Should always use forward slashes regardless of OS
Expect(qualifiedStr).To(ContainSubstring("2:"))
Expect(qualifiedStr).ToNot(ContainSubstring("\\"))
})
It("returns error for invalid resolution", func() {
resolution := pathResolution{valid: false}
_, err := resolution.ToQualifiedString()
Expect(err).To(HaveOccurred())
})
})
})

View File

@@ -1,4 +1,4 @@
package core_test
package core
import (
"context"
@@ -9,7 +9,6 @@ import (
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/conf/configtest"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/criteria"
"github.com/navidrome/navidrome/model/request"
@@ -21,7 +20,7 @@ import (
var _ = Describe("Playlists", func() {
var ds *tests.MockDataStore
var ps core.Playlists
var ps Playlists
var mockPlsRepo mockedPlaylistRepo
var mockLibRepo *tests.MockLibraryRepo
ctx := context.Background()
@@ -34,16 +33,16 @@ var _ = Describe("Playlists", func() {
MockedLibrary: mockLibRepo,
}
ctx = request.WithUser(ctx, model.User{ID: "123"})
// Path should be libPath, but we want to match the root folder referenced in the m3u, which is `/`
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/"}})
})
Describe("ImportFile", func() {
var folder *model.Folder
BeforeEach(func() {
ps = core.NewPlaylists(ds)
ps = NewPlaylists(ds)
ds.MockedMediaFile = &mockedMediaFileRepo{}
libPath, _ := os.Getwd()
// Set up library with the actual library path that matches the folder
mockLibRepo.SetData([]model.Library{{ID: 1, Path: libPath}})
folder = &model.Folder{
ID: "1",
LibraryID: 1,
@@ -113,224 +112,6 @@ var _ = Describe("Playlists", func() {
Expect(err.Error()).To(ContainSubstring("line 19, column 1: invalid character '\\n'"))
})
})
Describe("Cross-library relative paths", func() {
var tmpDir, plsDir, songsDir string
BeforeEach(func() {
// Create temp directory structure
tmpDir = GinkgoT().TempDir()
plsDir = tmpDir + "/playlists"
songsDir = tmpDir + "/songs"
Expect(os.Mkdir(plsDir, 0755)).To(Succeed())
Expect(os.Mkdir(songsDir, 0755)).To(Succeed())
// Setup two different libraries with paths matching our temp structure
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: songsDir},
{ID: 2, Path: plsDir},
})
// Create a mock media file repository that returns files for both libraries
// Note: The paths are relative to their respective library roots
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
data: []string{
"abc.mp3", // This is songs/abc.mp3 relative to songsDir
"def.mp3", // This is playlists/def.mp3 relative to plsDir
},
}
ps = core.NewPlaylists(ds)
})
It("handles relative paths that reference files in other libraries", func() {
// Create a temporary playlist file with relative path
plsContent := "#PLAYLIST:Cross Library Test\n../songs/abc.mp3\ndef.mp3"
plsFile := plsDir + "/test.m3u"
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
// Playlist is in the Playlists library folder
// Important: Path should be relative to LibraryPath, and Name is the folder name
plsFolder := &model.Folder{
ID: "2",
LibraryID: 2,
LibraryPath: plsDir,
Path: "",
Name: "",
}
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
Expect(err).ToNot(HaveOccurred())
Expect(pls.Tracks).To(HaveLen(2))
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3")) // From songsDir library
Expect(pls.Tracks[1].Path).To(Equal("def.mp3")) // From plsDir library
})
It("ignores paths that point outside all libraries", func() {
// Create a temporary playlist file with path outside libraries
plsContent := "#PLAYLIST:Outside Test\n../../outside.mp3\nabc.mp3"
plsFile := plsDir + "/test.m3u"
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
plsFolder := &model.Folder{
ID: "2",
LibraryID: 2,
LibraryPath: plsDir,
Path: "",
Name: "",
}
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
Expect(err).ToNot(HaveOccurred())
// Should only find abc.mp3, not outside.mp3
Expect(pls.Tracks).To(HaveLen(1))
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3"))
})
It("handles relative paths with multiple '../' components", func() {
// Create a nested structure: tmpDir/playlists/subfolder/test.m3u
subFolder := plsDir + "/subfolder"
Expect(os.Mkdir(subFolder, 0755)).To(Succeed())
// Create the media file in the subfolder directory
// The mock will return it as "def.mp3" relative to plsDir
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
data: []string{
"abc.mp3", // From songsDir library
"def.mp3", // From plsDir library root
},
}
// From subfolder, ../../songs/abc.mp3 should resolve to songs library
// ../def.mp3 should resolve to plsDir/def.mp3
plsContent := "#PLAYLIST:Nested Test\n../../songs/abc.mp3\n../def.mp3"
plsFile := subFolder + "/test.m3u"
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
// The folder: AbsolutePath = LibraryPath + Path + Name
// So for /playlists/subfolder: LibraryPath=/playlists, Path="", Name="subfolder"
plsFolder := &model.Folder{
ID: "2",
LibraryID: 2,
LibraryPath: plsDir,
Path: "", // Empty because subfolder is directly under library root
Name: "subfolder", // The folder name
}
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
Expect(err).ToNot(HaveOccurred())
Expect(pls.Tracks).To(HaveLen(2))
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3")) // From songsDir library
Expect(pls.Tracks[1].Path).To(Equal("def.mp3")) // From plsDir library root
})
It("correctly resolves libraries when one path is a prefix of another", func() {
// This tests the bug where /music would match before /music-classical
// Create temp directory structure with prefix conflict
tmpDir := GinkgoT().TempDir()
musicDir := tmpDir + "/music"
musicClassicalDir := tmpDir + "/music-classical"
Expect(os.Mkdir(musicDir, 0755)).To(Succeed())
Expect(os.Mkdir(musicClassicalDir, 0755)).To(Succeed())
// Setup two libraries where one is a prefix of the other
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: musicDir}, // /tmp/xxx/music
{ID: 2, Path: musicClassicalDir}, // /tmp/xxx/music-classical
})
// Mock will return tracks from both libraries
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
data: []string{
"rock.mp3", // From music library
"bach.mp3", // From music-classical library
},
}
// Create playlist in music library that references music-classical
plsContent := "#PLAYLIST:Cross Prefix Test\nrock.mp3\n../music-classical/bach.mp3"
plsFile := musicDir + "/test.m3u"
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
plsFolder := &model.Folder{
ID: "1",
LibraryID: 1,
LibraryPath: musicDir,
Path: "",
Name: "",
}
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
Expect(err).ToNot(HaveOccurred())
Expect(pls.Tracks).To(HaveLen(2))
Expect(pls.Tracks[0].Path).To(Equal("rock.mp3")) // From music library
Expect(pls.Tracks[1].Path).To(Equal("bach.mp3")) // From music-classical library (not music!)
})
It("correctly handles identical relative paths from different libraries", func() {
// This tests the bug where two libraries have files at the same relative path
// and only one appears in the playlist
tmpDir := GinkgoT().TempDir()
musicDir := tmpDir + "/music"
classicalDir := tmpDir + "/classical"
Expect(os.Mkdir(musicDir, 0755)).To(Succeed())
Expect(os.Mkdir(classicalDir, 0755)).To(Succeed())
Expect(os.MkdirAll(musicDir+"/album", 0755)).To(Succeed())
Expect(os.MkdirAll(classicalDir+"/album", 0755)).To(Succeed())
// Create placeholder files so paths resolve correctly
Expect(os.WriteFile(musicDir+"/album/track.mp3", []byte{}, 0600)).To(Succeed())
Expect(os.WriteFile(classicalDir+"/album/track.mp3", []byte{}, 0600)).To(Succeed())
// Both libraries have a file at "album/track.mp3"
mockLibRepo.SetData([]model.Library{
{ID: 1, Path: musicDir},
{ID: 2, Path: classicalDir},
})
// Mock returns files with same relative path but different IDs and library IDs
// Keys use the library-qualified format: "libraryID:path"
ds.MockedMediaFile = &mockedMediaFileRepo{
data: map[string]model.MediaFile{
"1:album/track.mp3": {ID: "music-track", Path: "album/track.mp3", LibraryID: 1, Title: "Rock Song"},
"2:album/track.mp3": {ID: "classical-track", Path: "album/track.mp3", LibraryID: 2, Title: "Classical Piece"},
},
}
// Recreate playlists service to pick up new mock
ps = core.NewPlaylists(ds)
// Create playlist in music library that references both tracks
plsContent := "#PLAYLIST:Same Path Test\nalbum/track.mp3\n../classical/album/track.mp3"
plsFile := musicDir + "/test.m3u"
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
plsFolder := &model.Folder{
ID: "1",
LibraryID: 1,
LibraryPath: musicDir,
Path: "",
Name: "",
}
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
Expect(err).ToNot(HaveOccurred())
// Should have BOTH tracks, not just one
Expect(pls.Tracks).To(HaveLen(2), "Playlist should contain both tracks with same relative path")
// Verify we got tracks from DIFFERENT libraries (the key fix!)
// Collect the library IDs
libIDs := make(map[int]bool)
for _, track := range pls.Tracks {
libIDs[track.LibraryID] = true
}
Expect(libIDs).To(HaveLen(2), "Tracks should come from two different libraries")
Expect(libIDs[1]).To(BeTrue(), "Should have track from library 1")
Expect(libIDs[2]).To(BeTrue(), "Should have track from library 2")
// Both tracks should have the same relative path
Expect(pls.Tracks[0].Path).To(Equal("album/track.mp3"))
Expect(pls.Tracks[1].Path).To(Equal("album/track.mp3"))
})
})
})
Describe("ImportM3U", func() {
@@ -338,7 +119,7 @@ var _ = Describe("Playlists", func() {
BeforeEach(func() {
repo = &mockedMediaFileFromListRepo{}
ds.MockedMediaFile = repo
ps = core.NewPlaylists(ds)
ps = NewPlaylists(ds)
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/music"}, {ID: 2, Path: "/new"}})
ctx = request.WithUser(ctx, model.User{ID: "123"})
})
@@ -425,23 +206,53 @@ var _ = Describe("Playlists", func() {
Expect(pls.Tracks[0].Path).To(Equal("abc/tEsT1.Mp3"))
})
It("handles Unicode normalization when comparing paths (NFD vs NFC)", func() {
// Simulate macOS filesystem: stores paths in NFD (decomposed) form
// "è" (U+00E8) in NFC becomes "e" + "◌̀" (U+0065 + U+0300) in NFD
nfdPath := "artist/Mich" + string([]rune{'e', '\u0300'}) + "le/song.mp3" // NFD: e + combining grave
It("handles Unicode normalization when comparing paths", func() {
// Test case for Apple Music playlists that use NFC encoding vs macOS filesystem NFD
// The character "è" can be represented as NFC (single codepoint) or NFD (e + combining accent)
const pathWithAccents = "artist/Michèle Desrosiers/album/Noël.m4a"
// Simulate a database entry with NFD encoding (as stored by macOS filesystem)
nfdPath := norm.NFD.String(pathWithAccents)
repo.data = []string{nfdPath}
// Simulate Apple Music M3U: uses NFC (composed) form
nfcPath := "/music/artist/Mich\u00E8le/song.mp3" // NFC: single è character
m3u := nfcPath + "\n"
// Simulate an Apple Music M3U playlist entry with NFC encoding
nfcPath := norm.NFC.String("/music/" + pathWithAccents)
m3u := strings.Join([]string{
nfcPath,
}, "\n")
f := strings.NewReader(m3u)
pls, err := ps.ImportM3U(ctx, f)
Expect(err).ToNot(HaveOccurred())
Expect(pls.Tracks).To(HaveLen(1))
// Should match despite different Unicode normalization forms
Expect(pls.Tracks).To(HaveLen(1), "Should find the track despite Unicode normalization differences")
Expect(pls.Tracks[0].Path).To(Equal(nfdPath))
})
})
Describe("normalizePathForComparison", func() {
It("normalizes Unicode characters to NFC form and converts to lowercase", func() {
// Test with NFD (decomposed) input - as would come from macOS filesystem
nfdPath := norm.NFD.String("Michèle") // Explicitly convert to NFD form
normalized := normalizePathForComparison(nfdPath)
Expect(normalized).To(Equal("michèle"))
// Test with NFC (composed) input - as would come from Apple Music M3U
nfcPath := "Michèle" // This might be in NFC form
normalizedNfc := normalizePathForComparison(nfcPath)
// Ensure the two paths are not equal in their original forms
Expect(nfdPath).ToNot(Equal(nfcPath))
// Both should normalize to the same result
Expect(normalized).To(Equal(normalizedNfc))
})
It("handles paths with mixed case and Unicode characters", func() {
path := "Artist/Noël Coward/Album/Song.mp3"
normalized := normalizePathForComparison(path)
Expect(normalized).To(Equal("artist/noël coward/album/song.mp3"))
})
})
Describe("InPlaylistsPath", func() {
@@ -458,27 +269,27 @@ var _ = Describe("Playlists", func() {
It("returns true if PlaylistsPath is empty", func() {
conf.Server.PlaylistsPath = ""
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
Expect(InPlaylistsPath(folder)).To(BeTrue())
})
It("returns true if PlaylistsPath is any (**/**)", func() {
conf.Server.PlaylistsPath = "**/**"
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
Expect(InPlaylistsPath(folder)).To(BeTrue())
})
It("returns true if folder is in PlaylistsPath", func() {
conf.Server.PlaylistsPath = "other/**:playlists/**"
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
Expect(InPlaylistsPath(folder)).To(BeTrue())
})
It("returns false if folder is not in PlaylistsPath", func() {
conf.Server.PlaylistsPath = "other"
Expect(core.InPlaylistsPath(folder)).To(BeFalse())
Expect(InPlaylistsPath(folder)).To(BeFalse())
})
It("returns true if for a playlist in root of MusicFolder if PlaylistsPath is '.'", func() {
conf.Server.PlaylistsPath = "."
Expect(core.InPlaylistsPath(folder)).To(BeFalse())
Expect(InPlaylistsPath(folder)).To(BeFalse())
folder2 := model.Folder{
LibraryPath: "/music",
@@ -486,47 +297,22 @@ var _ = Describe("Playlists", func() {
Name: ".",
}
Expect(core.InPlaylistsPath(folder2)).To(BeTrue())
Expect(InPlaylistsPath(folder2)).To(BeTrue())
})
})
})
// mockedMediaFileRepo's FindByPaths method returns MediaFiles for the given paths.
// If data map is provided, looks up files by key; otherwise creates them from paths.
// mockedMediaFileRepo's FindByPaths method returns a list of MediaFiles with the same paths as the input
type mockedMediaFileRepo struct {
model.MediaFileRepository
data map[string]model.MediaFile
}
func (r *mockedMediaFileRepo) FindByPaths(paths []string) (model.MediaFiles, error) {
var mfs model.MediaFiles
// If data map provided, look up files
if r.data != nil {
for _, path := range paths {
if mf, ok := r.data[path]; ok {
mfs = append(mfs, mf)
}
}
return mfs, nil
}
// Otherwise, create MediaFiles from paths
for idx, path := range paths {
// Strip library qualifier if present (format: "libraryID:path")
actualPath := path
libraryID := 1
if parts := strings.SplitN(path, ":", 2); len(parts) == 2 {
if id, err := strconv.Atoi(parts[0]); err == nil {
libraryID = id
actualPath = parts[1]
}
}
mfs = append(mfs, model.MediaFile{
ID: strconv.Itoa(idx),
Path: actualPath,
LibraryID: libraryID,
ID: strconv.Itoa(idx),
Path: path,
})
}
return mfs, nil
@@ -538,38 +324,13 @@ type mockedMediaFileFromListRepo struct {
data []string
}
func (r *mockedMediaFileFromListRepo) FindByPaths(paths []string) (model.MediaFiles, error) {
func (r *mockedMediaFileFromListRepo) FindByPaths([]string) (model.MediaFiles, error) {
var mfs model.MediaFiles
for idx, dataPath := range r.data {
// Normalize the data path to NFD (simulates macOS filesystem storage)
normalizedDataPath := norm.NFD.String(dataPath)
for _, requestPath := range paths {
// Strip library qualifier if present (format: "libraryID:path")
actualPath := requestPath
libraryID := 1
if parts := strings.SplitN(requestPath, ":", 2); len(parts) == 2 {
if id, err := strconv.Atoi(parts[0]); err == nil {
libraryID = id
actualPath = parts[1]
}
}
// The request path should already be normalized to NFD by production code
// before calling FindByPaths (to match DB storage)
normalizedRequestPath := norm.NFD.String(actualPath)
// Case-insensitive comparison (like SQL's "collate nocase")
if strings.EqualFold(normalizedRequestPath, normalizedDataPath) {
mfs = append(mfs, model.MediaFile{
ID: strconv.Itoa(idx),
Path: dataPath, // Return original path from DB
LibraryID: libraryID,
})
break
}
}
for idx, path := range r.data {
mfs = append(mfs, model.MediaFile{
ID: strconv.Itoa(idx),
Path: path,
})
}
return mfs, nil
}

View File

@@ -1,81 +0,0 @@
package publicurl
import (
"cmp"
"net/http"
"net/url"
"path"
"strconv"
"strings"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/consts"
"github.com/navidrome/navidrome/core/auth"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
)
// ImageURL generates a public URL for artwork images.
// It creates a signed token for the artwork ID and builds a complete public URL.
func ImageURL(req *http.Request, artID model.ArtworkID, size int) string {
token, _ := auth.CreatePublicToken(map[string]any{"id": artID.String()})
uri := path.Join(consts.URLPathPublicImages, token)
params := url.Values{}
if size > 0 {
params.Add("size", strconv.Itoa(size))
}
return PublicURL(req, uri, params)
}
// PublicURL builds a full URL for public-facing resources.
// It uses ShareURL from config if available, otherwise falls back to extracting
// the scheme and host from the provided http.Request.
// If req is nil and ShareURL is not set, it defaults to http://localhost.
func PublicURL(req *http.Request, u string, params url.Values) string {
if conf.Server.ShareURL == "" {
return AbsoluteURL(req, u, params)
}
shareUrl, err := url.Parse(conf.Server.ShareURL)
if err != nil {
return AbsoluteURL(req, u, params)
}
buildUrl, err := url.Parse(u)
if err != nil {
return AbsoluteURL(req, u, params)
}
buildUrl.Scheme = shareUrl.Scheme
buildUrl.Host = shareUrl.Host
if len(params) > 0 {
buildUrl.RawQuery = params.Encode()
}
return buildUrl.String()
}
// AbsoluteURL builds an absolute URL from a relative path.
// It uses BaseHost/BaseScheme from config if available, otherwise extracts
// the scheme and host from the http.Request.
// If req is nil and BaseHost is not set, it defaults to http://localhost.
func AbsoluteURL(req *http.Request, u string, params url.Values) string {
buildUrl, err := url.Parse(u)
if err != nil {
log.Error(req.Context(), "Failed to parse URL path", "url", u, err)
return ""
}
if strings.HasPrefix(u, "/") {
buildUrl.Path = path.Join(conf.Server.BasePath, buildUrl.Path)
if conf.Server.BaseHost != "" {
buildUrl.Scheme = cmp.Or(conf.Server.BaseScheme, "http")
buildUrl.Host = conf.Server.BaseHost
} else if req != nil {
buildUrl.Scheme = req.URL.Scheme
buildUrl.Host = req.Host
} else {
buildUrl.Scheme = "http"
buildUrl.Host = "localhost"
}
}
if len(params) > 0 {
buildUrl.RawQuery = params.Encode()
}
return buildUrl.String()
}

View File

@@ -1,174 +0,0 @@
package publicurl_test
import (
"net/http"
"net/url"
"testing"
"github.com/go-chi/jwtauth/v5"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/conf/configtest"
"github.com/navidrome/navidrome/core/auth"
"github.com/navidrome/navidrome/core/publicurl"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/tests"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestPublicURL(t *testing.T) {
tests.Init(t, false)
log.SetLevel(log.LevelFatal)
RegisterFailHandler(Fail)
RunSpecs(t, "Public URL Suite")
}
var _ = Describe("Public URL Utilities", func() {
BeforeEach(func() {
DeferCleanup(configtest.SetupConfig())
})
Describe("PublicURL", func() {
When("ShareURL is set", func() {
BeforeEach(func() {
conf.Server.ShareURL = "https://share.example.com"
})
It("uses ShareURL as the base", func() {
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
result := publicurl.PublicURL(r, "/path/to/resource", nil)
Expect(result).To(Equal("https://share.example.com/path/to/resource"))
})
It("includes query parameters", func() {
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
params := url.Values{"size": []string{"300"}, "format": []string{"png"}}
result := publicurl.PublicURL(r, "/image/123", params)
Expect(result).To(ContainSubstring("https://share.example.com/image/123"))
Expect(result).To(ContainSubstring("size=300"))
Expect(result).To(ContainSubstring("format=png"))
})
It("works without a request", func() {
result := publicurl.PublicURL(nil, "/path/to/resource", nil)
Expect(result).To(Equal("https://share.example.com/path/to/resource"))
})
})
When("ShareURL is not set", func() {
BeforeEach(func() {
conf.Server.ShareURL = ""
})
It("falls back to AbsoluteURL with request", func() {
r, _ := http.NewRequest("GET", "https://myserver.com/test", nil)
r.Host = "myserver.com"
result := publicurl.PublicURL(r, "/path/to/resource", nil)
Expect(result).To(Equal("https://myserver.com/path/to/resource"))
})
It("falls back to localhost without request", func() {
result := publicurl.PublicURL(nil, "/path/to/resource", nil)
Expect(result).To(Equal("http://localhost/path/to/resource"))
})
})
})
Describe("AbsoluteURL", func() {
When("BaseHost is set", func() {
BeforeEach(func() {
conf.Server.BaseHost = "configured.example.com"
conf.Server.BaseScheme = "https"
conf.Server.BasePath = ""
})
It("uses BaseHost and BaseScheme", func() {
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
result := publicurl.AbsoluteURL(r, "/path/to/resource", nil)
Expect(result).To(Equal("https://configured.example.com/path/to/resource"))
})
It("defaults to http scheme if BaseScheme is empty", func() {
conf.Server.BaseScheme = ""
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
result := publicurl.AbsoluteURL(r, "/path/to/resource", nil)
Expect(result).To(Equal("http://configured.example.com/path/to/resource"))
})
})
When("BaseHost is not set", func() {
BeforeEach(func() {
conf.Server.BaseHost = ""
conf.Server.BasePath = ""
})
It("extracts host from request", func() {
r, _ := http.NewRequest("GET", "https://request.example.com/test", nil)
r.Host = "request.example.com"
result := publicurl.AbsoluteURL(r, "/path/to/resource", nil)
Expect(result).To(Equal("https://request.example.com/path/to/resource"))
})
It("falls back to localhost without request", func() {
result := publicurl.AbsoluteURL(nil, "/path/to/resource", nil)
Expect(result).To(Equal("http://localhost/path/to/resource"))
})
})
When("BasePath is set", func() {
BeforeEach(func() {
conf.Server.BasePath = "/navidrome"
conf.Server.BaseHost = "example.com"
conf.Server.BaseScheme = "https"
})
It("prepends BasePath to the URL", func() {
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
result := publicurl.AbsoluteURL(r, "/path/to/resource", nil)
Expect(result).To(Equal("https://example.com/navidrome/path/to/resource"))
})
})
It("passes through absolute URLs unchanged", func() {
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
result := publicurl.AbsoluteURL(r, "https://other.example.com/path", nil)
Expect(result).To(Equal("https://other.example.com/path"))
})
It("includes query parameters", func() {
conf.Server.BaseHost = "example.com"
conf.Server.BaseScheme = "https"
r, _ := http.NewRequest("GET", "http://localhost/test", nil)
params := url.Values{"key": []string{"value"}}
result := publicurl.AbsoluteURL(r, "/path", params)
Expect(result).To(Equal("https://example.com/path?key=value"))
})
})
Describe("ImageURL", func() {
BeforeEach(func() {
conf.Server.ShareURL = "https://share.example.com"
// Initialize JWT auth for token generation
auth.TokenAuth = jwtauth.New("HS256", []byte("test secret"), nil)
})
It("generates a URL with the artwork token", func() {
artID := model.NewArtworkID(model.KindAlbumArtwork, "album-123", nil)
result := publicurl.ImageURL(nil, artID, 0)
Expect(result).To(HavePrefix("https://share.example.com/share/img/"))
})
It("includes size parameter when provided", func() {
artID := model.NewArtworkID(model.KindArtistArtwork, "artist-1", nil)
result := publicurl.ImageURL(nil, artID, 300)
Expect(result).To(ContainSubstring("size=300"))
})
It("omits size parameter when zero", func() {
artID := model.NewArtworkID(model.KindMediaFileArtwork, "track-1", nil)
result := publicurl.ImageURL(nil, artID, 0)
Expect(result).ToNot(ContainSubstring("size="))
})
})
})

View File

@@ -38,9 +38,9 @@ var _ = Describe("BufferedScrobbler", func() {
It("forwards NowPlaying calls", func() {
track := &model.MediaFile{ID: "123", Title: "Test Track"}
Expect(bs.NowPlaying(ctx, "user1", track, 0)).To(Succeed())
Expect(scr.GetNowPlayingCalled()).To(BeTrue())
Expect(scr.GetUserID()).To(Equal("user1"))
Expect(scr.GetTrack()).To(Equal(track))
Expect(scr.NowPlayingCalled).To(BeTrue())
Expect(scr.UserID).To(Equal("user1"))
Expect(scr.Track).To(Equal(track))
})
It("enqueues scrobbles to buffer", func() {
@@ -51,10 +51,9 @@ var _ = Describe("BufferedScrobbler", func() {
Expect(scr.ScrobbleCalled.Load()).To(BeFalse())
Expect(bs.Scrobble(ctx, "user1", scrobble)).To(Succeed())
Expect(buffer.Length()).To(Equal(int64(1)))
// Wait for the background goroutine to process the scrobble.
// We don't check buffer.Length() here because the background goroutine
// may dequeue the entry before we can observe it.
// Wait for the scrobble to be sent
Eventually(scr.ScrobbleCalled.Load).Should(BeTrue())
lastScrobble := scr.LastScrobble.Load()

View File

@@ -31,13 +31,6 @@ type Submission struct {
Timestamp time.Time
}
type nowPlayingEntry struct {
ctx context.Context
userId string
track *model.MediaFile
position int
}
type PlayTracker interface {
NowPlaying(ctx context.Context, playerId string, playerName string, trackId string, position int) error
GetNowPlaying(ctx context.Context) ([]NowPlayingInfo, error)
@@ -59,11 +52,6 @@ type playTracker struct {
pluginScrobblers map[string]Scrobbler
pluginLoader PluginLoader
mu sync.RWMutex
npQueue map[string]nowPlayingEntry
npMu sync.Mutex
npSignal chan struct{}
shutdown chan struct{}
workerDone chan struct{}
}
func GetPlayTracker(ds model.DataStore, broker events.Broker, pluginManager PluginLoader) PlayTracker {
@@ -83,10 +71,6 @@ func newPlayTracker(ds model.DataStore, broker events.Broker, pluginManager Plug
builtinScrobblers: make(map[string]Scrobbler),
pluginScrobblers: make(map[string]Scrobbler),
pluginLoader: pluginManager,
npQueue: make(map[string]nowPlayingEntry),
npSignal: make(chan struct{}, 1),
shutdown: make(chan struct{}),
workerDone: make(chan struct{}),
}
if conf.Server.EnableNowPlaying {
m.OnExpiration(func(_ string, _ NowPlayingInfo) {
@@ -106,16 +90,9 @@ func newPlayTracker(ds model.DataStore, broker events.Broker, pluginManager Plug
p.builtinScrobblers[name] = s
}
log.Debug("List of builtin scrobblers enabled", "names", enabled)
go p.nowPlayingWorker()
return p
}
// stopNowPlayingWorker stops the background worker. This is primarily for testing.
func (p *playTracker) stopNowPlayingWorker() {
close(p.shutdown)
<-p.workerDone // Wait for worker to finish
}
// pluginNamesMatchScrobblers returns true if the set of pluginNames matches the keys in pluginScrobblers
func pluginNamesMatchScrobblers(pluginNames []string, scrobblers map[string]Scrobbler) bool {
if len(pluginNames) != len(scrobblers) {
@@ -221,60 +198,11 @@ func (p *playTracker) NowPlaying(ctx context.Context, playerId string, playerNam
}
player, _ := request.PlayerFrom(ctx)
if player.ScrobbleEnabled {
p.enqueueNowPlaying(ctx, playerId, user.ID, mf, position)
p.dispatchNowPlaying(ctx, user.ID, mf, position)
}
return nil
}
func (p *playTracker) enqueueNowPlaying(ctx context.Context, playerId string, userId string, track *model.MediaFile, position int) {
p.npMu.Lock()
defer p.npMu.Unlock()
ctx = context.WithoutCancel(ctx) // Prevent cancellation from affecting background processing
p.npQueue[playerId] = nowPlayingEntry{
ctx: ctx,
userId: userId,
track: track,
position: position,
}
p.sendNowPlayingSignal()
}
func (p *playTracker) sendNowPlayingSignal() {
// Don't block if the previous signal was not read yet
select {
case p.npSignal <- struct{}{}:
default:
}
}
func (p *playTracker) nowPlayingWorker() {
defer close(p.workerDone)
for {
select {
case <-p.shutdown:
return
case <-time.After(time.Second):
case <-p.npSignal:
}
p.npMu.Lock()
if len(p.npQueue) == 0 {
p.npMu.Unlock()
continue
}
// Keep a copy of the entries to process and clear the queue
entries := p.npQueue
p.npQueue = make(map[string]nowPlayingEntry)
p.npMu.Unlock()
// Process entries without holding lock
for _, entry := range entries {
p.dispatchNowPlaying(entry.ctx, entry.userId, entry.track, entry.position)
}
}
}
func (p *playTracker) dispatchNowPlaying(ctx context.Context, userId string, t *model.MediaFile, position int) {
if t.Artist == consts.UnknownArtist {
log.Debug(ctx, "Ignoring external NowPlaying update for track with unknown artist", "track", t.Title, "artist", t.Artist)
@@ -348,14 +276,8 @@ func (p *playTracker) incPlay(ctx context.Context, track *model.MediaFile, times
}
for _, artist := range track.Participants[model.RoleArtist] {
err = tx.Artist(ctx).IncPlayCount(artist.ID, timestamp)
if err != nil {
return err
}
}
if conf.Server.EnableScrobbleHistory {
return tx.Scrobble(ctx).RecordScrobble(track.ID, timestamp)
}
return nil
return err
})
}

View File

@@ -24,26 +24,15 @@ import (
// Moved to top-level scope to avoid linter issues
type mockPluginLoader struct {
mu sync.RWMutex
names []string
scrobblers map[string]Scrobbler
}
func (m *mockPluginLoader) PluginNames(service string) []string {
m.mu.RLock()
defer m.mu.RUnlock()
return m.names
}
func (m *mockPluginLoader) SetNames(names []string) {
m.mu.Lock()
defer m.mu.Unlock()
m.names = names
}
func (m *mockPluginLoader) LoadScrobbler(name string) (Scrobbler, bool) {
m.mu.RLock()
defer m.mu.RUnlock()
s, ok := m.scrobblers[name]
return s, ok
}
@@ -57,24 +46,24 @@ var _ = Describe("PlayTracker", func() {
var album model.Album
var artist1 model.Artist
var artist2 model.Artist
var fake *fakeScrobbler
var fake fakeScrobbler
BeforeEach(func() {
DeferCleanup(configtest.SetupConfig())
ctx = GinkgoT().Context()
ctx = context.Background()
ctx = request.WithUser(ctx, model.User{ID: "u-1"})
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
ds = &tests.MockDataStore{}
fake = &fakeScrobbler{Authorized: true}
fake = fakeScrobbler{Authorized: true}
Register("fake", func(model.DataStore) Scrobbler {
return fake
return &fake
})
Register("disabled", func(model.DataStore) Scrobbler {
return nil
})
eventBroker = &fakeEventBroker{}
tracker = newPlayTracker(ds, eventBroker, nil)
tracker.(*playTracker).builtinScrobblers["fake"] = fake // Bypass buffering for tests
tracker.(*playTracker).builtinScrobblers["fake"] = &fake // Bypass buffering for tests
track = model.MediaFile{
ID: "123",
@@ -97,11 +86,6 @@ var _ = Describe("PlayTracker", func() {
_ = ds.Album(ctx).(*tests.MockAlbumRepo).Put(&album)
})
AfterEach(func() {
// Stop the worker goroutine to prevent data races between tests
tracker.(*playTracker).stopNowPlayingWorker()
})
It("does not register disabled scrobblers", func() {
Expect(tracker.(*playTracker).builtinScrobblers).To(HaveKey("fake"))
Expect(tracker.(*playTracker).builtinScrobblers).ToNot(HaveKey("disabled"))
@@ -111,10 +95,10 @@ var _ = Describe("PlayTracker", func() {
It("sends track to agent", func() {
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
Expect(err).ToNot(HaveOccurred())
Eventually(func() bool { return fake.GetNowPlayingCalled() }).Should(BeTrue())
Expect(fake.GetUserID()).To(Equal("u-1"))
Expect(fake.GetTrack().ID).To(Equal("123"))
Expect(fake.GetTrack().Participants).To(Equal(track.Participants))
Expect(fake.NowPlayingCalled).To(BeTrue())
Expect(fake.UserID).To(Equal("u-1"))
Expect(fake.Track.ID).To(Equal("123"))
Expect(fake.Track.Participants).To(Equal(track.Participants))
})
It("does not send track to agent if user has not authorized", func() {
fake.Authorized = false
@@ -122,7 +106,7 @@ var _ = Describe("PlayTracker", func() {
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
Expect(err).ToNot(HaveOccurred())
Expect(fake.GetNowPlayingCalled()).To(BeFalse())
Expect(fake.NowPlayingCalled).To(BeFalse())
})
It("does not send track to agent if player is not enabled to send scrobbles", func() {
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: false})
@@ -130,7 +114,7 @@ var _ = Describe("PlayTracker", func() {
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
Expect(err).ToNot(HaveOccurred())
Expect(fake.GetNowPlayingCalled()).To(BeFalse())
Expect(fake.NowPlayingCalled).To(BeFalse())
})
It("does not send track to agent if artist is unknown", func() {
track.Artist = consts.UnknownArtist
@@ -138,7 +122,7 @@ var _ = Describe("PlayTracker", func() {
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
Expect(err).ToNot(HaveOccurred())
Expect(fake.GetNowPlayingCalled()).To(BeFalse())
Expect(fake.NowPlayingCalled).To(BeFalse())
})
It("stores position when greater than zero", func() {
@@ -146,12 +130,11 @@ var _ = Describe("PlayTracker", func() {
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", pos)
Expect(err).ToNot(HaveOccurred())
Eventually(func() int { return fake.GetPosition() }).Should(Equal(pos))
playing, err := tracker.GetNowPlaying(ctx)
Expect(err).ToNot(HaveOccurred())
Expect(playing).To(HaveLen(1))
Expect(playing[0].Position).To(Equal(pos))
Expect(fake.Position).To(Equal(pos))
})
It("sends event with count", func() {
@@ -170,17 +153,6 @@ var _ = Describe("PlayTracker", func() {
Expect(err).ToNot(HaveOccurred())
Expect(eventBroker.getEvents()).To(BeEmpty())
})
It("passes user to scrobbler via context (fix for issue #4787)", func() {
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "testuser"})
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
Expect(err).ToNot(HaveOccurred())
Eventually(func() bool { return fake.GetNowPlayingCalled() }).Should(BeTrue())
// Verify the username was passed through async dispatch via context
Eventually(func() string { return fake.GetUsername() }).Should(Equal("testuser"))
})
})
Describe("GetNowPlaying", func() {
@@ -188,9 +160,9 @@ var _ = Describe("PlayTracker", func() {
track2 := track
track2.ID = "456"
_ = ds.MediaFile(ctx).Put(&track2)
ctx = request.WithUser(GinkgoT().Context(), model.User{UserName: "user-1"})
ctx = request.WithUser(context.Background(), model.User{UserName: "user-1"})
_ = tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
ctx = request.WithUser(GinkgoT().Context(), model.User{UserName: "user-2"})
ctx = request.WithUser(context.Background(), model.User{UserName: "user-2"})
_ = tracker.NowPlaying(ctx, "player-2", "player-two", "456", 0)
playing, err := tracker.GetNowPlaying(ctx)
@@ -238,7 +210,7 @@ var _ = Describe("PlayTracker", func() {
Expect(err).ToNot(HaveOccurred())
Expect(fake.ScrobbleCalled.Load()).To(BeTrue())
Expect(fake.GetUserID()).To(Equal("u-1"))
Expect(fake.UserID).To(Equal("u-1"))
lastScrobble := fake.LastScrobble.Load()
Expect(lastScrobble.TimeStamp).To(BeTemporally("~", ts, 1*time.Second))
Expect(lastScrobble.ID).To(Equal("123"))
@@ -302,82 +274,49 @@ var _ = Describe("PlayTracker", func() {
Expect(artist1.PlayCount).To(Equal(int64(1)))
Expect(artist2.PlayCount).To(Equal(int64(1)))
})
Context("Scrobble History", func() {
It("records scrobble in repository", func() {
conf.Server.EnableScrobbleHistory = true
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "user-1"})
ts := time.Now()
err := tracker.Submit(ctx, []Submission{{TrackID: "123", Timestamp: ts}})
Expect(err).ToNot(HaveOccurred())
mockDS := ds.(*tests.MockDataStore)
mockScrobble := mockDS.Scrobble(ctx).(*tests.MockScrobbleRepo)
Expect(mockScrobble.RecordedScrobbles).To(HaveLen(1))
Expect(mockScrobble.RecordedScrobbles[0].MediaFileID).To(Equal("123"))
Expect(mockScrobble.RecordedScrobbles[0].UserID).To(Equal("u-1"))
Expect(mockScrobble.RecordedScrobbles[0].SubmissionTime).To(Equal(ts))
})
It("does not record scrobble when history is disabled", func() {
conf.Server.EnableScrobbleHistory = false
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "user-1"})
ts := time.Now()
err := tracker.Submit(ctx, []Submission{{TrackID: "123", Timestamp: ts}})
Expect(err).ToNot(HaveOccurred())
mockDS := ds.(*tests.MockDataStore)
mockScrobble := mockDS.Scrobble(ctx).(*tests.MockScrobbleRepo)
Expect(mockScrobble.RecordedScrobbles).To(HaveLen(0))
})
})
})
Describe("Plugin scrobbler logic", func() {
var pluginLoader *mockPluginLoader
var pluginFake *fakeScrobbler
var pluginFake fakeScrobbler
BeforeEach(func() {
pluginFake = &fakeScrobbler{Authorized: true}
pluginFake = fakeScrobbler{Authorized: true}
pluginLoader = &mockPluginLoader{
names: []string{"plugin1"},
scrobblers: map[string]Scrobbler{"plugin1": pluginFake},
scrobblers: map[string]Scrobbler{"plugin1": &pluginFake},
}
tracker = newPlayTracker(ds, events.GetBroker(), pluginLoader)
// Bypass buffering for both built-in and plugin scrobblers
tracker.(*playTracker).builtinScrobblers["fake"] = fake
tracker.(*playTracker).pluginScrobblers["plugin1"] = pluginFake
tracker.(*playTracker).builtinScrobblers["fake"] = &fake
tracker.(*playTracker).pluginScrobblers["plugin1"] = &pluginFake
})
It("registers and uses plugin scrobbler for NowPlaying", func() {
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
Expect(err).ToNot(HaveOccurred())
Eventually(func() bool { return pluginFake.GetNowPlayingCalled() }).Should(BeTrue())
Expect(pluginFake.NowPlayingCalled).To(BeTrue())
})
It("removes plugin scrobbler if not present anymore", func() {
// First call: plugin present
_ = tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
Eventually(func() bool { return pluginFake.GetNowPlayingCalled() }).Should(BeTrue())
pluginFake.nowPlayingCalled.Store(false)
Expect(pluginFake.NowPlayingCalled).To(BeTrue())
pluginFake.NowPlayingCalled = false
// Remove plugin
pluginLoader.SetNames([]string{})
pluginLoader.names = []string{}
_ = tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
// Should not be called since plugin was removed
Consistently(func() bool { return pluginFake.GetNowPlayingCalled() }).Should(BeFalse())
Expect(pluginFake.NowPlayingCalled).To(BeFalse())
})
It("calls both builtin and plugin scrobblers for NowPlaying", func() {
fake.nowPlayingCalled.Store(false)
pluginFake.nowPlayingCalled.Store(false)
fake.NowPlayingCalled = false
pluginFake.NowPlayingCalled = false
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
Expect(err).ToNot(HaveOccurred())
Eventually(func() bool { return fake.GetNowPlayingCalled() }).Should(BeTrue())
Eventually(func() bool { return pluginFake.GetNowPlayingCalled() }).Should(BeTrue())
Expect(fake.NowPlayingCalled).To(BeTrue())
Expect(pluginFake.NowPlayingCalled).To(BeTrue())
})
It("calls plugin scrobbler for Submit", func() {
@@ -395,7 +334,7 @@ var _ = Describe("PlayTracker", func() {
var mockedBS *mockBufferedScrobbler
BeforeEach(func() {
ctx = GinkgoT().Context()
ctx = context.Background()
ctx = request.WithUser(ctx, model.User{ID: "u-1"})
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
ds = &tests.MockDataStore{}
@@ -420,7 +359,7 @@ var _ = Describe("PlayTracker", func() {
It("calls Stop on scrobblers when removing them", func() {
// Change the plugin names to simulate a plugin being removed
mockPlugin.SetNames([]string{})
mockPlugin.names = []string{}
// Call refreshPluginScrobblers which should detect the removed plugin
pTracker.refreshPluginScrobblers()
@@ -436,69 +375,32 @@ var _ = Describe("PlayTracker", func() {
type fakeScrobbler struct {
Authorized bool
nowPlayingCalled atomic.Bool
NowPlayingCalled bool
ScrobbleCalled atomic.Bool
userID atomic.Pointer[string]
username atomic.Pointer[string]
track atomic.Pointer[model.MediaFile]
position atomic.Int32
UserID string
Track *model.MediaFile
Position int
LastScrobble atomic.Pointer[Scrobble]
Error error
}
func (f *fakeScrobbler) GetNowPlayingCalled() bool {
return f.nowPlayingCalled.Load()
}
func (f *fakeScrobbler) GetUserID() string {
if p := f.userID.Load(); p != nil {
return *p
}
return ""
}
func (f *fakeScrobbler) GetTrack() *model.MediaFile {
return f.track.Load()
}
func (f *fakeScrobbler) GetPosition() int {
return int(f.position.Load())
}
func (f *fakeScrobbler) GetUsername() string {
if p := f.username.Load(); p != nil {
return *p
}
return ""
}
func (f *fakeScrobbler) IsAuthorized(ctx context.Context, userId string) bool {
return f.Error == nil && f.Authorized
}
func (f *fakeScrobbler) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
f.nowPlayingCalled.Store(true)
f.NowPlayingCalled = true
if f.Error != nil {
return f.Error
}
f.userID.Store(&userId)
// Capture username from context (this is what plugin scrobblers do)
username, _ := request.UsernameFrom(ctx)
if username == "" {
if u, ok := request.UserFrom(ctx); ok {
username = u.UserName
}
}
if username != "" {
f.username.Store(&username)
}
f.track.Store(track)
f.position.Store(int32(position))
f.UserID = userId
f.Track = track
f.Position = position
return nil
}
func (f *fakeScrobbler) Scrobble(ctx context.Context, userId string, s Scrobble) error {
f.userID.Store(&userId)
f.UserID = userId
f.LastScrobble.Store(&s)
f.ScrobbleCalled.Store(true)
if f.Error != nil {

View File

@@ -18,7 +18,6 @@ var Set = wire.NewSet(
NewShare,
NewPlaylists,
NewLibrary,
NewMaintenance,
agents.GetAgents,
external.NewProvider,
wire.Bind(new(external.Agents), new(*agents.Agents)),

View File

@@ -45,12 +45,10 @@ func Db() *sql.DB {
if err != nil {
log.Fatal("Error opening database", err)
}
if conf.Server.DevOptimizeDB {
_, err = db.Exec("PRAGMA optimize=0x10002")
if err != nil {
log.Error("Error applying PRAGMA optimize", err)
return nil
}
_, err = db.Exec("PRAGMA optimize=0x10002")
if err != nil {
log.Error("Error applying PRAGMA optimize", err)
return nil
}
return db
})
@@ -101,7 +99,7 @@ func Init(ctx context.Context) func() {
log.Fatal(ctx, "Failed to apply new migrations", err)
}
if hasSchemaChanges && conf.Server.DevOptimizeDB {
if hasSchemaChanges {
log.Debug(ctx, "Applying PRAGMA optimize after schema changes")
_, err = db.ExecContext(ctx, "PRAGMA optimize")
if err != nil {
@@ -116,9 +114,6 @@ func Init(ctx context.Context) func() {
// Optimize runs PRAGMA optimize on each connection in the pool
func Optimize(ctx context.Context) {
if !conf.Server.DevOptimizeDB {
return
}
numConns := Db().Stats().OpenConnections
if numConns == 0 {
log.Debug(ctx, "No open connections to optimize")

View File

@@ -1,7 +0,0 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE annotation ADD COLUMN rated_at datetime;
-- +goose StatementEnd
-- +goose Down

View File

@@ -1,20 +0,0 @@
-- +goose Up
-- +goose StatementBegin
CREATE TABLE scrobbles(
media_file_id VARCHAR(255) NOT NULL
REFERENCES media_file(id)
ON DELETE CASCADE
ON UPDATE CASCADE,
user_id VARCHAR(255) NOT NULL
REFERENCES user(id)
ON DELETE CASCADE
ON UPDATE CASCADE,
submission_time INTEGER NOT NULL
);
CREATE INDEX scrobbles_date ON scrobbles (submission_time);
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
DROP TABLE scrobbles;
-- +goose StatementEnd

View File

@@ -1,15 +0,0 @@
-- +goose Up
CREATE TABLE IF NOT EXISTS plugin (
id TEXT PRIMARY KEY,
path TEXT NOT NULL,
manifest TEXT NOT NULL,
config TEXT,
enabled INTEGER NOT NULL DEFAULT 0,
last_error TEXT,
sha256 TEXT NOT NULL,
created_at DATETIME NOT NULL,
updated_at DATETIME NOT NULL
);
-- +goose Down
DROP TABLE IF EXISTS plugin;

View File

@@ -7,7 +7,6 @@ import (
"strings"
"sync"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/consts"
)
@@ -22,13 +21,11 @@ func notice(tx *sql.Tx, msg string) {
// Call this in migrations that requires a full rescan
func forceFullRescan(tx *sql.Tx) error {
// If a full scan is required, most probably the query optimizer is outdated, so we run `analyze`.
if conf.Server.DevOptimizeDB {
_, err := tx.Exec(`ANALYZE;`)
if err != nil {
return err
}
_, err := tx.Exec(`ANALYZE;`)
if err != nil {
return err
}
_, err := tx.Exec(fmt.Sprintf(`
_, err = tx.Exec(fmt.Sprintf(`
INSERT OR REPLACE into property (id, value) values ('%s', '1');
`, consts.FullScanAfterMigrationFlagKey))
return err

51
go.mod
View File

@@ -1,9 +1,13 @@
module github.com/navidrome/navidrome
go 1.25
go 1.25.4
// Fork to fix https://github.com/navidrome/navidrome/issues/3254
replace github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 => github.com/deluan/tag v0.0.0-20241002021117-dfe5e6ea396d
replace (
// Fork to fix https://github.com/navidrome/navidrome/issues/3254
github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 => github.com/deluan/tag v0.0.0-20241002021117-dfe5e6ea396d
// Using version from main that fixes https://github.com/navidrome/navidrome/issues/4396
github.com/tetratelabs/wazero v1.9.0 => github.com/tetratelabs/wazero v0.0.0-20251106165119-514cdb337684
)
require (
github.com/Masterminds/squirrel v1.5.4
@@ -21,7 +25,6 @@ require (
github.com/djherbis/stream v1.4.0
github.com/djherbis/times v1.6.0
github.com/dustin/go-humanize v1.0.1
github.com/extism/go-sdk v1.7.1
github.com/fatih/structs v1.1.0
github.com/go-chi/chi/v5 v5.2.3
github.com/go-chi/cors v1.2.2
@@ -37,15 +40,15 @@ require (
github.com/jellydator/ttlcache/v3 v3.4.0
github.com/kardianos/service v1.2.4
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
github.com/knqyf263/go-plugin v0.9.0
github.com/kr/pretty v0.3.1
github.com/lestrrat-go/jwx/v2 v2.1.6
github.com/maruel/natural v1.3.0
github.com/matoous/go-nanoid/v2 v2.1.0
github.com/mattn/go-sqlite3 v1.14.32
github.com/microcosm-cc/bluemonday v1.0.27
github.com/mileusna/useragent v1.3.5
github.com/onsi/ginkgo/v2 v2.27.3
github.com/onsi/gomega v1.38.3
github.com/onsi/ginkgo/v2 v2.27.2
github.com/onsi/gomega v1.38.2
github.com/pelletier/go-toml/v2 v2.2.4
github.com/pocketbase/dbx v1.11.0
github.com/pressly/goose/v3 v3.26.0
@@ -54,20 +57,21 @@ require (
github.com/robfig/cron/v3 v3.0.1
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cobra v1.10.2
github.com/spf13/cobra v1.10.1
github.com/spf13/viper v1.21.0
github.com/stretchr/testify v1.11.1
github.com/tetratelabs/wazero v1.11.0
github.com/tetratelabs/wazero v1.9.0
github.com/unrolled/secure v1.17.0
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342
go.uber.org/goleak v1.3.0
golang.org/x/image v0.34.0
golang.org/x/net v0.48.0
golang.org/x/sync v0.19.0
golang.org/x/sys v0.39.0
golang.org/x/term v0.38.0
golang.org/x/text v0.32.0
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546
golang.org/x/image v0.32.0
golang.org/x/net v0.46.0
golang.org/x/sync v0.17.0
golang.org/x/sys v0.37.0
golang.org/x/text v0.30.0
golang.org/x/time v0.14.0
google.golang.org/protobuf v1.36.10
gopkg.in/yaml.v3 v3.0.1
)
@@ -82,20 +86,17 @@ require (
github.com/creack/pty v1.1.11 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect
github.com/dylibso/observe-sdk/go v0.0.0-20240819160327-2d926c5d788a // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/gobwas/glob v0.2.3 // indirect
github.com/goccy/go-json v0.10.5 // indirect
github.com/goccy/go-yaml v1.18.0 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/pprof v0.0.0-20251213031049-b05bdaca462f // indirect
github.com/google/pprof v0.0.0-20251007162407-5df77e3f7d1d // indirect
github.com/google/subcommands v1.2.0 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/ianlancetaylor/demangle v0.0.0-20250417193237-f615e6bd150b // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/kr/text v0.2.0 // indirect
@@ -126,18 +127,14 @@ require (
github.com/spf13/pflag v1.0.10 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/tetratelabs/wabin v0.0.0-20230304001439-f6f874872834 // indirect
github.com/zeebo/xxh3 v1.0.2 // indirect
go.opentelemetry.io/proto/otlp v1.3.1 // indirect
go.uber.org/multierr v1.11.0 // indirect
go.yaml.in/yaml/v2 v2.4.2 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/crypto v0.46.0 // indirect
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 // indirect
golang.org/x/mod v0.31.0 // indirect
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc // indirect
golang.org/x/tools v0.40.0 // indirect
google.golang.org/protobuf v1.36.11 // indirect
golang.org/x/crypto v0.43.0 // indirect
golang.org/x/mod v0.29.0 // indirect
golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 // indirect
golang.org/x/tools v0.38.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
)

84
go.sum
View File

@@ -55,10 +55,6 @@ github.com/djherbis/times v1.6.0 h1:w2ctJ92J8fBvWPxugmXIv7Nz7Q3iDMKNx9v5ocVH20c=
github.com/djherbis/times v1.6.0/go.mod h1:gOHeRAz2h+VJNZ5Gmc/o7iD9k4wW7NMVqieYCY99oc0=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/dylibso/observe-sdk/go v0.0.0-20240819160327-2d926c5d788a h1:UwSIFv5g5lIvbGgtf3tVwC7Ky9rmMFBp0RMs+6f6YqE=
github.com/dylibso/observe-sdk/go v0.0.0-20240819160327-2d926c5d788a/go.mod h1:C8DzXehI4zAbrdlbtOByKX6pfivJTBiV9Jjqv56Yd9Q=
github.com/extism/go-sdk v1.7.1 h1:lWJos6uY+tRFdlIHR+SJjwFDApY7OypS/2nMhiVQ9Sw=
github.com/extism/go-sdk v1.7.1/go.mod h1:IT+Xdg5AZM9hVtpFUA+uZCJMge/hbvshl8bwzLtFyKA=
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
@@ -91,8 +87,6 @@ github.com/go-viper/encoding/ini v0.1.1 h1:MVWY7B2XNw7lnOqHutGRc97bF3rP7omOdgjdM
github.com/go-viper/encoding/ini v0.1.1/go.mod h1:Pfi4M2V1eAGJVZ5q6FrkHPhtHED2YgLlXhvgMVrB+YQ=
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
@@ -105,8 +99,8 @@ github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc h1:hd+uUVsB1vdxohPneMrhGH2YfQuH5hRIK9u4/XCeUtw=
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc/go.mod h1:SL66SJVysrh7YbDCP9tH30b8a9o/N2HeiQNUm85EKhc=
github.com/google/pprof v0.0.0-20251213031049-b05bdaca462f h1:HU1RgM6NALf/KW9HEY6zry3ADbDKcmpQ+hJedoNGQYQ=
github.com/google/pprof v0.0.0-20251213031049-b05bdaca462f/go.mod h1:67FPmZWbr+KDT/VlpWtw6sO9XSjpJmLuHpoLmWiTGgY=
github.com/google/pprof v0.0.0-20251007162407-5df77e3f7d1d h1:KJIErDwbSHjnp/SGzE5ed8Aol7JsKiI5X7yWKAtzhM0=
github.com/google/pprof v0.0.0-20251007162407-5df77e3f7d1d/go.mod h1:I6V7YzU0XDpsHqbsyrghnFZLO1gwK6NPTNvmetQIk9U=
github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE=
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
@@ -124,8 +118,6 @@ github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/ianlancetaylor/demangle v0.0.0-20250417193237-f615e6bd150b h1:ogbOPx86mIhFy764gGkqnkFC8m5PJA7sPzlk9ppLVQA=
github.com/ianlancetaylor/demangle v0.0.0-20250417193237-f615e6bd150b/go.mod h1:gx7rwoVhcfuVKG5uya9Hs3Sxj7EIvldVofAWIUtGouw=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jellydator/ttlcache/v3 v3.4.0 h1:YS4P125qQS0tNhtL6aeYkheEaB/m8HCqdMMP4mnWdTY=
@@ -142,6 +134,8 @@ github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zt
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/knqyf263/go-plugin v0.9.0 h1:CQs2+lOPIlkZVtcb835ZYDEoyyWJWLbSTWeCs0EwTwI=
github.com/knqyf263/go-plugin v0.9.0/go.mod h1:2z5lCO1/pez6qGo8CvCxSlBFSEat4MEp1DrnA+f7w8Q=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
@@ -168,8 +162,8 @@ github.com/lestrrat-go/jwx/v2 v2.1.6 h1:hxM1gfDILk/l5ylers6BX/Eq1m/pnxe9NBwW6lVf
github.com/lestrrat-go/jwx/v2 v2.1.6/go.mod h1:Y722kU5r/8mV7fYDifjug0r8FK8mZdw0K0GpJw/l8pU=
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
github.com/maruel/natural v1.3.0 h1:VsmCsBmEyrR46RomtgHs5hbKADGRVtliHTyCOLFBpsg=
github.com/maruel/natural v1.3.0/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo=
github.com/maruel/natural v1.1.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
github.com/matoous/go-nanoid/v2 v2.1.0 h1:P64+dmq21hhWdtvZfEAofnvJULaRR1Yib0+PnU669bE=
github.com/matoous/go-nanoid/v2 v2.1.0/go.mod h1:KlbGNQ+FhrUNIHUxZdL63t7tl4LaPkZNpUULS8H4uVM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
@@ -192,10 +186,10 @@ github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdh
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/ogier/pflag v0.0.1 h1:RW6JSWSu/RkSatfcLtogGfFgpim5p7ARQ10ECk5O750=
github.com/ogier/pflag v0.0.1/go.mod h1:zkFki7tvTa0tafRvTBIZTvzYyAu6kQhPZFnshFFPE+g=
github.com/onsi/ginkgo/v2 v2.27.3 h1:ICsZJ8JoYafeXFFlFAG75a7CxMsJHwgKwtO+82SE9L8=
github.com/onsi/ginkgo/v2 v2.27.3/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
github.com/onsi/gomega v1.38.3 h1:eTX+W6dobAYfFeGC2PV6RwXRu/MyT+cQguijutvkpSM=
github.com/onsi/gomega v1.38.3/go.mod h1:ZCU1pkQcXDO5Sl9/VVEGlDyp+zm0m1cmeG5TOzLgdh4=
github.com/onsi/ginkgo/v2 v2.27.2 h1:LzwLj0b89qtIy6SSASkzlNvX6WktqurSHwkk2ipF/Ns=
github.com/onsi/ginkgo/v2 v2.27.2/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
github.com/onsi/gomega v1.38.2 h1:eZCjf2xjZAqe+LeWvKb5weQ+NcPwX84kqJ0cZNxok2A=
github.com/onsi/gomega v1.38.2/go.mod h1:W2MJcYxRGV63b418Ai34Ud0hEdTVXq9NW9+Sx6uXf3k=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
@@ -250,8 +244,8 @@ github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
@@ -271,10 +265,8 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/tetratelabs/wabin v0.0.0-20230304001439-f6f874872834 h1:ZF+QBjOI+tILZjBaFj3HgFonKXUcwgJ4djLb6i42S3Q=
github.com/tetratelabs/wabin v0.0.0-20230304001439-f6f874872834/go.mod h1:m9ymHTgNSEjuxvw8E7WWe4Pl4hZQHXONY8wE6dMLaRk=
github.com/tetratelabs/wazero v1.11.0 h1:+gKemEuKCTevU4d7ZTzlsvgd1uaToIDtlQlmNbwqYhA=
github.com/tetratelabs/wazero v1.11.0/go.mod h1:eV28rsN8Q+xwjogd7f4/Pp4xFxO7uOGbLcD/LzB1wiU=
github.com/tetratelabs/wazero v0.0.0-20251106165119-514cdb337684 h1:ugT1JTRsK1Jhn95BWilCugyZ1Svsyxm9xSiflOa2e7E=
github.com/tetratelabs/wazero v0.0.0-20251106165119-514cdb337684/go.mod h1:DRm5twOQ5Gr1AoEdSi0CLjDQF1J9ZAuyqFIjl1KKfQU=
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
@@ -292,8 +284,6 @@ github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0=
go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
@@ -308,20 +298,20 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 h1:MDfG8Cvcqlt9XXrmEiD4epKn7VJHZO84hejP9Jmp0MM=
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04=
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 h1:mgKeJMpvi0yx/sU5GsxQ7p6s2wtOnGAHZWCHUM4KGzY=
golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:j/pmGrbnkbPtQfxEe5D0VQhZC6qKbfKifgD0oM7sR70=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.34.0 h1:33gCkyw9hmwbZJeZkct8XyR11yH889EQt/QH4VmXMn8=
golang.org/x/image v0.34.0/go.mod h1:2RNFBZRB+vnwwFil8GkMdRvrJOFd1AzdZI6vOY+eJVU=
golang.org/x/image v0.32.0 h1:6lZQWq75h7L5IWNk0r+SCpUJ6tUVd3v4ZHnbRKLkUDQ=
golang.org/x/image v0.32.0/go.mod h1:/R37rrQmKXtO6tYXAjtDLwQgFLHmhW+V6ayXlxzP2Pc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@@ -333,8 +323,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -342,8 +332,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180926160741-c2ed4eda69e7/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -360,11 +350,11 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc h1:bH6xUXay0AIFMElXG2rQ4uiE+7ncwtiOdPfYK1NK2XA=
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ=
golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU=
golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8/go.mod h1:Pi4ztBfryZoJEkyFTI5/Ocsu2jXyDr6iSdgJiYE/uwE=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
@@ -373,8 +363,6 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q=
golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
@@ -385,8 +373,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@@ -396,12 +384,12 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ=
golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE=
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=

View File

@@ -11,7 +11,6 @@ import (
"runtime"
"sort"
"strings"
"sync"
"time"
"github.com/sirupsen/logrus"
@@ -29,8 +28,8 @@ var redacted = &Hook{
"(Secret:\")[\\w]*",
"(Spotify.*ID:\")[\\w]*",
"(PasswordEncryptionKey:[\\s]*\")[^\"]*",
"(UserHeader:[\\s]*\")[^\"]*",
"(TrustedSources:[\\s]*\")[^\"]*",
"(ReverseProxyUserHeader:[\\s]*\")[^\"]*",
"(ReverseProxyWhitelist:[\\s]*\")[^\"]*",
"(MetricsPath:[\\s]*\")[^\"]*",
"(DevAutoCreateAdminPassword:[\\s]*\")[^\"]*",
"(DevAutoLoginUsername:[\\s]*\")[^\"]*",
@@ -71,7 +70,6 @@ type levelPath struct {
var (
currentLevel Level
loggerMu sync.RWMutex
defaultLogger = logrus.New()
logSourceLine = false
rootPath string
@@ -80,19 +78,17 @@ var (
// SetLevel sets the global log level used by the simple logger.
func SetLevel(l Level) {
loggerMu.Lock()
currentLevel = l
defaultLogger.Level = logrus.TraceLevel
loggerMu.Unlock()
logrus.SetLevel(logrus.Level(l))
}
func SetLevelString(l string) {
level := ParseLogLevel(l)
level := levelFromString(l)
SetLevel(level)
}
func ParseLogLevel(l string) Level {
func levelFromString(l string) Level {
envLevel := strings.ToLower(l)
var level Level
switch envLevel {
@@ -114,11 +110,9 @@ func ParseLogLevel(l string) Level {
// SetLogLevels sets the log levels for specific paths in the codebase.
func SetLogLevels(levels map[string]string) {
loggerMu.Lock()
defer loggerMu.Unlock()
logLevels = nil
for k, v := range levels {
logLevels = append(logLevels, levelPath{path: k, level: ParseLogLevel(v)})
logLevels = append(logLevels, levelPath{path: k, level: levelFromString(v)})
}
sort.Slice(logLevels, func(i, j int) bool {
return logLevels[i].path > logLevels[j].path
@@ -131,8 +125,6 @@ func SetLogSourceLine(enabled bool) {
func SetRedacting(enabled bool) {
if enabled {
loggerMu.Lock()
defer loggerMu.Unlock()
defaultLogger.AddHook(redacted)
}
}
@@ -141,8 +133,6 @@ func SetOutput(w io.Writer) {
if runtime.GOOS == "windows" {
w = CRLFWriter(w)
}
loggerMu.Lock()
defer loggerMu.Unlock()
defaultLogger.SetOutput(w)
}
@@ -168,14 +158,10 @@ func NewContext(ctx context.Context, keyValuePairs ...interface{}) context.Conte
}
func SetDefaultLogger(l *logrus.Logger) {
loggerMu.Lock()
defer loggerMu.Unlock()
defaultLogger = l
}
func CurrentLevel() Level {
loggerMu.RLock()
defer loggerMu.RUnlock()
return currentLevel
}
@@ -185,31 +171,31 @@ func IsGreaterOrEqualTo(level Level) bool {
}
func Fatal(args ...interface{}) {
Log(LevelFatal, args...)
log(LevelFatal, args...)
os.Exit(1)
}
func Error(args ...interface{}) {
Log(LevelError, args...)
log(LevelError, args...)
}
func Warn(args ...interface{}) {
Log(LevelWarn, args...)
log(LevelWarn, args...)
}
func Info(args ...interface{}) {
Log(LevelInfo, args...)
log(LevelInfo, args...)
}
func Debug(args ...interface{}) {
Log(LevelDebug, args...)
log(LevelDebug, args...)
}
func Trace(args ...interface{}) {
Log(LevelTrace, args...)
log(LevelTrace, args...)
}
func Log(level Level, args ...interface{}) {
func log(level Level, args ...interface{}) {
if !shouldLog(level, 3) {
return
}
@@ -218,21 +204,14 @@ func Log(level Level, args ...interface{}) {
}
func Writer() io.Writer {
loggerMu.RLock()
defer loggerMu.RUnlock()
return defaultLogger.Writer()
}
func shouldLog(requiredLevel Level, skip int) bool {
loggerMu.RLock()
level := currentLevel
levels := logLevels
loggerMu.RUnlock()
if level >= requiredLevel {
if currentLevel >= requiredLevel {
return true
}
if len(levels) == 0 {
if len(logLevels) == 0 {
return false
}
@@ -242,7 +221,7 @@ func shouldLog(requiredLevel Level, skip int) bool {
}
file = strings.TrimPrefix(file, rootPath)
for _, lp := range levels {
for _, lp := range logLevels {
if strings.HasPrefix(file, lp.path) {
return lp.level >= requiredLevel
}
@@ -335,8 +314,6 @@ func extractLogger(ctx interface{}) (*logrus.Entry, error) {
func createNewLogger() *logrus.Entry {
//logrus.SetFormatter(&logrus.TextFormatter{ForceColors: true, DisableTimestamp: false, FullTimestamp: true})
//l.Formatter = &logrus.TextFormatter{ForceColors: true, DisableTimestamp: false, FullTimestamp: true}
loggerMu.RLock()
defer loggerMu.RUnlock()
logger := logrus.NewEntry(defaultLogger)
return logger
}

View File

@@ -6,7 +6,6 @@ type Annotations struct {
PlayCount int64 `structs:"play_count" json:"playCount,omitempty"`
PlayDate *time.Time `structs:"play_date" json:"playDate,omitempty" `
Rating int `structs:"rating" json:"rating,omitempty" `
RatedAt *time.Time `structs:"rated_at" json:"ratedAt,omitempty" `
Starred bool `structs:"starred" json:"starred,omitempty" `
StarredAt *time.Time `structs:"starred_at" json:"starredAt,omitempty"`
}

View File

@@ -44,7 +44,6 @@ var fieldMap = map[string]*mappedField{
"loved": {field: "COALESCE(annotation.starred, false)"},
"dateloved": {field: "annotation.starred_at"},
"lastplayed": {field: "annotation.play_date"},
"daterated": {field: "annotation.rated_at"},
"playcount": {field: "COALESCE(annotation.play_count, 0)"},
"rating": {field: "COALESCE(annotation.rating, 0)"},
"mbz_album_id": {field: "media_file.mbz_album_id"},

View File

@@ -38,12 +38,10 @@ type DataStore interface {
User(ctx context.Context) UserRepository
UserProps(ctx context.Context) UserPropsRepository
ScrobbleBuffer(ctx context.Context) ScrobbleBufferRepository
Scrobble(ctx context.Context) ScrobbleRepository
Plugin(ctx context.Context) PluginRepository
Resource(ctx context.Context, model interface{}) ResourceRepository
WithTx(block func(tx DataStore) error, scope ...string) error
WithTxImmediate(block func(tx DataStore) error, scope ...string) error
GC(ctx context.Context, libraryIDs ...int) error
GC(ctx context.Context) error
}

View File

@@ -85,7 +85,7 @@ type FolderRepository interface {
GetByPath(lib Library, path string) (*Folder, error)
GetAll(...QueryOptions) ([]Folder, error)
CountAll(...QueryOptions) (int64, error)
GetFolderUpdateInfo(lib Library, targetPaths ...string) (map[string]FolderUpdateInfo, error)
GetLastUpdates(lib Library) (map[string]FolderUpdateInfo, error)
Put(*Folder) error
MarkMissing(missing bool, ids ...string) error
GetTouchedWithPlaylists() (FolderCursor, error)

View File

@@ -23,7 +23,7 @@ func legacyTrackID(mf model.MediaFile, prependLibId bool) string {
}
func legacyAlbumID(mf model.MediaFile, md Metadata, prependLibId bool) string {
_, _, releaseDate := md.mapDates()
releaseDate := legacyReleaseDate(md)
albumPath := strings.ToLower(fmt.Sprintf("%s\\%s", legacyMapAlbumArtistName(md), legacyMapAlbumName(md)))
if !conf.Server.Scanner.GroupAlbumReleases {
if len(releaseDate) != 0 {
@@ -55,3 +55,9 @@ func legacyMapAlbumName(md Metadata) string {
consts.UnknownAlbum,
)
}
// Keep the TaggedLikePicard logic for backwards compatibility
func legacyReleaseDate(md Metadata) string {
_, _, releaseDate := md.mapDates()
return string(releaseDate)
}

View File

@@ -0,0 +1,30 @@
package metadata
import (
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("legacyReleaseDate", func() {
DescribeTable("legacyReleaseDate",
func(recordingDate, originalDate, releaseDate, expected string) {
md := New("", Info{
Tags: map[string][]string{
"DATE": {recordingDate},
"ORIGINALDATE": {originalDate},
"RELEASEDATE": {releaseDate},
},
})
result := legacyReleaseDate(md)
Expect(result).To(Equal(expected))
},
Entry("regular mapping", "2020-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping", "2020-05-15", "2019-02-10", "", "2020-05-15"),
Entry("legacy mapping, originalYear < year", "2018-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, originalYear empty", "2020-05-15", "", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, releaseYear", "2020-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, same dates", "2020-05-15", "2020-05-15", "", "2020-05-15"),
)
})

View File

@@ -75,23 +75,6 @@ var _ = Describe("ToMediaFile", func() {
Expect(mf.OriginalYear).To(Equal(1966))
Expect(mf.ReleaseYear).To(Equal(2014))
})
DescribeTable("legacyReleaseDate (TaggedLikePicard old behavior)",
func(recordingDate, originalDate, releaseDate, expected string) {
mf := toMediaFile(model.RawTags{
"DATE": {recordingDate},
"ORIGINALDATE": {originalDate},
"RELEASEDATE": {releaseDate},
})
Expect(mf.ReleaseDate).To(Equal(expected))
},
Entry("regular mapping", "2020-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping", "2020-05-15", "2019-02-10", "", "2020-05-15"),
Entry("legacy mapping, originalYear < year", "2018-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, originalYear empty", "2020-05-15", "", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, releaseYear", "2020-05-15", "2019-02-10", "2021-01-01", "2021-01-01"),
Entry("legacy mapping, same dates", "2020-05-15", "2020-05-15", "", "2020-05-15"),
)
})
Describe("Lyrics", func() {

View File

@@ -1,26 +0,0 @@
package model
import "time"
type Plugin struct {
ID string `structs:"id" json:"id"`
Path string `structs:"path" json:"path"`
Manifest string `structs:"manifest" json:"manifest"`
Config string `structs:"config" json:"config,omitempty"`
Enabled bool `structs:"enabled" json:"enabled"`
LastError string `structs:"last_error" json:"lastError,omitempty"`
SHA256 string `structs:"sha256" json:"sha256"`
CreatedAt time.Time `structs:"created_at" json:"createdAt"`
UpdatedAt time.Time `structs:"updated_at" json:"updatedAt"`
}
type Plugins []Plugin
type PluginRepository interface {
ResourceRepository
CountAll(options ...QueryOptions) (int64, error)
Delete(id string) error
Get(id string) (*Plugin, error)
GetAll(options ...QueryOptions) (Plugins, error)
Put(p *Plugin) error
}

View File

@@ -1,81 +0,0 @@
package model
import (
"context"
"fmt"
"strconv"
"strings"
"time"
)
// ScanTarget represents a specific folder within a library to be scanned.
// NOTE: This struct is used as a map key, so it should only contain comparable types.
type ScanTarget struct {
LibraryID int
FolderPath string // Relative path within the library, or "" for entire library
}
func (st ScanTarget) String() string {
return fmt.Sprintf("%d:%s", st.LibraryID, st.FolderPath)
}
// ScannerStatus holds information about the current scan status
type ScannerStatus struct {
Scanning bool
LastScan time.Time
Count uint32
FolderCount uint32
LastError string
ScanType string
ElapsedTime time.Duration
}
type Scanner interface {
// ScanAll starts a scan of all libraries. This is a blocking operation.
ScanAll(ctx context.Context, fullScan bool) (warnings []string, err error)
// ScanFolders scans specific library/folder pairs, recursing into subdirectories.
// If targets is nil, it scans all libraries. This is a blocking operation.
ScanFolders(ctx context.Context, fullScan bool, targets []ScanTarget) (warnings []string, err error)
Status(context.Context) (*ScannerStatus, error)
}
// ParseTargets parses scan targets strings into ScanTarget structs.
// Example: []string{"1:Music/Rock", "2:Classical"}
func ParseTargets(libFolders []string) ([]ScanTarget, error) {
targets := make([]ScanTarget, 0, len(libFolders))
for _, part := range libFolders {
part = strings.TrimSpace(part)
if part == "" {
continue
}
// Split by the first colon
colonIdx := strings.Index(part, ":")
if colonIdx == -1 {
return nil, fmt.Errorf("invalid target format: %q (expected libraryID:folderPath)", part)
}
libIDStr := part[:colonIdx]
folderPath := part[colonIdx+1:]
libID, err := strconv.Atoi(libIDStr)
if err != nil {
return nil, fmt.Errorf("invalid library ID %q: %w", libIDStr, err)
}
if libID <= 0 {
return nil, fmt.Errorf("invalid library ID %q", libIDStr)
}
targets = append(targets, ScanTarget{
LibraryID: libID,
FolderPath: folderPath,
})
}
if len(targets) == 0 {
return nil, fmt.Errorf("no valid targets found")
}
return targets, nil
}

View File

@@ -1,89 +0,0 @@
package model_test
import (
"github.com/navidrome/navidrome/model"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("ParseTargets", func() {
It("parses multiple entries in slice", func() {
targets, err := model.ParseTargets([]string{"1:Music/Rock", "1:Music/Jazz", "2:Classical"})
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(3))
Expect(targets[0].LibraryID).To(Equal(1))
Expect(targets[0].FolderPath).To(Equal("Music/Rock"))
Expect(targets[1].LibraryID).To(Equal(1))
Expect(targets[1].FolderPath).To(Equal("Music/Jazz"))
Expect(targets[2].LibraryID).To(Equal(2))
Expect(targets[2].FolderPath).To(Equal("Classical"))
})
It("handles empty folder paths", func() {
targets, err := model.ParseTargets([]string{"1:", "2:"})
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
Expect(targets[0].FolderPath).To(Equal(""))
Expect(targets[1].FolderPath).To(Equal(""))
})
It("trims whitespace from entries", func() {
targets, err := model.ParseTargets([]string{" 1:Music/Rock", " 2:Classical "})
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
Expect(targets[0].LibraryID).To(Equal(1))
Expect(targets[0].FolderPath).To(Equal("Music/Rock"))
Expect(targets[1].LibraryID).To(Equal(2))
Expect(targets[1].FolderPath).To(Equal("Classical"))
})
It("skips empty strings", func() {
targets, err := model.ParseTargets([]string{"1:Music/Rock", "", "2:Classical"})
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
})
It("handles paths with colons", func() {
targets, err := model.ParseTargets([]string{"1:C:/Music/Rock", "2:/path:with:colons"})
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
Expect(targets[0].FolderPath).To(Equal("C:/Music/Rock"))
Expect(targets[1].FolderPath).To(Equal("/path:with:colons"))
})
It("returns error for invalid format without colon", func() {
_, err := model.ParseTargets([]string{"1Music/Rock"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("invalid target format"))
})
It("returns error for non-numeric library ID", func() {
_, err := model.ParseTargets([]string{"abc:Music/Rock"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("invalid library ID"))
})
It("returns error for negative library ID", func() {
_, err := model.ParseTargets([]string{"-1:Music/Rock"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("invalid library ID"))
})
It("returns error for zero library ID", func() {
_, err := model.ParseTargets([]string{"0:Music/Rock"})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("invalid library ID"))
})
It("returns error for empty input", func() {
_, err := model.ParseTargets([]string{})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("no valid targets found"))
})
It("returns error for all empty strings", func() {
_, err := model.ParseTargets([]string{"", " ", ""})
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("no valid targets found"))
})
})

View File

@@ -1,13 +0,0 @@
package model
import "time"
type Scrobble struct {
MediaFileID string
UserID string
SubmissionTime time.Time
}
type ScrobbleRepository interface {
RecordScrobble(mediaFileID string, submissionTime time.Time) error
}

View File

@@ -42,9 +42,7 @@ func (u User) HasLibraryAccess(libraryID int) bool {
type Users []User
type UserRepository interface {
ResourceRepository
CountAll(...QueryOptions) (int64, error)
Delete(id string) error
Get(id string) (*User, error)
Put(*User) error
UpdateLastLoginAt(id string) error

View File

@@ -106,7 +106,6 @@ func NewAlbumRepository(ctx context.Context, db dbx.Builder) model.AlbumReposito
"random": "random",
"recently_added": recentlyAddedSort(),
"starred_at": "starred, starred_at",
"rated_at": "rating, rated_at",
})
return r
}
@@ -338,12 +337,8 @@ on conflict (user_id, item_id, item_type) do update
return r.executeSQL(query)
}
func (r *albumRepository) purgeEmpty(libraryIDs ...int) error {
func (r *albumRepository) purgeEmpty() error {
del := Delete(r.tableName).Where("id not in (select distinct(album_id) from media_file)")
// If libraryIDs are specified, only purge albums from those libraries
if len(libraryIDs) > 0 {
del = del.Where(Eq{"library_id": libraryIDs})
}
c, err := r.executeSQL(del)
if err != nil {
return fmt.Errorf("purging empty albums: %w", err)

View File

@@ -512,70 +512,6 @@ var _ = Describe("AlbumRepository", func() {
// Clean up the test album created for this test
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
})
It("removes stale role associations when artist role changes", func() {
// Regression test for issue #4242: Composers displayed in albumartist list
// This happens when an artist's role changes (e.g., was both albumartist and composer,
// now only composer) and the old role association isn't properly removed.
// Create an artist that will have changing roles
artist := &model.Artist{
ID: "role-change-artist-1",
Name: "Role Change Artist",
OrderArtistName: "role change artist",
}
err := createArtistWithLibrary(artistRepo, artist, 1)
Expect(err).ToNot(HaveOccurred())
// Create album with artist as both albumartist and composer
album := &model.Album{
LibraryID: 1,
ID: "test-album-role-change",
Name: "Test Album Role Change",
AlbumArtistID: "role-change-artist-1",
AlbumArtist: "Role Change Artist",
Participants: model.Participants{
model.RoleAlbumArtist: {
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
},
model.RoleComposer: {
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
},
},
}
err = albumRepo.Put(album)
Expect(err).ToNot(HaveOccurred())
// Verify initial state: artist has both albumartist and composer roles
expected := []albumArtistRecord{
{ArtistID: "role-change-artist-1", Role: "albumartist", SubRole: ""},
{ArtistID: "role-change-artist-1", Role: "composer", SubRole: ""},
}
verifyAlbumArtists(album.ID, expected)
// Now update album so artist is ONLY a composer (remove albumartist role)
album.Participants = model.Participants{
model.RoleComposer: {
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
},
}
err = albumRepo.Put(album)
Expect(err).ToNot(HaveOccurred())
// Verify that the albumartist role was removed - only composer should remain
// This is the key test: before the fix, the albumartist role would remain
// causing composers to appear in the albumartist filter
expectedAfter := []albumArtistRecord{
{ArtistID: "role-change-artist-1", Role: "composer", SubRole: ""},
}
verifyAlbumArtists(album.ID, expectedAfter)
// Clean up
_, _ = artistRepo.executeSQL(squirrel.Delete("artist").Where(squirrel.Eq{"id": artist.ID}))
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
})
})
})

View File

@@ -141,7 +141,6 @@ func NewArtistRepository(ctx context.Context, db dbx.Builder) model.ArtistReposi
r.setSortMappings(map[string]string{
"name": "order_artist_name",
"starred_at": "starred, starred_at",
"rated_at": "rating, rated_at",
"song_count": "stats->>'total'->>'m'",
"album_count": "stats->>'total'->>'a'",
"size": "stats->>'total'->>'s'",

View File

@@ -4,10 +4,7 @@ import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"slices"
"strings"
"time"
. "github.com/Masterminds/squirrel"
@@ -94,84 +91,8 @@ func (r folderRepository) CountAll(opt ...model.QueryOptions) (int64, error) {
return r.count(query)
}
func (r folderRepository) GetFolderUpdateInfo(lib model.Library, targetPaths ...string) (map[string]model.FolderUpdateInfo, error) {
// If no specific paths, return all folders in the library
if len(targetPaths) == 0 {
return r.getFolderUpdateInfoAll(lib)
}
// Check if any path is root (return all folders)
for _, targetPath := range targetPaths {
if targetPath == "" || targetPath == "." {
return r.getFolderUpdateInfoAll(lib)
}
}
// Process paths in batches to avoid SQLite's expression tree depth limit (max 1000).
// Each path generates ~3 conditions, so batch size of 100 keeps us well under the limit.
const batchSize = 100
result := make(map[string]model.FolderUpdateInfo)
for batch := range slices.Chunk(targetPaths, batchSize) {
batchResult, err := r.getFolderUpdateInfoBatch(lib, batch)
if err != nil {
return nil, err
}
for id, info := range batchResult {
result[id] = info
}
}
return result, nil
}
// getFolderUpdateInfoAll returns update info for all non-missing folders in the library
func (r folderRepository) getFolderUpdateInfoAll(lib model.Library) (map[string]model.FolderUpdateInfo, error) {
where := And{
Eq{"library_id": lib.ID},
Eq{"missing": false},
}
return r.queryFolderUpdateInfo(where)
}
// getFolderUpdateInfoBatch returns update info for a batch of target paths and their descendants
func (r folderRepository) getFolderUpdateInfoBatch(lib model.Library, targetPaths []string) (map[string]model.FolderUpdateInfo, error) {
where := And{
Eq{"library_id": lib.ID},
Eq{"missing": false},
}
// Collect folder IDs for exact target folders and path conditions for descendants
folderIDs := make([]string, 0, len(targetPaths))
pathConditions := make(Or, 0, len(targetPaths)*2)
for _, targetPath := range targetPaths {
// Clean the path to normalize it. Paths stored in the folder table do not have leading/trailing slashes.
cleanPath := strings.TrimPrefix(targetPath, string(os.PathSeparator))
cleanPath = filepath.Clean(cleanPath)
// Include the target folder itself by ID
folderIDs = append(folderIDs, model.FolderID(lib, cleanPath))
// Include all descendants: folders whose path field equals or starts with the target path
// Note: Folder.Path is the directory path, so children have path = targetPath
pathConditions = append(pathConditions, Eq{"path": cleanPath})
pathConditions = append(pathConditions, Like{"path": cleanPath + "/%"})
}
// Combine conditions: exact folder IDs OR descendant path patterns
if len(folderIDs) > 0 {
where = append(where, Or{Eq{"id": folderIDs}, pathConditions})
} else if len(pathConditions) > 0 {
where = append(where, pathConditions)
}
return r.queryFolderUpdateInfo(where)
}
// queryFolderUpdateInfo executes the query and returns the result map
func (r folderRepository) queryFolderUpdateInfo(where And) (map[string]model.FolderUpdateInfo, error) {
sq := r.newSelect().Columns("id", "updated_at", "hash").Where(where)
func (r folderRepository) GetLastUpdates(lib model.Library) (map[string]model.FolderUpdateInfo, error) {
sq := r.newSelect().Columns("id", "updated_at", "hash").Where(Eq{"library_id": lib.ID, "missing": false})
var res []struct {
ID string
UpdatedAt time.Time
@@ -228,7 +149,7 @@ func (r folderRepository) GetTouchedWithPlaylists() (model.FolderCursor, error)
}, nil
}
func (r folderRepository) purgeEmpty(libraryIDs ...int) error {
func (r folderRepository) purgeEmpty() error {
sq := Delete(r.tableName).Where(And{
Eq{"num_audio_files": 0},
Eq{"num_playlists": 0},
@@ -236,10 +157,6 @@ func (r folderRepository) purgeEmpty(libraryIDs ...int) error {
ConcatExpr("id not in (select parent_id from folder)"),
ConcatExpr("id not in (select folder_id from media_file)"),
})
// If libraryIDs are specified, only purge folders from those libraries
if len(libraryIDs) > 0 {
sq = sq.Where(Eq{"library_id": libraryIDs})
}
c, err := r.executeSQL(sq)
if err != nil {
return fmt.Errorf("purging empty folders: %w", err)

View File

@@ -1,213 +0,0 @@
package persistence
import (
"context"
"fmt"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/pocketbase/dbx"
)
var _ = Describe("FolderRepository", func() {
var repo model.FolderRepository
var ctx context.Context
var conn *dbx.DB
var testLib, otherLib model.Library
BeforeEach(func() {
ctx = request.WithUser(log.NewContext(context.TODO()), model.User{ID: "userid"})
conn = GetDBXBuilder()
repo = newFolderRepository(ctx, conn)
// Use existing library ID 1 from test fixtures
libRepo := NewLibraryRepository(ctx, conn)
lib, err := libRepo.Get(1)
Expect(err).ToNot(HaveOccurred())
testLib = *lib
// Create a second library with its own folder to verify isolation
otherLib = model.Library{Name: "Other Library", Path: "/other/path"}
Expect(libRepo.Put(&otherLib)).To(Succeed())
})
AfterEach(func() {
// Clean up only test folders created by our tests (paths starting with "Test")
// This prevents interference with fixture data needed by other tests
_, _ = conn.NewQuery("DELETE FROM folder WHERE library_id = 1 AND path LIKE 'Test%'").Execute()
_, _ = conn.NewQuery(fmt.Sprintf("DELETE FROM library WHERE id = %d", otherLib.ID)).Execute()
})
Describe("GetFolderUpdateInfo", func() {
Context("with no target paths", func() {
It("returns all folders in the library", func() {
// Create test folders with unique names to avoid conflicts
folder1 := model.NewFolder(testLib, "TestGetLastUpdates/Folder1")
folder2 := model.NewFolder(testLib, "TestGetLastUpdates/Folder2")
err := repo.Put(folder1)
Expect(err).ToNot(HaveOccurred())
err = repo.Put(folder2)
Expect(err).ToNot(HaveOccurred())
otherFolder := model.NewFolder(otherLib, "TestOtherLib/Folder")
err = repo.Put(otherFolder)
Expect(err).ToNot(HaveOccurred())
// Query all folders (no target paths) - should only return folders from testLib
results, err := repo.GetFolderUpdateInfo(testLib)
Expect(err).ToNot(HaveOccurred())
// Should include folders from testLib
Expect(results).To(HaveKey(folder1.ID))
Expect(results).To(HaveKey(folder2.ID))
// Should NOT include folders from other library
Expect(results).ToNot(HaveKey(otherFolder.ID))
})
})
Context("with specific target paths", func() {
It("returns folder info for existing folders", func() {
// Create test folders with unique names
folder1 := model.NewFolder(testLib, "TestSpecific/Rock")
folder2 := model.NewFolder(testLib, "TestSpecific/Jazz")
folder3 := model.NewFolder(testLib, "TestSpecific/Classical")
err := repo.Put(folder1)
Expect(err).ToNot(HaveOccurred())
err = repo.Put(folder2)
Expect(err).ToNot(HaveOccurred())
err = repo.Put(folder3)
Expect(err).ToNot(HaveOccurred())
// Query specific paths
results, err := repo.GetFolderUpdateInfo(testLib, "TestSpecific/Rock", "TestSpecific/Classical")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(HaveLen(2))
// Verify folder IDs are in results
Expect(results).To(HaveKey(folder1.ID))
Expect(results).To(HaveKey(folder3.ID))
Expect(results).ToNot(HaveKey(folder2.ID))
// Verify update info is populated
Expect(results[folder1.ID].UpdatedAt).ToNot(BeZero())
Expect(results[folder1.ID].Hash).To(Equal(folder1.Hash))
})
It("includes all child folders when querying parent", func() {
// Create a parent folder with multiple children
parent := model.NewFolder(testLib, "TestParent/Music")
child1 := model.NewFolder(testLib, "TestParent/Music/Rock/Queen")
child2 := model.NewFolder(testLib, "TestParent/Music/Jazz")
otherParent := model.NewFolder(testLib, "TestParent2/Music/Jazz")
Expect(repo.Put(parent)).To(Succeed())
Expect(repo.Put(child1)).To(Succeed())
Expect(repo.Put(child2)).To(Succeed())
// Query the parent folder - should return parent and all children
results, err := repo.GetFolderUpdateInfo(testLib, "TestParent/Music")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(HaveLen(3))
Expect(results).To(HaveKey(parent.ID))
Expect(results).To(HaveKey(child1.ID))
Expect(results).To(HaveKey(child2.ID))
Expect(results).ToNot(HaveKey(otherParent.ID))
})
It("excludes children from other libraries", func() {
// Create parent in testLib
parent := model.NewFolder(testLib, "TestIsolation/Parent")
child := model.NewFolder(testLib, "TestIsolation/Parent/Child")
Expect(repo.Put(parent)).To(Succeed())
Expect(repo.Put(child)).To(Succeed())
// Create similar path in other library
otherParent := model.NewFolder(otherLib, "TestIsolation/Parent")
otherChild := model.NewFolder(otherLib, "TestIsolation/Parent/Child")
Expect(repo.Put(otherParent)).To(Succeed())
Expect(repo.Put(otherChild)).To(Succeed())
// Query should only return folders from testLib
results, err := repo.GetFolderUpdateInfo(testLib, "TestIsolation/Parent")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(HaveLen(2))
Expect(results).To(HaveKey(parent.ID))
Expect(results).To(HaveKey(child.ID))
Expect(results).ToNot(HaveKey(otherParent.ID))
Expect(results).ToNot(HaveKey(otherChild.ID))
})
It("excludes missing children when querying parent", func() {
// Create parent and children, mark one as missing
parent := model.NewFolder(testLib, "TestMissingChild/Parent")
child1 := model.NewFolder(testLib, "TestMissingChild/Parent/Child1")
child2 := model.NewFolder(testLib, "TestMissingChild/Parent/Child2")
child2.Missing = true
Expect(repo.Put(parent)).To(Succeed())
Expect(repo.Put(child1)).To(Succeed())
Expect(repo.Put(child2)).To(Succeed())
// Query parent - should only return parent and non-missing child
results, err := repo.GetFolderUpdateInfo(testLib, "TestMissingChild/Parent")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(HaveLen(2))
Expect(results).To(HaveKey(parent.ID))
Expect(results).To(HaveKey(child1.ID))
Expect(results).ToNot(HaveKey(child2.ID))
})
It("handles mix of existing and non-existing target paths", func() {
// Create folders for one path but not the other
existingParent := model.NewFolder(testLib, "TestMixed/Exists")
existingChild := model.NewFolder(testLib, "TestMixed/Exists/Child")
Expect(repo.Put(existingParent)).To(Succeed())
Expect(repo.Put(existingChild)).To(Succeed())
// Query both existing and non-existing paths
results, err := repo.GetFolderUpdateInfo(testLib, "TestMixed/Exists", "TestMixed/DoesNotExist")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(HaveLen(2))
Expect(results).To(HaveKey(existingParent.ID))
Expect(results).To(HaveKey(existingChild.ID))
})
It("handles empty folder path as root", func() {
// Test querying for root folder without creating it (fixtures should have one)
rootFolderID := model.FolderID(testLib, ".")
results, err := repo.GetFolderUpdateInfo(testLib, "")
Expect(err).ToNot(HaveOccurred())
// Should return the root folder if it exists
if len(results) > 0 {
Expect(results).To(HaveKey(rootFolderID))
}
})
It("returns empty map for non-existent folders", func() {
results, err := repo.GetFolderUpdateInfo(testLib, "NonExistent/Path")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(BeEmpty())
})
It("skips missing folders", func() {
// Create a folder and mark it as missing
folder := model.NewFolder(testLib, "TestMissing/Folder")
folder.Missing = true
err := repo.Put(folder)
Expect(err).ToNot(HaveOccurred())
results, err := repo.GetFolderUpdateInfo(testLib, "TestMissing/Folder")
Expect(err).ToNot(HaveOccurred())
Expect(results).To(BeEmpty())
})
})
})
})

View File

@@ -177,11 +177,7 @@ func (r *libraryRepository) ScanEnd(id int) error {
return err
}
// https://www.sqlite.org/pragma.html#pragma_optimize
// Use mask 0x10000 to check table sizes without running ANALYZE
// Running ANALYZE can cause query planner issues with expression-based collation indexes
if conf.Server.DevOptimizeDB {
_, err = r.executeSQL(Expr("PRAGMA optimize=0x10000;"))
}
_, err = r.executeSQL(Expr("PRAGMA optimize=0x10012;"))
return err
}

View File

@@ -142,62 +142,4 @@ var _ = Describe("LibraryRepository", func() {
Expect(libAfter.TotalSize).To(Equal(sizeRes.Sum))
Expect(libAfter.TotalDuration).To(Equal(durationRes.Sum))
})
Describe("ScanBegin and ScanEnd", func() {
var lib *model.Library
BeforeEach(func() {
lib = &model.Library{
ID: 0,
Name: "Test Scan Library",
Path: "/music/test-scan",
}
err := repo.Put(lib)
Expect(err).ToNot(HaveOccurred())
})
DescribeTable("ScanBegin",
func(fullScan bool, expectedFullScanInProgress bool) {
err := repo.ScanBegin(lib.ID, fullScan)
Expect(err).ToNot(HaveOccurred())
updatedLib, err := repo.Get(lib.ID)
Expect(err).ToNot(HaveOccurred())
Expect(updatedLib.LastScanStartedAt).ToNot(BeZero())
Expect(updatedLib.FullScanInProgress).To(Equal(expectedFullScanInProgress))
},
Entry("sets FullScanInProgress to true for full scan", true, true),
Entry("sets FullScanInProgress to false for quick scan", false, false),
)
Context("ScanEnd", func() {
BeforeEach(func() {
err := repo.ScanBegin(lib.ID, true)
Expect(err).ToNot(HaveOccurred())
})
It("sets LastScanAt and clears FullScanInProgress and LastScanStartedAt", func() {
err := repo.ScanEnd(lib.ID)
Expect(err).ToNot(HaveOccurred())
updatedLib, err := repo.Get(lib.ID)
Expect(err).ToNot(HaveOccurred())
Expect(updatedLib.LastScanAt).ToNot(BeZero())
Expect(updatedLib.FullScanInProgress).To(BeFalse())
Expect(updatedLib.LastScanStartedAt).To(BeZero())
})
It("sets LastScanAt to be after LastScanStartedAt", func() {
libBefore, err := repo.Get(lib.ID)
Expect(err).ToNot(HaveOccurred())
err = repo.ScanEnd(lib.ID)
Expect(err).ToNot(HaveOccurred())
libAfter, err := repo.Get(lib.ID)
Expect(err).ToNot(HaveOccurred())
Expect(libAfter.LastScanAt).To(BeTemporally(">=", libBefore.LastScanStartedAt))
})
})
})
})

View File

@@ -4,8 +4,6 @@ import (
"context"
"fmt"
"slices"
"strconv"
"strings"
"sync"
"time"
@@ -86,7 +84,6 @@ func NewMediaFileRepository(ctx context.Context, db dbx.Builder) model.MediaFile
"created_at": "media_file.created_at",
"recently_added": mediaFileRecentlyAddedSort(),
"starred_at": "starred, starred_at",
"rated_at": "rating, rated_at",
})
return r
}
@@ -195,43 +192,12 @@ func (r *mediaFileRepository) GetCursor(options ...model.QueryOptions) (model.Me
}, nil
}
// FindByPaths finds media files by their paths.
// The paths can be library-qualified (format: "libraryID:path") or unqualified ("path").
// Library-qualified paths search within the specified library, while unqualified paths
// search across all libraries for backward compatibility.
func (r *mediaFileRepository) FindByPaths(paths []string) (model.MediaFiles, error) {
query := Or{}
for _, path := range paths {
parts := strings.SplitN(path, ":", 2)
if len(parts) == 2 {
// Library-qualified path: "libraryID:path"
libraryID, err := strconv.Atoi(parts[0])
if err != nil {
// Invalid format, skip
continue
}
relativePath := parts[1]
query = append(query, And{
Eq{"path collate nocase": relativePath},
Eq{"library_id": libraryID},
})
} else {
// Unqualified path: search across all libraries
query = append(query, Eq{"path collate nocase": path})
}
}
if len(query) == 0 {
return model.MediaFiles{}, nil
}
sel := r.newSelect().Columns("*").Where(query)
sel := r.newSelect().Columns("*").Where(Eq{"path collate nocase": paths})
var res dbMediaFiles
if err := r.queryAll(sel, &res); err != nil {
return nil, err
}
return res.toModels(), nil
}

View File

@@ -89,14 +89,6 @@ func (s *SQLStore) ScrobbleBuffer(ctx context.Context) model.ScrobbleBufferRepos
return NewScrobbleBufferRepository(ctx, s.getDBXBuilder())
}
func (s *SQLStore) Scrobble(ctx context.Context) model.ScrobbleRepository {
return NewScrobbleRepository(ctx, s.getDBXBuilder())
}
func (s *SQLStore) Plugin(ctx context.Context) model.PluginRepository {
return NewPluginRepository(ctx, s.getDBXBuilder())
}
func (s *SQLStore) Resource(ctx context.Context, m interface{}) model.ResourceRepository {
switch m.(type) {
case model.User:
@@ -121,8 +113,6 @@ func (s *SQLStore) Resource(ctx context.Context, m interface{}) model.ResourceRe
return s.Share(ctx).(model.ResourceRepository)
case model.Tag:
return s.Tag(ctx).(model.ResourceRepository)
case model.Plugin:
return s.Plugin(ctx).(model.ResourceRepository)
}
log.Error("Resource not implemented", "model", reflect.TypeOf(m).Name())
return nil
@@ -167,7 +157,7 @@ func (s *SQLStore) WithTxImmediate(block func(tx model.DataStore) error, scope .
}, scope...)
}
func (s *SQLStore) GC(ctx context.Context, libraryIDs ...int) error {
func (s *SQLStore) GC(ctx context.Context) error {
trace := func(ctx context.Context, msg string, f func() error) func() error {
return func() error {
start := time.Now()
@@ -177,17 +167,11 @@ func (s *SQLStore) GC(ctx context.Context, libraryIDs ...int) error {
}
}
// If libraryIDs are provided, scope operations to those libraries where possible
scoped := len(libraryIDs) > 0
if scoped {
log.Debug(ctx, "GC: Running selective garbage collection", "libraryIDs", libraryIDs)
}
err := run.Sequentially(
trace(ctx, "purge empty albums", func() error { return s.Album(ctx).(*albumRepository).purgeEmpty(libraryIDs...) }),
trace(ctx, "purge empty albums", func() error { return s.Album(ctx).(*albumRepository).purgeEmpty() }),
trace(ctx, "purge empty artists", func() error { return s.Artist(ctx).(*artistRepository).purgeEmpty() }),
trace(ctx, "mark missing artists", func() error { return s.Artist(ctx).(*artistRepository).markMissing() }),
trace(ctx, "purge empty folders", func() error { return s.Folder(ctx).(*folderRepository).purgeEmpty(libraryIDs...) }),
trace(ctx, "purge empty folders", func() error { return s.Folder(ctx).(*folderRepository).purgeEmpty() }),
trace(ctx, "clean album annotations", func() error { return s.Album(ctx).(*albumRepository).cleanAnnotations() }),
trace(ctx, "clean artist annotations", func() error { return s.Artist(ctx).(*artistRepository).cleanAnnotations() }),
trace(ctx, "clean media file annotations", func() error { return s.MediaFile(ctx).(*mediaFileRepository).cleanAnnotations() }),

View File

@@ -264,11 +264,6 @@ func (r *playlistRepository) refreshSmartPlaylist(pls *model.Playlist) bool {
"annotation.item_id = media_file.id" +
" AND annotation.item_type = 'media_file'" +
" AND annotation.user_id = '" + usr.ID + "')")
// Only include media files from libraries the user has access to
sq = r.applyLibraryFilter(sq, "media_file")
// Apply the criteria rules
sq = r.addCriteria(sq, rules)
insSql := Insert("playlist_tracks").Columns("id", "playlist_id", "media_file_id").Select(sq)
_, err = r.executeSQL(insSql)
@@ -393,7 +388,6 @@ func (r *playlistRepository) loadTracks(sel SelectBuilder, id string) (model.Pla
"coalesce(play_count, 0) as play_count",
"play_date",
"coalesce(rating, 0) as rating",
"rated_at",
"f.*",
"playlist_tracks.*",
"library.path as library_path",

View File

@@ -1,6 +1,7 @@
package persistence
import (
"context"
"time"
"github.com/navidrome/navidrome/conf"
@@ -10,14 +11,13 @@ import (
"github.com/navidrome/navidrome/model/request"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/pocketbase/dbx"
)
var _ = Describe("PlaylistRepository", func() {
var repo model.PlaylistRepository
BeforeEach(func() {
ctx := log.NewContext(GinkgoT().Context())
ctx := log.NewContext(context.TODO())
ctx = request.WithUser(ctx, model.User{ID: "userid", UserName: "userid", IsAdmin: true})
repo = NewPlaylistRepository(ctx, GetDBXBuilder())
})
@@ -252,250 +252,4 @@ var _ = Describe("PlaylistRepository", func() {
Expect(tracks[3].MediaFileID).To(Equal("2001")) // Disc 2, Track 11
})
})
Describe("Smart Playlists with Tag Criteria", func() {
var mfRepo model.MediaFileRepository
var testPlaylistID string
var songWithGrouping, songWithoutGrouping model.MediaFile
BeforeEach(func() {
ctx := log.NewContext(GinkgoT().Context())
ctx = request.WithUser(ctx, model.User{ID: "userid", UserName: "userid", IsAdmin: true})
mfRepo = NewMediaFileRepository(ctx, GetDBXBuilder())
// Register 'grouping' as a valid tag for smart playlists
criteria.AddTagNames([]string{"grouping"})
// Create a song with the grouping tag
songWithGrouping = model.MediaFile{
ID: "test-grouping-1",
Title: "Song With Grouping",
Artist: "Test Artist",
ArtistID: "1",
Album: "Test Album",
AlbumID: "101",
Path: "/test/grouping/song1.mp3",
Tags: model.Tags{
"grouping": []string{"My Crate"},
},
Participants: model.Participants{},
LibraryID: 1,
Lyrics: "[]",
}
Expect(mfRepo.Put(&songWithGrouping)).To(Succeed())
// Create a song without the grouping tag
songWithoutGrouping = model.MediaFile{
ID: "test-grouping-2",
Title: "Song Without Grouping",
Artist: "Test Artist",
ArtistID: "1",
Album: "Test Album",
AlbumID: "101",
Path: "/test/grouping/song2.mp3",
Tags: model.Tags{},
Participants: model.Participants{},
LibraryID: 1,
Lyrics: "[]",
}
Expect(mfRepo.Put(&songWithoutGrouping)).To(Succeed())
})
AfterEach(func() {
if testPlaylistID != "" {
_ = repo.Delete(testPlaylistID)
testPlaylistID = ""
}
// Clean up test media files
_, _ = GetDBXBuilder().Delete("media_file", dbx.HashExp{"id": "test-grouping-1"}).Execute()
_, _ = GetDBXBuilder().Delete("media_file", dbx.HashExp{"id": "test-grouping-2"}).Execute()
})
It("matches tracks with a tag value using 'contains' with empty string (issue #4728 workaround)", func() {
By("creating a smart playlist that checks if grouping tag has any value")
// This is the workaround for issue #4728: using 'contains' with empty string
// generates SQL: value LIKE '%%' which matches any non-empty string
rules := &criteria.Criteria{
Expression: criteria.All{
criteria.Contains{"grouping": ""},
},
}
newPls := model.Playlist{Name: "Tracks with Grouping", OwnerID: "userid", Rules: rules}
Expect(repo.Put(&newPls)).To(Succeed())
testPlaylistID = newPls.ID
By("refreshing the smart playlist")
conf.Server.SmartPlaylistRefreshDelay = -1 * time.Second // Force refresh
pls, err := repo.GetWithTracks(newPls.ID, true, false)
Expect(err).ToNot(HaveOccurred())
By("verifying only the track with grouping tag is matched")
Expect(pls.Tracks).To(HaveLen(1))
Expect(pls.Tracks[0].MediaFileID).To(Equal(songWithGrouping.ID))
})
It("excludes tracks with a tag value using 'notContains' with empty string", func() {
By("creating a smart playlist that checks if grouping tag is NOT set")
rules := &criteria.Criteria{
Expression: criteria.All{
criteria.NotContains{"grouping": ""},
},
}
newPls := model.Playlist{Name: "Tracks without Grouping", OwnerID: "userid", Rules: rules}
Expect(repo.Put(&newPls)).To(Succeed())
testPlaylistID = newPls.ID
By("refreshing the smart playlist")
conf.Server.SmartPlaylistRefreshDelay = -1 * time.Second // Force refresh
pls, err := repo.GetWithTracks(newPls.ID, true, false)
Expect(err).ToNot(HaveOccurred())
By("verifying the track with grouping is NOT in the playlist")
for _, track := range pls.Tracks {
Expect(track.MediaFileID).ToNot(Equal(songWithGrouping.ID))
}
By("verifying the track without grouping IS in the playlist")
var foundWithoutGrouping bool
for _, track := range pls.Tracks {
if track.MediaFileID == songWithoutGrouping.ID {
foundWithoutGrouping = true
break
}
}
Expect(foundWithoutGrouping).To(BeTrue())
})
})
Describe("Smart Playlists Library Filtering", func() {
var mfRepo model.MediaFileRepository
var testPlaylistID string
var lib2ID int
var restrictedUserID string
var uniqueLibPath string
BeforeEach(func() {
db := GetDBXBuilder()
// Generate unique IDs for this test run
uniqueSuffix := time.Now().Format("20060102150405.000")
restrictedUserID = "restricted-user-" + uniqueSuffix
uniqueLibPath = "/music/lib2-" + uniqueSuffix
// Create a second library with unique name and path to avoid conflicts with other tests
_, err := db.DB().Exec("INSERT INTO library (name, path, created_at, updated_at) VALUES (?, ?, datetime('now'), datetime('now'))", "Library 2-"+uniqueSuffix, uniqueLibPath)
Expect(err).ToNot(HaveOccurred())
err = db.DB().QueryRow("SELECT last_insert_rowid()").Scan(&lib2ID)
Expect(err).ToNot(HaveOccurred())
// Create a restricted user with access only to library 1
_, err = db.DB().Exec("INSERT INTO user (id, user_name, name, is_admin, password, created_at, updated_at) VALUES (?, ?, 'Restricted User', false, 'pass', datetime('now'), datetime('now'))", restrictedUserID, restrictedUserID)
Expect(err).ToNot(HaveOccurred())
_, err = db.DB().Exec("INSERT INTO user_library (user_id, library_id) VALUES (?, 1)", restrictedUserID)
Expect(err).ToNot(HaveOccurred())
// Create test media files in each library
ctx := log.NewContext(GinkgoT().Context())
ctx = request.WithUser(ctx, model.User{ID: "userid", UserName: "userid", IsAdmin: true})
mfRepo = NewMediaFileRepository(ctx, db)
// Song in library 1 (accessible by restricted user)
songLib1 := model.MediaFile{
ID: "lib1-song",
Title: "Song in Lib1",
Artist: "Test Artist",
ArtistID: "1",
Album: "Test Album",
AlbumID: "101",
Path: "/music/lib1/song.mp3",
LibraryID: 1,
Participants: model.Participants{},
Tags: model.Tags{},
Lyrics: "[]",
}
Expect(mfRepo.Put(&songLib1)).To(Succeed())
// Song in library 2 (NOT accessible by restricted user)
songLib2 := model.MediaFile{
ID: "lib2-song",
Title: "Song in Lib2",
Artist: "Test Artist",
ArtistID: "1",
Album: "Test Album",
AlbumID: "101",
Path: uniqueLibPath + "/song.mp3",
LibraryID: lib2ID,
Participants: model.Participants{},
Tags: model.Tags{},
Lyrics: "[]",
}
Expect(mfRepo.Put(&songLib2)).To(Succeed())
})
AfterEach(func() {
db := GetDBXBuilder()
if testPlaylistID != "" {
_ = repo.Delete(testPlaylistID)
testPlaylistID = ""
}
// Clean up test data
_, _ = db.Delete("media_file", dbx.HashExp{"id": "lib1-song"}).Execute()
_, _ = db.Delete("media_file", dbx.HashExp{"id": "lib2-song"}).Execute()
_, _ = db.Delete("user_library", dbx.HashExp{"user_id": restrictedUserID}).Execute()
_, _ = db.Delete("user", dbx.HashExp{"id": restrictedUserID}).Execute()
_, _ = db.DB().Exec("DELETE FROM library WHERE id = ?", lib2ID)
})
It("should only include tracks from libraries the user has access to (issue #4738)", func() {
db := GetDBXBuilder()
ctx := log.NewContext(GinkgoT().Context())
// Create the smart playlist as the restricted user
restrictedUser := model.User{ID: restrictedUserID, UserName: restrictedUserID, IsAdmin: false}
ctx = request.WithUser(ctx, restrictedUser)
restrictedRepo := NewPlaylistRepository(ctx, db)
// Create a smart playlist that matches all songs
rules := &criteria.Criteria{
Expression: criteria.All{
criteria.Gt{"playCount": -1}, // Matches everything
},
}
newPls := model.Playlist{Name: "All Songs", OwnerID: restrictedUserID, Rules: rules}
Expect(restrictedRepo.Put(&newPls)).To(Succeed())
testPlaylistID = newPls.ID
By("refreshing the smart playlist")
conf.Server.SmartPlaylistRefreshDelay = -1 * time.Second // Force refresh
pls, err := restrictedRepo.GetWithTracks(newPls.ID, true, false)
Expect(err).ToNot(HaveOccurred())
By("verifying only the track from library 1 is in the playlist")
var foundLib1Song, foundLib2Song bool
for _, track := range pls.Tracks {
if track.MediaFileID == "lib1-song" {
foundLib1Song = true
}
if track.MediaFileID == "lib2-song" {
foundLib2Song = true
}
}
Expect(foundLib1Song).To(BeTrue(), "Song from library 1 should be in the playlist")
Expect(foundLib2Song).To(BeFalse(), "Song from library 2 should NOT be in the playlist")
By("verifying playlist_tracks table only contains the accessible track")
var playlistTracksCount int
err = db.DB().QueryRow("SELECT count(*) FROM playlist_tracks WHERE playlist_id = ?", newPls.ID).Scan(&playlistTracksCount)
Expect(err).ToNot(HaveOccurred())
// Count should only include tracks visible to the user (lib1-song)
// The count may include other test songs from library 1, but NOT lib2-song
var lib2TrackCount int
err = db.DB().QueryRow("SELECT count(*) FROM playlist_tracks WHERE playlist_id = ? AND media_file_id = 'lib2-song'", newPls.ID).Scan(&lib2TrackCount)
Expect(err).ToNot(HaveOccurred())
Expect(lib2TrackCount).To(Equal(0), "lib2-song should not be in playlist_tracks")
By("verifying SongCount matches visible tracks")
Expect(pls.SongCount).To(Equal(len(pls.Tracks)), "SongCount should match the number of visible tracks")
})
})
})

View File

@@ -97,7 +97,6 @@ func (r *playlistTrackRepository) Read(id string) (interface{}, error) {
"coalesce(rating, 0) as rating",
"starred_at",
"play_date",
"rated_at",
"f.*",
"playlist_tracks.*",
).

View File

@@ -1,153 +0,0 @@
package persistence
import (
"context"
"errors"
"time"
. "github.com/Masterminds/squirrel"
"github.com/deluan/rest"
"github.com/navidrome/navidrome/model"
"github.com/pocketbase/dbx"
)
type pluginRepository struct {
sqlRepository
}
func NewPluginRepository(ctx context.Context, db dbx.Builder) model.PluginRepository {
r := &pluginRepository{}
r.ctx = ctx
r.db = db
r.registerModel(&model.Plugin{}, map[string]filterFunc{
"id": idFilter("plugin"),
"enabled": booleanFilter,
})
return r
}
func (r *pluginRepository) isPermitted() bool {
user := loggedUser(r.ctx)
return user.IsAdmin
}
func (r *pluginRepository) CountAll(options ...model.QueryOptions) (int64, error) {
if !r.isPermitted() {
return 0, rest.ErrPermissionDenied
}
sql := r.newSelect()
return r.count(sql, options...)
}
func (r *pluginRepository) Delete(id string) error {
if !r.isPermitted() {
return rest.ErrPermissionDenied
}
return r.delete(Eq{"id": id})
}
func (r *pluginRepository) Get(id string) (*model.Plugin, error) {
if !r.isPermitted() {
return nil, rest.ErrPermissionDenied
}
sel := r.newSelect().Where(Eq{"id": id}).Columns("*")
res := model.Plugin{}
err := r.queryOne(sel, &res)
return &res, err
}
func (r *pluginRepository) GetAll(options ...model.QueryOptions) (model.Plugins, error) {
if !r.isPermitted() {
return nil, rest.ErrPermissionDenied
}
sel := r.newSelect(options...).Columns("*")
res := model.Plugins{}
err := r.queryAll(sel, &res)
return res, err
}
func (r *pluginRepository) Put(plugin *model.Plugin) error {
if !r.isPermitted() {
return rest.ErrPermissionDenied
}
plugin.UpdatedAt = time.Now()
if plugin.ID == "" {
return errors.New("plugin ID cannot be empty")
}
// Upsert using INSERT ... ON CONFLICT for atomic operation
_, err := r.db.NewQuery(`
INSERT INTO plugin (id, path, manifest, config, enabled, last_error, sha256, created_at, updated_at)
VALUES ({:id}, {:path}, {:manifest}, {:config}, {:enabled}, {:last_error}, {:sha256}, {:created_at}, {:updated_at})
ON CONFLICT(id) DO UPDATE SET
path = excluded.path,
manifest = excluded.manifest,
config = excluded.config,
enabled = excluded.enabled,
last_error = excluded.last_error,
sha256 = excluded.sha256,
updated_at = excluded.updated_at
`).Bind(dbx.Params{
"id": plugin.ID,
"path": plugin.Path,
"manifest": plugin.Manifest,
"config": plugin.Config,
"enabled": plugin.Enabled,
"last_error": plugin.LastError,
"sha256": plugin.SHA256,
"created_at": time.Now(),
"updated_at": plugin.UpdatedAt,
}).Execute()
return err
}
func (r *pluginRepository) Count(options ...rest.QueryOptions) (int64, error) {
return r.CountAll(r.parseRestOptions(r.ctx, options...))
}
func (r *pluginRepository) EntityName() string {
return "plugin"
}
func (r *pluginRepository) NewInstance() interface{} {
return &model.Plugin{}
}
func (r *pluginRepository) Read(id string) (interface{}, error) {
return r.Get(id)
}
func (r *pluginRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
return r.GetAll(r.parseRestOptions(r.ctx, options...))
}
func (r *pluginRepository) Save(entity interface{}) (string, error) {
p := entity.(*model.Plugin)
if !r.isPermitted() {
return "", rest.ErrPermissionDenied
}
err := r.Put(p)
if errors.Is(err, model.ErrNotFound) {
return "", rest.ErrNotFound
}
return p.ID, err
}
func (r *pluginRepository) Update(id string, entity interface{}, cols ...string) error {
p := entity.(*model.Plugin)
p.ID = id
if !r.isPermitted() {
return rest.ErrPermissionDenied
}
err := r.Put(p)
if errors.Is(err, model.ErrNotFound) {
return rest.ErrNotFound
}
return err
}
var _ model.PluginRepository = (*pluginRepository)(nil)
var _ rest.Repository = (*pluginRepository)(nil)
var _ rest.Persistable = (*pluginRepository)(nil)

View File

@@ -1,227 +0,0 @@
package persistence
import (
"github.com/deluan/rest"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("PluginRepository", func() {
var repo model.PluginRepository
Describe("Admin User", func() {
BeforeEach(func() {
ctx := GinkgoT().Context()
ctx = request.WithUser(ctx, model.User{ID: "userid", UserName: "userid", IsAdmin: true})
repo = NewPluginRepository(ctx, GetDBXBuilder())
// Clean up any existing plugins
all, _ := repo.GetAll()
for _, p := range all {
_ = repo.Delete(p.ID)
}
})
AfterEach(func() {
// Clean up after tests
all, _ := repo.GetAll()
for _, p := range all {
_ = repo.Delete(p.ID)
}
})
Describe("CountAll", func() {
It("returns 0 when no plugins exist", func() {
Expect(repo.CountAll()).To(Equal(int64(0)))
})
It("returns the number of plugins in the DB", func() {
_ = repo.Put(&model.Plugin{ID: "test-plugin-1", Path: "/plugins/test1.wasm", Manifest: "{}", SHA256: "abc123"})
_ = repo.Put(&model.Plugin{ID: "test-plugin-2", Path: "/plugins/test2.wasm", Manifest: "{}", SHA256: "def456"})
Expect(repo.CountAll()).To(Equal(int64(2)))
})
})
Describe("Delete", func() {
It("deletes existing item", func() {
plugin := &model.Plugin{ID: "to-delete", Path: "/plugins/delete.wasm", Manifest: "{}", SHA256: "hash"}
_ = repo.Put(plugin)
err := repo.Delete(plugin.ID)
Expect(err).To(BeNil())
_, err = repo.Get(plugin.ID)
Expect(err).To(MatchError(model.ErrNotFound))
})
})
Describe("Get", func() {
It("returns an existing item", func() {
plugin := &model.Plugin{ID: "test-get", Path: "/plugins/test.wasm", Manifest: `{"name":"test"}`, SHA256: "hash123"}
_ = repo.Put(plugin)
res, err := repo.Get(plugin.ID)
Expect(err).To(BeNil())
Expect(res.ID).To(Equal(plugin.ID))
Expect(res.Path).To(Equal(plugin.Path))
Expect(res.Manifest).To(Equal(plugin.Manifest))
})
It("errors when missing", func() {
_, err := repo.Get("notanid")
Expect(err).To(MatchError(model.ErrNotFound))
})
})
Describe("GetAll", func() {
It("returns all items from the DB", func() {
_ = repo.Put(&model.Plugin{ID: "plugin-a", Path: "/plugins/a.wasm", Manifest: "{}", SHA256: "hash1"})
_ = repo.Put(&model.Plugin{ID: "plugin-b", Path: "/plugins/b.wasm", Manifest: "{}", SHA256: "hash2"})
all, err := repo.GetAll()
Expect(err).To(BeNil())
Expect(all).To(HaveLen(2))
})
It("supports pagination", func() {
_ = repo.Put(&model.Plugin{ID: "plugin-1", Path: "/plugins/1.wasm", Manifest: "{}", SHA256: "h1"})
_ = repo.Put(&model.Plugin{ID: "plugin-2", Path: "/plugins/2.wasm", Manifest: "{}", SHA256: "h2"})
_ = repo.Put(&model.Plugin{ID: "plugin-3", Path: "/plugins/3.wasm", Manifest: "{}", SHA256: "h3"})
page1, err := repo.GetAll(model.QueryOptions{Max: 2, Offset: 0, Sort: "id"})
Expect(err).To(BeNil())
Expect(page1).To(HaveLen(2))
page2, err := repo.GetAll(model.QueryOptions{Max: 2, Offset: 2, Sort: "id"})
Expect(err).To(BeNil())
Expect(page2).To(HaveLen(1))
})
})
Describe("Put", func() {
It("successfully creates a new plugin", func() {
plugin := &model.Plugin{
ID: "new-plugin",
Path: "/plugins/new.wasm",
Manifest: `{"name":"new","version":"1.0"}`,
Config: `{"setting":"value"}`,
SHA256: "sha256hash",
Enabled: false,
}
err := repo.Put(plugin)
Expect(err).To(BeNil())
saved, err := repo.Get(plugin.ID)
Expect(err).To(BeNil())
Expect(saved.Path).To(Equal(plugin.Path))
Expect(saved.Manifest).To(Equal(plugin.Manifest))
Expect(saved.Config).To(Equal(plugin.Config))
Expect(saved.Enabled).To(BeFalse())
Expect(saved.CreatedAt).NotTo(BeZero())
Expect(saved.UpdatedAt).NotTo(BeZero())
})
It("successfully updates an existing plugin", func() {
plugin := &model.Plugin{
ID: "update-plugin",
Path: "/plugins/update.wasm",
Manifest: `{"name":"test"}`,
SHA256: "original",
Enabled: false,
}
_ = repo.Put(plugin)
plugin.Enabled = true
plugin.Config = `{"new":"config"}`
plugin.SHA256 = "updated"
err := repo.Put(plugin)
Expect(err).To(BeNil())
saved, err := repo.Get(plugin.ID)
Expect(err).To(BeNil())
Expect(saved.Enabled).To(BeTrue())
Expect(saved.Config).To(Equal(`{"new":"config"}`))
Expect(saved.SHA256).To(Equal("updated"))
})
It("stores and retrieves last_error", func() {
plugin := &model.Plugin{
ID: "error-plugin",
Path: "/plugins/error.wasm",
Manifest: "{}",
SHA256: "hash",
LastError: "failed to load: missing export",
}
err := repo.Put(plugin)
Expect(err).To(BeNil())
saved, err := repo.Get(plugin.ID)
Expect(err).To(BeNil())
Expect(saved.LastError).To(Equal("failed to load: missing export"))
})
It("fails when ID is empty", func() {
plugin := &model.Plugin{
Path: "/plugins/noid.wasm",
Manifest: "{}",
SHA256: "hash",
}
err := repo.Put(plugin)
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("ID cannot be empty"))
})
})
})
Describe("Regular User", func() {
BeforeEach(func() {
ctx := GinkgoT().Context()
ctx = request.WithUser(ctx, model.User{ID: "userid", UserName: "userid", IsAdmin: false})
repo = NewPluginRepository(ctx, GetDBXBuilder())
})
Describe("CountAll", func() {
It("fails to count items", func() {
_, err := repo.CountAll()
Expect(err).To(Equal(rest.ErrPermissionDenied))
})
})
Describe("Delete", func() {
It("fails to delete items", func() {
err := repo.Delete("any-id")
Expect(err).To(Equal(rest.ErrPermissionDenied))
})
})
Describe("Get", func() {
It("fails to get items", func() {
_, err := repo.Get("any-id")
Expect(err).To(Equal(rest.ErrPermissionDenied))
})
})
Describe("GetAll", func() {
It("fails to get all items", func() {
_, err := repo.GetAll()
Expect(err).To(Equal(rest.ErrPermissionDenied))
})
})
Describe("Put", func() {
It("fails to create/update item", func() {
err := repo.Put(&model.Plugin{
ID: "user-create",
Path: "/plugins/create.wasm",
Manifest: "{}",
SHA256: "hash",
})
Expect(err).To(Equal(rest.ErrPermissionDenied))
})
})
})
})

View File

@@ -1,34 +0,0 @@
package persistence
import (
"context"
"time"
. "github.com/Masterminds/squirrel"
"github.com/navidrome/navidrome/model"
"github.com/pocketbase/dbx"
)
type scrobbleRepository struct {
sqlRepository
}
func NewScrobbleRepository(ctx context.Context, db dbx.Builder) model.ScrobbleRepository {
r := &scrobbleRepository{}
r.ctx = ctx
r.db = db
r.tableName = "scrobbles"
return r
}
func (r *scrobbleRepository) RecordScrobble(mediaFileID string, submissionTime time.Time) error {
userID := loggedUser(r.ctx).ID
values := map[string]interface{}{
"media_file_id": mediaFileID,
"user_id": userID,
"submission_time": submissionTime.Unix(),
}
insert := Insert(r.tableName).SetMap(values)
_, err := r.executeSQL(insert)
return err
}

View File

@@ -1,84 +0,0 @@
package persistence
import (
"context"
"time"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/id"
"github.com/navidrome/navidrome/model/request"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
"github.com/pocketbase/dbx"
)
var _ = Describe("ScrobbleRepository", func() {
var repo model.ScrobbleRepository
var rawRepo sqlRepository
var ctx context.Context
var fileID string
var userID string
BeforeEach(func() {
fileID = id.NewRandom()
userID = id.NewRandom()
ctx = request.WithUser(log.NewContext(GinkgoT().Context()), model.User{ID: userID, UserName: "johndoe", IsAdmin: true})
db := GetDBXBuilder()
repo = NewScrobbleRepository(ctx, db)
rawRepo = sqlRepository{
ctx: ctx,
tableName: "scrobbles",
db: db,
}
})
AfterEach(func() {
_, _ = rawRepo.db.Delete("scrobbles", dbx.HashExp{"media_file_id": fileID}).Execute()
_, _ = rawRepo.db.Delete("media_file", dbx.HashExp{"id": fileID}).Execute()
_, _ = rawRepo.db.Delete("user", dbx.HashExp{"id": userID}).Execute()
})
Describe("RecordScrobble", func() {
It("records a scrobble event", func() {
submissionTime := time.Now().UTC()
// Insert User
_, err := rawRepo.db.Insert("user", dbx.Params{
"id": userID,
"user_name": "user",
"password": "pw",
"created_at": time.Now(),
"updated_at": time.Now(),
}).Execute()
Expect(err).ToNot(HaveOccurred())
// Insert MediaFile
_, err = rawRepo.db.Insert("media_file", dbx.Params{
"id": fileID,
"path": "path",
"created_at": time.Now(),
"updated_at": time.Now(),
}).Execute()
Expect(err).ToNot(HaveOccurred())
err = repo.RecordScrobble(fileID, submissionTime)
Expect(err).ToNot(HaveOccurred())
// Verify insertion
var scrobble struct {
MediaFileID string `db:"media_file_id"`
UserID string `db:"user_id"`
SubmissionTime int64 `db:"submission_time"`
}
err = rawRepo.db.Select("*").From("scrobbles").
Where(dbx.HashExp{"media_file_id": fileID, "user_id": userID}).
One(&scrobble)
Expect(err).ToNot(HaveOccurred())
Expect(scrobble.MediaFileID).To(Equal(fileID))
Expect(scrobble.UserID).To(Equal(userID))
Expect(scrobble.SubmissionTime).To(Equal(submissionTime.Unix()))
})
})
})

Some files were not shown because too many files have changed in this diff Show More