mirror of
https://github.com/navidrome/navidrome.git
synced 2026-01-01 03:18:13 -05:00
Compare commits
26 Commits
copilot/su
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
735c0d9103 | ||
|
|
fc9817552d | ||
|
|
0c1b65d3e6 | ||
|
|
47b448c64f | ||
|
|
834fa494e4 | ||
|
|
5d34640065 | ||
|
|
9ed309ac81 | ||
|
|
8c80be56da | ||
|
|
cde5992c46 | ||
|
|
017676c457 | ||
|
|
2d7b716834 | ||
|
|
c7ac0e4414 | ||
|
|
c9409d306a | ||
|
|
ebbe62bbbd | ||
|
|
42c85a18e2 | ||
|
|
7ccf44b8ed | ||
|
|
603cccde11 | ||
|
|
6ed6524752 | ||
|
|
a081569ed4 | ||
|
|
e923c02c6a | ||
|
|
51ca2dee65 | ||
|
|
6b961bd99d | ||
|
|
396eee48c6 | ||
|
|
cc3cca6077 | ||
|
|
f6ac99e081 | ||
|
|
a521c74a59 |
20
.github/workflows/pipeline.yml
vendored
20
.github/workflows/pipeline.yml
vendored
@@ -217,7 +217,7 @@ jobs:
|
||||
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: Upload Binaries
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: navidrome-${{ env.PLATFORM }}
|
||||
path: ./output
|
||||
@@ -248,7 +248,7 @@ jobs:
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v6
|
||||
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM }}
|
||||
@@ -270,7 +270,7 @@ jobs:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v6
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
@@ -304,7 +304,7 @@ jobs:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v6
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
@@ -356,7 +356,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: actions/download-artifact@v6
|
||||
- uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-windows*
|
||||
@@ -375,7 +375,7 @@ jobs:
|
||||
du -h binaries/msi/*.msi
|
||||
|
||||
- name: Upload MSI files
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: navidrome-windows-installers
|
||||
path: binaries/msi/*.msi
|
||||
@@ -393,7 +393,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- uses: actions/download-artifact@v6
|
||||
- uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: ./binaries
|
||||
pattern: navidrome-*
|
||||
@@ -419,7 +419,7 @@ jobs:
|
||||
rm ./dist/*.tar.gz ./dist/*.zip
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: packages
|
||||
path: dist/navidrome_0*
|
||||
@@ -442,13 +442,13 @@ jobs:
|
||||
item: ${{ fromJson(needs.release.outputs.package_list) }}
|
||||
steps:
|
||||
- name: Download all-packages artifact
|
||||
uses: actions/download-artifact@v6
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: packages
|
||||
path: ./dist
|
||||
|
||||
- name: Upload all-packages artifact
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: navidrome_linux_${{ matrix.item }}
|
||||
path: dist/navidrome_0*_linux_${{ matrix.item }}
|
||||
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5
|
||||
- uses: dessant/lock-threads@v6
|
||||
with:
|
||||
process-only: 'issues, prs'
|
||||
issue-inactive-days: 120
|
||||
|
||||
2
.github/workflows/update-translations.yml
vendored
2
.github/workflows/update-translations.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
git status --porcelain
|
||||
git diff
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v8
|
||||
with:
|
||||
token: ${{ secrets.PAT }}
|
||||
author: "navidrome-bot <navidrome-bot@navidrome.org>"
|
||||
|
||||
10
Dockerfile
10
Dockerfile
@@ -2,10 +2,10 @@ FROM --platform=$BUILDPLATFORM ghcr.io/crazy-max/osxcross:14.5-debian AS osxcros
|
||||
|
||||
########################################################################################################################
|
||||
### Build xx (original image: tonistiigi/xx)
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS xx-build
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.20 AS xx-build
|
||||
|
||||
# v1.5.0
|
||||
ENV XX_VERSION=b4e4c451c778822e6742bfc9d9a91d7c7d885c8a
|
||||
# v1.9.0
|
||||
ENV XX_VERSION=a5592eab7a57895e8d385394ff12241bc65ecd50
|
||||
|
||||
RUN apk add -U --no-cache git
|
||||
RUN git clone https://github.com/tonistiigi/xx && \
|
||||
@@ -26,7 +26,7 @@ COPY --from=xx-build /out/ /usr/bin/
|
||||
|
||||
########################################################################################################################
|
||||
### Get TagLib
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.19 AS taglib-build
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/alpine:3.20 AS taglib-build
|
||||
ARG TARGETPLATFORM
|
||||
ARG CROSS_TAGLIB_VERSION=2.1.1-1
|
||||
ENV CROSS_TAGLIB_RELEASES_URL=https://github.com/navidrome/cross-taglib/releases/download/v${CROSS_TAGLIB_VERSION}/
|
||||
@@ -122,7 +122,7 @@ COPY --from=build /out /
|
||||
|
||||
########################################################################################################################
|
||||
### Build Final Image
|
||||
FROM public.ecr.aws/docker/library/alpine:3.19 AS final
|
||||
FROM public.ecr.aws/docker/library/alpine:3.20 AS final
|
||||
LABEL maintainer="deluan@navidrome.org"
|
||||
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
|
||||
|
||||
|
||||
2
Makefile
2
Makefile
@@ -16,7 +16,7 @@ DOCKER_TAG ?= deluan/navidrome:develop
|
||||
|
||||
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
|
||||
CROSS_TAGLIB_VERSION ?= 2.1.1-1
|
||||
GOLANGCI_LINT_VERSION ?= v2.6.2
|
||||
GOLANGCI_LINT_VERSION ?= v2.7.2
|
||||
|
||||
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
|
||||
|
||||
|
||||
46
cmd/scan.go
46
cmd/scan.go
@@ -1,9 +1,12 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"encoding/gob"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
@@ -19,12 +22,14 @@ var (
|
||||
fullScan bool
|
||||
subprocess bool
|
||||
targets []string
|
||||
targetFile string
|
||||
)
|
||||
|
||||
func init() {
|
||||
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
|
||||
scanCmd.Flags().StringArrayVarP(&targets, "target", "t", []string{}, "list of libraryID:folderPath pairs, can be repeated (e.g., \"-t 1:Music/Rock -t 1:Music/Jazz -t 2:Classical\")")
|
||||
scanCmd.Flags().StringVar(&targetFile, "target-file", "", "path to file containing targets (one libraryID:folderPath per line)")
|
||||
rootCmd.AddCommand(scanCmd)
|
||||
}
|
||||
|
||||
@@ -71,10 +76,17 @@ func runScanner(ctx context.Context) {
|
||||
ds := persistence.New(sqlDB)
|
||||
pls := core.NewPlaylists(ds)
|
||||
|
||||
// Parse targets if provided
|
||||
// Parse targets from command line or file
|
||||
var scanTargets []model.ScanTarget
|
||||
if len(targets) > 0 {
|
||||
var err error
|
||||
var err error
|
||||
|
||||
if targetFile != "" {
|
||||
scanTargets, err = readTargetsFromFile(targetFile)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to read targets from file", err)
|
||||
}
|
||||
log.Info(ctx, "Scanning specific folders from file", "numTargets", len(scanTargets))
|
||||
} else if len(targets) > 0 {
|
||||
scanTargets, err = model.ParseTargets(targets)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to parse targets", err)
|
||||
@@ -94,3 +106,31 @@ func runScanner(ctx context.Context) {
|
||||
trackScanInteractively(ctx, progress)
|
||||
}
|
||||
}
|
||||
|
||||
// readTargetsFromFile reads scan targets from a file, one per line.
|
||||
// Each line should be in the format "libraryID:folderPath".
|
||||
// Empty lines and lines starting with # are ignored.
|
||||
func readTargetsFromFile(filePath string) ([]model.ScanTarget, error) {
|
||||
file, err := os.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open target file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
var targetStrings []string
|
||||
scanner := bufio.NewScanner(file)
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
// Skip empty lines and comments
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
targetStrings = append(targetStrings, line)
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("failed to read target file: %w", err)
|
||||
}
|
||||
|
||||
return model.ParseTargets(targetStrings)
|
||||
}
|
||||
|
||||
89
cmd/scan_test.go
Normal file
89
cmd/scan_test.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("readTargetsFromFile", func() {
|
||||
var tempDir string
|
||||
|
||||
BeforeEach(func() {
|
||||
var err error
|
||||
tempDir, err = os.MkdirTemp("", "navidrome-test-")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
os.RemoveAll(tempDir)
|
||||
})
|
||||
|
||||
It("reads valid targets from file", func() {
|
||||
filePath := filepath.Join(tempDir, "targets.txt")
|
||||
content := "1:Music/Rock\n2:Music/Jazz\n3:Classical\n"
|
||||
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
targets, err := readTargetsFromFile(filePath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(targets).To(HaveLen(3))
|
||||
Expect(targets[0]).To(Equal(model.ScanTarget{LibraryID: 1, FolderPath: "Music/Rock"}))
|
||||
Expect(targets[1]).To(Equal(model.ScanTarget{LibraryID: 2, FolderPath: "Music/Jazz"}))
|
||||
Expect(targets[2]).To(Equal(model.ScanTarget{LibraryID: 3, FolderPath: "Classical"}))
|
||||
})
|
||||
|
||||
It("skips empty lines", func() {
|
||||
filePath := filepath.Join(tempDir, "targets.txt")
|
||||
content := "1:Music/Rock\n\n2:Music/Jazz\n\n"
|
||||
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
targets, err := readTargetsFromFile(filePath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(targets).To(HaveLen(2))
|
||||
})
|
||||
|
||||
It("trims whitespace", func() {
|
||||
filePath := filepath.Join(tempDir, "targets.txt")
|
||||
content := " 1:Music/Rock \n\t2:Music/Jazz\t\n"
|
||||
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
targets, err := readTargetsFromFile(filePath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(targets).To(HaveLen(2))
|
||||
Expect(targets[0].FolderPath).To(Equal("Music/Rock"))
|
||||
Expect(targets[1].FolderPath).To(Equal("Music/Jazz"))
|
||||
})
|
||||
|
||||
It("returns error for non-existent file", func() {
|
||||
_, err := readTargetsFromFile("/nonexistent/file.txt")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("failed to open target file"))
|
||||
})
|
||||
|
||||
It("returns error for invalid target format", func() {
|
||||
filePath := filepath.Join(tempDir, "targets.txt")
|
||||
content := "invalid-format\n"
|
||||
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
_, err = readTargetsFromFile(filePath)
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
|
||||
It("handles mixed valid and empty lines", func() {
|
||||
filePath := filepath.Join(tempDir, "targets.txt")
|
||||
content := "\n1:Music/Rock\n\n\n2:Music/Jazz\n\n"
|
||||
err := os.WriteFile(filePath, []byte(content), 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
targets, err := readTargetsFromFile(filePath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(targets).To(HaveLen(2))
|
||||
})
|
||||
})
|
||||
@@ -90,7 +90,7 @@ type configOptions struct {
|
||||
ExtAuth extAuthOptions
|
||||
Plugins pluginsOptions
|
||||
PluginConfig map[string]map[string]string
|
||||
HTTPSecurityHeaders secureOptions `json:",omitzero"`
|
||||
HTTPHeaders httpHeaderOptions `json:",omitzero"`
|
||||
Prometheus prometheusOptions `json:",omitzero"`
|
||||
Scanner scannerOptions `json:",omitzero"`
|
||||
Jukebox jukeboxOptions `json:",omitzero"`
|
||||
@@ -188,8 +188,8 @@ type listenBrainzOptions struct {
|
||||
BaseURL string
|
||||
}
|
||||
|
||||
type secureOptions struct {
|
||||
CustomFrameOptionsValue string
|
||||
type httpHeaderOptions struct {
|
||||
FrameOptions string
|
||||
}
|
||||
|
||||
type prometheusOptions struct {
|
||||
@@ -257,6 +257,7 @@ func Load(noConfigDump bool) {
|
||||
// Map deprecated options to their new names for backwards compatibility
|
||||
mapDeprecatedOption("ReverseProxyWhitelist", "ExtAuth.TrustedSources")
|
||||
mapDeprecatedOption("ReverseProxyUserHeader", "ExtAuth.UserHeader")
|
||||
mapDeprecatedOption("HTTPSecurityHeaders.CustomFrameOptionsValue", "HTTPHeaders.FrameOptions")
|
||||
|
||||
err := viper.Unmarshal(&Server)
|
||||
if err != nil {
|
||||
@@ -344,6 +345,8 @@ func Load(noConfigDump bool) {
|
||||
// Log configuration source
|
||||
if Server.ConfigFile != "" {
|
||||
log.Info("Loaded configuration", "file", Server.ConfigFile)
|
||||
} else if hasNDEnvVars() {
|
||||
log.Info("No configuration file found. Loaded configuration only from environment variables")
|
||||
} else {
|
||||
log.Warn("No configuration file found. Using default values. To specify a config file, use the --configfile flag or set the ND_CONFIGFILE environment variable.")
|
||||
}
|
||||
@@ -365,10 +368,12 @@ func Load(noConfigDump bool) {
|
||||
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
|
||||
Server.Scanner.Extractor = consts.DefaultScannerExtractor
|
||||
}
|
||||
logDeprecatedOptions("Scanner.GenreSeparators")
|
||||
logDeprecatedOptions("Scanner.GroupAlbumReleases")
|
||||
logDeprecatedOptions("DevEnableBufferedScrobble") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
||||
logDeprecatedOptions("ReverseProxyWhitelist", "ReverseProxyUserHeader")
|
||||
logDeprecatedOptions("Scanner.GenreSeparators", "")
|
||||
logDeprecatedOptions("Scanner.GroupAlbumReleases", "")
|
||||
logDeprecatedOptions("DevEnableBufferedScrobble", "") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
||||
logDeprecatedOptions("ReverseProxyWhitelist", "ExtAuth.TrustedSources")
|
||||
logDeprecatedOptions("ReverseProxyUserHeader", "ExtAuth.UserHeader")
|
||||
logDeprecatedOptions("HTTPSecurityHeaders.CustomFrameOptionsValue", "HTTPHeaders.FrameOptions")
|
||||
|
||||
// Call init hooks
|
||||
for _, hook := range hooks {
|
||||
@@ -376,16 +381,22 @@ func Load(noConfigDump bool) {
|
||||
}
|
||||
}
|
||||
|
||||
func logDeprecatedOptions(options ...string) {
|
||||
for _, option := range options {
|
||||
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(option, ".", "_"))
|
||||
if os.Getenv(envVar) != "" {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", envVar))
|
||||
}
|
||||
if viper.InConfig(option) {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", option))
|
||||
func logDeprecatedOptions(oldName, newName string) {
|
||||
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(oldName, ".", "_"))
|
||||
newEnvVar := "ND_" + strings.ToUpper(strings.ReplaceAll(newName, ".", "_"))
|
||||
logWarning := func(oldName, newName string) {
|
||||
if newName != "" {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release. Please use the new '%s'", oldName, newName))
|
||||
} else {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", oldName))
|
||||
}
|
||||
}
|
||||
if os.Getenv(envVar) != "" {
|
||||
logWarning(envVar, newEnvVar)
|
||||
}
|
||||
if viper.InConfig(oldName) {
|
||||
logWarning(oldName, newName)
|
||||
}
|
||||
}
|
||||
|
||||
// mapDeprecatedOption is used to provide backwards compatibility for deprecated options. It should be called after
|
||||
@@ -502,6 +513,16 @@ func AddHook(hook func()) {
|
||||
hooks = append(hooks, hook)
|
||||
}
|
||||
|
||||
// hasNDEnvVars checks if any ND_ prefixed environment variables are set (excluding ND_CONFIGFILE)
|
||||
func hasNDEnvVars() bool {
|
||||
for _, env := range os.Environ() {
|
||||
if strings.HasPrefix(env, "ND_") && !strings.HasPrefix(env, "ND_CONFIGFILE=") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func setViperDefaults() {
|
||||
viper.SetDefault("musicfolder", filepath.Join(".", "music"))
|
||||
viper.SetDefault("cachefolder", "")
|
||||
@@ -586,7 +607,7 @@ func setViperDefaults() {
|
||||
viper.SetDefault("subsonic.appendsubtitle", true)
|
||||
viper.SetDefault("subsonic.artistparticipations", false)
|
||||
viper.SetDefault("subsonic.defaultreportrealpath", false)
|
||||
viper.SetDefault("subsonic.legacyclients", "DSub")
|
||||
viper.SetDefault("subsonic.legacyclients", "DSub,SubMusic")
|
||||
viper.SetDefault("agents", "lastfm,spotify,deezer")
|
||||
viper.SetDefault("lastfm.enabled", true)
|
||||
viper.SetDefault("lastfm.language", "en")
|
||||
@@ -600,7 +621,7 @@ func setViperDefaults() {
|
||||
viper.SetDefault("listenbrainz.enabled", true)
|
||||
viper.SetDefault("listenbrainz.baseurl", "https://api.listenbrainz.org/1/")
|
||||
viper.SetDefault("enablescrobblehistory", true)
|
||||
viper.SetDefault("httpsecurityheaders.customframeoptionsvalue", "DENY")
|
||||
viper.SetDefault("httpheaders.frameoptions", "DENY")
|
||||
viper.SetDefault("backup.path", "")
|
||||
viper.SetDefault("backup.schedule", "")
|
||||
viper.SetDefault("backup.count", 0)
|
||||
|
||||
@@ -43,6 +43,7 @@ func newClient(hc httpDoer, language string) *client {
|
||||
func (c *client) searchArtists(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||
params := url.Values{}
|
||||
params.Add("q", name)
|
||||
params.Add("order", "RANKING")
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", apiBaseURL+"/search/artist", nil)
|
||||
if err != nil {
|
||||
|
||||
@@ -3,6 +3,7 @@ package deezer
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
@@ -82,10 +83,20 @@ func (s *deezerAgent) searchArtist(ctx context.Context, name string) (*Artist, e
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Trace(ctx, "Artists found", "count", len(artists), "searched_name", name)
|
||||
for i := range artists {
|
||||
log.Trace(ctx, fmt.Sprintf("Artists found #%d", i), "name", artists[i].Name, "id", artists[i].ID, "link", artists[i].Link)
|
||||
if i > 2 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If the first one has the same name, that's the one
|
||||
if !strings.EqualFold(artists[0].Name, name) {
|
||||
log.Trace(ctx, "Top artist do not match", "searched_name", name, "found_name", artists[0].Name)
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
log.Trace(ctx, "Found artist", "name", artists[0].Name, "id", artists[0].ID, "link", artists[0].Link)
|
||||
return &artists[0], err
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
@@ -9,7 +10,7 @@ import (
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -194,22 +195,35 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
||||
}
|
||||
filteredLines = append(filteredLines, line)
|
||||
}
|
||||
paths, err := s.normalizePaths(ctx, pls, folder, filteredLines)
|
||||
resolvedPaths, err := s.resolvePaths(ctx, folder, filteredLines)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Error normalizing paths in playlist", "playlist", pls.Name, err)
|
||||
log.Warn(ctx, "Error resolving paths in playlist", "playlist", pls.Name, err)
|
||||
continue
|
||||
}
|
||||
found, err := mediaFileRepository.FindByPaths(paths)
|
||||
|
||||
// Normalize to NFD for filesystem compatibility (macOS). Database stores paths in NFD.
|
||||
// See https://github.com/navidrome/navidrome/issues/4663
|
||||
resolvedPaths = slice.Map(resolvedPaths, func(path string) string {
|
||||
return strings.ToLower(norm.NFD.String(path))
|
||||
})
|
||||
|
||||
found, err := mediaFileRepository.FindByPaths(resolvedPaths)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Error reading files from DB", "playlist", pls.Name, err)
|
||||
continue
|
||||
}
|
||||
// Build lookup map with library-qualified keys, normalized for comparison
|
||||
existing := make(map[string]int, len(found))
|
||||
for idx := range found {
|
||||
existing[normalizePathForComparison(found[idx].Path)] = idx
|
||||
// Normalize to lowercase for case-insensitive comparison
|
||||
// Key format: "libraryID:path"
|
||||
key := fmt.Sprintf("%d:%s", found[idx].LibraryID, strings.ToLower(found[idx].Path))
|
||||
existing[key] = idx
|
||||
}
|
||||
for _, path := range paths {
|
||||
idx, ok := existing[normalizePathForComparison(path)]
|
||||
|
||||
// Find media files in the order of the resolved paths, to keep playlist order
|
||||
for _, path := range resolvedPaths {
|
||||
idx, ok := existing[path]
|
||||
if ok {
|
||||
mfs = append(mfs, found[idx])
|
||||
} else {
|
||||
@@ -226,69 +240,150 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
||||
return nil
|
||||
}
|
||||
|
||||
// normalizePathForComparison normalizes a file path to NFC form and converts to lowercase
|
||||
// for consistent comparison. This fixes Unicode normalization issues on macOS where
|
||||
// Apple Music creates playlists with NFC-encoded paths but the filesystem uses NFD.
|
||||
func normalizePathForComparison(path string) string {
|
||||
return strings.ToLower(norm.NFC.String(path))
|
||||
// pathResolution holds the result of resolving a playlist path to a library-relative path.
|
||||
type pathResolution struct {
|
||||
absolutePath string
|
||||
libraryPath string
|
||||
libraryID int
|
||||
valid bool
|
||||
}
|
||||
|
||||
// TODO This won't work for multiple libraries
|
||||
func (s *playlists) normalizePaths(ctx context.Context, pls *model.Playlist, folder *model.Folder, lines []string) ([]string, error) {
|
||||
libRegex, err := s.compileLibraryPaths(ctx)
|
||||
// ToQualifiedString converts the path resolution to a library-qualified string with forward slashes.
|
||||
// Format: "libraryID:relativePath" with forward slashes for path separators.
|
||||
func (r pathResolution) ToQualifiedString() (string, error) {
|
||||
if !r.valid {
|
||||
return "", fmt.Errorf("invalid path resolution")
|
||||
}
|
||||
relativePath, err := filepath.Rel(r.libraryPath, r.absolutePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return "", err
|
||||
}
|
||||
|
||||
res := make([]string, 0, len(lines))
|
||||
for idx, line := range lines {
|
||||
var libPath string
|
||||
var filePath string
|
||||
|
||||
if folder != nil && !filepath.IsAbs(line) {
|
||||
libPath = folder.LibraryPath
|
||||
filePath = filepath.Join(folder.AbsolutePath(), line)
|
||||
} else {
|
||||
cleanLine := filepath.Clean(line)
|
||||
if libPath = libRegex.FindString(cleanLine); libPath != "" {
|
||||
filePath = cleanLine
|
||||
}
|
||||
}
|
||||
|
||||
if libPath != "" {
|
||||
if rel, err := filepath.Rel(libPath, filePath); err == nil {
|
||||
res = append(res, rel)
|
||||
} else {
|
||||
log.Debug(ctx, "Error getting relative path", "playlist", pls.Name, "path", line, "libPath", libPath,
|
||||
"filePath", filePath, err)
|
||||
}
|
||||
} else {
|
||||
log.Warn(ctx, "Path in playlist not found in any library", "path", line, "line", idx)
|
||||
}
|
||||
}
|
||||
return slice.Map(res, filepath.ToSlash), nil
|
||||
// Convert path separators to forward slashes
|
||||
return fmt.Sprintf("%d:%s", r.libraryID, filepath.ToSlash(relativePath)), nil
|
||||
}
|
||||
|
||||
func (s *playlists) compileLibraryPaths(ctx context.Context) (*regexp.Regexp, error) {
|
||||
libs, err := s.ds.Library(ctx).GetAll()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// libraryMatcher holds sorted libraries with cleaned paths for efficient path matching.
|
||||
type libraryMatcher struct {
|
||||
libraries model.Libraries
|
||||
cleanedPaths []string
|
||||
}
|
||||
|
||||
// Create regex patterns for each library path
|
||||
patterns := make([]string, len(libs))
|
||||
// findLibraryForPath finds which library contains the given absolute path.
|
||||
// Returns library ID and path, or 0 and empty string if not found.
|
||||
func (lm *libraryMatcher) findLibraryForPath(absolutePath string) (int, string) {
|
||||
// Check sorted libraries (longest path first) to find the best match
|
||||
for i, cleanLibPath := range lm.cleanedPaths {
|
||||
// Check if absolutePath is under this library path
|
||||
if strings.HasPrefix(absolutePath, cleanLibPath) {
|
||||
// Ensure it's a proper path boundary (not just a prefix)
|
||||
if len(absolutePath) == len(cleanLibPath) || absolutePath[len(cleanLibPath)] == filepath.Separator {
|
||||
return lm.libraries[i].ID, cleanLibPath
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0, ""
|
||||
}
|
||||
|
||||
// newLibraryMatcher creates a libraryMatcher with libraries sorted by path length (longest first).
|
||||
// This ensures correct matching when library paths are prefixes of each other.
|
||||
// Example: /music-classical must be checked before /music
|
||||
// Otherwise, /music-classical/track.mp3 would match /music instead of /music-classical
|
||||
func newLibraryMatcher(libs model.Libraries) *libraryMatcher {
|
||||
// Sort libraries by path length (descending) to ensure longest paths match first.
|
||||
slices.SortFunc(libs, func(i, j model.Library) int {
|
||||
return cmp.Compare(len(j.Path), len(i.Path)) // Reverse order for descending
|
||||
})
|
||||
|
||||
// Pre-clean all library paths once for efficient matching
|
||||
cleanedPaths := make([]string, len(libs))
|
||||
for i, lib := range libs {
|
||||
cleanPath := filepath.Clean(lib.Path)
|
||||
escapedPath := regexp.QuoteMeta(cleanPath)
|
||||
patterns[i] = fmt.Sprintf("^%s(?:/|$)", escapedPath)
|
||||
cleanedPaths[i] = filepath.Clean(lib.Path)
|
||||
}
|
||||
// Combine all patterns into a single regex
|
||||
combinedPattern := strings.Join(patterns, "|")
|
||||
re, err := regexp.Compile(combinedPattern)
|
||||
return &libraryMatcher{
|
||||
libraries: libs,
|
||||
cleanedPaths: cleanedPaths,
|
||||
}
|
||||
}
|
||||
|
||||
// pathResolver handles path resolution logic for playlist imports.
|
||||
type pathResolver struct {
|
||||
matcher *libraryMatcher
|
||||
}
|
||||
|
||||
// newPathResolver creates a pathResolver with libraries loaded from the datastore.
|
||||
func newPathResolver(ctx context.Context, ds model.DataStore) (*pathResolver, error) {
|
||||
libs, err := ds.Library(ctx).GetAll()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("compiling library paths `%s`: %w", combinedPattern, err)
|
||||
return nil, err
|
||||
}
|
||||
return re, nil
|
||||
matcher := newLibraryMatcher(libs)
|
||||
return &pathResolver{matcher: matcher}, nil
|
||||
}
|
||||
|
||||
// resolvePath determines the absolute path and library path for a playlist entry.
|
||||
// For absolute paths, it uses them directly.
|
||||
// For relative paths, it resolves them relative to the playlist's folder location.
|
||||
// Example: playlist at /music/playlists/test.m3u with line "../songs/abc.mp3"
|
||||
//
|
||||
// resolves to /music/songs/abc.mp3
|
||||
func (r *pathResolver) resolvePath(line string, folder *model.Folder) pathResolution {
|
||||
var absolutePath string
|
||||
if folder != nil && !filepath.IsAbs(line) {
|
||||
// Resolve relative path to absolute path based on playlist location
|
||||
absolutePath = filepath.Clean(filepath.Join(folder.AbsolutePath(), line))
|
||||
} else {
|
||||
// Use absolute path directly after cleaning
|
||||
absolutePath = filepath.Clean(line)
|
||||
}
|
||||
|
||||
return r.findInLibraries(absolutePath)
|
||||
}
|
||||
|
||||
// findInLibraries matches an absolute path against all known libraries and returns
|
||||
// a pathResolution with the library information. Returns an invalid resolution if
|
||||
// the path is not found in any library.
|
||||
func (r *pathResolver) findInLibraries(absolutePath string) pathResolution {
|
||||
libID, libPath := r.matcher.findLibraryForPath(absolutePath)
|
||||
if libID == 0 {
|
||||
return pathResolution{valid: false}
|
||||
}
|
||||
return pathResolution{
|
||||
absolutePath: absolutePath,
|
||||
libraryPath: libPath,
|
||||
libraryID: libID,
|
||||
valid: true,
|
||||
}
|
||||
}
|
||||
|
||||
// resolvePaths converts playlist file paths to library-qualified paths (format: "libraryID:relativePath").
|
||||
// For relative paths, it resolves them to absolute paths first, then determines which
|
||||
// library they belong to. This allows playlists to reference files across library boundaries.
|
||||
func (s *playlists) resolvePaths(ctx context.Context, folder *model.Folder, lines []string) ([]string, error) {
|
||||
resolver, err := newPathResolver(ctx, s.ds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
results := make([]string, 0, len(lines))
|
||||
for idx, line := range lines {
|
||||
resolution := resolver.resolvePath(line, folder)
|
||||
|
||||
if !resolution.valid {
|
||||
log.Warn(ctx, "Path in playlist not found in any library", "path", line, "line", idx)
|
||||
continue
|
||||
}
|
||||
|
||||
qualifiedPath, err := resolution.ToQualifiedString()
|
||||
if err != nil {
|
||||
log.Debug(ctx, "Error getting library-qualified path", "path", line,
|
||||
"libPath", resolution.libraryPath, "filePath", resolution.absolutePath, err)
|
||||
continue
|
||||
}
|
||||
|
||||
results = append(results, qualifiedPath)
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (s *playlists) updatePlaylist(ctx context.Context, newPls *model.Playlist) error {
|
||||
|
||||
406
core/playlists_internal_test.go
Normal file
406
core/playlists_internal_test.go
Normal file
@@ -0,0 +1,406 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("libraryMatcher", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var mockLibRepo *tests.MockLibraryRepo
|
||||
ctx := context.Background()
|
||||
|
||||
BeforeEach(func() {
|
||||
mockLibRepo = &tests.MockLibraryRepo{}
|
||||
ds = &tests.MockDataStore{
|
||||
MockedLibrary: mockLibRepo,
|
||||
}
|
||||
})
|
||||
|
||||
// Helper function to create a libraryMatcher from the mock datastore
|
||||
createMatcher := func(ds model.DataStore) *libraryMatcher {
|
||||
libs, err := ds.Library(ctx).GetAll()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
return newLibraryMatcher(libs)
|
||||
}
|
||||
|
||||
Describe("Longest library path matching", func() {
|
||||
It("matches the longest library path when multiple libraries share a prefix", func() {
|
||||
// Setup libraries with prefix conflicts
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/music"},
|
||||
{ID: 2, Path: "/music-classical"},
|
||||
{ID: 3, Path: "/music-classical/opera"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
// Test that longest path matches first and returns correct library ID
|
||||
testCases := []struct {
|
||||
path string
|
||||
expectedLibID int
|
||||
expectedLibPath string
|
||||
}{
|
||||
{"/music-classical/opera/track.mp3", 3, "/music-classical/opera"},
|
||||
{"/music-classical/track.mp3", 2, "/music-classical"},
|
||||
{"/music/track.mp3", 1, "/music"},
|
||||
{"/music-classical/opera/subdir/file.mp3", 3, "/music-classical/opera"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
libID, libPath := matcher.findLibraryForPath(tc.path)
|
||||
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d, but got %d", tc.path, tc.expectedLibID, libID)
|
||||
Expect(libPath).To(Equal(tc.expectedLibPath), "Path %s should match library path %s, but got %s", tc.path, tc.expectedLibPath, libPath)
|
||||
}
|
||||
})
|
||||
|
||||
It("handles libraries with similar prefixes but different structures", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/home/user/music"},
|
||||
{ID: 2, Path: "/home/user/music-backup"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
// Test that music-backup library is matched correctly
|
||||
libID, libPath := matcher.findLibraryForPath("/home/user/music-backup/track.mp3")
|
||||
Expect(libID).To(Equal(2))
|
||||
Expect(libPath).To(Equal("/home/user/music-backup"))
|
||||
|
||||
// Test that music library is still matched correctly
|
||||
libID, libPath = matcher.findLibraryForPath("/home/user/music/track.mp3")
|
||||
Expect(libID).To(Equal(1))
|
||||
Expect(libPath).To(Equal("/home/user/music"))
|
||||
})
|
||||
|
||||
It("matches path that is exactly the library root", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/music"},
|
||||
{ID: 2, Path: "/music-classical"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
// Exact library path should match
|
||||
libID, libPath := matcher.findLibraryForPath("/music-classical")
|
||||
Expect(libID).To(Equal(2))
|
||||
Expect(libPath).To(Equal("/music-classical"))
|
||||
})
|
||||
|
||||
It("handles complex nested library structures", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/media"},
|
||||
{ID: 2, Path: "/media/audio"},
|
||||
{ID: 3, Path: "/media/audio/classical"},
|
||||
{ID: 4, Path: "/media/audio/classical/baroque"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
testCases := []struct {
|
||||
path string
|
||||
expectedLibID int
|
||||
expectedLibPath string
|
||||
}{
|
||||
{"/media/audio/classical/baroque/bach/track.mp3", 4, "/media/audio/classical/baroque"},
|
||||
{"/media/audio/classical/mozart/track.mp3", 3, "/media/audio/classical"},
|
||||
{"/media/audio/rock/track.mp3", 2, "/media/audio"},
|
||||
{"/media/video/movie.mp4", 1, "/media"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
libID, libPath := matcher.findLibraryForPath(tc.path)
|
||||
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d", tc.path, tc.expectedLibID)
|
||||
Expect(libPath).To(Equal(tc.expectedLibPath), "Path %s should match library path %s", tc.path, tc.expectedLibPath)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Edge cases", func() {
|
||||
It("handles empty library list", func() {
|
||||
mockLibRepo.SetData([]model.Library{})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
Expect(matcher).ToNot(BeNil())
|
||||
|
||||
// Should not match anything
|
||||
libID, libPath := matcher.findLibraryForPath("/music/track.mp3")
|
||||
Expect(libID).To(Equal(0))
|
||||
Expect(libPath).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("handles single library", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/music"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
libID, libPath := matcher.findLibraryForPath("/music/track.mp3")
|
||||
Expect(libID).To(Equal(1))
|
||||
Expect(libPath).To(Equal("/music"))
|
||||
})
|
||||
|
||||
It("handles libraries with special characters in paths", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/music[test]"},
|
||||
{ID: 2, Path: "/music(backup)"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
Expect(matcher).ToNot(BeNil())
|
||||
|
||||
// Special characters should match literally
|
||||
libID, libPath := matcher.findLibraryForPath("/music[test]/track.mp3")
|
||||
Expect(libID).To(Equal(1))
|
||||
Expect(libPath).To(Equal("/music[test]"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Path matching order", func() {
|
||||
It("ensures longest paths match first", func() {
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/a"},
|
||||
{ID: 2, Path: "/ab"},
|
||||
{ID: 3, Path: "/abc"},
|
||||
})
|
||||
|
||||
matcher := createMatcher(ds)
|
||||
|
||||
// Verify that longer paths match correctly (not cut off by shorter prefix)
|
||||
testCases := []struct {
|
||||
path string
|
||||
expectedLibID int
|
||||
}{
|
||||
{"/abc/file.mp3", 3},
|
||||
{"/ab/file.mp3", 2},
|
||||
{"/a/file.mp3", 1},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
libID, _ := matcher.findLibraryForPath(tc.path)
|
||||
Expect(libID).To(Equal(tc.expectedLibID), "Path %s should match library ID %d", tc.path, tc.expectedLibID)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
var _ = Describe("pathResolver", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var mockLibRepo *tests.MockLibraryRepo
|
||||
var resolver *pathResolver
|
||||
ctx := context.Background()
|
||||
|
||||
BeforeEach(func() {
|
||||
mockLibRepo = &tests.MockLibraryRepo{}
|
||||
ds = &tests.MockDataStore{
|
||||
MockedLibrary: mockLibRepo,
|
||||
}
|
||||
|
||||
// Setup test libraries
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: "/music"},
|
||||
{ID: 2, Path: "/music-classical"},
|
||||
{ID: 3, Path: "/podcasts"},
|
||||
})
|
||||
|
||||
var err error
|
||||
resolver, err = newPathResolver(ctx, ds)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
Describe("resolvePath", func() {
|
||||
It("resolves absolute paths", func() {
|
||||
resolution := resolver.resolvePath("/music/artist/album/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(1))
|
||||
Expect(resolution.libraryPath).To(Equal("/music"))
|
||||
Expect(resolution.absolutePath).To(Equal("/music/artist/album/track.mp3"))
|
||||
})
|
||||
|
||||
It("resolves relative paths when folder is provided", func() {
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
resolution := resolver.resolvePath("../artist/album/track.mp3", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(1))
|
||||
Expect(resolution.absolutePath).To(Equal("/music/artist/album/track.mp3"))
|
||||
})
|
||||
|
||||
It("returns invalid resolution for paths outside any library", func() {
|
||||
resolution := resolver.resolvePath("/outside/library/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeFalse())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("resolvePath", func() {
|
||||
Context("With absolute paths", func() {
|
||||
It("resolves path within a library", func() {
|
||||
resolution := resolver.resolvePath("/music/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(1))
|
||||
Expect(resolution.libraryPath).To(Equal("/music"))
|
||||
Expect(resolution.absolutePath).To(Equal("/music/track.mp3"))
|
||||
})
|
||||
|
||||
It("resolves path to the longest matching library", func() {
|
||||
resolution := resolver.resolvePath("/music-classical/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(2))
|
||||
Expect(resolution.libraryPath).To(Equal("/music-classical"))
|
||||
})
|
||||
|
||||
It("returns invalid resolution for path outside libraries", func() {
|
||||
resolution := resolver.resolvePath("/videos/movie.mp4", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeFalse())
|
||||
})
|
||||
|
||||
It("cleans the path before matching", func() {
|
||||
resolution := resolver.resolvePath("/music//artist/../artist/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.absolutePath).To(Equal("/music/artist/track.mp3"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("With relative paths", func() {
|
||||
It("resolves relative path within same library", func() {
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
resolution := resolver.resolvePath("../songs/track.mp3", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(1))
|
||||
Expect(resolution.absolutePath).To(Equal("/music/songs/track.mp3"))
|
||||
})
|
||||
|
||||
It("resolves relative path to different library", func() {
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
// Path goes up and into a different library
|
||||
resolution := resolver.resolvePath("../../podcasts/episode.mp3", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(3))
|
||||
Expect(resolution.libraryPath).To(Equal("/podcasts"))
|
||||
})
|
||||
|
||||
It("uses matcher to find correct library for resolved path", func() {
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
// This relative path resolves to music-classical library
|
||||
resolution := resolver.resolvePath("../../music-classical/track.mp3", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(2))
|
||||
Expect(resolution.libraryPath).To(Equal("/music-classical"))
|
||||
})
|
||||
|
||||
It("returns invalid for relative paths escaping all libraries", func() {
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
resolution := resolver.resolvePath("../../../../etc/passwd", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeFalse())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Cross-library resolution scenarios", func() {
|
||||
It("handles playlist in library A referencing file in library B", func() {
|
||||
// Playlist is in /music/playlists
|
||||
folder := &model.Folder{
|
||||
Path: "playlists",
|
||||
LibraryPath: "/music",
|
||||
LibraryID: 1,
|
||||
}
|
||||
|
||||
// Relative path that goes to /podcasts library
|
||||
resolution := resolver.resolvePath("../../podcasts/show/episode.mp3", folder)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(3), "Should resolve to podcasts library")
|
||||
Expect(resolution.libraryPath).To(Equal("/podcasts"))
|
||||
})
|
||||
|
||||
It("prefers longer library paths when resolving", func() {
|
||||
// Ensure /music-classical is matched instead of /music
|
||||
resolution := resolver.resolvePath("/music-classical/baroque/track.mp3", nil)
|
||||
|
||||
Expect(resolution.valid).To(BeTrue())
|
||||
Expect(resolution.libraryID).To(Equal(2), "Should match /music-classical, not /music")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
var _ = Describe("pathResolution", func() {
|
||||
Describe("ToQualifiedString", func() {
|
||||
It("converts valid resolution to qualified string with forward slashes", func() {
|
||||
resolution := pathResolution{
|
||||
absolutePath: "/music/artist/album/track.mp3",
|
||||
libraryPath: "/music",
|
||||
libraryID: 1,
|
||||
valid: true,
|
||||
}
|
||||
|
||||
qualifiedStr, err := resolution.ToQualifiedString()
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(qualifiedStr).To(Equal("1:artist/album/track.mp3"))
|
||||
})
|
||||
|
||||
It("handles Windows-style paths by converting to forward slashes", func() {
|
||||
resolution := pathResolution{
|
||||
absolutePath: "/music/artist/album/track.mp3",
|
||||
libraryPath: "/music",
|
||||
libraryID: 2,
|
||||
valid: true,
|
||||
}
|
||||
|
||||
qualifiedStr, err := resolution.ToQualifiedString()
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
// Should always use forward slashes regardless of OS
|
||||
Expect(qualifiedStr).To(ContainSubstring("2:"))
|
||||
Expect(qualifiedStr).ToNot(ContainSubstring("\\"))
|
||||
})
|
||||
|
||||
It("returns error for invalid resolution", func() {
|
||||
resolution := pathResolution{valid: false}
|
||||
|
||||
_, err := resolution.ToQualifiedString()
|
||||
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,4 +1,4 @@
|
||||
package core
|
||||
package core_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/criteria"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
@@ -20,7 +21,7 @@ import (
|
||||
|
||||
var _ = Describe("Playlists", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var ps Playlists
|
||||
var ps core.Playlists
|
||||
var mockPlsRepo mockedPlaylistRepo
|
||||
var mockLibRepo *tests.MockLibraryRepo
|
||||
ctx := context.Background()
|
||||
@@ -33,16 +34,16 @@ var _ = Describe("Playlists", func() {
|
||||
MockedLibrary: mockLibRepo,
|
||||
}
|
||||
ctx = request.WithUser(ctx, model.User{ID: "123"})
|
||||
// Path should be libPath, but we want to match the root folder referenced in the m3u, which is `/`
|
||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/"}})
|
||||
})
|
||||
|
||||
Describe("ImportFile", func() {
|
||||
var folder *model.Folder
|
||||
BeforeEach(func() {
|
||||
ps = NewPlaylists(ds)
|
||||
ps = core.NewPlaylists(ds)
|
||||
ds.MockedMediaFile = &mockedMediaFileRepo{}
|
||||
libPath, _ := os.Getwd()
|
||||
// Set up library with the actual library path that matches the folder
|
||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: libPath}})
|
||||
folder = &model.Folder{
|
||||
ID: "1",
|
||||
LibraryID: 1,
|
||||
@@ -112,6 +113,224 @@ var _ = Describe("Playlists", func() {
|
||||
Expect(err.Error()).To(ContainSubstring("line 19, column 1: invalid character '\\n'"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Cross-library relative paths", func() {
|
||||
var tmpDir, plsDir, songsDir string
|
||||
|
||||
BeforeEach(func() {
|
||||
// Create temp directory structure
|
||||
tmpDir = GinkgoT().TempDir()
|
||||
plsDir = tmpDir + "/playlists"
|
||||
songsDir = tmpDir + "/songs"
|
||||
Expect(os.Mkdir(plsDir, 0755)).To(Succeed())
|
||||
Expect(os.Mkdir(songsDir, 0755)).To(Succeed())
|
||||
|
||||
// Setup two different libraries with paths matching our temp structure
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: songsDir},
|
||||
{ID: 2, Path: plsDir},
|
||||
})
|
||||
|
||||
// Create a mock media file repository that returns files for both libraries
|
||||
// Note: The paths are relative to their respective library roots
|
||||
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
|
||||
data: []string{
|
||||
"abc.mp3", // This is songs/abc.mp3 relative to songsDir
|
||||
"def.mp3", // This is playlists/def.mp3 relative to plsDir
|
||||
},
|
||||
}
|
||||
ps = core.NewPlaylists(ds)
|
||||
})
|
||||
|
||||
It("handles relative paths that reference files in other libraries", func() {
|
||||
// Create a temporary playlist file with relative path
|
||||
plsContent := "#PLAYLIST:Cross Library Test\n../songs/abc.mp3\ndef.mp3"
|
||||
plsFile := plsDir + "/test.m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||
|
||||
// Playlist is in the Playlists library folder
|
||||
// Important: Path should be relative to LibraryPath, and Name is the folder name
|
||||
plsFolder := &model.Folder{
|
||||
ID: "2",
|
||||
LibraryID: 2,
|
||||
LibraryPath: plsDir,
|
||||
Path: "",
|
||||
Name: "",
|
||||
}
|
||||
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(2))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3")) // From songsDir library
|
||||
Expect(pls.Tracks[1].Path).To(Equal("def.mp3")) // From plsDir library
|
||||
})
|
||||
|
||||
It("ignores paths that point outside all libraries", func() {
|
||||
// Create a temporary playlist file with path outside libraries
|
||||
plsContent := "#PLAYLIST:Outside Test\n../../outside.mp3\nabc.mp3"
|
||||
plsFile := plsDir + "/test.m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||
|
||||
plsFolder := &model.Folder{
|
||||
ID: "2",
|
||||
LibraryID: 2,
|
||||
LibraryPath: plsDir,
|
||||
Path: "",
|
||||
Name: "",
|
||||
}
|
||||
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
// Should only find abc.mp3, not outside.mp3
|
||||
Expect(pls.Tracks).To(HaveLen(1))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3"))
|
||||
})
|
||||
|
||||
It("handles relative paths with multiple '../' components", func() {
|
||||
// Create a nested structure: tmpDir/playlists/subfolder/test.m3u
|
||||
subFolder := plsDir + "/subfolder"
|
||||
Expect(os.Mkdir(subFolder, 0755)).To(Succeed())
|
||||
|
||||
// Create the media file in the subfolder directory
|
||||
// The mock will return it as "def.mp3" relative to plsDir
|
||||
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
|
||||
data: []string{
|
||||
"abc.mp3", // From songsDir library
|
||||
"def.mp3", // From plsDir library root
|
||||
},
|
||||
}
|
||||
|
||||
// From subfolder, ../../songs/abc.mp3 should resolve to songs library
|
||||
// ../def.mp3 should resolve to plsDir/def.mp3
|
||||
plsContent := "#PLAYLIST:Nested Test\n../../songs/abc.mp3\n../def.mp3"
|
||||
plsFile := subFolder + "/test.m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||
|
||||
// The folder: AbsolutePath = LibraryPath + Path + Name
|
||||
// So for /playlists/subfolder: LibraryPath=/playlists, Path="", Name="subfolder"
|
||||
plsFolder := &model.Folder{
|
||||
ID: "2",
|
||||
LibraryID: 2,
|
||||
LibraryPath: plsDir,
|
||||
Path: "", // Empty because subfolder is directly under library root
|
||||
Name: "subfolder", // The folder name
|
||||
}
|
||||
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(2))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("abc.mp3")) // From songsDir library
|
||||
Expect(pls.Tracks[1].Path).To(Equal("def.mp3")) // From plsDir library root
|
||||
})
|
||||
|
||||
It("correctly resolves libraries when one path is a prefix of another", func() {
|
||||
// This tests the bug where /music would match before /music-classical
|
||||
// Create temp directory structure with prefix conflict
|
||||
tmpDir := GinkgoT().TempDir()
|
||||
musicDir := tmpDir + "/music"
|
||||
musicClassicalDir := tmpDir + "/music-classical"
|
||||
Expect(os.Mkdir(musicDir, 0755)).To(Succeed())
|
||||
Expect(os.Mkdir(musicClassicalDir, 0755)).To(Succeed())
|
||||
|
||||
// Setup two libraries where one is a prefix of the other
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: musicDir}, // /tmp/xxx/music
|
||||
{ID: 2, Path: musicClassicalDir}, // /tmp/xxx/music-classical
|
||||
})
|
||||
|
||||
// Mock will return tracks from both libraries
|
||||
ds.MockedMediaFile = &mockedMediaFileFromListRepo{
|
||||
data: []string{
|
||||
"rock.mp3", // From music library
|
||||
"bach.mp3", // From music-classical library
|
||||
},
|
||||
}
|
||||
|
||||
// Create playlist in music library that references music-classical
|
||||
plsContent := "#PLAYLIST:Cross Prefix Test\nrock.mp3\n../music-classical/bach.mp3"
|
||||
plsFile := musicDir + "/test.m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||
|
||||
plsFolder := &model.Folder{
|
||||
ID: "1",
|
||||
LibraryID: 1,
|
||||
LibraryPath: musicDir,
|
||||
Path: "",
|
||||
Name: "",
|
||||
}
|
||||
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(2))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("rock.mp3")) // From music library
|
||||
Expect(pls.Tracks[1].Path).To(Equal("bach.mp3")) // From music-classical library (not music!)
|
||||
})
|
||||
|
||||
It("correctly handles identical relative paths from different libraries", func() {
|
||||
// This tests the bug where two libraries have files at the same relative path
|
||||
// and only one appears in the playlist
|
||||
tmpDir := GinkgoT().TempDir()
|
||||
musicDir := tmpDir + "/music"
|
||||
classicalDir := tmpDir + "/classical"
|
||||
Expect(os.Mkdir(musicDir, 0755)).To(Succeed())
|
||||
Expect(os.Mkdir(classicalDir, 0755)).To(Succeed())
|
||||
Expect(os.MkdirAll(musicDir+"/album", 0755)).To(Succeed())
|
||||
Expect(os.MkdirAll(classicalDir+"/album", 0755)).To(Succeed())
|
||||
// Create placeholder files so paths resolve correctly
|
||||
Expect(os.WriteFile(musicDir+"/album/track.mp3", []byte{}, 0600)).To(Succeed())
|
||||
Expect(os.WriteFile(classicalDir+"/album/track.mp3", []byte{}, 0600)).To(Succeed())
|
||||
|
||||
// Both libraries have a file at "album/track.mp3"
|
||||
mockLibRepo.SetData([]model.Library{
|
||||
{ID: 1, Path: musicDir},
|
||||
{ID: 2, Path: classicalDir},
|
||||
})
|
||||
|
||||
// Mock returns files with same relative path but different IDs and library IDs
|
||||
// Keys use the library-qualified format: "libraryID:path"
|
||||
ds.MockedMediaFile = &mockedMediaFileRepo{
|
||||
data: map[string]model.MediaFile{
|
||||
"1:album/track.mp3": {ID: "music-track", Path: "album/track.mp3", LibraryID: 1, Title: "Rock Song"},
|
||||
"2:album/track.mp3": {ID: "classical-track", Path: "album/track.mp3", LibraryID: 2, Title: "Classical Piece"},
|
||||
},
|
||||
}
|
||||
// Recreate playlists service to pick up new mock
|
||||
ps = core.NewPlaylists(ds)
|
||||
|
||||
// Create playlist in music library that references both tracks
|
||||
plsContent := "#PLAYLIST:Same Path Test\nalbum/track.mp3\n../classical/album/track.mp3"
|
||||
plsFile := musicDir + "/test.m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte(plsContent), 0600)).To(Succeed())
|
||||
|
||||
plsFolder := &model.Folder{
|
||||
ID: "1",
|
||||
LibraryID: 1,
|
||||
LibraryPath: musicDir,
|
||||
Path: "",
|
||||
Name: "",
|
||||
}
|
||||
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, "test.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Should have BOTH tracks, not just one
|
||||
Expect(pls.Tracks).To(HaveLen(2), "Playlist should contain both tracks with same relative path")
|
||||
|
||||
// Verify we got tracks from DIFFERENT libraries (the key fix!)
|
||||
// Collect the library IDs
|
||||
libIDs := make(map[int]bool)
|
||||
for _, track := range pls.Tracks {
|
||||
libIDs[track.LibraryID] = true
|
||||
}
|
||||
Expect(libIDs).To(HaveLen(2), "Tracks should come from two different libraries")
|
||||
Expect(libIDs[1]).To(BeTrue(), "Should have track from library 1")
|
||||
Expect(libIDs[2]).To(BeTrue(), "Should have track from library 2")
|
||||
|
||||
// Both tracks should have the same relative path
|
||||
Expect(pls.Tracks[0].Path).To(Equal("album/track.mp3"))
|
||||
Expect(pls.Tracks[1].Path).To(Equal("album/track.mp3"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("ImportM3U", func() {
|
||||
@@ -119,7 +338,7 @@ var _ = Describe("Playlists", func() {
|
||||
BeforeEach(func() {
|
||||
repo = &mockedMediaFileFromListRepo{}
|
||||
ds.MockedMediaFile = repo
|
||||
ps = NewPlaylists(ds)
|
||||
ps = core.NewPlaylists(ds)
|
||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/music"}, {ID: 2, Path: "/new"}})
|
||||
ctx = request.WithUser(ctx, model.User{ID: "123"})
|
||||
})
|
||||
@@ -206,53 +425,23 @@ var _ = Describe("Playlists", func() {
|
||||
Expect(pls.Tracks[0].Path).To(Equal("abc/tEsT1.Mp3"))
|
||||
})
|
||||
|
||||
It("handles Unicode normalization when comparing paths", func() {
|
||||
// Test case for Apple Music playlists that use NFC encoding vs macOS filesystem NFD
|
||||
// The character "è" can be represented as NFC (single codepoint) or NFD (e + combining accent)
|
||||
|
||||
const pathWithAccents = "artist/Michèle Desrosiers/album/Noël.m4a"
|
||||
|
||||
// Simulate a database entry with NFD encoding (as stored by macOS filesystem)
|
||||
nfdPath := norm.NFD.String(pathWithAccents)
|
||||
It("handles Unicode normalization when comparing paths (NFD vs NFC)", func() {
|
||||
// Simulate macOS filesystem: stores paths in NFD (decomposed) form
|
||||
// "è" (U+00E8) in NFC becomes "e" + "◌̀" (U+0065 + U+0300) in NFD
|
||||
nfdPath := "artist/Mich" + string([]rune{'e', '\u0300'}) + "le/song.mp3" // NFD: e + combining grave
|
||||
repo.data = []string{nfdPath}
|
||||
|
||||
// Simulate an Apple Music M3U playlist entry with NFC encoding
|
||||
nfcPath := norm.NFC.String("/music/" + pathWithAccents)
|
||||
m3u := strings.Join([]string{
|
||||
nfcPath,
|
||||
}, "\n")
|
||||
// Simulate Apple Music M3U: uses NFC (composed) form
|
||||
nfcPath := "/music/artist/Mich\u00E8le/song.mp3" // NFC: single è character
|
||||
m3u := nfcPath + "\n"
|
||||
f := strings.NewReader(m3u)
|
||||
|
||||
pls, err := ps.ImportM3U(ctx, f)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(1), "Should find the track despite Unicode normalization differences")
|
||||
Expect(pls.Tracks).To(HaveLen(1))
|
||||
// Should match despite different Unicode normalization forms
|
||||
Expect(pls.Tracks[0].Path).To(Equal(nfdPath))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("normalizePathForComparison", func() {
|
||||
It("normalizes Unicode characters to NFC form and converts to lowercase", func() {
|
||||
// Test with NFD (decomposed) input - as would come from macOS filesystem
|
||||
nfdPath := norm.NFD.String("Michèle") // Explicitly convert to NFD form
|
||||
normalized := normalizePathForComparison(nfdPath)
|
||||
Expect(normalized).To(Equal("michèle"))
|
||||
|
||||
// Test with NFC (composed) input - as would come from Apple Music M3U
|
||||
nfcPath := "Michèle" // This might be in NFC form
|
||||
normalizedNfc := normalizePathForComparison(nfcPath)
|
||||
|
||||
// Ensure the two paths are not equal in their original forms
|
||||
Expect(nfdPath).ToNot(Equal(nfcPath))
|
||||
|
||||
// Both should normalize to the same result
|
||||
Expect(normalized).To(Equal(normalizedNfc))
|
||||
})
|
||||
|
||||
It("handles paths with mixed case and Unicode characters", func() {
|
||||
path := "Artist/Noël Coward/Album/Song.mp3"
|
||||
normalized := normalizePathForComparison(path)
|
||||
Expect(normalized).To(Equal("artist/noël coward/album/song.mp3"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("InPlaylistsPath", func() {
|
||||
@@ -269,27 +458,27 @@ var _ = Describe("Playlists", func() {
|
||||
|
||||
It("returns true if PlaylistsPath is empty", func() {
|
||||
conf.Server.PlaylistsPath = ""
|
||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
||||
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns true if PlaylistsPath is any (**/**)", func() {
|
||||
conf.Server.PlaylistsPath = "**/**"
|
||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
||||
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns true if folder is in PlaylistsPath", func() {
|
||||
conf.Server.PlaylistsPath = "other/**:playlists/**"
|
||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
||||
Expect(core.InPlaylistsPath(folder)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns false if folder is not in PlaylistsPath", func() {
|
||||
conf.Server.PlaylistsPath = "other"
|
||||
Expect(InPlaylistsPath(folder)).To(BeFalse())
|
||||
Expect(core.InPlaylistsPath(folder)).To(BeFalse())
|
||||
})
|
||||
|
||||
It("returns true if for a playlist in root of MusicFolder if PlaylistsPath is '.'", func() {
|
||||
conf.Server.PlaylistsPath = "."
|
||||
Expect(InPlaylistsPath(folder)).To(BeFalse())
|
||||
Expect(core.InPlaylistsPath(folder)).To(BeFalse())
|
||||
|
||||
folder2 := model.Folder{
|
||||
LibraryPath: "/music",
|
||||
@@ -297,22 +486,47 @@ var _ = Describe("Playlists", func() {
|
||||
Name: ".",
|
||||
}
|
||||
|
||||
Expect(InPlaylistsPath(folder2)).To(BeTrue())
|
||||
Expect(core.InPlaylistsPath(folder2)).To(BeTrue())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// mockedMediaFileRepo's FindByPaths method returns a list of MediaFiles with the same paths as the input
|
||||
// mockedMediaFileRepo's FindByPaths method returns MediaFiles for the given paths.
|
||||
// If data map is provided, looks up files by key; otherwise creates them from paths.
|
||||
type mockedMediaFileRepo struct {
|
||||
model.MediaFileRepository
|
||||
data map[string]model.MediaFile
|
||||
}
|
||||
|
||||
func (r *mockedMediaFileRepo) FindByPaths(paths []string) (model.MediaFiles, error) {
|
||||
var mfs model.MediaFiles
|
||||
|
||||
// If data map provided, look up files
|
||||
if r.data != nil {
|
||||
for _, path := range paths {
|
||||
if mf, ok := r.data[path]; ok {
|
||||
mfs = append(mfs, mf)
|
||||
}
|
||||
}
|
||||
return mfs, nil
|
||||
}
|
||||
|
||||
// Otherwise, create MediaFiles from paths
|
||||
for idx, path := range paths {
|
||||
// Strip library qualifier if present (format: "libraryID:path")
|
||||
actualPath := path
|
||||
libraryID := 1
|
||||
if parts := strings.SplitN(path, ":", 2); len(parts) == 2 {
|
||||
if id, err := strconv.Atoi(parts[0]); err == nil {
|
||||
libraryID = id
|
||||
actualPath = parts[1]
|
||||
}
|
||||
}
|
||||
|
||||
mfs = append(mfs, model.MediaFile{
|
||||
ID: strconv.Itoa(idx),
|
||||
Path: path,
|
||||
ID: strconv.Itoa(idx),
|
||||
Path: actualPath,
|
||||
LibraryID: libraryID,
|
||||
})
|
||||
}
|
||||
return mfs, nil
|
||||
@@ -324,13 +538,38 @@ type mockedMediaFileFromListRepo struct {
|
||||
data []string
|
||||
}
|
||||
|
||||
func (r *mockedMediaFileFromListRepo) FindByPaths([]string) (model.MediaFiles, error) {
|
||||
func (r *mockedMediaFileFromListRepo) FindByPaths(paths []string) (model.MediaFiles, error) {
|
||||
var mfs model.MediaFiles
|
||||
for idx, path := range r.data {
|
||||
mfs = append(mfs, model.MediaFile{
|
||||
ID: strconv.Itoa(idx),
|
||||
Path: path,
|
||||
})
|
||||
|
||||
for idx, dataPath := range r.data {
|
||||
// Normalize the data path to NFD (simulates macOS filesystem storage)
|
||||
normalizedDataPath := norm.NFD.String(dataPath)
|
||||
|
||||
for _, requestPath := range paths {
|
||||
// Strip library qualifier if present (format: "libraryID:path")
|
||||
actualPath := requestPath
|
||||
libraryID := 1
|
||||
if parts := strings.SplitN(requestPath, ":", 2); len(parts) == 2 {
|
||||
if id, err := strconv.Atoi(parts[0]); err == nil {
|
||||
libraryID = id
|
||||
actualPath = parts[1]
|
||||
}
|
||||
}
|
||||
|
||||
// The request path should already be normalized to NFD by production code
|
||||
// before calling FindByPaths (to match DB storage)
|
||||
normalizedRequestPath := norm.NFD.String(actualPath)
|
||||
|
||||
// Case-insensitive comparison (like SQL's "collate nocase")
|
||||
if strings.EqualFold(normalizedRequestPath, normalizedDataPath) {
|
||||
mfs = append(mfs, model.MediaFile{
|
||||
ID: strconv.Itoa(idx),
|
||||
Path: dataPath, // Return original path from DB
|
||||
LibraryID: libraryID,
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return mfs, nil
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ type Submission struct {
|
||||
}
|
||||
|
||||
type nowPlayingEntry struct {
|
||||
ctx context.Context
|
||||
userId string
|
||||
track *model.MediaFile
|
||||
position int
|
||||
@@ -220,15 +221,17 @@ func (p *playTracker) NowPlaying(ctx context.Context, playerId string, playerNam
|
||||
}
|
||||
player, _ := request.PlayerFrom(ctx)
|
||||
if player.ScrobbleEnabled {
|
||||
p.enqueueNowPlaying(playerId, user.ID, mf, position)
|
||||
p.enqueueNowPlaying(ctx, playerId, user.ID, mf, position)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *playTracker) enqueueNowPlaying(playerId string, userId string, track *model.MediaFile, position int) {
|
||||
func (p *playTracker) enqueueNowPlaying(ctx context.Context, playerId string, userId string, track *model.MediaFile, position int) {
|
||||
p.npMu.Lock()
|
||||
defer p.npMu.Unlock()
|
||||
ctx = context.WithoutCancel(ctx) // Prevent cancellation from affecting background processing
|
||||
p.npQueue[playerId] = nowPlayingEntry{
|
||||
ctx: ctx,
|
||||
userId: userId,
|
||||
track: track,
|
||||
position: position,
|
||||
@@ -267,7 +270,7 @@ func (p *playTracker) nowPlayingWorker() {
|
||||
|
||||
// Process entries without holding lock
|
||||
for _, entry := range entries {
|
||||
p.dispatchNowPlaying(context.Background(), entry.userId, entry.track, entry.position)
|
||||
p.dispatchNowPlaying(entry.ctx, entry.userId, entry.track, entry.position)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -170,6 +170,17 @@ var _ = Describe("PlayTracker", func() {
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(eventBroker.getEvents()).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("passes user to scrobbler via context (fix for issue #4787)", func() {
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "testuser"})
|
||||
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
||||
|
||||
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123", 0)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Eventually(func() bool { return fake.GetNowPlayingCalled() }).Should(BeTrue())
|
||||
// Verify the username was passed through async dispatch via context
|
||||
Eventually(func() string { return fake.GetUsername() }).Should(Equal("testuser"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetNowPlaying", func() {
|
||||
@@ -234,23 +245,6 @@ var _ = Describe("PlayTracker", func() {
|
||||
Expect(lastScrobble.Participants).To(Equal(track.Participants))
|
||||
})
|
||||
|
||||
It("records scrobble in repository", func() {
|
||||
conf.Server.EnableScrobbleHistory = true
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "user-1"})
|
||||
ts := time.Now()
|
||||
|
||||
err := tracker.Submit(ctx, []Submission{{TrackID: "123", Timestamp: ts}})
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
mockDS := ds.(*tests.MockDataStore)
|
||||
mockScrobble := mockDS.Scrobble(ctx).(*tests.MockScrobbleRepo)
|
||||
Expect(mockScrobble.RecordedScrobbles).To(HaveLen(1))
|
||||
Expect(mockScrobble.RecordedScrobbles[0].MediaFileID).To(Equal("123"))
|
||||
Expect(mockScrobble.RecordedScrobbles[0].UserID).To(Equal("u-1"))
|
||||
Expect(mockScrobble.RecordedScrobbles[0].SubmissionTime).To(Equal(ts))
|
||||
})
|
||||
|
||||
It("increments play counts in the DB", func() {
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "user-1"})
|
||||
ts := time.Now()
|
||||
@@ -308,6 +302,38 @@ var _ = Describe("PlayTracker", func() {
|
||||
Expect(artist1.PlayCount).To(Equal(int64(1)))
|
||||
Expect(artist2.PlayCount).To(Equal(int64(1)))
|
||||
})
|
||||
|
||||
Context("Scrobble History", func() {
|
||||
It("records scrobble in repository", func() {
|
||||
conf.Server.EnableScrobbleHistory = true
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "user-1"})
|
||||
ts := time.Now()
|
||||
|
||||
err := tracker.Submit(ctx, []Submission{{TrackID: "123", Timestamp: ts}})
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
mockDS := ds.(*tests.MockDataStore)
|
||||
mockScrobble := mockDS.Scrobble(ctx).(*tests.MockScrobbleRepo)
|
||||
Expect(mockScrobble.RecordedScrobbles).To(HaveLen(1))
|
||||
Expect(mockScrobble.RecordedScrobbles[0].MediaFileID).To(Equal("123"))
|
||||
Expect(mockScrobble.RecordedScrobbles[0].UserID).To(Equal("u-1"))
|
||||
Expect(mockScrobble.RecordedScrobbles[0].SubmissionTime).To(Equal(ts))
|
||||
})
|
||||
|
||||
It("does not record scrobble when history is disabled", func() {
|
||||
conf.Server.EnableScrobbleHistory = false
|
||||
ctx = request.WithUser(ctx, model.User{ID: "u-1", UserName: "user-1"})
|
||||
ts := time.Now()
|
||||
|
||||
err := tracker.Submit(ctx, []Submission{{TrackID: "123", Timestamp: ts}})
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
mockDS := ds.(*tests.MockDataStore)
|
||||
mockScrobble := mockDS.Scrobble(ctx).(*tests.MockScrobbleRepo)
|
||||
Expect(mockScrobble.RecordedScrobbles).To(HaveLen(0))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Plugin scrobbler logic", func() {
|
||||
@@ -413,6 +439,7 @@ type fakeScrobbler struct {
|
||||
nowPlayingCalled atomic.Bool
|
||||
ScrobbleCalled atomic.Bool
|
||||
userID atomic.Pointer[string]
|
||||
username atomic.Pointer[string]
|
||||
track atomic.Pointer[model.MediaFile]
|
||||
position atomic.Int32
|
||||
LastScrobble atomic.Pointer[Scrobble]
|
||||
@@ -438,6 +465,13 @@ func (f *fakeScrobbler) GetPosition() int {
|
||||
return int(f.position.Load())
|
||||
}
|
||||
|
||||
func (f *fakeScrobbler) GetUsername() string {
|
||||
if p := f.username.Load(); p != nil {
|
||||
return *p
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (f *fakeScrobbler) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
return f.Error == nil && f.Authorized
|
||||
}
|
||||
@@ -448,6 +482,16 @@ func (f *fakeScrobbler) NowPlaying(ctx context.Context, userId string, track *mo
|
||||
return f.Error
|
||||
}
|
||||
f.userID.Store(&userId)
|
||||
// Capture username from context (this is what plugin scrobblers do)
|
||||
username, _ := request.UsernameFrom(ctx)
|
||||
if username == "" {
|
||||
if u, ok := request.UserFrom(ctx); ok {
|
||||
username = u.UserName
|
||||
}
|
||||
}
|
||||
if username != "" {
|
||||
f.username.Store(&username)
|
||||
}
|
||||
f.track.Store(track)
|
||||
f.position.Store(int32(position))
|
||||
return nil
|
||||
|
||||
36
go.mod
36
go.mod
@@ -39,13 +39,13 @@ require (
|
||||
github.com/knqyf263/go-plugin v0.9.0
|
||||
github.com/kr/pretty v0.3.1
|
||||
github.com/lestrrat-go/jwx/v2 v2.1.6
|
||||
github.com/maruel/natural v1.2.1
|
||||
github.com/maruel/natural v1.3.0
|
||||
github.com/matoous/go-nanoid/v2 v2.1.0
|
||||
github.com/mattn/go-sqlite3 v1.14.32
|
||||
github.com/microcosm-cc/bluemonday v1.0.27
|
||||
github.com/mileusna/useragent v1.3.5
|
||||
github.com/onsi/ginkgo/v2 v2.27.2
|
||||
github.com/onsi/gomega v1.38.2
|
||||
github.com/onsi/ginkgo/v2 v2.27.3
|
||||
github.com/onsi/gomega v1.38.3
|
||||
github.com/pelletier/go-toml/v2 v2.2.4
|
||||
github.com/pocketbase/dbx v1.11.0
|
||||
github.com/pressly/goose/v3 v3.26.0
|
||||
@@ -54,22 +54,21 @@ require (
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/spf13/viper v1.21.0
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/tetratelabs/wazero v1.10.1
|
||||
github.com/tetratelabs/wazero v1.11.0
|
||||
github.com/unrolled/secure v1.17.0
|
||||
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342
|
||||
go.uber.org/goleak v1.3.0
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6
|
||||
golang.org/x/image v0.33.0
|
||||
golang.org/x/net v0.47.0
|
||||
golang.org/x/sync v0.18.0
|
||||
golang.org/x/sys v0.38.0
|
||||
golang.org/x/term v0.37.0
|
||||
golang.org/x/text v0.31.0
|
||||
golang.org/x/image v0.34.0
|
||||
golang.org/x/net v0.48.0
|
||||
golang.org/x/sync v0.19.0
|
||||
golang.org/x/sys v0.39.0
|
||||
golang.org/x/term v0.38.0
|
||||
golang.org/x/text v0.32.0
|
||||
golang.org/x/time v0.14.0
|
||||
google.golang.org/protobuf v1.36.10
|
||||
google.golang.org/protobuf v1.36.11
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
@@ -91,7 +90,7 @@ require (
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/goccy/go-yaml v1.18.0 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8 // indirect
|
||||
github.com/google/pprof v0.0.0-20251213031049-b05bdaca462f // indirect
|
||||
github.com/google/subcommands v1.2.0 // indirect
|
||||
github.com/gorilla/css v1.0.1 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
@@ -129,10 +128,11 @@ require (
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/crypto v0.45.0 // indirect
|
||||
golang.org/x/mod v0.30.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 // indirect
|
||||
golang.org/x/tools v0.39.0 // indirect
|
||||
golang.org/x/crypto v0.46.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 // indirect
|
||||
golang.org/x/mod v0.31.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc // indirect
|
||||
golang.org/x/tools v0.40.0 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
|
||||
)
|
||||
|
||||
72
go.sum
72
go.sum
@@ -99,8 +99,8 @@ github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc h1:hd+uUVsB1vdxohPneMrhGH2YfQuH5hRIK9u4/XCeUtw=
|
||||
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc/go.mod h1:SL66SJVysrh7YbDCP9tH30b8a9o/N2HeiQNUm85EKhc=
|
||||
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8 h1:3DsUAV+VNEQa2CUVLxCY3f87278uWfIDhJnbdvDjvmE=
|
||||
github.com/google/pprof v0.0.0-20251114195745-4902fdda35c8/go.mod h1:I6V7YzU0XDpsHqbsyrghnFZLO1gwK6NPTNvmetQIk9U=
|
||||
github.com/google/pprof v0.0.0-20251213031049-b05bdaca462f h1:HU1RgM6NALf/KW9HEY6zry3ADbDKcmpQ+hJedoNGQYQ=
|
||||
github.com/google/pprof v0.0.0-20251213031049-b05bdaca462f/go.mod h1:67FPmZWbr+KDT/VlpWtw6sO9XSjpJmLuHpoLmWiTGgY=
|
||||
github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE=
|
||||
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
@@ -162,8 +162,8 @@ github.com/lestrrat-go/jwx/v2 v2.1.6 h1:hxM1gfDILk/l5ylers6BX/Eq1m/pnxe9NBwW6lVf
|
||||
github.com/lestrrat-go/jwx/v2 v2.1.6/go.mod h1:Y722kU5r/8mV7fYDifjug0r8FK8mZdw0K0GpJw/l8pU=
|
||||
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
|
||||
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
|
||||
github.com/maruel/natural v1.2.1 h1:G/y4pwtTA07lbQsMefvsmEO0VN0NfqpxprxXDM4R/4o=
|
||||
github.com/maruel/natural v1.2.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
|
||||
github.com/maruel/natural v1.3.0 h1:VsmCsBmEyrR46RomtgHs5hbKADGRVtliHTyCOLFBpsg=
|
||||
github.com/maruel/natural v1.3.0/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
|
||||
github.com/matoous/go-nanoid/v2 v2.1.0 h1:P64+dmq21hhWdtvZfEAofnvJULaRR1Yib0+PnU669bE=
|
||||
github.com/matoous/go-nanoid/v2 v2.1.0/go.mod h1:KlbGNQ+FhrUNIHUxZdL63t7tl4LaPkZNpUULS8H4uVM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
@@ -186,10 +186,10 @@ github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdh
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/ogier/pflag v0.0.1 h1:RW6JSWSu/RkSatfcLtogGfFgpim5p7ARQ10ECk5O750=
|
||||
github.com/ogier/pflag v0.0.1/go.mod h1:zkFki7tvTa0tafRvTBIZTvzYyAu6kQhPZFnshFFPE+g=
|
||||
github.com/onsi/ginkgo/v2 v2.27.2 h1:LzwLj0b89qtIy6SSASkzlNvX6WktqurSHwkk2ipF/Ns=
|
||||
github.com/onsi/ginkgo/v2 v2.27.2/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
|
||||
github.com/onsi/gomega v1.38.2 h1:eZCjf2xjZAqe+LeWvKb5weQ+NcPwX84kqJ0cZNxok2A=
|
||||
github.com/onsi/gomega v1.38.2/go.mod h1:W2MJcYxRGV63b418Ai34Ud0hEdTVXq9NW9+Sx6uXf3k=
|
||||
github.com/onsi/ginkgo/v2 v2.27.3 h1:ICsZJ8JoYafeXFFlFAG75a7CxMsJHwgKwtO+82SE9L8=
|
||||
github.com/onsi/ginkgo/v2 v2.27.3/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
|
||||
github.com/onsi/gomega v1.38.3 h1:eTX+W6dobAYfFeGC2PV6RwXRu/MyT+cQguijutvkpSM=
|
||||
github.com/onsi/gomega v1.38.3/go.mod h1:ZCU1pkQcXDO5Sl9/VVEGlDyp+zm0m1cmeG5TOzLgdh4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
@@ -244,8 +244,8 @@ github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
|
||||
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
|
||||
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
|
||||
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
|
||||
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
|
||||
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
|
||||
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
||||
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
|
||||
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
@@ -265,8 +265,8 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
github.com/tetratelabs/wazero v1.10.1 h1:2DugeJf6VVk58KTPszlNfeeN8AhhpwcZqkJj2wwFuH8=
|
||||
github.com/tetratelabs/wazero v1.10.1/go.mod h1:DRm5twOQ5Gr1AoEdSi0CLjDQF1J9ZAuyqFIjl1KKfQU=
|
||||
github.com/tetratelabs/wazero v1.11.0 h1:+gKemEuKCTevU4d7ZTzlsvgd1uaToIDtlQlmNbwqYhA=
|
||||
github.com/tetratelabs/wazero v1.11.0/go.mod h1:eV28rsN8Q+xwjogd7f4/Pp4xFxO7uOGbLcD/LzB1wiU=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
@@ -298,20 +298,20 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6 h1:zfMcR1Cs4KNuomFFgGefv5N0czO2XZpUbxGUy8i8ug0=
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
|
||||
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
|
||||
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
|
||||
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 h1:MDfG8Cvcqlt9XXrmEiD4epKn7VJHZO84hejP9Jmp0MM=
|
||||
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.33.0 h1:LXRZRnv1+zGd5XBUVRFmYEphyyKJjQjCRiOuAP3sZfQ=
|
||||
golang.org/x/image v0.33.0/go.mod h1:DD3OsTYT9chzuzTQt+zMcOlBHgfoKQb1gry8p76Y1sc=
|
||||
golang.org/x/image v0.34.0 h1:33gCkyw9hmwbZJeZkct8XyR11yH889EQt/QH4VmXMn8=
|
||||
golang.org/x/image v0.34.0/go.mod h1:2RNFBZRB+vnwwFil8GkMdRvrJOFd1AzdZI6vOY+eJVU=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
|
||||
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
|
||||
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
@@ -323,8 +323,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -332,8 +332,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180926160741-c2ed4eda69e7/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -350,11 +350,11 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
|
||||
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 h1:E2/AqCUMZGgd73TQkxUMcMla25GB9i/5HOdLr+uH7Vo=
|
||||
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ=
|
||||
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc h1:bH6xUXay0AIFMElXG2rQ4uiE+7ncwtiOdPfYK1NK2XA=
|
||||
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
@@ -363,8 +363,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
|
||||
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254=
|
||||
golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q=
|
||||
golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
@@ -375,8 +375,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
|
||||
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
@@ -386,12 +386,12 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
|
||||
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
|
||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE=
|
||||
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
|
||||
@@ -512,6 +512,70 @@ var _ = Describe("AlbumRepository", func() {
|
||||
// Clean up the test album created for this test
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
|
||||
})
|
||||
|
||||
It("removes stale role associations when artist role changes", func() {
|
||||
// Regression test for issue #4242: Composers displayed in albumartist list
|
||||
// This happens when an artist's role changes (e.g., was both albumartist and composer,
|
||||
// now only composer) and the old role association isn't properly removed.
|
||||
|
||||
// Create an artist that will have changing roles
|
||||
artist := &model.Artist{
|
||||
ID: "role-change-artist-1",
|
||||
Name: "Role Change Artist",
|
||||
OrderArtistName: "role change artist",
|
||||
}
|
||||
err := createArtistWithLibrary(artistRepo, artist, 1)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Create album with artist as both albumartist and composer
|
||||
album := &model.Album{
|
||||
LibraryID: 1,
|
||||
ID: "test-album-role-change",
|
||||
Name: "Test Album Role Change",
|
||||
AlbumArtistID: "role-change-artist-1",
|
||||
AlbumArtist: "Role Change Artist",
|
||||
Participants: model.Participants{
|
||||
model.RoleAlbumArtist: {
|
||||
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
|
||||
},
|
||||
model.RoleComposer: {
|
||||
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err = albumRepo.Put(album)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Verify initial state: artist has both albumartist and composer roles
|
||||
expected := []albumArtistRecord{
|
||||
{ArtistID: "role-change-artist-1", Role: "albumartist", SubRole: ""},
|
||||
{ArtistID: "role-change-artist-1", Role: "composer", SubRole: ""},
|
||||
}
|
||||
verifyAlbumArtists(album.ID, expected)
|
||||
|
||||
// Now update album so artist is ONLY a composer (remove albumartist role)
|
||||
album.Participants = model.Participants{
|
||||
model.RoleComposer: {
|
||||
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
|
||||
},
|
||||
}
|
||||
|
||||
err = albumRepo.Put(album)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Verify that the albumartist role was removed - only composer should remain
|
||||
// This is the key test: before the fix, the albumartist role would remain
|
||||
// causing composers to appear in the albumartist filter
|
||||
expectedAfter := []albumArtistRecord{
|
||||
{ArtistID: "role-change-artist-1", Role: "composer", SubRole: ""},
|
||||
}
|
||||
verifyAlbumArtists(album.ID, expectedAfter)
|
||||
|
||||
// Clean up
|
||||
_, _ = artistRepo.executeSQL(squirrel.Delete("artist").Where(squirrel.Eq{"id": artist.ID}))
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -95,45 +95,82 @@ func (r folderRepository) CountAll(opt ...model.QueryOptions) (int64, error) {
|
||||
}
|
||||
|
||||
func (r folderRepository) GetFolderUpdateInfo(lib model.Library, targetPaths ...string) (map[string]model.FolderUpdateInfo, error) {
|
||||
// If no specific paths, return all folders in the library
|
||||
if len(targetPaths) == 0 {
|
||||
return r.getFolderUpdateInfoAll(lib)
|
||||
}
|
||||
|
||||
// Check if any path is root (return all folders)
|
||||
for _, targetPath := range targetPaths {
|
||||
if targetPath == "" || targetPath == "." {
|
||||
return r.getFolderUpdateInfoAll(lib)
|
||||
}
|
||||
}
|
||||
|
||||
// Process paths in batches to avoid SQLite's expression tree depth limit (max 1000).
|
||||
// Each path generates ~3 conditions, so batch size of 100 keeps us well under the limit.
|
||||
const batchSize = 100
|
||||
result := make(map[string]model.FolderUpdateInfo)
|
||||
|
||||
for batch := range slices.Chunk(targetPaths, batchSize) {
|
||||
batchResult, err := r.getFolderUpdateInfoBatch(lib, batch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for id, info := range batchResult {
|
||||
result[id] = info
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// getFolderUpdateInfoAll returns update info for all non-missing folders in the library
|
||||
func (r folderRepository) getFolderUpdateInfoAll(lib model.Library) (map[string]model.FolderUpdateInfo, error) {
|
||||
where := And{
|
||||
Eq{"library_id": lib.ID},
|
||||
Eq{"missing": false},
|
||||
}
|
||||
return r.queryFolderUpdateInfo(where)
|
||||
}
|
||||
|
||||
// getFolderUpdateInfoBatch returns update info for a batch of target paths and their descendants
|
||||
func (r folderRepository) getFolderUpdateInfoBatch(lib model.Library, targetPaths []string) (map[string]model.FolderUpdateInfo, error) {
|
||||
where := And{
|
||||
Eq{"library_id": lib.ID},
|
||||
Eq{"missing": false},
|
||||
}
|
||||
|
||||
// If specific paths are requested, include those folders and all their descendants
|
||||
if len(targetPaths) > 0 {
|
||||
// Collect folder IDs for exact target folders and path conditions for descendants
|
||||
folderIDs := make([]string, 0, len(targetPaths))
|
||||
pathConditions := make(Or, 0, len(targetPaths)*2)
|
||||
// Collect folder IDs for exact target folders and path conditions for descendants
|
||||
folderIDs := make([]string, 0, len(targetPaths))
|
||||
pathConditions := make(Or, 0, len(targetPaths)*2)
|
||||
|
||||
for _, targetPath := range targetPaths {
|
||||
if targetPath == "" || targetPath == "." {
|
||||
// Root path - include everything in this library
|
||||
pathConditions = Or{}
|
||||
folderIDs = nil
|
||||
break
|
||||
}
|
||||
// Clean the path to normalize it. Paths stored in the folder table do not have leading/trailing slashes.
|
||||
cleanPath := strings.TrimPrefix(targetPath, string(os.PathSeparator))
|
||||
cleanPath = filepath.Clean(cleanPath)
|
||||
for _, targetPath := range targetPaths {
|
||||
// Clean the path to normalize it. Paths stored in the folder table do not have leading/trailing slashes.
|
||||
cleanPath := strings.TrimPrefix(targetPath, string(os.PathSeparator))
|
||||
cleanPath = filepath.Clean(cleanPath)
|
||||
|
||||
// Include the target folder itself by ID
|
||||
folderIDs = append(folderIDs, model.FolderID(lib, cleanPath))
|
||||
// Include the target folder itself by ID
|
||||
folderIDs = append(folderIDs, model.FolderID(lib, cleanPath))
|
||||
|
||||
// Include all descendants: folders whose path field equals or starts with the target path
|
||||
// Note: Folder.Path is the directory path, so children have path = targetPath
|
||||
pathConditions = append(pathConditions, Eq{"path": cleanPath})
|
||||
pathConditions = append(pathConditions, Like{"path": cleanPath + "/%"})
|
||||
}
|
||||
|
||||
// Combine conditions: exact folder IDs OR descendant path patterns
|
||||
if len(folderIDs) > 0 {
|
||||
where = append(where, Or{Eq{"id": folderIDs}, pathConditions})
|
||||
} else if len(pathConditions) > 0 {
|
||||
where = append(where, pathConditions)
|
||||
}
|
||||
// Include all descendants: folders whose path field equals or starts with the target path
|
||||
// Note: Folder.Path is the directory path, so children have path = targetPath
|
||||
pathConditions = append(pathConditions, Eq{"path": cleanPath})
|
||||
pathConditions = append(pathConditions, Like{"path": cleanPath + "/%"})
|
||||
}
|
||||
|
||||
// Combine conditions: exact folder IDs OR descendant path patterns
|
||||
if len(folderIDs) > 0 {
|
||||
where = append(where, Or{Eq{"id": folderIDs}, pathConditions})
|
||||
} else if len(pathConditions) > 0 {
|
||||
where = append(where, pathConditions)
|
||||
}
|
||||
|
||||
return r.queryFolderUpdateInfo(where)
|
||||
}
|
||||
|
||||
// queryFolderUpdateInfo executes the query and returns the result map
|
||||
func (r folderRepository) queryFolderUpdateInfo(where And) (map[string]model.FolderUpdateInfo, error) {
|
||||
sq := r.newSelect().Columns("id", "updated_at", "hash").Where(where)
|
||||
var res []struct {
|
||||
ID string
|
||||
|
||||
@@ -4,6 +4,8 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
@@ -193,12 +195,43 @@ func (r *mediaFileRepository) GetCursor(options ...model.QueryOptions) (model.Me
|
||||
}, nil
|
||||
}
|
||||
|
||||
// FindByPaths finds media files by their paths.
|
||||
// The paths can be library-qualified (format: "libraryID:path") or unqualified ("path").
|
||||
// Library-qualified paths search within the specified library, while unqualified paths
|
||||
// search across all libraries for backward compatibility.
|
||||
func (r *mediaFileRepository) FindByPaths(paths []string) (model.MediaFiles, error) {
|
||||
sel := r.newSelect().Columns("*").Where(Eq{"path collate nocase": paths})
|
||||
query := Or{}
|
||||
|
||||
for _, path := range paths {
|
||||
parts := strings.SplitN(path, ":", 2)
|
||||
if len(parts) == 2 {
|
||||
// Library-qualified path: "libraryID:path"
|
||||
libraryID, err := strconv.Atoi(parts[0])
|
||||
if err != nil {
|
||||
// Invalid format, skip
|
||||
continue
|
||||
}
|
||||
relativePath := parts[1]
|
||||
query = append(query, And{
|
||||
Eq{"path collate nocase": relativePath},
|
||||
Eq{"library_id": libraryID},
|
||||
})
|
||||
} else {
|
||||
// Unqualified path: search across all libraries
|
||||
query = append(query, Eq{"path collate nocase": path})
|
||||
}
|
||||
}
|
||||
|
||||
if len(query) == 0 {
|
||||
return model.MediaFiles{}, nil
|
||||
}
|
||||
|
||||
sel := r.newSelect().Columns("*").Where(query)
|
||||
var res dbMediaFiles
|
||||
if err := r.queryAll(sel, &res); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res.toModels(), nil
|
||||
}
|
||||
|
||||
|
||||
@@ -67,12 +67,18 @@ var _ = Describe("ScrobbleRepository", func() {
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Verify insertion
|
||||
var count int
|
||||
err = rawRepo.db.Select("count(*)").From("scrobbles").
|
||||
var scrobble struct {
|
||||
MediaFileID string `db:"media_file_id"`
|
||||
UserID string `db:"user_id"`
|
||||
SubmissionTime int64 `db:"submission_time"`
|
||||
}
|
||||
err = rawRepo.db.Select("*").From("scrobbles").
|
||||
Where(dbx.HashExp{"media_file_id": fileID, "user_id": userID}).
|
||||
Row(&count)
|
||||
One(&scrobble)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(count).To(Equal(1))
|
||||
Expect(scrobble.MediaFileID).To(Equal(fileID))
|
||||
Expect(scrobble.UserID).To(Equal(userID))
|
||||
Expect(scrobble.SubmissionTime).To(Equal(submissionTime.Unix()))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -51,8 +51,10 @@ func unmarshalParticipants(data string) (model.Participants, error) {
|
||||
}
|
||||
|
||||
func (r sqlRepository) updateParticipants(itemID string, participants model.Participants) error {
|
||||
ids := participants.AllIDs()
|
||||
sqd := Delete(r.tableName + "_artists").Where(And{Eq{r.tableName + "_id": itemID}, NotEq{"artist_id": ids}})
|
||||
// Delete all existing participant entries for this item.
|
||||
// This ensures stale role associations are removed when an artist's role changes
|
||||
// (e.g., an artist was both albumartist and composer, but is now only composer).
|
||||
sqd := Delete(r.tableName + "_artists").Where(Eq{r.tableName + "_id": itemID})
|
||||
_, err := r.executeSQL(sqd)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -302,6 +302,8 @@
|
||||
},
|
||||
"actions": {
|
||||
"scan": "Arakatu liburutegia",
|
||||
"quickScan": "Araketa bizkorra",
|
||||
"fullScan": "Araketa sakona",
|
||||
"manageUsers": "Kudeatu erabiltzaileen sarbidea",
|
||||
"viewDetails": "Ikusi xehetasunak"
|
||||
},
|
||||
@@ -310,6 +312,9 @@
|
||||
"updated": "Liburutegia ondo eguneratu da",
|
||||
"deleted": "Liburutegia ondo ezabatu da",
|
||||
"scanStarted": "Liburutegiaren araketa hasi da",
|
||||
"quickScanStarted": "Araketa bizkorra hasi da",
|
||||
"fullScanStarted": "Araketa sakona hasi da",
|
||||
"scanError": "Errorea araketa abiaraztean. Aztertu erregistroak",
|
||||
"scanCompleted": "Liburutegiaren araketa amaitu da"
|
||||
},
|
||||
"validation": {
|
||||
@@ -459,7 +464,7 @@
|
||||
"bad_item": "Elementu okerra",
|
||||
"item_doesnt_exist": "Elementua ez dago",
|
||||
"http_error": "Errorea zerbitzariarekin komunikatzerakoan",
|
||||
"data_provider_error": "Errorea datuen hornitzailean. Berrikusi kontsola xehetasun gehiagorako.",
|
||||
"data_provider_error": "Errorea datuen hornitzailean. Aztertu kontsola xehetasun gehiagorako.",
|
||||
"i18n_error": "Ezin izan dira zehaztutako hizkuntzaren itzulpenak kargatu",
|
||||
"canceled": "Ekintza bertan behera utzi da",
|
||||
"logged_out": "Saioa amaitu da, konektatu berriro.",
|
||||
@@ -600,8 +605,9 @@
|
||||
"activity": {
|
||||
"title": "Ekintzak",
|
||||
"totalScanned": "Arakatutako karpeta guztiak",
|
||||
"quickScan": "Arakatze azkarra",
|
||||
"quickScan": "Arakatze bizkorra",
|
||||
"fullScan": "Arakatze sakona",
|
||||
"selectiveScan": "Arakatze selektiboa",
|
||||
"serverUptime": "Zerbitzariak piztuta daraman denbora",
|
||||
"serverDown": "LINEAZ KANPO",
|
||||
"scanType": "Mota",
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
"mood": "Tunnelma",
|
||||
"participants": "Lisäosallistujat",
|
||||
"tags": "Lisätunnisteet",
|
||||
"mappedTags": "Mäpättyt tunnisteet",
|
||||
"mappedTags": "Mäpätyt tunnisteet",
|
||||
"rawTags": "Raakatunnisteet",
|
||||
"bitDepth": "Bittisyvyys",
|
||||
"sampleRate": "Näytteenottotaajuus",
|
||||
@@ -324,7 +324,7 @@
|
||||
"pathInvalid": "Virheellinen kirjaston polku"
|
||||
},
|
||||
"messages": {
|
||||
"deleteConfirm": "Oletko varma, että haluat poistaa tämän kirjaston? Tämä poistaa kaikki liittyvät tiedot ja käyttäjien pääsyn.",
|
||||
"deleteConfirm": "Haluatko varmasti poistaa tämän kirjaston? Kaikki siihen liittyvät tiedot ja käyttäjien pääsy poistetaan.",
|
||||
"scanInProgress": "Skannaus käynnissä...",
|
||||
"noLibrariesAssigned": "Tälle käyttäjälle ei ole määritetty kirjastoja"
|
||||
}
|
||||
@@ -341,7 +341,7 @@
|
||||
"username": "Käyttäjänimi",
|
||||
"password": "Salasana",
|
||||
"sign_in": "Kirjaudu",
|
||||
"sign_in_error": "Autentikointi epäonnistui. Yritä uudelleen",
|
||||
"sign_in_error": "Kirjautuminen epäonnistui. Yritä uudelleen",
|
||||
"logout": "Kirjaudu ulos",
|
||||
"insightsCollectionNote": "Navidrome kerää anonyymejä käyttötietoja auttaakseen parantamaan\nprojektia. Paina [tästä] saadaksesi lisätietoa\nja halutessasi kieltäytyä"
|
||||
},
|
||||
@@ -351,7 +351,7 @@
|
||||
"required": "Pakollinen",
|
||||
"minLength": "Pitää vähintään olla %{min} merkkiä",
|
||||
"maxLength": "Saa olla enintään %{max} merkkiä",
|
||||
"minValue": "pitää olla vähintään %{min}",
|
||||
"minValue": "Pitää olla vähintään %{min}",
|
||||
"maxValue": "Saa olla enentään %{max}",
|
||||
"number": "Pitää olla numero",
|
||||
"email": "Pitää olla oikea sähköpostiosoite",
|
||||
@@ -445,7 +445,7 @@
|
||||
},
|
||||
"navigation": {
|
||||
"no_results": "Ei tuloksia",
|
||||
"no_more_results": "Sivunumero %{page} on rajojen ulkopuolella. Kokeile edellinen sivu.",
|
||||
"no_more_results": "Sivunumeroa %{page} ei löydy. Yritä edellistä sivua.",
|
||||
"page_out_of_boundaries": "Sivunumero %{page} on rajojen ulkopuolella",
|
||||
"page_out_from_end": "Viimeinen sivu, ei voi edetä",
|
||||
"page_out_from_begin": "Ensimmäinen sivu, ei voi palata",
|
||||
@@ -527,7 +527,7 @@
|
||||
"desktop_notifications": "Työpöytäilmoitukset",
|
||||
"lastfmScrobbling": "Kuuntelutottumuksen lähetys Last.fm-palveluun",
|
||||
"listenBrainzScrobbling": "Kuuntelutottumuksen lähetys ListenBrainz-palveluun",
|
||||
"replaygain": "RepleyGain -tila",
|
||||
"replaygain": "ReplayGain -tila",
|
||||
"preAmp": "ReplayGain esivahvistus (dB)",
|
||||
"gain": {
|
||||
"none": "Pois käytöstä",
|
||||
@@ -559,7 +559,7 @@
|
||||
"previousTrackText": "Edellinen kappale",
|
||||
"reloadText": "Päivitä",
|
||||
"volumeText": "Äänenvoimakkuus",
|
||||
"toggleLyricText": "Toggle lyric",
|
||||
"toggleLyricText": "Näytä/piilota sanat",
|
||||
"toggleMiniModeText": "Minimoi",
|
||||
"destroyText": "Poista",
|
||||
"downloadText": "Lataa",
|
||||
@@ -618,7 +618,7 @@
|
||||
"show_help": "Näytä tämä apuvalikko",
|
||||
"toggle_menu": "Menuvalikko päälle ja pois",
|
||||
"toggle_play": "Toista / Tauko",
|
||||
"prev_song": "Esellinen kappale",
|
||||
"prev_song": "Edellinen kappale",
|
||||
"next_song": "Seuraava kappale",
|
||||
"vol_up": "Kovemmalle",
|
||||
"vol_down": "Hiljemmalle",
|
||||
|
||||
@@ -14,6 +14,12 @@ import (
|
||||
"github.com/navidrome/navidrome/model"
|
||||
)
|
||||
|
||||
const (
|
||||
// argLengthThreshold is the threshold for switching from command-line args to file-based target passing.
|
||||
// Set conservatively at 24KB to support Windows (~32KB limit) with margin for env vars.
|
||||
argLengthThreshold = 24 * 1024
|
||||
)
|
||||
|
||||
// scannerExternal is a scanner that runs an external process to do the scanning. It is used to avoid
|
||||
// memory leaks or retention in the main process, as the scanner can consume a lot of memory. The
|
||||
// external process will be spawned with the same executable as the current process, and will run
|
||||
@@ -45,10 +51,14 @@ func (s *scannerExternal) scan(ctx context.Context, fullScan bool, targets []mod
|
||||
|
||||
// Add targets if provided
|
||||
if len(targets) > 0 {
|
||||
for _, target := range targets {
|
||||
args = append(args, "-t", target.String())
|
||||
targetArgs, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||
if err != nil {
|
||||
progress <- &ProgressInfo{Error: err.Error()}
|
||||
return
|
||||
}
|
||||
log.Debug(ctx, "Spawning external scanner process with targets", "fullScan", fullScan, "path", exe, "targets", targets)
|
||||
defer cleanup()
|
||||
log.Debug(ctx, "Spawning external scanner process with target file", "fullScan", fullScan, "path", exe, "numTargets", len(targets))
|
||||
args = append(args, targetArgs...)
|
||||
} else {
|
||||
log.Debug(ctx, "Spawning external scanner process", "fullScan", fullScan, "path", exe)
|
||||
}
|
||||
@@ -98,4 +108,62 @@ func (s *scannerExternal) wait(cmd *exec.Cmd, out *io.PipeWriter) {
|
||||
_ = out.Close()
|
||||
}
|
||||
|
||||
// targetArguments builds command-line arguments for the given scan targets.
|
||||
// If the estimated argument length exceeds a threshold, it writes the targets to a temp file
|
||||
// and returns the --target-file argument instead.
|
||||
// Returns the arguments, a cleanup function to remove any temp file created, and an error if any.
|
||||
func targetArguments(ctx context.Context, targets []model.ScanTarget, lengthThreshold int) ([]string, func(), error) {
|
||||
var args []string
|
||||
|
||||
// Estimate argument length to decide whether to use file-based approach
|
||||
argLength := estimateArgLength(targets)
|
||||
|
||||
if argLength > lengthThreshold {
|
||||
// Write targets to temp file and pass via --target-file
|
||||
targetFile, err := writeTargetsToFile(targets)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to write targets to file: %w", err)
|
||||
}
|
||||
args = append(args, "--target-file", targetFile)
|
||||
return args, func() {
|
||||
os.Remove(targetFile) // Clean up temp file
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Use command-line arguments for small target lists
|
||||
for _, target := range targets {
|
||||
args = append(args, "-t", target.String())
|
||||
}
|
||||
return args, func() {}, nil
|
||||
}
|
||||
|
||||
// estimateArgLength estimates the total length of command-line arguments for the given targets.
|
||||
func estimateArgLength(targets []model.ScanTarget) int {
|
||||
length := 0
|
||||
for _, target := range targets {
|
||||
// Each target adds: "-t " + target string + space
|
||||
length += 3 + len(target.String()) + 1
|
||||
}
|
||||
return length
|
||||
}
|
||||
|
||||
// writeTargetsToFile writes the targets to a temporary file, one per line.
|
||||
// Returns the path to the temp file, which the caller should clean up.
|
||||
func writeTargetsToFile(targets []model.ScanTarget) (string, error) {
|
||||
tmpFile, err := os.CreateTemp("", "navidrome-scan-targets-*.txt")
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to create temp file: %w", err)
|
||||
}
|
||||
defer tmpFile.Close()
|
||||
|
||||
for _, target := range targets {
|
||||
if _, err := fmt.Fprintln(tmpFile, target.String()); err != nil {
|
||||
os.Remove(tmpFile.Name())
|
||||
return "", fmt.Errorf("failed to write to temp file: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return tmpFile.Name(), nil
|
||||
}
|
||||
|
||||
var _ scanner = (*scannerExternal)(nil)
|
||||
|
||||
160
scanner/external_test.go
Normal file
160
scanner/external_test.go
Normal file
@@ -0,0 +1,160 @@
|
||||
package scanner
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("targetArguments", func() {
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
})
|
||||
|
||||
Context("with small target list", func() {
|
||||
It("returns command-line arguments for single target", func() {
|
||||
targets := []model.ScanTarget{
|
||||
{LibraryID: 1, FolderPath: "Music/Rock"},
|
||||
}
|
||||
|
||||
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
defer cleanup()
|
||||
Expect(args).To(Equal([]string{"-t", "1:Music/Rock"}))
|
||||
})
|
||||
|
||||
It("returns command-line arguments for multiple targets", func() {
|
||||
targets := []model.ScanTarget{
|
||||
{LibraryID: 1, FolderPath: "Music/Rock"},
|
||||
{LibraryID: 2, FolderPath: "Music/Jazz"},
|
||||
{LibraryID: 3, FolderPath: "Classical"},
|
||||
}
|
||||
|
||||
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
defer cleanup()
|
||||
Expect(args).To(Equal([]string{
|
||||
"-t", "1:Music/Rock",
|
||||
"-t", "2:Music/Jazz",
|
||||
"-t", "3:Classical",
|
||||
}))
|
||||
})
|
||||
|
||||
It("handles targets with special characters", func() {
|
||||
targets := []model.ScanTarget{
|
||||
{LibraryID: 1, FolderPath: "Music/Rock & Roll"},
|
||||
{LibraryID: 2, FolderPath: "Music/Jazz (Modern)"},
|
||||
}
|
||||
|
||||
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
defer cleanup()
|
||||
Expect(args).To(Equal([]string{
|
||||
"-t", "1:Music/Rock & Roll",
|
||||
"-t", "2:Music/Jazz (Modern)",
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with large target list exceeding threshold", func() {
|
||||
It("returns --target-file argument when exceeding threshold", func() {
|
||||
// Create enough targets to exceed the threshold
|
||||
var targets []model.ScanTarget
|
||||
for i := 1; i <= 600; i++ {
|
||||
targets = append(targets, model.ScanTarget{
|
||||
LibraryID: 1,
|
||||
FolderPath: "Music/VeryLongFolderPathToSimulateRealScenario/SubFolder",
|
||||
})
|
||||
}
|
||||
|
||||
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
defer cleanup()
|
||||
Expect(args).To(HaveLen(2))
|
||||
Expect(args[0]).To(Equal("--target-file"))
|
||||
|
||||
// Verify the file exists and has correct format
|
||||
filePath := args[1]
|
||||
Expect(filePath).To(ContainSubstring("navidrome-scan-targets-"))
|
||||
Expect(filePath).To(HaveSuffix(".txt"))
|
||||
|
||||
// Verify file actually exists
|
||||
_, err = os.Stat(filePath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("creates temp file with correct format", func() {
|
||||
// Use custom threshold to easily exceed it
|
||||
targets := []model.ScanTarget{
|
||||
{LibraryID: 1, FolderPath: "Music/Rock"},
|
||||
{LibraryID: 2, FolderPath: "Music/Jazz"},
|
||||
{LibraryID: 3, FolderPath: "Classical"},
|
||||
}
|
||||
|
||||
// Set threshold very low to force file usage
|
||||
args, cleanup, err := targetArguments(ctx, targets, 10)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
defer cleanup()
|
||||
Expect(args[0]).To(Equal("--target-file"))
|
||||
|
||||
// Verify file exists with correct format
|
||||
filePath := args[1]
|
||||
Expect(filePath).To(ContainSubstring("navidrome-scan-targets-"))
|
||||
Expect(filePath).To(HaveSuffix(".txt"))
|
||||
|
||||
// Verify file content
|
||||
content, err := os.ReadFile(filePath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
|
||||
Expect(lines).To(HaveLen(3))
|
||||
Expect(lines[0]).To(Equal("1:Music/Rock"))
|
||||
Expect(lines[1]).To(Equal("2:Music/Jazz"))
|
||||
Expect(lines[2]).To(Equal("3:Classical"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("edge cases", func() {
|
||||
It("handles empty target list", func() {
|
||||
var targets []model.ScanTarget
|
||||
|
||||
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
defer cleanup()
|
||||
Expect(args).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("uses command-line args when exactly at threshold", func() {
|
||||
// Create targets that are exactly at threshold
|
||||
targets := []model.ScanTarget{
|
||||
{LibraryID: 1, FolderPath: "Music"},
|
||||
}
|
||||
|
||||
// Estimate length should be 11 bytes
|
||||
estimatedLength := estimateArgLength(targets)
|
||||
|
||||
args, cleanup, err := targetArguments(ctx, targets, estimatedLength)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
defer cleanup()
|
||||
Expect(args).To(Equal([]string{"-t", "1:Music"}))
|
||||
})
|
||||
|
||||
It("uses file when one byte over threshold", func() {
|
||||
targets := []model.ScanTarget{
|
||||
{LibraryID: 1, FolderPath: "Music"},
|
||||
}
|
||||
|
||||
// Set threshold just below the estimated length
|
||||
estimatedLength := estimateArgLength(targets)
|
||||
args, cleanup, err := targetArguments(ctx, targets, estimatedLength-1)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
defer cleanup()
|
||||
Expect(args[0]).To(Equal("--target-file"))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -76,6 +76,12 @@ func newScanJob(ctx context.Context, ds model.DataStore, cw artwork.CacheWarmer,
|
||||
log.Error(ctx, "Error getting fs for library", "library", lib.Name, "path", lib.Path, err)
|
||||
return nil, fmt.Errorf("getting fs for library: %w", err)
|
||||
}
|
||||
|
||||
// Ensure FullScanInProgress reflects the current scan request.
|
||||
// This is important when resuming an interrupted quick scan as a full scan:
|
||||
// the DB may have FullScanInProgress=false, but we need it true for isOutdated() to work correctly.
|
||||
lib.FullScanInProgress = lib.FullScanInProgress || fullScan
|
||||
|
||||
return &scanJob{
|
||||
lib: lib,
|
||||
fs: fsys,
|
||||
|
||||
@@ -675,6 +675,155 @@ var _ = Describe("Scanner", Ordered, func() {
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Interrupted scan resumption", func() {
|
||||
var fsys storagetest.FakeFS
|
||||
var help func(...map[string]any) *fstest.MapFile
|
||||
|
||||
BeforeEach(func() {
|
||||
help = template(_t{"albumartist": "The Beatles", "album": "Help!", "year": 1965})
|
||||
fsys = createFS(fstest.MapFS{
|
||||
"The Beatles/Help!/01 - Help!.mp3": help(track(1, "Help!")),
|
||||
"The Beatles/Help!/02 - The Night Before.mp3": help(track(2, "The Night Before")),
|
||||
})
|
||||
})
|
||||
|
||||
simulateInterruptedScan := func(fullScan bool) {
|
||||
// Call ScanBegin to properly set LastScanStartedAt and FullScanInProgress
|
||||
// This simulates what would happen if a scan was interrupted (ScanBegin called but ScanEnd not)
|
||||
Expect(ds.Library(ctx).ScanBegin(lib.ID, fullScan)).To(Succeed())
|
||||
|
||||
// Verify the update was persisted
|
||||
reloaded, err := ds.Library(ctx).Get(lib.ID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(reloaded.LastScanStartedAt).ToNot(BeZero())
|
||||
Expect(reloaded.FullScanInProgress).To(Equal(fullScan))
|
||||
}
|
||||
|
||||
Context("when a quick scan is interrupted and resumed with a full scan request", func() {
|
||||
BeforeEach(func() {
|
||||
// First, complete a full scan to populate the database
|
||||
Expect(runScanner(ctx, true)).To(Succeed())
|
||||
|
||||
// Verify files were imported
|
||||
mfs, err := ds.MediaFile(ctx).GetAll()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mfs).To(HaveLen(2))
|
||||
|
||||
// Now simulate an interrupted quick scan
|
||||
// (LastScanStartedAt is set, FullScanInProgress is false)
|
||||
simulateInterruptedScan(false)
|
||||
})
|
||||
|
||||
It("should rescan all folders when resumed as full scan", func() {
|
||||
// Update a tag without changing the folder hash by preserving the original modtime.
|
||||
// In a quick scan, this wouldn't be detected because the folder hash hasn't changed.
|
||||
// But in a full scan, all files should be re-read regardless of hash.
|
||||
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
|
||||
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "updated comment"}, origModTime)
|
||||
|
||||
// Resume with a full scan - this should process all folders
|
||||
// even though folder hashes haven't changed
|
||||
Expect(runScanner(ctx, true)).To(Succeed())
|
||||
|
||||
// Verify the comment was updated (which means the folder was processed and file re-imported)
|
||||
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Eq{"title": "Help!"},
|
||||
})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mfs).To(HaveLen(1))
|
||||
Expect(mfs[0].Comment).To(Equal("updated comment"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when a full scan is interrupted and resumed with a quick scan request", func() {
|
||||
BeforeEach(func() {
|
||||
// First, complete a full scan to populate the database
|
||||
Expect(runScanner(ctx, true)).To(Succeed())
|
||||
|
||||
// Verify files were imported
|
||||
mfs, err := ds.MediaFile(ctx).GetAll()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mfs).To(HaveLen(2))
|
||||
|
||||
// Now simulate an interrupted full scan
|
||||
// (LastScanStartedAt is set, FullScanInProgress is true)
|
||||
simulateInterruptedScan(true)
|
||||
})
|
||||
|
||||
It("should continue as full scan even when quick scan is requested", func() {
|
||||
// Update a tag without changing the folder hash by preserving the original modtime.
|
||||
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
|
||||
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "full scan comment"}, origModTime)
|
||||
|
||||
// Request a quick scan - but because a full scan was in progress,
|
||||
// it should continue as a full scan
|
||||
Expect(runScanner(ctx, false)).To(Succeed())
|
||||
|
||||
// Verify the comment was updated (folder was processed despite unchanged hash)
|
||||
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Eq{"title": "Help!"},
|
||||
})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mfs).To(HaveLen(1))
|
||||
Expect(mfs[0].Comment).To(Equal("full scan comment"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when no scan was in progress", func() {
|
||||
BeforeEach(func() {
|
||||
// First, complete a full scan to populate the database
|
||||
Expect(runScanner(ctx, true)).To(Succeed())
|
||||
|
||||
// Verify files were imported
|
||||
mfs, err := ds.MediaFile(ctx).GetAll()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mfs).To(HaveLen(2))
|
||||
|
||||
// Library should have LastScanStartedAt cleared after successful scan
|
||||
updatedLib, err := ds.Library(ctx).Get(lib.ID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(updatedLib.LastScanStartedAt).To(BeZero())
|
||||
Expect(updatedLib.FullScanInProgress).To(BeFalse())
|
||||
})
|
||||
|
||||
It("should respect the full scan flag for new scans", func() {
|
||||
// Update a tag without changing the folder hash by preserving the original modtime.
|
||||
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
|
||||
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "new full scan"}, origModTime)
|
||||
|
||||
// Start a new full scan
|
||||
Expect(runScanner(ctx, true)).To(Succeed())
|
||||
|
||||
// Verify the comment was updated
|
||||
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Eq{"title": "Help!"},
|
||||
})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mfs).To(HaveLen(1))
|
||||
Expect(mfs[0].Comment).To(Equal("new full scan"))
|
||||
})
|
||||
|
||||
It("should not rescan unchanged folders during quick scan", func() {
|
||||
// Update a tag without changing the folder hash by preserving the original modtime.
|
||||
// This simulates editing tags in a file (e.g., with a tag editor) without modifying its timestamp.
|
||||
// In a quick scan, this should NOT be detected because the folder hash remains unchanged.
|
||||
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
|
||||
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "should not appear"}, origModTime)
|
||||
|
||||
// Do a quick scan - unchanged folders should be skipped
|
||||
Expect(runScanner(ctx, false)).To(Succeed())
|
||||
|
||||
// Verify the comment was NOT updated (folder was skipped)
|
||||
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Eq{"title": "Help!"},
|
||||
})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mfs).To(HaveLen(1))
|
||||
Expect(mfs[0].Comment).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("RefreshStats", func() {
|
||||
var refreshStatsCalls []bool
|
||||
var fsys storagetest.FakeFS
|
||||
|
||||
@@ -107,7 +107,7 @@ func secureMiddleware() func(http.Handler) http.Handler {
|
||||
FrameDeny: true,
|
||||
ReferrerPolicy: "same-origin",
|
||||
PermissionsPolicy: "autoplay=(), camera=(), microphone=(), usb=()",
|
||||
CustomFrameOptionsValue: conf.Server.HTTPSecurityHeaders.CustomFrameOptionsValue,
|
||||
CustomFrameOptionsValue: conf.Server.HTTPHeaders.FrameOptions,
|
||||
//ContentSecurityPolicy: "script-src 'self' 'unsafe-inline'",
|
||||
})
|
||||
return sec.Handler
|
||||
|
||||
@@ -118,11 +118,7 @@ func (api *Router) routes() http.Handler {
|
||||
hr(r, "getAlbumList2", api.GetAlbumList2)
|
||||
h(r, "getStarred", api.GetStarred)
|
||||
h(r, "getStarred2", api.GetStarred2)
|
||||
if conf.Server.EnableNowPlaying {
|
||||
h(r, "getNowPlaying", api.GetNowPlaying)
|
||||
} else {
|
||||
h501(r, "getNowPlaying")
|
||||
}
|
||||
h(r, "getNowPlaying", api.GetNowPlaying)
|
||||
h(r, "getRandomSongs", api.GetRandomSongs)
|
||||
h(r, "getSongsByGenre", api.GetSongsByGenre)
|
||||
})
|
||||
|
||||
@@ -2,11 +2,13 @@ package subsonic
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/server/subsonic/responses"
|
||||
"github.com/navidrome/navidrome/utils/req"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
)
|
||||
|
||||
@@ -35,7 +37,13 @@ func (api *Router) GetUser(r *http.Request) (*responses.Subsonic, error) {
|
||||
if !ok {
|
||||
return nil, newError(responses.ErrorGeneric, "Internal error")
|
||||
}
|
||||
|
||||
username, err := req.Params(r).String("username")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !strings.EqualFold(username, loggedUser.UserName) {
|
||||
return nil, newError(responses.ErrorAuthorizationFail)
|
||||
}
|
||||
response := newResponse()
|
||||
user := buildUserResponse(loggedUser)
|
||||
response.User = &user
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package subsonic
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http/httptest"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
@@ -43,8 +43,8 @@ var _ = Describe("Users", func() {
|
||||
}
|
||||
|
||||
// Create request with user in context
|
||||
req := httptest.NewRequest("GET", "/rest/getUser", nil)
|
||||
ctx := request.WithUser(context.Background(), testUser)
|
||||
req := httptest.NewRequest("GET", "/rest/getUser?username=testuser", nil)
|
||||
ctx := request.WithUser(GinkgoT().Context(), testUser)
|
||||
req = req.WithContext(ctx)
|
||||
|
||||
userResponse, err1 := router.GetUser(req)
|
||||
@@ -116,4 +116,60 @@ var _ = Describe("Users", func() {
|
||||
Expect(response.Folder).To(ContainElements(int32(1), int32(2), int32(5)))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("GetUser authorization", func() {
|
||||
It("should allow user to request their own information", func() {
|
||||
req := httptest.NewRequest("GET", "/rest/getUser?username=testuser", nil)
|
||||
ctx := request.WithUser(GinkgoT().Context(), testUser)
|
||||
req = req.WithContext(ctx)
|
||||
|
||||
response, err := router.GetUser(req)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(response).ToNot(BeNil())
|
||||
Expect(response.User).ToNot(BeNil())
|
||||
Expect(response.User.Username).To(Equal("testuser"))
|
||||
})
|
||||
|
||||
It("should deny user from requesting another user's information", func() {
|
||||
req := httptest.NewRequest("GET", "/rest/getUser?username=anotheruser", nil)
|
||||
ctx := request.WithUser(GinkgoT().Context(), testUser)
|
||||
req = req.WithContext(ctx)
|
||||
|
||||
response, err := router.GetUser(req)
|
||||
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(response).To(BeNil())
|
||||
|
||||
var subErr subError
|
||||
ok := errors.As(err, &subErr)
|
||||
Expect(ok).To(BeTrue())
|
||||
Expect(subErr.code).To(Equal(responses.ErrorAuthorizationFail))
|
||||
})
|
||||
|
||||
It("should return error when username parameter is missing", func() {
|
||||
req := httptest.NewRequest("GET", "/rest/getUser", nil)
|
||||
ctx := request.WithUser(GinkgoT().Context(), testUser)
|
||||
req = req.WithContext(ctx)
|
||||
|
||||
response, err := router.GetUser(req)
|
||||
|
||||
Expect(err).To(MatchError("missing parameter: 'username'"))
|
||||
Expect(response).To(BeNil())
|
||||
})
|
||||
|
||||
It("should return error when user context is missing", func() {
|
||||
req := httptest.NewRequest("GET", "/rest/getUser?username=testuser", nil)
|
||||
|
||||
response, err := router.GetUser(req)
|
||||
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(response).To(BeNil())
|
||||
|
||||
var subErr subError
|
||||
ok := errors.As(err, &subErr)
|
||||
Expect(ok).To(BeTrue())
|
||||
Expect(subErr.code).To(Equal(responses.ErrorGeneric))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import React from 'react'
|
||||
import { TopToolbar, ExportButton } from 'react-admin'
|
||||
import { TopToolbar, ExportButton, useListContext } from 'react-admin'
|
||||
import DeleteMissingFilesButton from './DeleteMissingFilesButton.jsx'
|
||||
|
||||
const MissingListActions = (props) => (
|
||||
<TopToolbar {...props}>
|
||||
<ExportButton />
|
||||
<DeleteMissingFilesButton deleteAll />
|
||||
</TopToolbar>
|
||||
)
|
||||
const MissingListActions = (props) => {
|
||||
const { total } = useListContext()
|
||||
return (
|
||||
<TopToolbar {...props}>
|
||||
<ExportButton maxResults={total} />
|
||||
<DeleteMissingFilesButton deleteAll />
|
||||
</TopToolbar>
|
||||
)
|
||||
}
|
||||
|
||||
export default MissingListActions
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
const stylesheet = `
|
||||
.react-jinke-music-player-main .music-player-panel svg {
|
||||
color: #eee
|
||||
}
|
||||
.react-jinke-music-player-main .music-player-panel button:disabled svg {
|
||||
opacity: 0.3
|
||||
}
|
||||
.react-jinke-music-player-main svg:active, .react-jinke-music-player-main svg:hover {
|
||||
color: #D60017
|
||||
}
|
||||
@@ -27,7 +33,6 @@ const stylesheet = `
|
||||
.react-jinke-music-player-main .audio-item.playing .player-singer {
|
||||
color: #ff4e6b !important
|
||||
}
|
||||
.react-jinke-music-player-main .lyric-btn,
|
||||
.react-jinke-music-player-main .lyric-btn-active svg{
|
||||
color: #ff4e6b !important
|
||||
}
|
||||
|
||||
@@ -194,7 +194,12 @@ export default {
|
||||
},
|
||||
RaDeleteWithConfirmButton: {
|
||||
deleteButton: {
|
||||
color: 'unset',
|
||||
color: '#fff',
|
||||
},
|
||||
},
|
||||
RaBulkDeleteWithUndoButton: {
|
||||
deleteButton: {
|
||||
color: '#fff',
|
||||
},
|
||||
},
|
||||
RaPaginationActions: {
|
||||
|
||||
@@ -2,11 +2,15 @@ package number
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"golang.org/x/exp/constraints"
|
||||
)
|
||||
|
||||
func ParseInt[T constraints.Integer](s string) T {
|
||||
// Integer is a constraint that permits any integer type.
|
||||
type Integer interface {
|
||||
~int | ~int8 | ~int16 | ~int32 | ~int64 |
|
||||
~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr
|
||||
}
|
||||
|
||||
func ParseInt[T Integer](s string) T {
|
||||
r, _ := strconv.ParseInt(s, 10, 64)
|
||||
return T(r)
|
||||
}
|
||||
|
||||
@@ -5,12 +5,12 @@ import (
|
||||
"encoding/binary"
|
||||
"math/big"
|
||||
|
||||
"golang.org/x/exp/constraints"
|
||||
"github.com/navidrome/navidrome/utils/number"
|
||||
)
|
||||
|
||||
// Int64N returns a random int64 between 0 and max.
|
||||
// This is a reimplementation of math/rand/v2.Int64N using a cryptographically secure random number generator.
|
||||
func Int64N[T constraints.Integer](max T) int64 {
|
||||
func Int64N[T number.Integer](max T) int64 {
|
||||
rnd, _ := rand.Int(rand.Reader, big.NewInt(int64(max)))
|
||||
return rnd.Int64()
|
||||
}
|
||||
|
||||
@@ -6,9 +6,8 @@ import (
|
||||
"cmp"
|
||||
"io"
|
||||
"iter"
|
||||
"maps"
|
||||
"slices"
|
||||
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
func Map[T any, R any](t []T, mapFunc func(T) R) []R {
|
||||
@@ -49,11 +48,9 @@ func CompactByFrequency[T comparable](list []T) []T {
|
||||
counters[item]++
|
||||
}
|
||||
|
||||
sorted := maps.Keys(counters)
|
||||
slices.SortFunc(sorted, func(i, j T) int {
|
||||
return slices.SortedFunc(maps.Keys(counters), func(i, j T) int {
|
||||
return cmp.Compare(counters[j], counters[i])
|
||||
})
|
||||
return sorted
|
||||
}
|
||||
|
||||
func MostFrequent[T comparable](list []T) T {
|
||||
|
||||
Reference in New Issue
Block a user