Compare commits

...

12 Commits

Author SHA1 Message Date
Deluan Quintão
5c3568f758 fix(ui): make playlist name sorting case-insensitive (#4845)
* fix: make playlist name sorting case-insensitive

Add collation NOCASE to playlist.name column to ensure case-insensitive sorting, matching the behavior of other tables like radio and user. This fixes the issue where uppercase playlist names would appear before lowercase names regardless of alphabetical order.

The migration recreates the playlist table with the proper collation and recreates all associated indexes. Corresponding collation tests are added to verify the fix persists through future migrations.

* fix: add default sorting to playlist names

Signed-off-by: Deluan <deluan@navidrome.org>

---------

Signed-off-by: Deluan <deluan@navidrome.org>
2026-01-05 19:05:11 -05:00
Deluan
735c0d9103 chore(deps): remove direct dependency on golang.org/x/exp
Signed-off-by: Deluan <deluan@navidrome.org>
2025-12-31 17:03:44 -05:00
Deluan
fc9817552d fix(subsonic): make getUser?username comparison case-insensitive
Signed-off-by: Deluan <deluan@navidrome.org>
2025-12-19 17:56:40 -05:00
Xabi
0c1b65d3e6 fix(ui): update Basque translation (#4815)
Added missing strings and a fix or two
2025-12-19 08:32:13 -05:00
Deluan
47b448c64f chore(deps): update action versions in pipeline configuration
Signed-off-by: Deluan <deluan@navidrome.org>
2025-12-19 08:30:18 -05:00
Deluan
834fa494e4 chore(deps): update golangci-lint to v2.7.2
Signed-off-by: Deluan <deluan@navidrome.org>
2025-12-19 08:25:51 -05:00
Deluan
5d34640065 chore(deps): update dependencies for maruel/natural to v1.3.0 and tetratelabs/wazero to v1.11.0
Signed-off-by: Deluan <deluan@navidrome.org>
2025-12-19 08:24:45 -05:00
Deluan
9ed309ac81 feat(scanner): implement file-based target passing for large target lists
Signed-off-by: Deluan <deluan@navidrome.org>
2025-12-16 16:08:32 -05:00
Deluan
8c80be56da fix(scanner): ensure FullScanInProgress reflects current scan request during interrupted scans
Signed-off-by: Deluan <deluan@navidrome.org>
2025-12-16 12:16:00 -05:00
Deluan
cde5992c46 fix(scanner): execute GetFolderUpdateInfo in batches to avoid "Expression tree is too large (maximum depth 1000)"
Signed-off-by: Deluan <deluan@navidrome.org>
2025-12-16 11:37:13 -05:00
Deluan
017676c457 fix(ui): export all missing files instead of first 1000
Fixes #4721
2025-12-16 06:43:02 -05:00
Deluan
2d7b716834 fix(scanner): remove stale role associations when artist role changes. Fix #4242
Signed-off-by: Deluan <deluan@navidrome.org>
2025-12-16 06:38:50 -05:00
24 changed files with 805 additions and 77 deletions

View File

@@ -217,7 +217,7 @@ jobs:
CROSS_TAGLIB_VERSION=${{ env.CROSS_TAGLIB_VERSION }}
- name: Upload Binaries
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v6
with:
name: navidrome-${{ env.PLATFORM }}
path: ./output
@@ -248,7 +248,7 @@ jobs:
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v6
if: env.IS_LINUX == 'true' && env.IS_DOCKER_PUSH_CONFIGURED == 'true' && env.IS_ARMV5 == 'false'
with:
name: digests-${{ env.PLATFORM }}
@@ -270,7 +270,7 @@ jobs:
- uses: actions/checkout@v6
- name: Download digests
uses: actions/download-artifact@v6
uses: actions/download-artifact@v7
with:
path: /tmp/digests
pattern: digests-*
@@ -304,7 +304,7 @@ jobs:
- uses: actions/checkout@v6
- name: Download digests
uses: actions/download-artifact@v6
uses: actions/download-artifact@v7
with:
path: /tmp/digests
pattern: digests-*
@@ -356,7 +356,7 @@ jobs:
steps:
- uses: actions/checkout@v6
- uses: actions/download-artifact@v6
- uses: actions/download-artifact@v7
with:
path: ./binaries
pattern: navidrome-windows*
@@ -375,7 +375,7 @@ jobs:
du -h binaries/msi/*.msi
- name: Upload MSI files
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v6
with:
name: navidrome-windows-installers
path: binaries/msi/*.msi
@@ -393,7 +393,7 @@ jobs:
fetch-depth: 0
fetch-tags: true
- uses: actions/download-artifact@v6
- uses: actions/download-artifact@v7
with:
path: ./binaries
pattern: navidrome-*
@@ -419,7 +419,7 @@ jobs:
rm ./dist/*.tar.gz ./dist/*.zip
- name: Upload all-packages artifact
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v6
with:
name: packages
path: dist/navidrome_0*
@@ -442,13 +442,13 @@ jobs:
item: ${{ fromJson(needs.release.outputs.package_list) }}
steps:
- name: Download all-packages artifact
uses: actions/download-artifact@v6
uses: actions/download-artifact@v7
with:
name: packages
path: ./dist
- name: Upload all-packages artifact
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v6
with:
name: navidrome_linux_${{ matrix.item }}
path: dist/navidrome_0*_linux_${{ matrix.item }}

View File

@@ -12,7 +12,7 @@ jobs:
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v5
- uses: dessant/lock-threads@v6
with:
process-only: 'issues, prs'
issue-inactive-days: 120

View File

@@ -24,7 +24,7 @@ jobs:
git status --porcelain
git diff
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v8
with:
token: ${{ secrets.PAT }}
author: "navidrome-bot <navidrome-bot@navidrome.org>"

View File

@@ -16,7 +16,7 @@ DOCKER_TAG ?= deluan/navidrome:develop
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
CROSS_TAGLIB_VERSION ?= 2.1.1-1
GOLANGCI_LINT_VERSION ?= v2.6.2
GOLANGCI_LINT_VERSION ?= v2.7.2
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")

View File

@@ -1,9 +1,12 @@
package cmd
import (
"bufio"
"context"
"encoding/gob"
"fmt"
"os"
"strings"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/db"
@@ -19,12 +22,14 @@ var (
fullScan bool
subprocess bool
targets []string
targetFile string
)
func init() {
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
scanCmd.Flags().StringArrayVarP(&targets, "target", "t", []string{}, "list of libraryID:folderPath pairs, can be repeated (e.g., \"-t 1:Music/Rock -t 1:Music/Jazz -t 2:Classical\")")
scanCmd.Flags().StringVar(&targetFile, "target-file", "", "path to file containing targets (one libraryID:folderPath per line)")
rootCmd.AddCommand(scanCmd)
}
@@ -71,10 +76,17 @@ func runScanner(ctx context.Context) {
ds := persistence.New(sqlDB)
pls := core.NewPlaylists(ds)
// Parse targets if provided
// Parse targets from command line or file
var scanTargets []model.ScanTarget
if len(targets) > 0 {
var err error
var err error
if targetFile != "" {
scanTargets, err = readTargetsFromFile(targetFile)
if err != nil {
log.Fatal(ctx, "Failed to read targets from file", err)
}
log.Info(ctx, "Scanning specific folders from file", "numTargets", len(scanTargets))
} else if len(targets) > 0 {
scanTargets, err = model.ParseTargets(targets)
if err != nil {
log.Fatal(ctx, "Failed to parse targets", err)
@@ -94,3 +106,31 @@ func runScanner(ctx context.Context) {
trackScanInteractively(ctx, progress)
}
}
// readTargetsFromFile reads scan targets from a file, one per line.
// Each line should be in the format "libraryID:folderPath".
// Empty lines and lines starting with # are ignored.
func readTargetsFromFile(filePath string) ([]model.ScanTarget, error) {
file, err := os.Open(filePath)
if err != nil {
return nil, fmt.Errorf("failed to open target file: %w", err)
}
defer file.Close()
var targetStrings []string
scanner := bufio.NewScanner(file)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
// Skip empty lines and comments
if line == "" {
continue
}
targetStrings = append(targetStrings, line)
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("failed to read target file: %w", err)
}
return model.ParseTargets(targetStrings)
}

89
cmd/scan_test.go Normal file
View File

@@ -0,0 +1,89 @@
package cmd
import (
"os"
"path/filepath"
"github.com/navidrome/navidrome/model"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("readTargetsFromFile", func() {
var tempDir string
BeforeEach(func() {
var err error
tempDir, err = os.MkdirTemp("", "navidrome-test-")
Expect(err).ToNot(HaveOccurred())
})
AfterEach(func() {
os.RemoveAll(tempDir)
})
It("reads valid targets from file", func() {
filePath := filepath.Join(tempDir, "targets.txt")
content := "1:Music/Rock\n2:Music/Jazz\n3:Classical\n"
err := os.WriteFile(filePath, []byte(content), 0600)
Expect(err).ToNot(HaveOccurred())
targets, err := readTargetsFromFile(filePath)
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(3))
Expect(targets[0]).To(Equal(model.ScanTarget{LibraryID: 1, FolderPath: "Music/Rock"}))
Expect(targets[1]).To(Equal(model.ScanTarget{LibraryID: 2, FolderPath: "Music/Jazz"}))
Expect(targets[2]).To(Equal(model.ScanTarget{LibraryID: 3, FolderPath: "Classical"}))
})
It("skips empty lines", func() {
filePath := filepath.Join(tempDir, "targets.txt")
content := "1:Music/Rock\n\n2:Music/Jazz\n\n"
err := os.WriteFile(filePath, []byte(content), 0600)
Expect(err).ToNot(HaveOccurred())
targets, err := readTargetsFromFile(filePath)
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
})
It("trims whitespace", func() {
filePath := filepath.Join(tempDir, "targets.txt")
content := " 1:Music/Rock \n\t2:Music/Jazz\t\n"
err := os.WriteFile(filePath, []byte(content), 0600)
Expect(err).ToNot(HaveOccurred())
targets, err := readTargetsFromFile(filePath)
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
Expect(targets[0].FolderPath).To(Equal("Music/Rock"))
Expect(targets[1].FolderPath).To(Equal("Music/Jazz"))
})
It("returns error for non-existent file", func() {
_, err := readTargetsFromFile("/nonexistent/file.txt")
Expect(err).To(HaveOccurred())
Expect(err.Error()).To(ContainSubstring("failed to open target file"))
})
It("returns error for invalid target format", func() {
filePath := filepath.Join(tempDir, "targets.txt")
content := "invalid-format\n"
err := os.WriteFile(filePath, []byte(content), 0600)
Expect(err).ToNot(HaveOccurred())
_, err = readTargetsFromFile(filePath)
Expect(err).To(HaveOccurred())
})
It("handles mixed valid and empty lines", func() {
filePath := filepath.Join(tempDir, "targets.txt")
content := "\n1:Music/Rock\n\n\n2:Music/Jazz\n\n"
err := os.WriteFile(filePath, []byte(content), 0600)
Expect(err).ToNot(HaveOccurred())
targets, err := readTargetsFromFile(filePath)
Expect(err).ToNot(HaveOccurred())
Expect(targets).To(HaveLen(2))
})
})

View File

@@ -0,0 +1,99 @@
-- +goose Up
-- Fix case-insensitive sorting for playlist names
create table playlist_dg_tmp
(
id varchar(255) not null
primary key,
name varchar(255) collate NOCASE default '' not null,
comment varchar(255) default '' not null,
duration real default 0 not null,
song_count integer default 0 not null,
public bool default FALSE not null,
created_at datetime,
updated_at datetime,
path string default '' not null,
sync bool default false not null,
size integer default 0 not null,
rules varchar,
evaluated_at datetime,
owner_id varchar(255) not null
constraint playlist_user_user_id_fk
references user
on update cascade on delete cascade
);
insert into playlist_dg_tmp(id, name, comment, duration, song_count, public, created_at, updated_at, path, sync, size,
rules, evaluated_at, owner_id)
select id, name, comment, duration, song_count, public, created_at, updated_at, path, sync, size, rules, evaluated_at,
owner_id
from playlist;
drop table playlist;
alter table playlist_dg_tmp
rename to playlist;
create index playlist_name
on playlist (name);
create index playlist_created_at
on playlist (created_at);
create index playlist_updated_at
on playlist (updated_at);
create index playlist_evaluated_at
on playlist (evaluated_at);
create index playlist_size
on playlist (size);
-- +goose Down
-- Note: Downgrade loses the collation but preserves data
create table playlist_dg_tmp
(
id varchar(255) not null
primary key,
name varchar(255) default '' not null,
comment varchar(255) default '' not null,
duration real default 0 not null,
song_count integer default 0 not null,
public bool default FALSE not null,
created_at datetime,
updated_at datetime,
path string default '' not null,
sync bool default false not null,
size integer default 0 not null,
rules varchar,
evaluated_at datetime,
owner_id varchar(255) not null
constraint playlist_user_user_id_fk
references user
on update cascade on delete cascade
);
insert into playlist_dg_tmp(id, name, comment, duration, song_count, public, created_at, updated_at, path, sync, size,
rules, evaluated_at, owner_id)
select id, name, comment, duration, song_count, public, created_at, updated_at, path, sync, size, rules, evaluated_at,
owner_id
from playlist;
drop table playlist;
alter table playlist_dg_tmp
rename to playlist;
create index playlist_name
on playlist (name);
create index playlist_created_at
on playlist (created_at);
create index playlist_updated_at
on playlist (updated_at);
create index playlist_evaluated_at
on playlist (evaluated_at);
create index playlist_size
on playlist (size);

6
go.mod
View File

@@ -39,7 +39,7 @@ require (
github.com/knqyf263/go-plugin v0.9.0
github.com/kr/pretty v0.3.1
github.com/lestrrat-go/jwx/v2 v2.1.6
github.com/maruel/natural v1.2.1
github.com/maruel/natural v1.3.0
github.com/matoous/go-nanoid/v2 v2.1.0
github.com/mattn/go-sqlite3 v1.14.32
github.com/microcosm-cc/bluemonday v1.0.27
@@ -57,11 +57,10 @@ require (
github.com/spf13/cobra v1.10.2
github.com/spf13/viper v1.21.0
github.com/stretchr/testify v1.11.1
github.com/tetratelabs/wazero v1.10.1
github.com/tetratelabs/wazero v1.11.0
github.com/unrolled/secure v1.17.0
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342
go.uber.org/goleak v1.3.0
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9
golang.org/x/image v0.34.0
golang.org/x/net v0.48.0
golang.org/x/sync v0.19.0
@@ -130,6 +129,7 @@ require (
go.yaml.in/yaml/v2 v2.4.2 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/crypto v0.46.0 // indirect
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 // indirect
golang.org/x/mod v0.31.0 // indirect
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc // indirect
golang.org/x/tools v0.40.0 // indirect

8
go.sum
View File

@@ -162,8 +162,8 @@ github.com/lestrrat-go/jwx/v2 v2.1.6 h1:hxM1gfDILk/l5ylers6BX/Eq1m/pnxe9NBwW6lVf
github.com/lestrrat-go/jwx/v2 v2.1.6/go.mod h1:Y722kU5r/8mV7fYDifjug0r8FK8mZdw0K0GpJw/l8pU=
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
github.com/maruel/natural v1.2.1 h1:G/y4pwtTA07lbQsMefvsmEO0VN0NfqpxprxXDM4R/4o=
github.com/maruel/natural v1.2.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
github.com/maruel/natural v1.3.0 h1:VsmCsBmEyrR46RomtgHs5hbKADGRVtliHTyCOLFBpsg=
github.com/maruel/natural v1.3.0/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
github.com/matoous/go-nanoid/v2 v2.1.0 h1:P64+dmq21hhWdtvZfEAofnvJULaRR1Yib0+PnU669bE=
github.com/matoous/go-nanoid/v2 v2.1.0/go.mod h1:KlbGNQ+FhrUNIHUxZdL63t7tl4LaPkZNpUULS8H4uVM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
@@ -265,8 +265,8 @@ github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/tetratelabs/wazero v1.10.1 h1:2DugeJf6VVk58KTPszlNfeeN8AhhpwcZqkJj2wwFuH8=
github.com/tetratelabs/wazero v1.10.1/go.mod h1:DRm5twOQ5Gr1AoEdSi0CLjDQF1J9ZAuyqFIjl1KKfQU=
github.com/tetratelabs/wazero v1.11.0 h1:+gKemEuKCTevU4d7ZTzlsvgd1uaToIDtlQlmNbwqYhA=
github.com/tetratelabs/wazero v1.11.0/go.mod h1:eV28rsN8Q+xwjogd7f4/Pp4xFxO7uOGbLcD/LzB1wiU=
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=

View File

@@ -512,6 +512,70 @@ var _ = Describe("AlbumRepository", func() {
// Clean up the test album created for this test
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
})
It("removes stale role associations when artist role changes", func() {
// Regression test for issue #4242: Composers displayed in albumartist list
// This happens when an artist's role changes (e.g., was both albumartist and composer,
// now only composer) and the old role association isn't properly removed.
// Create an artist that will have changing roles
artist := &model.Artist{
ID: "role-change-artist-1",
Name: "Role Change Artist",
OrderArtistName: "role change artist",
}
err := createArtistWithLibrary(artistRepo, artist, 1)
Expect(err).ToNot(HaveOccurred())
// Create album with artist as both albumartist and composer
album := &model.Album{
LibraryID: 1,
ID: "test-album-role-change",
Name: "Test Album Role Change",
AlbumArtistID: "role-change-artist-1",
AlbumArtist: "Role Change Artist",
Participants: model.Participants{
model.RoleAlbumArtist: {
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
},
model.RoleComposer: {
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
},
},
}
err = albumRepo.Put(album)
Expect(err).ToNot(HaveOccurred())
// Verify initial state: artist has both albumartist and composer roles
expected := []albumArtistRecord{
{ArtistID: "role-change-artist-1", Role: "albumartist", SubRole: ""},
{ArtistID: "role-change-artist-1", Role: "composer", SubRole: ""},
}
verifyAlbumArtists(album.ID, expected)
// Now update album so artist is ONLY a composer (remove albumartist role)
album.Participants = model.Participants{
model.RoleComposer: {
{Artist: model.Artist{ID: "role-change-artist-1", Name: "Role Change Artist"}},
},
}
err = albumRepo.Put(album)
Expect(err).ToNot(HaveOccurred())
// Verify that the albumartist role was removed - only composer should remain
// This is the key test: before the fix, the albumartist role would remain
// causing composers to appear in the albumartist filter
expectedAfter := []albumArtistRecord{
{ArtistID: "role-change-artist-1", Role: "composer", SubRole: ""},
}
verifyAlbumArtists(album.ID, expectedAfter)
// Clean up
_, _ = artistRepo.executeSQL(squirrel.Delete("artist").Where(squirrel.Eq{"id": artist.ID}))
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": album.ID}))
})
})
})

View File

@@ -32,6 +32,7 @@ var _ = Describe("Collation", func() {
Entry("media_file.sort_title", "media_file", "sort_title"),
Entry("media_file.sort_album_name", "media_file", "sort_album_name"),
Entry("media_file.sort_artist_name", "media_file", "sort_artist_name"),
Entry("playlist.name", "playlist", "name"),
Entry("radio.name", "radio", "name"),
Entry("user.name", "user", "name"),
)
@@ -53,6 +54,7 @@ var _ = Describe("Collation", func() {
Entry("media_file.sort_album_name", "media_file", "coalesce(nullif(sort_album_name,''),order_album_name) collate nocase"),
Entry("media_file.sort_artist_name", "media_file", "coalesce(nullif(sort_artist_name,''),order_artist_name) collate nocase"),
Entry("media_file.path", "media_file", "path collate nocase"),
Entry("playlist.name", "playlist", "name collate nocase"),
Entry("radio.name", "radio", "name collate nocase"),
Entry("user.user_name", "user", "user_name collate nocase"),
)

View File

@@ -95,45 +95,82 @@ func (r folderRepository) CountAll(opt ...model.QueryOptions) (int64, error) {
}
func (r folderRepository) GetFolderUpdateInfo(lib model.Library, targetPaths ...string) (map[string]model.FolderUpdateInfo, error) {
// If no specific paths, return all folders in the library
if len(targetPaths) == 0 {
return r.getFolderUpdateInfoAll(lib)
}
// Check if any path is root (return all folders)
for _, targetPath := range targetPaths {
if targetPath == "" || targetPath == "." {
return r.getFolderUpdateInfoAll(lib)
}
}
// Process paths in batches to avoid SQLite's expression tree depth limit (max 1000).
// Each path generates ~3 conditions, so batch size of 100 keeps us well under the limit.
const batchSize = 100
result := make(map[string]model.FolderUpdateInfo)
for batch := range slices.Chunk(targetPaths, batchSize) {
batchResult, err := r.getFolderUpdateInfoBatch(lib, batch)
if err != nil {
return nil, err
}
for id, info := range batchResult {
result[id] = info
}
}
return result, nil
}
// getFolderUpdateInfoAll returns update info for all non-missing folders in the library
func (r folderRepository) getFolderUpdateInfoAll(lib model.Library) (map[string]model.FolderUpdateInfo, error) {
where := And{
Eq{"library_id": lib.ID},
Eq{"missing": false},
}
return r.queryFolderUpdateInfo(where)
}
// getFolderUpdateInfoBatch returns update info for a batch of target paths and their descendants
func (r folderRepository) getFolderUpdateInfoBatch(lib model.Library, targetPaths []string) (map[string]model.FolderUpdateInfo, error) {
where := And{
Eq{"library_id": lib.ID},
Eq{"missing": false},
}
// If specific paths are requested, include those folders and all their descendants
if len(targetPaths) > 0 {
// Collect folder IDs for exact target folders and path conditions for descendants
folderIDs := make([]string, 0, len(targetPaths))
pathConditions := make(Or, 0, len(targetPaths)*2)
// Collect folder IDs for exact target folders and path conditions for descendants
folderIDs := make([]string, 0, len(targetPaths))
pathConditions := make(Or, 0, len(targetPaths)*2)
for _, targetPath := range targetPaths {
if targetPath == "" || targetPath == "." {
// Root path - include everything in this library
pathConditions = Or{}
folderIDs = nil
break
}
// Clean the path to normalize it. Paths stored in the folder table do not have leading/trailing slashes.
cleanPath := strings.TrimPrefix(targetPath, string(os.PathSeparator))
cleanPath = filepath.Clean(cleanPath)
for _, targetPath := range targetPaths {
// Clean the path to normalize it. Paths stored in the folder table do not have leading/trailing slashes.
cleanPath := strings.TrimPrefix(targetPath, string(os.PathSeparator))
cleanPath = filepath.Clean(cleanPath)
// Include the target folder itself by ID
folderIDs = append(folderIDs, model.FolderID(lib, cleanPath))
// Include the target folder itself by ID
folderIDs = append(folderIDs, model.FolderID(lib, cleanPath))
// Include all descendants: folders whose path field equals or starts with the target path
// Note: Folder.Path is the directory path, so children have path = targetPath
pathConditions = append(pathConditions, Eq{"path": cleanPath})
pathConditions = append(pathConditions, Like{"path": cleanPath + "/%"})
}
// Combine conditions: exact folder IDs OR descendant path patterns
if len(folderIDs) > 0 {
where = append(where, Or{Eq{"id": folderIDs}, pathConditions})
} else if len(pathConditions) > 0 {
where = append(where, pathConditions)
}
// Include all descendants: folders whose path field equals or starts with the target path
// Note: Folder.Path is the directory path, so children have path = targetPath
pathConditions = append(pathConditions, Eq{"path": cleanPath})
pathConditions = append(pathConditions, Like{"path": cleanPath + "/%"})
}
// Combine conditions: exact folder IDs OR descendant path patterns
if len(folderIDs) > 0 {
where = append(where, Or{Eq{"id": folderIDs}, pathConditions})
} else if len(pathConditions) > 0 {
where = append(where, pathConditions)
}
return r.queryFolderUpdateInfo(where)
}
// queryFolderUpdateInfo executes the query and returns the result map
func (r folderRepository) queryFolderUpdateInfo(where And) (map[string]model.FolderUpdateInfo, error) {
sq := r.newSelect().Columns("id", "updated_at", "hash").Where(where)
var res []struct {
ID string

View File

@@ -51,8 +51,10 @@ func unmarshalParticipants(data string) (model.Participants, error) {
}
func (r sqlRepository) updateParticipants(itemID string, participants model.Participants) error {
ids := participants.AllIDs()
sqd := Delete(r.tableName + "_artists").Where(And{Eq{r.tableName + "_id": itemID}, NotEq{"artist_id": ids}})
// Delete all existing participant entries for this item.
// This ensures stale role associations are removed when an artist's role changes
// (e.g., an artist was both albumartist and composer, but is now only composer).
sqd := Delete(r.tableName + "_artists").Where(Eq{r.tableName + "_id": itemID})
_, err := r.executeSQL(sqd)
if err != nil {
return err

View File

@@ -302,6 +302,8 @@
},
"actions": {
"scan": "Arakatu liburutegia",
"quickScan": "Araketa bizkorra",
"fullScan": "Araketa sakona",
"manageUsers": "Kudeatu erabiltzaileen sarbidea",
"viewDetails": "Ikusi xehetasunak"
},
@@ -310,6 +312,9 @@
"updated": "Liburutegia ondo eguneratu da",
"deleted": "Liburutegia ondo ezabatu da",
"scanStarted": "Liburutegiaren araketa hasi da",
"quickScanStarted": "Araketa bizkorra hasi da",
"fullScanStarted": "Araketa sakona hasi da",
"scanError": "Errorea araketa abiaraztean. Aztertu erregistroak",
"scanCompleted": "Liburutegiaren araketa amaitu da"
},
"validation": {
@@ -459,7 +464,7 @@
"bad_item": "Elementu okerra",
"item_doesnt_exist": "Elementua ez dago",
"http_error": "Errorea zerbitzariarekin komunikatzerakoan",
"data_provider_error": "Errorea datuen hornitzailean. Berrikusi kontsola xehetasun gehiagorako.",
"data_provider_error": "Errorea datuen hornitzailean. Aztertu kontsola xehetasun gehiagorako.",
"i18n_error": "Ezin izan dira zehaztutako hizkuntzaren itzulpenak kargatu",
"canceled": "Ekintza bertan behera utzi da",
"logged_out": "Saioa amaitu da, konektatu berriro.",
@@ -600,8 +605,9 @@
"activity": {
"title": "Ekintzak",
"totalScanned": "Arakatutako karpeta guztiak",
"quickScan": "Arakatze azkarra",
"quickScan": "Arakatze bizkorra",
"fullScan": "Arakatze sakona",
"selectiveScan": "Arakatze selektiboa",
"serverUptime": "Zerbitzariak piztuta daraman denbora",
"serverDown": "LINEAZ KANPO",
"scanType": "Mota",

View File

@@ -14,6 +14,12 @@ import (
"github.com/navidrome/navidrome/model"
)
const (
// argLengthThreshold is the threshold for switching from command-line args to file-based target passing.
// Set conservatively at 24KB to support Windows (~32KB limit) with margin for env vars.
argLengthThreshold = 24 * 1024
)
// scannerExternal is a scanner that runs an external process to do the scanning. It is used to avoid
// memory leaks or retention in the main process, as the scanner can consume a lot of memory. The
// external process will be spawned with the same executable as the current process, and will run
@@ -45,10 +51,14 @@ func (s *scannerExternal) scan(ctx context.Context, fullScan bool, targets []mod
// Add targets if provided
if len(targets) > 0 {
for _, target := range targets {
args = append(args, "-t", target.String())
targetArgs, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
if err != nil {
progress <- &ProgressInfo{Error: err.Error()}
return
}
log.Debug(ctx, "Spawning external scanner process with targets", "fullScan", fullScan, "path", exe, "targets", targets)
defer cleanup()
log.Debug(ctx, "Spawning external scanner process with target file", "fullScan", fullScan, "path", exe, "numTargets", len(targets))
args = append(args, targetArgs...)
} else {
log.Debug(ctx, "Spawning external scanner process", "fullScan", fullScan, "path", exe)
}
@@ -98,4 +108,62 @@ func (s *scannerExternal) wait(cmd *exec.Cmd, out *io.PipeWriter) {
_ = out.Close()
}
// targetArguments builds command-line arguments for the given scan targets.
// If the estimated argument length exceeds a threshold, it writes the targets to a temp file
// and returns the --target-file argument instead.
// Returns the arguments, a cleanup function to remove any temp file created, and an error if any.
func targetArguments(ctx context.Context, targets []model.ScanTarget, lengthThreshold int) ([]string, func(), error) {
var args []string
// Estimate argument length to decide whether to use file-based approach
argLength := estimateArgLength(targets)
if argLength > lengthThreshold {
// Write targets to temp file and pass via --target-file
targetFile, err := writeTargetsToFile(targets)
if err != nil {
return nil, nil, fmt.Errorf("failed to write targets to file: %w", err)
}
args = append(args, "--target-file", targetFile)
return args, func() {
os.Remove(targetFile) // Clean up temp file
}, nil
}
// Use command-line arguments for small target lists
for _, target := range targets {
args = append(args, "-t", target.String())
}
return args, func() {}, nil
}
// estimateArgLength estimates the total length of command-line arguments for the given targets.
func estimateArgLength(targets []model.ScanTarget) int {
length := 0
for _, target := range targets {
// Each target adds: "-t " + target string + space
length += 3 + len(target.String()) + 1
}
return length
}
// writeTargetsToFile writes the targets to a temporary file, one per line.
// Returns the path to the temp file, which the caller should clean up.
func writeTargetsToFile(targets []model.ScanTarget) (string, error) {
tmpFile, err := os.CreateTemp("", "navidrome-scan-targets-*.txt")
if err != nil {
return "", fmt.Errorf("failed to create temp file: %w", err)
}
defer tmpFile.Close()
for _, target := range targets {
if _, err := fmt.Fprintln(tmpFile, target.String()); err != nil {
os.Remove(tmpFile.Name())
return "", fmt.Errorf("failed to write to temp file: %w", err)
}
}
return tmpFile.Name(), nil
}
var _ scanner = (*scannerExternal)(nil)

160
scanner/external_test.go Normal file
View File

@@ -0,0 +1,160 @@
package scanner
import (
"context"
"os"
"strings"
"github.com/navidrome/navidrome/model"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("targetArguments", func() {
var ctx context.Context
BeforeEach(func() {
ctx = GinkgoT().Context()
})
Context("with small target list", func() {
It("returns command-line arguments for single target", func() {
targets := []model.ScanTarget{
{LibraryID: 1, FolderPath: "Music/Rock"},
}
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
Expect(err).ToNot(HaveOccurred())
defer cleanup()
Expect(args).To(Equal([]string{"-t", "1:Music/Rock"}))
})
It("returns command-line arguments for multiple targets", func() {
targets := []model.ScanTarget{
{LibraryID: 1, FolderPath: "Music/Rock"},
{LibraryID: 2, FolderPath: "Music/Jazz"},
{LibraryID: 3, FolderPath: "Classical"},
}
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
Expect(err).ToNot(HaveOccurred())
defer cleanup()
Expect(args).To(Equal([]string{
"-t", "1:Music/Rock",
"-t", "2:Music/Jazz",
"-t", "3:Classical",
}))
})
It("handles targets with special characters", func() {
targets := []model.ScanTarget{
{LibraryID: 1, FolderPath: "Music/Rock & Roll"},
{LibraryID: 2, FolderPath: "Music/Jazz (Modern)"},
}
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
Expect(err).ToNot(HaveOccurred())
defer cleanup()
Expect(args).To(Equal([]string{
"-t", "1:Music/Rock & Roll",
"-t", "2:Music/Jazz (Modern)",
}))
})
})
Context("with large target list exceeding threshold", func() {
It("returns --target-file argument when exceeding threshold", func() {
// Create enough targets to exceed the threshold
var targets []model.ScanTarget
for i := 1; i <= 600; i++ {
targets = append(targets, model.ScanTarget{
LibraryID: 1,
FolderPath: "Music/VeryLongFolderPathToSimulateRealScenario/SubFolder",
})
}
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
Expect(err).ToNot(HaveOccurred())
defer cleanup()
Expect(args).To(HaveLen(2))
Expect(args[0]).To(Equal("--target-file"))
// Verify the file exists and has correct format
filePath := args[1]
Expect(filePath).To(ContainSubstring("navidrome-scan-targets-"))
Expect(filePath).To(HaveSuffix(".txt"))
// Verify file actually exists
_, err = os.Stat(filePath)
Expect(err).ToNot(HaveOccurred())
})
It("creates temp file with correct format", func() {
// Use custom threshold to easily exceed it
targets := []model.ScanTarget{
{LibraryID: 1, FolderPath: "Music/Rock"},
{LibraryID: 2, FolderPath: "Music/Jazz"},
{LibraryID: 3, FolderPath: "Classical"},
}
// Set threshold very low to force file usage
args, cleanup, err := targetArguments(ctx, targets, 10)
Expect(err).ToNot(HaveOccurred())
defer cleanup()
Expect(args[0]).To(Equal("--target-file"))
// Verify file exists with correct format
filePath := args[1]
Expect(filePath).To(ContainSubstring("navidrome-scan-targets-"))
Expect(filePath).To(HaveSuffix(".txt"))
// Verify file content
content, err := os.ReadFile(filePath)
Expect(err).ToNot(HaveOccurred())
lines := strings.Split(strings.TrimSpace(string(content)), "\n")
Expect(lines).To(HaveLen(3))
Expect(lines[0]).To(Equal("1:Music/Rock"))
Expect(lines[1]).To(Equal("2:Music/Jazz"))
Expect(lines[2]).To(Equal("3:Classical"))
})
})
Context("edge cases", func() {
It("handles empty target list", func() {
var targets []model.ScanTarget
args, cleanup, err := targetArguments(ctx, targets, argLengthThreshold)
Expect(err).ToNot(HaveOccurred())
defer cleanup()
Expect(args).To(BeEmpty())
})
It("uses command-line args when exactly at threshold", func() {
// Create targets that are exactly at threshold
targets := []model.ScanTarget{
{LibraryID: 1, FolderPath: "Music"},
}
// Estimate length should be 11 bytes
estimatedLength := estimateArgLength(targets)
args, cleanup, err := targetArguments(ctx, targets, estimatedLength)
Expect(err).ToNot(HaveOccurred())
defer cleanup()
Expect(args).To(Equal([]string{"-t", "1:Music"}))
})
It("uses file when one byte over threshold", func() {
targets := []model.ScanTarget{
{LibraryID: 1, FolderPath: "Music"},
}
// Set threshold just below the estimated length
estimatedLength := estimateArgLength(targets)
args, cleanup, err := targetArguments(ctx, targets, estimatedLength-1)
Expect(err).ToNot(HaveOccurred())
defer cleanup()
Expect(args[0]).To(Equal("--target-file"))
})
})
})

View File

@@ -76,6 +76,12 @@ func newScanJob(ctx context.Context, ds model.DataStore, cw artwork.CacheWarmer,
log.Error(ctx, "Error getting fs for library", "library", lib.Name, "path", lib.Path, err)
return nil, fmt.Errorf("getting fs for library: %w", err)
}
// Ensure FullScanInProgress reflects the current scan request.
// This is important when resuming an interrupted quick scan as a full scan:
// the DB may have FullScanInProgress=false, but we need it true for isOutdated() to work correctly.
lib.FullScanInProgress = lib.FullScanInProgress || fullScan
return &scanJob{
lib: lib,
fs: fsys,

View File

@@ -675,6 +675,155 @@ var _ = Describe("Scanner", Ordered, func() {
})
})
Describe("Interrupted scan resumption", func() {
var fsys storagetest.FakeFS
var help func(...map[string]any) *fstest.MapFile
BeforeEach(func() {
help = template(_t{"albumartist": "The Beatles", "album": "Help!", "year": 1965})
fsys = createFS(fstest.MapFS{
"The Beatles/Help!/01 - Help!.mp3": help(track(1, "Help!")),
"The Beatles/Help!/02 - The Night Before.mp3": help(track(2, "The Night Before")),
})
})
simulateInterruptedScan := func(fullScan bool) {
// Call ScanBegin to properly set LastScanStartedAt and FullScanInProgress
// This simulates what would happen if a scan was interrupted (ScanBegin called but ScanEnd not)
Expect(ds.Library(ctx).ScanBegin(lib.ID, fullScan)).To(Succeed())
// Verify the update was persisted
reloaded, err := ds.Library(ctx).Get(lib.ID)
Expect(err).ToNot(HaveOccurred())
Expect(reloaded.LastScanStartedAt).ToNot(BeZero())
Expect(reloaded.FullScanInProgress).To(Equal(fullScan))
}
Context("when a quick scan is interrupted and resumed with a full scan request", func() {
BeforeEach(func() {
// First, complete a full scan to populate the database
Expect(runScanner(ctx, true)).To(Succeed())
// Verify files were imported
mfs, err := ds.MediaFile(ctx).GetAll()
Expect(err).ToNot(HaveOccurred())
Expect(mfs).To(HaveLen(2))
// Now simulate an interrupted quick scan
// (LastScanStartedAt is set, FullScanInProgress is false)
simulateInterruptedScan(false)
})
It("should rescan all folders when resumed as full scan", func() {
// Update a tag without changing the folder hash by preserving the original modtime.
// In a quick scan, this wouldn't be detected because the folder hash hasn't changed.
// But in a full scan, all files should be re-read regardless of hash.
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "updated comment"}, origModTime)
// Resume with a full scan - this should process all folders
// even though folder hashes haven't changed
Expect(runScanner(ctx, true)).To(Succeed())
// Verify the comment was updated (which means the folder was processed and file re-imported)
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
Filters: squirrel.Eq{"title": "Help!"},
})
Expect(err).ToNot(HaveOccurred())
Expect(mfs).To(HaveLen(1))
Expect(mfs[0].Comment).To(Equal("updated comment"))
})
})
Context("when a full scan is interrupted and resumed with a quick scan request", func() {
BeforeEach(func() {
// First, complete a full scan to populate the database
Expect(runScanner(ctx, true)).To(Succeed())
// Verify files were imported
mfs, err := ds.MediaFile(ctx).GetAll()
Expect(err).ToNot(HaveOccurred())
Expect(mfs).To(HaveLen(2))
// Now simulate an interrupted full scan
// (LastScanStartedAt is set, FullScanInProgress is true)
simulateInterruptedScan(true)
})
It("should continue as full scan even when quick scan is requested", func() {
// Update a tag without changing the folder hash by preserving the original modtime.
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "full scan comment"}, origModTime)
// Request a quick scan - but because a full scan was in progress,
// it should continue as a full scan
Expect(runScanner(ctx, false)).To(Succeed())
// Verify the comment was updated (folder was processed despite unchanged hash)
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
Filters: squirrel.Eq{"title": "Help!"},
})
Expect(err).ToNot(HaveOccurred())
Expect(mfs).To(HaveLen(1))
Expect(mfs[0].Comment).To(Equal("full scan comment"))
})
})
Context("when no scan was in progress", func() {
BeforeEach(func() {
// First, complete a full scan to populate the database
Expect(runScanner(ctx, true)).To(Succeed())
// Verify files were imported
mfs, err := ds.MediaFile(ctx).GetAll()
Expect(err).ToNot(HaveOccurred())
Expect(mfs).To(HaveLen(2))
// Library should have LastScanStartedAt cleared after successful scan
updatedLib, err := ds.Library(ctx).Get(lib.ID)
Expect(err).ToNot(HaveOccurred())
Expect(updatedLib.LastScanStartedAt).To(BeZero())
Expect(updatedLib.FullScanInProgress).To(BeFalse())
})
It("should respect the full scan flag for new scans", func() {
// Update a tag without changing the folder hash by preserving the original modtime.
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "new full scan"}, origModTime)
// Start a new full scan
Expect(runScanner(ctx, true)).To(Succeed())
// Verify the comment was updated
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
Filters: squirrel.Eq{"title": "Help!"},
})
Expect(err).ToNot(HaveOccurred())
Expect(mfs).To(HaveLen(1))
Expect(mfs[0].Comment).To(Equal("new full scan"))
})
It("should not rescan unchanged folders during quick scan", func() {
// Update a tag without changing the folder hash by preserving the original modtime.
// This simulates editing tags in a file (e.g., with a tag editor) without modifying its timestamp.
// In a quick scan, this should NOT be detected because the folder hash remains unchanged.
origModTime := fsys.MapFS["The Beatles/Help!/01 - Help!.mp3"].ModTime
fsys.UpdateTags("The Beatles/Help!/01 - Help!.mp3", _t{"comment": "should not appear"}, origModTime)
// Do a quick scan - unchanged folders should be skipped
Expect(runScanner(ctx, false)).To(Succeed())
// Verify the comment was NOT updated (folder was skipped)
mfs, err := ds.MediaFile(ctx).GetAll(model.QueryOptions{
Filters: squirrel.Eq{"title": "Help!"},
})
Expect(err).ToNot(HaveOccurred())
Expect(mfs).To(HaveLen(1))
Expect(mfs[0].Comment).To(BeEmpty())
})
})
})
Describe("RefreshStats", func() {
var refreshStatsCalls []bool
var fsys storagetest.FakeFS

View File

@@ -2,6 +2,7 @@ package subsonic
import (
"net/http"
"strings"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/model"
@@ -40,7 +41,7 @@ func (api *Router) GetUser(r *http.Request) (*responses.Subsonic, error) {
if err != nil {
return nil, err
}
if username != loggedUser.UserName {
if !strings.EqualFold(username, loggedUser.UserName) {
return nil, newError(responses.ErrorAuthorizationFail)
}
response := newResponse()

View File

@@ -1,12 +1,15 @@
import React from 'react'
import { TopToolbar, ExportButton } from 'react-admin'
import { TopToolbar, ExportButton, useListContext } from 'react-admin'
import DeleteMissingFilesButton from './DeleteMissingFilesButton.jsx'
const MissingListActions = (props) => (
<TopToolbar {...props}>
<ExportButton />
<DeleteMissingFilesButton deleteAll />
</TopToolbar>
)
const MissingListActions = (props) => {
const { total } = useListContext()
return (
<TopToolbar {...props}>
<ExportButton maxResults={total} />
<DeleteMissingFilesButton deleteAll />
</TopToolbar>
)
}
export default MissingListActions

View File

@@ -170,6 +170,7 @@ const PlaylistList = (props) => {
<List
{...props}
exporter={false}
sort={{ field: 'name', order: 'ASC' }}
filters={<PlaylistFilter />}
actions={<PlaylistListActions />}
bulkActionButtons={!isXsmall && <PlaylistListBulkActions />}

View File

@@ -2,11 +2,15 @@ package number
import (
"strconv"
"golang.org/x/exp/constraints"
)
func ParseInt[T constraints.Integer](s string) T {
// Integer is a constraint that permits any integer type.
type Integer interface {
~int | ~int8 | ~int16 | ~int32 | ~int64 |
~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr
}
func ParseInt[T Integer](s string) T {
r, _ := strconv.ParseInt(s, 10, 64)
return T(r)
}

View File

@@ -5,12 +5,12 @@ import (
"encoding/binary"
"math/big"
"golang.org/x/exp/constraints"
"github.com/navidrome/navidrome/utils/number"
)
// Int64N returns a random int64 between 0 and max.
// This is a reimplementation of math/rand/v2.Int64N using a cryptographically secure random number generator.
func Int64N[T constraints.Integer](max T) int64 {
func Int64N[T number.Integer](max T) int64 {
rnd, _ := rand.Int(rand.Reader, big.NewInt(int64(max)))
return rnd.Int64()
}

View File

@@ -6,9 +6,8 @@ import (
"cmp"
"io"
"iter"
"maps"
"slices"
"golang.org/x/exp/maps"
)
func Map[T any, R any](t []T, mapFunc func(T) R) []R {
@@ -49,11 +48,9 @@ func CompactByFrequency[T comparable](list []T) []T {
counters[item]++
}
sorted := maps.Keys(counters)
slices.SortFunc(sorted, func(i, j T) int {
return slices.SortedFunc(maps.Keys(counters), func(i, j T) int {
return cmp.Compare(counters[j], counters[i])
})
return sorted
}
func MostFrequent[T comparable](list []T) T {