mirror of
https://github.com/navidrome/navidrome.git
synced 2026-01-20 12:48:13 -05:00
Compare commits
21 Commits
fix-playli
...
plugins-js
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a50c17d7db | ||
|
|
43ca0d0c7b | ||
|
|
acf7408517 | ||
|
|
a0f7b95c65 | ||
|
|
f984030813 | ||
|
|
fae58bb390 | ||
|
|
f1e75c40dc | ||
|
|
66474fc9f4 | ||
|
|
fd620413b8 | ||
|
|
4ec6e7c56e | ||
|
|
03120bac32 | ||
|
|
0473c50b49 | ||
|
|
2de2484bca | ||
|
|
64e165aaef | ||
|
|
8e96dd0784 | ||
|
|
9bd91d2c04 | ||
|
|
c5447a637a | ||
|
|
b9247ba34e | ||
|
|
510acde3db | ||
|
|
13be8e6dfb | ||
|
|
9ab0c2dc67 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -35,4 +35,5 @@ AGENTS.md
|
||||
*.test
|
||||
*.wasm
|
||||
*.ndp
|
||||
openspec/
|
||||
openspec/
|
||||
go.work*
|
||||
274
adapters/gotaglib/end_to_end_test.go
Normal file
274
adapters/gotaglib/end_to_end_test.go
Normal file
@@ -0,0 +1,274 @@
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/djherbis/times"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type testFileInfo struct {
|
||||
fs.FileInfo
|
||||
}
|
||||
|
||||
func (t testFileInfo) BirthTime() time.Time {
|
||||
if ts := times.Get(t.FileInfo); ts.HasBirthTime() {
|
||||
return ts.BirthTime()
|
||||
}
|
||||
return t.FileInfo.ModTime()
|
||||
}
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
toP := func(name, sortName, mbid string) model.Participant {
|
||||
return model.Participant{
|
||||
Artist: model.Artist{Name: name, SortArtistName: sortName, MbzArtistID: mbid},
|
||||
}
|
||||
}
|
||||
|
||||
roles := []struct {
|
||||
model.Role
|
||||
model.ParticipantList
|
||||
}{
|
||||
{model.RoleComposer, model.ParticipantList{
|
||||
toP("coma a", "a, coma", "bf13b584-f27c-43db-8f42-32898d33d4e2"),
|
||||
toP("comb", "comb", "924039a2-09c6-4d29-9b4f-50cc54447d36"),
|
||||
}},
|
||||
{model.RoleLyricist, model.ParticipantList{
|
||||
toP("la a", "a, la", "c84f648f-68a6-40a2-a0cb-d135b25da3c2"),
|
||||
toP("lb", "lb", "0a7c582d-143a-4540-b4e9-77200835af65"),
|
||||
}},
|
||||
{model.RoleArranger, model.ParticipantList{
|
||||
toP("aa", "", "4605a1d4-8d15-42a3-bd00-9c20e42f71e6"),
|
||||
toP("ab", "", "002f0ff8-77bf-42cc-8216-61a9c43dc145"),
|
||||
}},
|
||||
{model.RoleConductor, model.ParticipantList{
|
||||
toP("cona", "", "af86879b-2141-42af-bad2-389a4dc91489"),
|
||||
toP("conb", "", "3dfa3c70-d7d3-4b97-b953-c298dd305e12"),
|
||||
}},
|
||||
{model.RoleDirector, model.ParticipantList{
|
||||
toP("dia", "", "f943187f-73de-4794-be47-88c66f0fd0f4"),
|
||||
toP("dib", "", "bceb75da-1853-4b3d-b399-b27f0cafc389"),
|
||||
}},
|
||||
{model.RoleEngineer, model.ParticipantList{
|
||||
toP("ea", "", "f634bf6d-d66a-425d-888a-28ad39392759"),
|
||||
toP("eb", "", "243d64ae-d514-44e1-901a-b918d692baee"),
|
||||
}},
|
||||
{model.RoleProducer, model.ParticipantList{
|
||||
toP("pra", "", "d971c8d7-999c-4a5f-ac31-719721ab35d6"),
|
||||
toP("prb", "", "f0a09070-9324-434f-a599-6d25ded87b69"),
|
||||
}},
|
||||
{model.RoleRemixer, model.ParticipantList{
|
||||
toP("ra", "", "c7dc6095-9534-4c72-87cc-aea0103462cf"),
|
||||
toP("rb", "", "8ebeef51-c08c-4736-992f-c37870becedd"),
|
||||
}},
|
||||
{model.RoleDJMixer, model.ParticipantList{
|
||||
toP("dja", "", "d063f13b-7589-4efc-ab7f-c60e6db17247"),
|
||||
toP("djb", "", "3636670c-385f-4212-89c8-0ff51d6bc456"),
|
||||
}},
|
||||
{model.RoleMixer, model.ParticipantList{
|
||||
toP("ma", "", "53fb5a2d-7016-427e-a563-d91819a5f35a"),
|
||||
toP("mb", "", "64c13e65-f0da-4ab9-a300-71ee53b0376a"),
|
||||
}},
|
||||
}
|
||||
|
||||
var e *extractor
|
||||
|
||||
parseTestFile := func(path string) *model.MediaFile {
|
||||
mds, err := e.Parse(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
info, ok := mds[path]
|
||||
Expect(ok).To(BeTrue())
|
||||
|
||||
fileInfo, err := os.Stat(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
info.FileInfo = testFileInfo{FileInfo: fileInfo}
|
||||
|
||||
metadata := metadata.New(path, info)
|
||||
mf := metadata.ToMediaFile(1, "folderID")
|
||||
return &mf
|
||||
}
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{fs: os.DirFS(".")}
|
||||
})
|
||||
|
||||
Describe("ReplayGain", func() {
|
||||
DescribeTable("test replaygain end-to-end", func(file string, trackGain, trackPeak, albumGain, albumPeak *float64) {
|
||||
mf := parseTestFile("tests/fixtures/" + file)
|
||||
|
||||
Expect(mf.RGTrackGain).To(Equal(trackGain))
|
||||
Expect(mf.RGTrackPeak).To(Equal(trackPeak))
|
||||
Expect(mf.RGAlbumGain).To(Equal(albumGain))
|
||||
Expect(mf.RGAlbumPeak).To(Equal(albumPeak))
|
||||
},
|
||||
Entry("mp3 with no replaygain", "no_replaygain.mp3", nil, nil, nil, nil),
|
||||
Entry("mp3 with no zero replaygain", "zero_replaygain.mp3", gg.P(0.0), gg.P(1.0), gg.P(0.0), gg.P(1.0)),
|
||||
)
|
||||
})
|
||||
|
||||
Describe("lyrics", func() {
|
||||
makeLyrics := func(code, secondLine string) model.Lyrics {
|
||||
return model.Lyrics{
|
||||
DisplayArtist: "",
|
||||
DisplayTitle: "",
|
||||
Lang: code,
|
||||
Line: []model.Line{
|
||||
{Start: gg.P(int64(0)), Value: "This is"},
|
||||
{Start: gg.P(int64(2500)), Value: secondLine},
|
||||
},
|
||||
Offset: nil,
|
||||
Synced: true,
|
||||
}
|
||||
}
|
||||
|
||||
It("should fetch both synced and unsynced lyrics in mixed flac", func() {
|
||||
mf := parseTestFile("tests/fixtures/mixed-lyrics.flac")
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(lyrics).To(HaveLen(2))
|
||||
|
||||
Expect(lyrics[0].Synced).To(BeTrue())
|
||||
Expect(lyrics[1].Synced).To(BeFalse())
|
||||
})
|
||||
|
||||
It("should handle mp3 with uslt and sylt", func() {
|
||||
mf := parseTestFile("tests/fixtures/test.mp3")
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(lyrics).To(HaveLen(4))
|
||||
|
||||
engSylt := makeLyrics("eng", "English SYLT")
|
||||
engUslt := makeLyrics("eng", "English")
|
||||
unsSylt := makeLyrics("xxx", "unspecified SYLT")
|
||||
unsUslt := makeLyrics("xxx", "unspecified")
|
||||
|
||||
Expect(lyrics).To(ConsistOf(engSylt, engUslt, unsSylt, unsUslt))
|
||||
})
|
||||
|
||||
DescribeTable("format-specific lyrics", func(file string, isId3 bool) {
|
||||
mf := parseTestFile("tests/fixtures/" + file)
|
||||
|
||||
lyrics, err := mf.StructuredLyrics()
|
||||
Expect(err).To(Not(HaveOccurred()))
|
||||
Expect(lyrics).To(HaveLen(2))
|
||||
|
||||
unspec := makeLyrics("xxx", "unspecified")
|
||||
eng := makeLyrics("xxx", "English")
|
||||
|
||||
if isId3 {
|
||||
eng.Lang = "eng"
|
||||
}
|
||||
|
||||
Expect(lyrics).To(Or(
|
||||
Equal(model.LyricList{unspec, eng}),
|
||||
Equal(model.LyricList{eng, unspec})))
|
||||
},
|
||||
Entry("flac", "test.flac", false),
|
||||
Entry("m4a", "test.m4a", false),
|
||||
Entry("ogg", "test.ogg", false),
|
||||
Entry("wma", "test.wma", false),
|
||||
Entry("wv", "test.wv", false),
|
||||
Entry("wav", "test.wav", true),
|
||||
Entry("aiff", "test.aiff", true),
|
||||
)
|
||||
})
|
||||
|
||||
Describe("Participants", func() {
|
||||
DescribeTable("test tags consistent across formats", func(format string) {
|
||||
mf := parseTestFile("tests/fixtures/test." + format)
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
|
||||
actual := mf.Participants[role]
|
||||
Expect(actual).To(HaveLen(len(artists)))
|
||||
|
||||
for i := range artists {
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[i]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
Expect(actualArtist.SortArtistName).To(Equal(expectedArtist.SortArtistName))
|
||||
Expect(actualArtist.MbzArtistID).To(Equal(expectedArtist.MbzArtistID))
|
||||
}
|
||||
}
|
||||
|
||||
if format != "m4a" {
|
||||
performers := mf.Participants[model.RolePerformer]
|
||||
Expect(performers).To(HaveLen(8))
|
||||
|
||||
rules := map[string][]string{
|
||||
"pgaa": {"2fd0b311-9fa8-4ff9-be5d-f6f3d16b835e", "Guitar"},
|
||||
"pgbb": {"223d030b-bf97-4c2a-ad26-b7f7bbe25c93", "Guitar", ""},
|
||||
"pvaa": {"cb195f72-448f-41c8-b962-3f3c13d09d38", "Vocals"},
|
||||
"pvbb": {"60a1f832-8ca2-49f6-8660-84d57f07b520", "Vocals", "Flute"},
|
||||
"pfaa": {"51fb40c-0305-4bf9-a11b-2ee615277725", "", "Flute"},
|
||||
}
|
||||
|
||||
for name, rule := range rules {
|
||||
mbid := rule[0]
|
||||
for i := 1; i < len(rule); i++ {
|
||||
found := false
|
||||
|
||||
for _, mapped := range performers {
|
||||
if mapped.Name == name && mapped.MbzArtistID == mbid && mapped.SubRole == rule[i] {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
Expect(found).To(BeTrue(), "Could not find matching artist")
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Entry("FLAC format", "flac"),
|
||||
Entry("M4a format", "m4a"),
|
||||
Entry("OGG format", "ogg"),
|
||||
Entry("WV format", "wv"),
|
||||
|
||||
Entry("MP3 format", "mp3"),
|
||||
Entry("WAV format", "wav"),
|
||||
Entry("AIFF format", "aiff"),
|
||||
)
|
||||
|
||||
It("should parse wma", func() {
|
||||
mf := parseTestFile("tests/fixtures/test.wma")
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
actual := mf.Participants[role]
|
||||
|
||||
// WMA has no Arranger role
|
||||
if role == model.RoleArranger {
|
||||
Expect(actual).To(HaveLen(0))
|
||||
continue
|
||||
}
|
||||
|
||||
Expect(actual).To(HaveLen(len(artists)), role.String())
|
||||
|
||||
// For some bizarre reason, the order is inverted. We also don't get
|
||||
// sort names or MBIDs
|
||||
for i := range artists {
|
||||
idx := len(artists) - 1 - i
|
||||
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[idx]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
263
adapters/gotaglib/gotaglib.go
Normal file
263
adapters/gotaglib/gotaglib.go
Normal file
@@ -0,0 +1,263 @@
|
||||
// Package gotaglib provides an alternative metadata extractor using go-taglib,
|
||||
// a pure Go (WASM-based) implementation of TagLib.
|
||||
//
|
||||
// This extractor aims for parity with the CGO-based taglib extractor. It uses
|
||||
// TagLib's PropertyMap interface for standard tags. The File handle API provides
|
||||
// efficient access to format-specific tags (ID3v2 frames, MP4 atoms, ASF attributes)
|
||||
// through a single file open operation.
|
||||
//
|
||||
// This extractor is registered under the name "gotaglib". It only works with a filesystem
|
||||
// (fs.FS) and does not support direct local file paths. Files returned by the filesystem
|
||||
// must implement io.ReadSeeker for go-taglib to read them.
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"io/fs"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/core/storage/local"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
"go.senan.xyz/taglib"
|
||||
)
|
||||
|
||||
type extractor struct {
|
||||
fs fs.FS
|
||||
}
|
||||
|
||||
func (e extractor) Parse(files ...string) (map[string]metadata.Info, error) {
|
||||
results := make(map[string]metadata.Info)
|
||||
for _, path := range files {
|
||||
props, err := e.extractMetadata(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
results[path] = *props
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (e extractor) Version() string {
|
||||
return "go-taglib (TagLib 2.1.1 WASM)"
|
||||
}
|
||||
|
||||
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
|
||||
f, close, err := e.openFile(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer close()
|
||||
|
||||
// Get all tags and properties in one go
|
||||
allTags := f.AllTags()
|
||||
props := f.Properties()
|
||||
|
||||
// Map properties to AudioProperties
|
||||
ap := metadata.AudioProperties{
|
||||
Duration: props.Length.Round(time.Millisecond * 10),
|
||||
BitRate: int(props.Bitrate),
|
||||
Channels: int(props.Channels),
|
||||
SampleRate: int(props.SampleRate),
|
||||
BitDepth: int(props.BitsPerSample),
|
||||
}
|
||||
|
||||
// Convert normalized tags to lowercase keys (go-taglib returns UPPERCASE keys)
|
||||
normalizedTags := make(map[string][]string, len(allTags.Tags))
|
||||
for key, values := range allTags.Tags {
|
||||
lowerKey := strings.ToLower(key)
|
||||
normalizedTags[lowerKey] = values
|
||||
}
|
||||
|
||||
// Process format-specific raw tags
|
||||
processRawTags(allTags, normalizedTags)
|
||||
|
||||
// Parse track/disc totals from "N/Total" format
|
||||
parseTuple(normalizedTags, "track")
|
||||
parseTuple(normalizedTags, "disc")
|
||||
|
||||
// Adjust some ID3 tags
|
||||
parseLyrics(normalizedTags)
|
||||
parseTIPL(normalizedTags)
|
||||
delete(normalizedTags, "tmcl") // TMCL is already parsed by TagLib
|
||||
|
||||
// Determine if file has embedded picture
|
||||
hasPicture := len(props.Images) > 0
|
||||
|
||||
return &metadata.Info{
|
||||
Tags: normalizedTags,
|
||||
AudioProperties: ap,
|
||||
HasPicture: hasPicture,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// openFile opens the file at filePath using the extractor's filesystem.
|
||||
// It returns a TagLib File handle and a cleanup function to close resources.
|
||||
func (e extractor) openFile(filePath string) (*taglib.File, func(), error) {
|
||||
// Open the file from the filesystem
|
||||
file, err := e.fs.Open(filePath)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
rs, isSeekable := file.(io.ReadSeeker)
|
||||
if !isSeekable {
|
||||
file.Close()
|
||||
return nil, nil, errors.New("file is not seekable")
|
||||
}
|
||||
f, err := taglib.OpenStream(rs, taglib.WithReadStyle(taglib.ReadStyleFast))
|
||||
if err != nil {
|
||||
file.Close()
|
||||
return nil, nil, err
|
||||
}
|
||||
closeFunc := func() {
|
||||
f.Close()
|
||||
file.Close()
|
||||
}
|
||||
return f, closeFunc, nil
|
||||
}
|
||||
|
||||
// parseTuple parses track/disc numbers in "N/Total" format and separates them.
|
||||
// For example, tracknumber="2/10" becomes tracknumber="2" and tracktotal="10".
|
||||
func parseTuple(tags map[string][]string, prop string) {
|
||||
tagName := prop + "number"
|
||||
tagTotal := prop + "total"
|
||||
if value, ok := tags[tagName]; ok && len(value) > 0 {
|
||||
parts := strings.Split(value[0], "/")
|
||||
tags[tagName] = []string{parts[0]}
|
||||
if len(parts) == 2 {
|
||||
tags[tagTotal] = []string{parts[1]}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseLyrics ensures lyrics tags have a language code.
|
||||
// If lyrics exist without a language code, they are moved to "lyrics:xxx".
|
||||
func parseLyrics(tags map[string][]string) {
|
||||
lyrics := tags["lyrics"]
|
||||
if len(lyrics) > 0 {
|
||||
tags["lyrics:xxx"] = lyrics
|
||||
delete(tags, "lyrics")
|
||||
}
|
||||
}
|
||||
|
||||
// processRawTags processes format-specific raw tags based on the detected file format.
|
||||
// This handles ID3v2 frames (MP3/WAV/AIFF), MP4 atoms, and ASF attributes.
|
||||
func processRawTags(allTags taglib.AllTags, normalizedTags map[string][]string) {
|
||||
switch allTags.Format {
|
||||
case taglib.FormatMPEG, taglib.FormatWAV, taglib.FormatAIFF:
|
||||
parseID3v2Frames(allTags.Raw, normalizedTags)
|
||||
case taglib.FormatMP4:
|
||||
parseMP4Atoms(allTags.Raw, normalizedTags)
|
||||
case taglib.FormatASF:
|
||||
parseASFAttributes(allTags.Raw, normalizedTags)
|
||||
}
|
||||
}
|
||||
|
||||
// parseID3v2Frames processes ID3v2 raw frames to extract USLT/SYLT with language codes.
|
||||
// This extracts language-specific lyrics that the standard Tags() doesn't provide.
|
||||
func parseID3v2Frames(rawFrames map[string][]string, tags map[string][]string) {
|
||||
// Process frames that have language-specific data
|
||||
for key, values := range rawFrames {
|
||||
lowerKey := strings.ToLower(key)
|
||||
|
||||
// Handle USLT:xxx and SYLT:xxx (lyrics with language codes)
|
||||
if strings.HasPrefix(lowerKey, "uslt:") || strings.HasPrefix(lowerKey, "sylt:") {
|
||||
parts := strings.SplitN(lowerKey, ":", 2)
|
||||
if len(parts) == 2 && parts[1] != "" {
|
||||
lang := parts[1]
|
||||
lyricsKey := "lyrics:" + lang
|
||||
tags[lyricsKey] = append(tags[lyricsKey], values...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we found any language-specific lyrics from ID3v2 frames, remove the generic lyrics
|
||||
for key := range tags {
|
||||
if strings.HasPrefix(key, "lyrics:") && key != "lyrics" {
|
||||
delete(tags, "lyrics")
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const iTunesKeyPrefix = "----:com.apple.iTunes:"
|
||||
|
||||
// parseMP4Atoms processes MP4 raw atoms to get iTunes-specific tags.
|
||||
func parseMP4Atoms(rawAtoms map[string][]string, tags map[string][]string) {
|
||||
// Process all atoms and add them to tags
|
||||
for key, values := range rawAtoms {
|
||||
// Strip iTunes prefix and convert to lowercase
|
||||
normalizedKey := strings.TrimPrefix(key, iTunesKeyPrefix)
|
||||
normalizedKey = strings.ToLower(normalizedKey)
|
||||
|
||||
// Only add if the tag doesn't already exist (avoid duplication with PropertyMap)
|
||||
if _, exists := tags[normalizedKey]; !exists {
|
||||
tags[normalizedKey] = values
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseASFAttributes processes ASF raw attributes to get WMA-specific tags.
|
||||
func parseASFAttributes(rawAttrs map[string][]string, tags map[string][]string) {
|
||||
// Process all attributes and add them to tags
|
||||
for key, values := range rawAttrs {
|
||||
normalizedKey := strings.ToLower(key)
|
||||
|
||||
// Only add if the tag doesn't already exist (avoid duplication with PropertyMap)
|
||||
if _, exists := tags[normalizedKey]; !exists {
|
||||
tags[normalizedKey] = values
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// These are the only roles we support, based on Picard's tag map:
|
||||
// https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
var tiplMapping = map[string]string{
|
||||
"arranger": "arranger",
|
||||
"engineer": "engineer",
|
||||
"producer": "producer",
|
||||
"mix": "mixer",
|
||||
"DJ-mix": "djmixer",
|
||||
}
|
||||
|
||||
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
|
||||
//
|
||||
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".
|
||||
//
|
||||
// and breaks it down into a map of roles and names, e.g.:
|
||||
//
|
||||
// {"arranger": ["Andrew Powell"], "engineer": ["Chris Blair", "Pat Stapley"], "producer": ["Eric Woolfson"]}.
|
||||
func parseTIPL(tags map[string][]string) {
|
||||
tipl := tags["tipl"]
|
||||
if len(tipl) == 0 {
|
||||
return
|
||||
}
|
||||
addRole := func(currentRole string, currentValue []string) {
|
||||
if currentRole != "" && len(currentValue) > 0 {
|
||||
role := tiplMapping[currentRole]
|
||||
tags[role] = append(tags[role], strings.Join(currentValue, " "))
|
||||
}
|
||||
}
|
||||
var currentRole string
|
||||
var currentValue []string
|
||||
for _, part := range strings.Split(tipl[0], " ") {
|
||||
if _, ok := tiplMapping[part]; ok {
|
||||
addRole(currentRole, currentValue)
|
||||
currentRole = part
|
||||
currentValue = nil
|
||||
continue
|
||||
}
|
||||
currentValue = append(currentValue, part)
|
||||
}
|
||||
addRole(currentRole, currentValue)
|
||||
delete(tags, "tipl")
|
||||
}
|
||||
|
||||
var _ local.Extractor = (*extractor)(nil)
|
||||
|
||||
func init() {
|
||||
local.RegisterExtractor("taglib", func(fsys fs.FS, baseDir string) local.Extractor {
|
||||
return &extractor{fsys}
|
||||
})
|
||||
}
|
||||
17
adapters/gotaglib/gotaglib_suite_test.go
Normal file
17
adapters/gotaglib/gotaglib_suite_test.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestGoTagLib(t *testing.T) {
|
||||
tests.Init(t, true)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "GoTagLib Suite")
|
||||
}
|
||||
302
adapters/gotaglib/gotaglib_test.go
Normal file
302
adapters/gotaglib/gotaglib_test.go
Normal file
@@ -0,0 +1,302 @@
|
||||
package gotaglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{fs: os.DirFS(".")}
|
||||
})
|
||||
|
||||
Describe("Parse", func() {
|
||||
It("correctly parses metadata from all files in folder", func() {
|
||||
mds, err := e.Parse(
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
|
||||
// Test MP3
|
||||
m := mds["tests/fixtures/test.mp3"]
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Song"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal("1.02s"))
|
||||
Expect(m.AudioProperties.BitRate).To(Equal(192))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(44100))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("tcmp", []string{"1"})),
|
||||
)
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014-05-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("originaldate", []string{"1996-11-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("releasedate", []string{"2020-12-31"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("discnumber", []string{"1"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_gain", []string{"+3.21518 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_peak", []string{"0.9125"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_gain", []string{"-1.48 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_peak", []string{"0.4512"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracknumber", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
|
||||
Expect(m.Tags).ToNot(HaveKey("lyrics"))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}), HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
})))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}), HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
})))
|
||||
|
||||
// Test OGG
|
||||
m = mds["tests/fixtures/test.ogg"]
|
||||
Expect(err).To(BeNil())
|
||||
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||
|
||||
// TagLib 1.12 returns 18, previous versions return 39.
|
||||
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
|
||||
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
})
|
||||
|
||||
DescribeTable("Format-Specific tests",
|
||||
func(file, duration string, channels, samplerate, bitdepth int, albumGain, albumPeak, trackGain, trackPeak string, id3Lyrics bool, image bool) {
|
||||
file = "tests/fixtures/" + file
|
||||
mds, err := e.Parse(file)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(1))
|
||||
|
||||
m := mds[file]
|
||||
|
||||
Expect(m.HasPicture).To(Equal(image))
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal(duration))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(channels))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(samplerate))
|
||||
Expect(m.AudioProperties.BitDepth).To(Equal(bitdepth))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_gain", []string{albumGain}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_peak", []string{albumPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_peak", []string{albumPeak}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_gain", []string{trackGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{trackGain}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_peak", []string{trackPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_peak", []string{trackPeak}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Title"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("tracknumber", []string{"3"}),
|
||||
HaveKeyWithValue("tracknumber", []string{"3/10"}),
|
||||
))
|
||||
if !strings.HasSuffix(file, "test.wma") {
|
||||
// TODO Not sure why this is not working for WMA
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
}
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("discnumber", []string{"1"}),
|
||||
HaveKeyWithValue("discnumber", []string{"1/2"}),
|
||||
))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
|
||||
// WMA does not have a "compilation" tag, but "wm/iscompilation"
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("wm/iscompilation", []string{"1"})),
|
||||
)
|
||||
|
||||
if id3Lyrics {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}))
|
||||
} else {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
}
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
},
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1200:duration=1" test.flac
|
||||
Entry("correctly parses flac tags", "test.flac", "1s", 1, 44100, 16, "+4.06 dB", "0.12496948", "+4.06 dB", "0.12496948", false, true),
|
||||
|
||||
Entry("correctly parses m4a (aac) gain tags", "01 Invisible (RED) Edit Version.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
|
||||
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
|
||||
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=800:duration=1" test.wv
|
||||
Entry("correctly parses wv (wavpak) tags", "test.wv", "1s", 1, 44100, 16, "3.43 dB", "0.125061", "3.43 dB", "0.125061", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1000:duration=1" test.wav
|
||||
Entry("correctly parses wav tags", "test.wav", "1s", 1, 44100, 16, "3.06 dB", "0.125056", "3.06 dB", "0.125056", true, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1400:duration=1" test.aiff
|
||||
Entry("correctly parses aiff tags", "test.aiff", "1s", 1, 44100, 16, "2.00 dB", "0.124972", "2.00 dB", "0.124972", true, true),
|
||||
)
|
||||
|
||||
// Skip these tests when running as root
|
||||
Context("Access Forbidden", func() {
|
||||
var accessForbiddenFile string
|
||||
var RegularUserContext = XContext
|
||||
var isRegularUser = os.Getuid() != 0
|
||||
if isRegularUser {
|
||||
RegularUserContext = Context
|
||||
}
|
||||
|
||||
// Only run permission tests if we are not root
|
||||
RegularUserContext("when run without root privileges", func() {
|
||||
BeforeEach(func() {
|
||||
// Use root fs for absolute paths in temp directory
|
||||
e = &extractor{fs: os.DirFS("/")}
|
||||
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
|
||||
|
||||
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
DeferCleanup(func() {
|
||||
Expect(f.Close()).To(Succeed())
|
||||
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
|
||||
})
|
||||
})
|
||||
|
||||
It("correctly handle unreadable file due to insufficient read permission", func() {
|
||||
// Strip leading slash for DirFS rooted at "/"
|
||||
_, err := e.extractMetadata(accessForbiddenFile[1:])
|
||||
Expect(err).To(MatchError(os.ErrPermission))
|
||||
})
|
||||
|
||||
It("skips the file if it cannot be read", func() {
|
||||
// Get current working directory to construct paths relative to root
|
||||
cwd, err := os.Getwd()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
// Strip leading slash for DirFS rooted at "/"
|
||||
files := []string{
|
||||
cwd[1:] + "/tests/fixtures/test.mp3",
|
||||
cwd[1:] + "/tests/fixtures/test.ogg",
|
||||
accessForbiddenFile[1:],
|
||||
}
|
||||
mds, err := e.Parse(files...)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
Expect(mds).ToNot(HaveKey(accessForbiddenFile[1:]))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Describe("Error Checking", func() {
|
||||
It("returns a generic ErrPath if file does not exist", func() {
|
||||
testFilePath := "tests/fixtures/NON_EXISTENT.ogg"
|
||||
_, err := e.extractMetadata(testFilePath)
|
||||
Expect(err).To(MatchError(fs.ErrNotExist))
|
||||
})
|
||||
It("does not throw a SIGSEGV error when reading a file with an invalid frame", func() {
|
||||
// File has an empty TDAT frame
|
||||
md, err := e.extractMetadata("tests/fixtures/invalid-files/test-invalid-frame.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(md.Tags).To(HaveKeyWithValue("albumartist", []string{"Elvis Presley"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("parseTIPL", func() {
|
||||
var tags map[string][]string
|
||||
|
||||
BeforeEach(func() {
|
||||
tags = make(map[string][]string)
|
||||
})
|
||||
|
||||
Context("when the TIPL string is populated", func() {
|
||||
It("correctly parses roles and names", func() {
|
||||
tags["tipl"] = []string{"arranger Andrew Powell DJ-mix François Kevorkian DJ-mix Jane Doe engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["arranger"]).To(ConsistOf("Andrew Powell"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Chris Blair"))
|
||||
Expect(tags["djmixer"]).To(ConsistOf("François Kevorkian", "Jane Doe"))
|
||||
})
|
||||
|
||||
It("handles multiple names for a single role", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer Eric Woolfson engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["producer"]).To(ConsistOf("Eric Woolfson"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
|
||||
It("discards roles without names", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).ToNot(HaveKey("producer"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL string is empty", func() {
|
||||
It("does nothing", func() {
|
||||
tags["tipl"] = []string{""}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL is not present", func() {
|
||||
It("does nothing", func() {
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
@@ -151,11 +151,7 @@ var _ = Describe("Extractor", func() {
|
||||
unsSylt := makeLyrics("xxx", "unspecified SYLT")
|
||||
unsUslt := makeLyrics("xxx", "unspecified")
|
||||
|
||||
// Why is the order inconsistent between runs? Nobody knows
|
||||
Expect(lyrics).To(Or(
|
||||
Equal(model.LyricList{engSylt, engUslt, unsSylt, unsUslt}),
|
||||
Equal(model.LyricList{unsSylt, unsUslt, engSylt, engUslt}),
|
||||
))
|
||||
Expect(lyrics).To(ConsistOf(engSylt, engUslt, unsSylt, unsUslt))
|
||||
})
|
||||
|
||||
DescribeTable("format-specific lyrics", func(file string, isId3 bool) {
|
||||
|
||||
@@ -168,7 +168,7 @@ func parseTIPL(tags map[string][]string) {
|
||||
var _ local.Extractor = (*extractor)(nil)
|
||||
|
||||
func init() {
|
||||
local.RegisterExtractor("taglib", func(_ fs.FS, baseDir string) local.Extractor {
|
||||
local.RegisterExtractor("legacy-taglib", func(_ fs.FS, baseDir string) local.Extractor {
|
||||
// ignores fs, as taglib extractor only works with local files
|
||||
return &extractor{baseDir}
|
||||
})
|
||||
|
||||
@@ -80,12 +80,11 @@ var _ = Describe("Extractor", func() {
|
||||
Expect(err).To(BeNil())
|
||||
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||
|
||||
// TabLib 1.12 returns 18, previous versions return 39.
|
||||
// TagLib 1.12 returns 18, previous versions return 39.
|
||||
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 19, 39, 40, 43, 49))
|
||||
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
})
|
||||
|
||||
@@ -106,7 +105,7 @@ var _ = Describe("Extractor", func() {
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_gain", []string{albumGain}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
|
||||
// Import adapters to register them
|
||||
_ "github.com/navidrome/navidrome/adapters/deezer"
|
||||
_ "github.com/navidrome/navidrome/adapters/gotaglib"
|
||||
_ "github.com/navidrome/navidrome/adapters/lastfm"
|
||||
_ "github.com/navidrome/navidrome/adapters/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/adapters/spotify"
|
||||
|
||||
@@ -33,6 +33,7 @@ import (
|
||||
|
||||
import (
|
||||
_ "github.com/navidrome/navidrome/adapters/deezer"
|
||||
_ "github.com/navidrome/navidrome/adapters/gotaglib"
|
||||
_ "github.com/navidrome/navidrome/adapters/lastfm"
|
||||
_ "github.com/navidrome/navidrome/adapters/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/adapters/spotify"
|
||||
|
||||
@@ -126,6 +126,7 @@ type configOptions struct {
|
||||
DevExternalScanner bool
|
||||
DevScannerThreads uint
|
||||
DevSelectiveWatcher bool
|
||||
DevLegacyEmbedImage bool
|
||||
DevInsightsInitialDelay time.Duration
|
||||
DevEnablePlayerInsights bool
|
||||
DevEnablePluginsInsights bool
|
||||
@@ -152,7 +153,9 @@ type subsonicOptions struct {
|
||||
AppendSubtitle bool
|
||||
ArtistParticipations bool
|
||||
DefaultReportRealPath bool
|
||||
EnableAverageRating bool
|
||||
LegacyClients string
|
||||
MinimalClients string
|
||||
}
|
||||
|
||||
type TagConf struct {
|
||||
@@ -365,10 +368,6 @@ func Load(noConfigDump bool) {
|
||||
disableExternalServices()
|
||||
}
|
||||
|
||||
if Server.Scanner.Extractor != consts.DefaultScannerExtractor {
|
||||
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
|
||||
Server.Scanner.Extractor = consts.DefaultScannerExtractor
|
||||
}
|
||||
logDeprecatedOptions("Scanner.GenreSeparators", "")
|
||||
logDeprecatedOptions("Scanner.GroupAlbumReleases", "")
|
||||
logDeprecatedOptions("DevEnableBufferedScrobble", "") // Deprecated: Buffered scrobbling is now always enabled and this option is ignored
|
||||
@@ -608,6 +607,7 @@ func setViperDefaults() {
|
||||
viper.SetDefault("subsonic.appendsubtitle", true)
|
||||
viper.SetDefault("subsonic.artistparticipations", false)
|
||||
viper.SetDefault("subsonic.defaultreportrealpath", false)
|
||||
viper.SetDefault("subsonic.enableaveragerating", true)
|
||||
viper.SetDefault("subsonic.legacyclients", "DSub,SubMusic")
|
||||
viper.SetDefault("agents", "lastfm,spotify,deezer")
|
||||
viper.SetDefault("lastfm.enabled", true)
|
||||
|
||||
@@ -16,12 +16,14 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/dhowden/tag"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/resources"
|
||||
"go.senan.xyz/taglib"
|
||||
)
|
||||
|
||||
func selectImageReader(ctx context.Context, artID model.ArtworkID, extractFuncs ...sourceFunc) (io.ReadCloser, string, error) {
|
||||
@@ -84,6 +86,13 @@ var picTypeRegexes = []*regexp.Regexp{
|
||||
}
|
||||
|
||||
func fromTag(ctx context.Context, path string) sourceFunc {
|
||||
if conf.Server.DevLegacyEmbedImage {
|
||||
return fromTagLegacy(ctx, path)
|
||||
}
|
||||
return fromTagGoTaglib(ctx, path)
|
||||
}
|
||||
|
||||
func fromTagLegacy(ctx context.Context, path string) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
if path == "" {
|
||||
return nil, "", nil
|
||||
@@ -128,6 +137,44 @@ func fromTag(ctx context.Context, path string) sourceFunc {
|
||||
}
|
||||
}
|
||||
|
||||
func fromTagGoTaglib(ctx context.Context, path string) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
if path == "" {
|
||||
return nil, "", nil
|
||||
}
|
||||
f, err := taglib.OpenReadOnly(path, taglib.WithReadStyle(taglib.ReadStyleFast))
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
images := f.Properties().Images
|
||||
if len(images) == 0 {
|
||||
return nil, "", fmt.Errorf("no embedded image found in %s", path)
|
||||
}
|
||||
|
||||
imageIndex := findBestImageIndex(ctx, images, path)
|
||||
data, err := f.Image(imageIndex)
|
||||
if err != nil || len(data) == 0 {
|
||||
return nil, "", fmt.Errorf("could not load embedded image from %s", path)
|
||||
}
|
||||
return io.NopCloser(bytes.NewReader(data)), path, nil
|
||||
}
|
||||
}
|
||||
|
||||
func findBestImageIndex(ctx context.Context, images []taglib.ImageDesc, path string) int {
|
||||
for _, regex := range picTypeRegexes {
|
||||
for i, img := range images {
|
||||
if regex.MatchString(img.Type) {
|
||||
log.Trace(ctx, "Found embedded image", "type", img.Type, "path", path)
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
log.Trace(ctx, "Could not find a front image. Getting the first one", "type", images[0].Type, "path", path)
|
||||
return 0
|
||||
}
|
||||
|
||||
func fromFFmpegTag(ctx context.Context, ffmpeg ffmpeg.FFmpeg, path string) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
if path == "" {
|
||||
|
||||
@@ -9,7 +9,7 @@ import (
|
||||
"sync"
|
||||
|
||||
"github.com/deluan/rest"
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib" // Register taglib extractor
|
||||
_ "github.com/navidrome/navidrome/adapters/gotaglib" // Register taglib extractor
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
_ "github.com/navidrome/navidrome/core/storage/local" // Register local storage
|
||||
|
||||
@@ -265,6 +265,10 @@ func (c *insightsCollector) collect(ctx context.Context) []byte {
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Error reading active users count", err)
|
||||
}
|
||||
data.Library.FileSuffixes, err = c.ds.MediaFile(ctx).CountBySuffix()
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Error reading file suffixes count", err)
|
||||
}
|
||||
|
||||
// Check for smart playlists
|
||||
data.Config.HasSmartPlaylists, err = c.hasSmartPlaylists(ctx)
|
||||
|
||||
@@ -40,6 +40,7 @@ type Data struct {
|
||||
Libraries int64 `json:"libraries"`
|
||||
ActiveUsers int64 `json:"activeUsers"`
|
||||
ActivePlayers map[string]int64 `json:"activePlayers,omitempty"`
|
||||
FileSuffixes map[string]int64 `json:"fileSuffixes,omitempty"`
|
||||
} `json:"library"`
|
||||
Config struct {
|
||||
LogLevel string `json:"logLevel,omitempty"`
|
||||
|
||||
@@ -168,6 +168,11 @@ func (s *playlists) parseNSP(_ context.Context, pls *model.Playlist, reader io.R
|
||||
if nsp.Comment != "" {
|
||||
pls.Comment = nsp.Comment
|
||||
}
|
||||
if nsp.Public != nil {
|
||||
pls.Public = *nsp.Public
|
||||
} else {
|
||||
pls.Public = conf.Server.DefaultPlaylistPublicVisibility
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -201,49 +206,33 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
||||
continue
|
||||
}
|
||||
|
||||
// SQLite comparisons do not perform Unicode normalization, and filesystem normalization
|
||||
// differs across platforms (macOS often yields NFD, while Linux/Windows typically use NFC).
|
||||
// Generate lookup candidates for both forms so playlist entries match DB paths regardless
|
||||
// of the original normalization. See https://github.com/navidrome/navidrome/issues/4884
|
||||
lookupCandidates := make([]string, 0, len(resolvedPaths)*2)
|
||||
seen := make(map[string]struct{}, len(resolvedPaths)*2)
|
||||
for _, path := range resolvedPaths {
|
||||
nfc := strings.ToLower(norm.NFC.String(path))
|
||||
if _, ok := seen[nfc]; !ok {
|
||||
seen[nfc] = struct{}{}
|
||||
lookupCandidates = append(lookupCandidates, nfc)
|
||||
}
|
||||
nfd := strings.ToLower(norm.NFD.String(path))
|
||||
if _, ok := seen[nfd]; !ok {
|
||||
seen[nfd] = struct{}{}
|
||||
lookupCandidates = append(lookupCandidates, nfd)
|
||||
}
|
||||
}
|
||||
// Normalize to NFD for filesystem compatibility (macOS). Database stores paths in NFD.
|
||||
// See https://github.com/navidrome/navidrome/issues/4663
|
||||
resolvedPaths = slice.Map(resolvedPaths, func(path string) string {
|
||||
return strings.ToLower(norm.NFD.String(path))
|
||||
})
|
||||
|
||||
found, err := mediaFileRepository.FindByPaths(lookupCandidates)
|
||||
found, err := mediaFileRepository.FindByPaths(resolvedPaths)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Error reading files from DB", "playlist", pls.Name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Build lookup map with library-qualified keys, normalized for comparison.
|
||||
// Canonicalize to NFC so NFD/NFC become comparable.
|
||||
// Build lookup map with library-qualified keys, normalized for comparison
|
||||
existing := make(map[string]int, len(found))
|
||||
for idx := range found {
|
||||
key := fmt.Sprintf("%d:%s", found[idx].LibraryID, strings.ToLower(norm.NFC.String(found[idx].Path)))
|
||||
// Normalize to lowercase for case-insensitive comparison
|
||||
// Key format: "libraryID:path"
|
||||
key := fmt.Sprintf("%d:%s", found[idx].LibraryID, strings.ToLower(found[idx].Path))
|
||||
existing[key] = idx
|
||||
}
|
||||
|
||||
// Find media files in the order of the resolved paths, to keep playlist order
|
||||
for _, path := range resolvedPaths {
|
||||
key := strings.ToLower(norm.NFC.String(path))
|
||||
idx, ok := existing[key]
|
||||
idx, ok := existing[path]
|
||||
if ok {
|
||||
mfs = append(mfs, found[idx])
|
||||
} else {
|
||||
// Prefer logging a composed representation when possible to avoid confusing output
|
||||
// with decomposed combining marks.
|
||||
log.Warn(ctx, "Path in playlist not found", "playlist", pls.Name, "path", norm.NFC.String(path))
|
||||
log.Warn(ctx, "Path in playlist not found", "playlist", pls.Name, "path", path)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -405,20 +394,7 @@ func (s *playlists) resolvePaths(ctx context.Context, folder *model.Folder, line
|
||||
func (s *playlists) updatePlaylist(ctx context.Context, newPls *model.Playlist) error {
|
||||
owner, _ := request.UserFrom(ctx)
|
||||
|
||||
// Try to find existing playlist by path. Since filesystem normalization differs across
|
||||
// platforms (macOS uses NFD, Linux/Windows use NFC), we try both forms to match
|
||||
// playlists that may have been imported on a different platform.
|
||||
pls, err := s.ds.Playlist(ctx).FindByPath(newPls.Path)
|
||||
if errors.Is(err, model.ErrNotFound) {
|
||||
// Try alternate normalization form
|
||||
altPath := norm.NFD.String(newPls.Path)
|
||||
if altPath == newPls.Path {
|
||||
altPath = norm.NFC.String(newPls.Path)
|
||||
}
|
||||
if altPath != newPls.Path {
|
||||
pls, err = s.ds.Playlist(ctx).FindByPath(altPath)
|
||||
}
|
||||
}
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
return err
|
||||
}
|
||||
@@ -438,7 +414,10 @@ func (s *playlists) updatePlaylist(ctx context.Context, newPls *model.Playlist)
|
||||
} else {
|
||||
log.Info(ctx, "Adding synced playlist", "playlist", newPls.Name, "path", newPls.Path, "owner", owner.UserName)
|
||||
newPls.OwnerID = owner.ID
|
||||
newPls.Public = conf.Server.DefaultPlaylistPublicVisibility
|
||||
// For NSP files, Public may already be set from the file; for M3U, use server default
|
||||
if !newPls.IsSmartPlaylist() {
|
||||
newPls.Public = conf.Server.DefaultPlaylistPublicVisibility
|
||||
}
|
||||
}
|
||||
return s.ds.Playlist(ctx).Put(newPls)
|
||||
}
|
||||
@@ -502,6 +481,7 @@ type nspFile struct {
|
||||
criteria.Criteria
|
||||
Name string `json:"name"`
|
||||
Comment string `json:"comment"`
|
||||
Public *bool `json:"public"`
|
||||
}
|
||||
|
||||
func (i *nspFile) UnmarshalJSON(data []byte) error {
|
||||
@@ -512,5 +492,8 @@ func (i *nspFile) UnmarshalJSON(data []byte) error {
|
||||
}
|
||||
i.Name, _ = m["name"].(string)
|
||||
i.Comment, _ = m["comment"].(string)
|
||||
if public, ok := m["public"].(bool); ok {
|
||||
i.Public = &public
|
||||
}
|
||||
return json.Unmarshal(data, &i.Criteria)
|
||||
}
|
||||
|
||||
@@ -112,56 +112,28 @@ var _ = Describe("Playlists", func() {
|
||||
_, err := ps.ImportFile(ctx, folder, "invalid_json.nsp")
|
||||
Expect(err.Error()).To(ContainSubstring("line 19, column 1: invalid character '\\n'"))
|
||||
})
|
||||
})
|
||||
|
||||
DescribeTable("Playlist filename Unicode normalization (regression fix-playlist-filename-normalization)",
|
||||
func(storedForm, filesystemForm string) {
|
||||
// Use Polish characters that decompose: ó (U+00F3) -> o + combining acute (U+006F + U+0301)
|
||||
plsNameNFC := "Piosenki_Polskie_zółć" // NFC form (composed)
|
||||
plsNameNFD := norm.NFD.String(plsNameNFC)
|
||||
Expect(plsNameNFD).ToNot(Equal(plsNameNFC)) // Verify they differ
|
||||
|
||||
nameByForm := map[string]string{"NFC": plsNameNFC, "NFD": plsNameNFD}
|
||||
storedName := nameByForm[storedForm]
|
||||
filesystemName := nameByForm[filesystemForm]
|
||||
|
||||
tmpDir := GinkgoT().TempDir()
|
||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: tmpDir}})
|
||||
ds.MockedMediaFile = &mockedMediaFileFromListRepo{data: []string{}}
|
||||
ps = core.NewPlaylists(ds)
|
||||
|
||||
// Create the playlist file on disk with the filesystem's normalization form
|
||||
plsFile := tmpDir + "/" + filesystemName + ".m3u"
|
||||
Expect(os.WriteFile(plsFile, []byte("#PLAYLIST:Test\n"), 0600)).To(Succeed())
|
||||
|
||||
// Pre-populate mock repo with the stored normalization form
|
||||
storedPath := tmpDir + "/" + storedName + ".m3u"
|
||||
existingPls := &model.Playlist{
|
||||
ID: "existing-id",
|
||||
Name: "Existing Playlist",
|
||||
Path: storedPath,
|
||||
Sync: true,
|
||||
}
|
||||
mockPlsRepo.data = map[string]*model.Playlist{storedPath: existingPls}
|
||||
|
||||
// Import using the filesystem's normalization form
|
||||
plsFolder := &model.Folder{
|
||||
ID: "1",
|
||||
LibraryID: 1,
|
||||
LibraryPath: tmpDir,
|
||||
Path: "",
|
||||
Name: "",
|
||||
}
|
||||
pls, err := ps.ImportFile(ctx, plsFolder, filesystemName+".m3u")
|
||||
It("parses NSP with public: true and creates public playlist", func() {
|
||||
pls, err := ps.ImportFile(ctx, folder, "public_playlist.nsp")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Name).To(Equal("Public Playlist"))
|
||||
Expect(pls.Public).To(BeTrue())
|
||||
})
|
||||
It("parses NSP with public: false and creates private playlist", func() {
|
||||
pls, err := ps.ImportFile(ctx, folder, "private_playlist.nsp")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Name).To(Equal("Private Playlist"))
|
||||
Expect(pls.Public).To(BeFalse())
|
||||
})
|
||||
It("uses server default when public field is absent", func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.DefaultPlaylistPublicVisibility = true
|
||||
|
||||
// Should update existing playlist, not create new one
|
||||
Expect(pls.ID).To(Equal("existing-id"))
|
||||
Expect(pls.Name).To(Equal("Existing Playlist"))
|
||||
},
|
||||
Entry("finds NFD-stored playlist when filesystem provides NFC path", "NFD", "NFC"),
|
||||
Entry("finds NFC-stored playlist when filesystem provides NFD path", "NFC", "NFD"),
|
||||
)
|
||||
pls, err := ps.ImportFile(ctx, folder, "recently_played.nsp")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Name).To(Equal("Recently Played"))
|
||||
Expect(pls.Public).To(BeTrue()) // Should be true since server default is true
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Cross-library relative paths", func() {
|
||||
var tmpDir, plsDir, songsDir string
|
||||
@@ -474,63 +446,22 @@ var _ = Describe("Playlists", func() {
|
||||
Expect(pls.Tracks[0].Path).To(Equal("abc/tEsT1.Mp3"))
|
||||
})
|
||||
|
||||
// Unicode normalization tests: NFC (composed) vs NFD (decomposed) forms
|
||||
// macOS stores paths in NFD, Linux/Windows use NFC. Playlists may use either form.
|
||||
DescribeTable("matches paths across Unicode NFC/NFD normalization",
|
||||
func(description, pathNFC string, dbForm, playlistForm norm.Form) {
|
||||
pathNFD := norm.NFD.String(pathNFC)
|
||||
Expect(pathNFD).ToNot(Equal(pathNFC), "test path should have decomposable characters")
|
||||
It("handles Unicode normalization when comparing paths (NFD vs NFC)", func() {
|
||||
// Simulate macOS filesystem: stores paths in NFD (decomposed) form
|
||||
// "è" (U+00E8) in NFC becomes "e" + "◌̀" (U+0065 + U+0300) in NFD
|
||||
nfdPath := "artist/Mich" + string([]rune{'e', '\u0300'}) + "le/song.mp3" // NFD: e + combining grave
|
||||
repo.data = []string{nfdPath}
|
||||
|
||||
// Set up DB with specified normalization form
|
||||
var dbPath string
|
||||
if dbForm == norm.NFC {
|
||||
dbPath = pathNFC
|
||||
} else {
|
||||
dbPath = pathNFD
|
||||
}
|
||||
repo.data = []string{dbPath}
|
||||
|
||||
// Set up playlist with specified normalization form
|
||||
var playlistPath string
|
||||
if playlistForm == norm.NFC {
|
||||
playlistPath = pathNFC
|
||||
} else {
|
||||
playlistPath = pathNFD
|
||||
}
|
||||
m3u := "/music/" + playlistPath + "\n"
|
||||
f := strings.NewReader(m3u)
|
||||
|
||||
pls, err := ps.ImportM3U(ctx, f)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(1))
|
||||
Expect(pls.Tracks[0].Path).To(Equal(dbPath))
|
||||
},
|
||||
// French: è (U+00E8) decomposes to e + combining grave (U+0065 + U+0300)
|
||||
Entry("French diacritics - DB:NFD, playlist:NFC",
|
||||
"macOS DB with Apple Music playlist",
|
||||
"artist/Michèle/song.mp3", norm.NFD, norm.NFC),
|
||||
|
||||
// Japanese Katakana: ド (U+30C9) decomposes to ト (U+30C8) + combining dakuten (U+3099)
|
||||
Entry("Japanese Katakana with dakuten - DB:NFC, playlist:NFC (#4884)",
|
||||
"Linux/Windows DB with NFC playlist",
|
||||
"artist/\u30a2\u30a4\u30c9\u30eb/\u30c9\u30ea\u30fc\u30e0\u30bd\u30f3\u30b0.mp3", norm.NFC, norm.NFC),
|
||||
Entry("Japanese Katakana with dakuten - DB:NFD, playlist:NFC (#4884)",
|
||||
"macOS DB with NFC playlist",
|
||||
"artist/\u30a2\u30a4\u30c9\u30eb/\u30c9\u30ea\u30fc\u30e0\u30bd\u30f3\u30b0.mp3", norm.NFD, norm.NFC),
|
||||
|
||||
// Cyrillic: й (U+0439) decomposes to и (U+0438) + combining breve (U+0306)
|
||||
Entry("Cyrillic characters - DB:NFD, playlist:NFC (#4791)",
|
||||
"macOS DB with NFC playlist",
|
||||
"Жуки/Батарейка/01 - Разлюбила.mp3", norm.NFD, norm.NFC),
|
||||
|
||||
// Polish: ó (U+00F3) decomposes to o + combining acute (U+0301)
|
||||
Entry("Polish diacritics - DB:NFD, playlist:NFC (#4663)",
|
||||
"macOS DB with NFC playlist",
|
||||
"Zespół/Człowiek/Piosenka o miłości.mp3", norm.NFD, norm.NFC),
|
||||
Entry("Polish diacritics - DB:NFC, playlist:NFD",
|
||||
"Linux/Windows DB with macOS-exported playlist",
|
||||
"Zespół/Człowiek/Piosenka o miłości.mp3", norm.NFC, norm.NFD),
|
||||
)
|
||||
// Simulate Apple Music M3U: uses NFC (composed) form
|
||||
nfcPath := "/music/artist/Mich\u00E8le/song.mp3" // NFC: single è character
|
||||
m3u := nfcPath + "\n"
|
||||
f := strings.NewReader(m3u)
|
||||
pls, err := ps.ImportM3U(ctx, f)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(1))
|
||||
// Should match despite different Unicode normalization forms
|
||||
Expect(pls.Tracks[0].Path).To(Equal(nfdPath))
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
@@ -632,6 +563,9 @@ func (r *mockedMediaFileFromListRepo) FindByPaths(paths []string) (model.MediaFi
|
||||
var mfs model.MediaFiles
|
||||
|
||||
for idx, dataPath := range r.data {
|
||||
// Normalize the data path to NFD (simulates macOS filesystem storage)
|
||||
normalizedDataPath := norm.NFD.String(dataPath)
|
||||
|
||||
for _, requestPath := range paths {
|
||||
// Strip library qualifier if present (format: "libraryID:path")
|
||||
actualPath := requestPath
|
||||
@@ -643,9 +577,12 @@ func (r *mockedMediaFileFromListRepo) FindByPaths(paths []string) (model.MediaFi
|
||||
}
|
||||
}
|
||||
|
||||
// Case-insensitive comparison (like SQL's "collate nocase"), but with no
|
||||
// implicit Unicode normalization (SQLite does not normalize NFC/NFD).
|
||||
if strings.EqualFold(actualPath, dataPath) {
|
||||
// The request path should already be normalized to NFD by production code
|
||||
// before calling FindByPaths (to match DB storage)
|
||||
normalizedRequestPath := norm.NFD.String(actualPath)
|
||||
|
||||
// Case-insensitive comparison (like SQL's "collate nocase")
|
||||
if strings.EqualFold(normalizedRequestPath, normalizedDataPath) {
|
||||
mfs = append(mfs, model.MediaFile{
|
||||
ID: strconv.Itoa(idx),
|
||||
Path: dataPath, // Return original path from DB
|
||||
@@ -660,16 +597,10 @@ func (r *mockedMediaFileFromListRepo) FindByPaths(paths []string) (model.MediaFi
|
||||
|
||||
type mockedPlaylistRepo struct {
|
||||
last *model.Playlist
|
||||
data map[string]*model.Playlist // keyed by path
|
||||
model.PlaylistRepository
|
||||
}
|
||||
|
||||
func (r *mockedPlaylistRepo) FindByPath(path string) (*model.Playlist, error) {
|
||||
if r.data != nil {
|
||||
if pls, ok := r.data[path]; ok {
|
||||
return pls, nil
|
||||
}
|
||||
}
|
||||
func (r *mockedPlaylistRepo) FindByPath(string) (*model.Playlist, error) {
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
|
||||
|
||||
23
db/migrations/20260117201522_add_avg_rating_column.sql
Normal file
23
db/migrations/20260117201522_add_avg_rating_column.sql
Normal file
@@ -0,0 +1,23 @@
|
||||
-- +goose Up
|
||||
ALTER TABLE album ADD COLUMN average_rating REAL NOT NULL DEFAULT 0;
|
||||
ALTER TABLE media_file ADD COLUMN average_rating REAL NOT NULL DEFAULT 0;
|
||||
ALTER TABLE artist ADD COLUMN average_rating REAL NOT NULL DEFAULT 0;
|
||||
|
||||
-- Populate average_rating from existing ratings
|
||||
UPDATE album SET average_rating = coalesce(
|
||||
(SELECT round(avg(rating), 2) FROM annotation WHERE item_id = album.id AND item_type = 'album' AND rating > 0),
|
||||
0
|
||||
);
|
||||
UPDATE media_file SET average_rating = coalesce(
|
||||
(SELECT round(avg(rating), 2) FROM annotation WHERE item_id = media_file.id AND item_type = 'media_file' AND rating > 0),
|
||||
0
|
||||
);
|
||||
UPDATE artist SET average_rating = coalesce(
|
||||
(SELECT round(avg(rating), 2) FROM annotation WHERE item_id = artist.id AND item_type = 'artist' AND rating > 0),
|
||||
0
|
||||
);
|
||||
|
||||
-- +goose Down
|
||||
ALTER TABLE artist DROP COLUMN average_rating;
|
||||
ALTER TABLE media_file DROP COLUMN average_rating;
|
||||
ALTER TABLE album DROP COLUMN average_rating;
|
||||
15
go.mod
15
go.mod
@@ -2,8 +2,13 @@ module github.com/navidrome/navidrome
|
||||
|
||||
go 1.25
|
||||
|
||||
// Fork to fix https://github.com/navidrome/navidrome/issues/3254
|
||||
replace github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 => github.com/deluan/tag v0.0.0-20241002021117-dfe5e6ea396d
|
||||
replace (
|
||||
// Fork to fix https://github.com/navidrome/navidrome/issues/3254
|
||||
github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 => github.com/deluan/tag v0.0.0-20241002021117-dfe5e6ea396d
|
||||
|
||||
// Fork to implement raw tags support
|
||||
go.senan.xyz/taglib => github.com/deluan/go-taglib v0.0.0-20260119020817-8753c7531798
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/Masterminds/squirrel v1.5.4
|
||||
@@ -53,13 +58,15 @@ require (
|
||||
github.com/rjeczalik/notify v0.9.3
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2
|
||||
github.com/sirupsen/logrus v1.9.4
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/spf13/viper v1.21.0
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/tetratelabs/wazero v1.11.0
|
||||
github.com/unrolled/secure v1.17.0
|
||||
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342
|
||||
go.senan.xyz/taglib v0.11.1
|
||||
go.uber.org/goleak v1.3.0
|
||||
golang.org/x/image v0.35.0
|
||||
golang.org/x/net v0.49.0
|
||||
@@ -91,7 +98,7 @@ require (
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/goccy/go-yaml v1.19.2 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/pprof v0.0.0-20260111202518-71be6bfdd440 // indirect
|
||||
github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83 // indirect
|
||||
github.com/google/subcommands v1.2.0 // indirect
|
||||
github.com/gorilla/css v1.0.1 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
|
||||
16
go.sum
16
go.sum
@@ -36,6 +36,8 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40=
|
||||
github.com/deluan/go-taglib v0.0.0-20260119020817-8753c7531798 h1:q4fvcIK/LxElpyQILCejG6WPYjVb2F/4P93+k017ANk=
|
||||
github.com/deluan/go-taglib v0.0.0-20260119020817-8753c7531798/go.mod h1:sKDN0U4qXDlq6LFK+aOAkDH4Me5nDV1V/A4B+B69xBA=
|
||||
github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf h1:tb246l2Zmpt/GpF9EcHCKTtwzrd0HGfEmoODFA/qnk4=
|
||||
github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf/go.mod h1:tSgDythFsl0QgS/PFWfIZqcJKnkADWneY80jaVRlqK8=
|
||||
github.com/deluan/sanitize v0.0.0-20241120162836-fdfd8fdfaa55 h1:wSCnggTs2f2ji6nFwQmfwgINcmSMj0xF0oHnoyRSPe4=
|
||||
@@ -54,6 +56,8 @@ github.com/djherbis/stream v1.4.0 h1:aVD46WZUiq5kJk55yxJAyw6Kuera6kmC3i2vEQyW/AE
|
||||
github.com/djherbis/stream v1.4.0/go.mod h1:cqjC1ZRq3FFwkGmUtHwcldbnW8f0Q4YuVsGW1eAFtOk=
|
||||
github.com/djherbis/times v1.6.0 h1:w2ctJ92J8fBvWPxugmXIv7Nz7Q3iDMKNx9v5ocVH20c=
|
||||
github.com/djherbis/times v1.6.0/go.mod h1:gOHeRAz2h+VJNZ5Gmc/o7iD9k4wW7NMVqieYCY99oc0=
|
||||
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
|
||||
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/dylibso/observe-sdk/go v0.0.0-20240828172851-9145d8ad07e1 h1:idfl8M8rPW93NehFw5H1qqH8yG158t5POr+LX9avbJY=
|
||||
@@ -106,8 +110,8 @@ github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc h1:hd+uUVsB1vdxohPneMrhGH2YfQuH5hRIK9u4/XCeUtw=
|
||||
github.com/google/go-pipeline v0.0.0-20230411140531-6cbedfc1d3fc/go.mod h1:SL66SJVysrh7YbDCP9tH30b8a9o/N2HeiQNUm85EKhc=
|
||||
github.com/google/pprof v0.0.0-20260111202518-71be6bfdd440 h1:oKBqR+eQXiIM7X8K1JEg9aoTEePLq/c6Awe484abOuA=
|
||||
github.com/google/pprof v0.0.0-20260111202518-71be6bfdd440/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI=
|
||||
github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83 h1:z2ogiKUYzX5Is6zr/vP9vJGqPwcdqsWjOt+V8J7+bTc=
|
||||
github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI=
|
||||
github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE=
|
||||
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
@@ -234,13 +238,15 @@ github.com/sagikazarmark/locafero v0.12.0 h1:/NQhBAkUb4+fH1jivKHWusDYFjMOOKU88ee
|
||||
github.com/sagikazarmark/locafero v0.12.0/go.mod h1:sZh36u/YSZ918v0Io+U9ogLYQJ9tLLBmM4eneO6WwsI=
|
||||
github.com/sanity-io/litter v1.5.8 h1:uM/2lKrWdGbRXDrIq08Lh9XtVYoeGtcQxk9rtQ7+rYg=
|
||||
github.com/sanity-io/litter v1.5.8/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U=
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 h1:KRzFb2m7YtdldCEkzs6KqmJw4nqEVZGK7IN2kJkjTuQ=
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU=
|
||||
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
|
||||
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
|
||||
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w=
|
||||
github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
|
||||
@@ -269,7 +275,6 @@ github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRci
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
@@ -358,7 +363,6 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
|
||||
@@ -3,12 +3,13 @@ package model
|
||||
import "time"
|
||||
|
||||
type Annotations struct {
|
||||
PlayCount int64 `structs:"play_count" json:"playCount,omitempty"`
|
||||
PlayDate *time.Time `structs:"play_date" json:"playDate,omitempty" `
|
||||
Rating int `structs:"rating" json:"rating,omitempty" `
|
||||
RatedAt *time.Time `structs:"rated_at" json:"ratedAt,omitempty" `
|
||||
Starred bool `structs:"starred" json:"starred,omitempty" `
|
||||
StarredAt *time.Time `structs:"starred_at" json:"starredAt,omitempty"`
|
||||
PlayCount int64 `structs:"play_count" json:"playCount,omitempty"`
|
||||
PlayDate *time.Time `structs:"play_date" json:"playDate,omitempty" `
|
||||
Rating int `structs:"rating" json:"rating,omitempty" `
|
||||
RatedAt *time.Time `structs:"rated_at" json:"ratedAt,omitempty" `
|
||||
Starred bool `structs:"starred" json:"starred,omitempty" `
|
||||
StarredAt *time.Time `structs:"starred_at" json:"starredAt,omitempty"`
|
||||
AverageRating float64 `structs:"average_rating" json:"averageRating,omitempty"`
|
||||
}
|
||||
|
||||
type AnnotatedRepository interface {
|
||||
|
||||
@@ -353,6 +353,7 @@ type MediaFileCursor iter.Seq2[MediaFile, error]
|
||||
|
||||
type MediaFileRepository interface {
|
||||
CountAll(options ...QueryOptions) (int64, error)
|
||||
CountBySuffix(options ...QueryOptions) (map[string]int64, error)
|
||||
Exists(id string) (bool, error)
|
||||
Put(m *MediaFile) error
|
||||
Get(id string) (*MediaFile, error)
|
||||
|
||||
@@ -126,6 +126,89 @@ var _ = Describe("AlbumRepository", func() {
|
||||
)
|
||||
})
|
||||
|
||||
Describe("Album.AverageRating", func() {
|
||||
It("returns 0 when no ratings exist", func() {
|
||||
newID := id.NewRandom()
|
||||
Expect(albumRepo.Put(&model.Album{LibraryID: 1, ID: newID, Name: "no ratings album"})).To(Succeed())
|
||||
|
||||
album, err := albumRepo.Get(newID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(album.AverageRating).To(Equal(0.0))
|
||||
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": newID}))
|
||||
})
|
||||
|
||||
It("returns the user's rating as average when only one user rated", func() {
|
||||
newID := id.NewRandom()
|
||||
Expect(albumRepo.Put(&model.Album{LibraryID: 1, ID: newID, Name: "single rating album"})).To(Succeed())
|
||||
Expect(albumRepo.SetRating(4, newID)).To(Succeed())
|
||||
|
||||
album, err := albumRepo.Get(newID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(album.AverageRating).To(Equal(4.0))
|
||||
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("annotation").Where(squirrel.Eq{"item_id": newID}))
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": newID}))
|
||||
})
|
||||
|
||||
It("calculates average across multiple users", func() {
|
||||
newID := id.NewRandom()
|
||||
Expect(albumRepo.Put(&model.Album{LibraryID: 1, ID: newID, Name: "multi rating album"})).To(Succeed())
|
||||
|
||||
Expect(albumRepo.SetRating(4, newID)).To(Succeed())
|
||||
|
||||
user2Ctx := request.WithUser(GinkgoT().Context(), regularUser)
|
||||
user2Repo := NewAlbumRepository(user2Ctx, GetDBXBuilder()).(*albumRepository)
|
||||
Expect(user2Repo.SetRating(5, newID)).To(Succeed())
|
||||
|
||||
album, err := albumRepo.Get(newID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(album.AverageRating).To(Equal(4.5))
|
||||
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("annotation").Where(squirrel.Eq{"item_id": newID}))
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": newID}))
|
||||
})
|
||||
|
||||
It("excludes zero ratings from average calculation", func() {
|
||||
newID := id.NewRandom()
|
||||
Expect(albumRepo.Put(&model.Album{LibraryID: 1, ID: newID, Name: "zero rating excluded album"})).To(Succeed())
|
||||
Expect(albumRepo.SetRating(3, newID)).To(Succeed())
|
||||
|
||||
user2Ctx := request.WithUser(GinkgoT().Context(), regularUser)
|
||||
user2Repo := NewAlbumRepository(user2Ctx, GetDBXBuilder()).(*albumRepository)
|
||||
Expect(user2Repo.SetRating(0, newID)).To(Succeed())
|
||||
|
||||
album, err := albumRepo.Get(newID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(album.AverageRating).To(Equal(3.0))
|
||||
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("annotation").Where(squirrel.Eq{"item_id": newID}))
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": newID}))
|
||||
})
|
||||
|
||||
It("rounds to 2 decimal places", func() {
|
||||
newID := id.NewRandom()
|
||||
Expect(albumRepo.Put(&model.Album{LibraryID: 1, ID: newID, Name: "rounding test album"})).To(Succeed())
|
||||
|
||||
Expect(albumRepo.SetRating(5, newID)).To(Succeed())
|
||||
|
||||
user2Ctx := request.WithUser(GinkgoT().Context(), regularUser)
|
||||
user2Repo := NewAlbumRepository(user2Ctx, GetDBXBuilder()).(*albumRepository)
|
||||
Expect(user2Repo.SetRating(4, newID)).To(Succeed())
|
||||
|
||||
user3Ctx := request.WithUser(GinkgoT().Context(), thirdUser)
|
||||
user3Repo := NewAlbumRepository(user3Ctx, GetDBXBuilder()).(*albumRepository)
|
||||
Expect(user3Repo.SetRating(4, newID)).To(Succeed())
|
||||
|
||||
album, err := albumRepo.Get(newID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(album.AverageRating).To(Equal(4.33)) // (5 + 4 + 4) / 3 = 4.333...
|
||||
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("annotation").Where(squirrel.Eq{"item_id": newID}))
|
||||
_, _ = albumRepo.executeSQL(squirrel.Delete("album").Where(squirrel.Eq{"id": newID}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("dbAlbum mapping", func() {
|
||||
var (
|
||||
a model.Album
|
||||
|
||||
@@ -124,6 +124,25 @@ func (r *mediaFileRepository) CountAll(options ...model.QueryOptions) (int64, er
|
||||
return r.count(query, options...)
|
||||
}
|
||||
|
||||
func (r *mediaFileRepository) CountBySuffix(options ...model.QueryOptions) (map[string]int64, error) {
|
||||
sel := r.newSelect(options...).
|
||||
Columns("lower(suffix) as suffix", "count(*) as count").
|
||||
GroupBy("lower(suffix)")
|
||||
var res []struct {
|
||||
Suffix string
|
||||
Count int64
|
||||
}
|
||||
err := r.queryAll(sel, &res)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
counts := make(map[string]int64, len(res))
|
||||
for _, c := range res {
|
||||
counts[c.Suffix] = c.Count
|
||||
}
|
||||
return counts, nil
|
||||
}
|
||||
|
||||
func (r *mediaFileRepository) Exists(id string) (bool, error) {
|
||||
return r.exists(Eq{"media_file.id": id})
|
||||
}
|
||||
|
||||
@@ -41,6 +41,44 @@ var _ = Describe("MediaRepository", func() {
|
||||
Expect(mr.CountAll()).To(Equal(int64(10)))
|
||||
})
|
||||
|
||||
Describe("CountBySuffix", func() {
|
||||
var mp3File, flacFile1, flacFile2, flacUpperFile model.MediaFile
|
||||
|
||||
BeforeEach(func() {
|
||||
mp3File = model.MediaFile{ID: "suffix-mp3", LibraryID: 1, Suffix: "mp3", Path: "/test/file.mp3"}
|
||||
flacFile1 = model.MediaFile{ID: "suffix-flac1", LibraryID: 1, Suffix: "flac", Path: "/test/file1.flac"}
|
||||
flacFile2 = model.MediaFile{ID: "suffix-flac2", LibraryID: 1, Suffix: "flac", Path: "/test/file2.flac"}
|
||||
flacUpperFile = model.MediaFile{ID: "suffix-FLAC", LibraryID: 1, Suffix: "FLAC", Path: "/test/file.FLAC"}
|
||||
|
||||
Expect(mr.Put(&mp3File)).To(Succeed())
|
||||
Expect(mr.Put(&flacFile1)).To(Succeed())
|
||||
Expect(mr.Put(&flacFile2)).To(Succeed())
|
||||
Expect(mr.Put(&flacUpperFile)).To(Succeed())
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
_ = mr.Delete(mp3File.ID)
|
||||
_ = mr.Delete(flacFile1.ID)
|
||||
_ = mr.Delete(flacFile2.ID)
|
||||
_ = mr.Delete(flacUpperFile.ID)
|
||||
})
|
||||
|
||||
It("counts media files grouped by suffix with lowercase normalization", func() {
|
||||
counts, err := mr.CountBySuffix()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Should have lowercase keys only
|
||||
Expect(counts).To(HaveKey("mp3"))
|
||||
Expect(counts).To(HaveKey("flac"))
|
||||
Expect(counts).ToNot(HaveKey("FLAC"))
|
||||
|
||||
// mp3: 1 file
|
||||
Expect(counts["mp3"]).To(Equal(int64(1)))
|
||||
// flac: 3 files (2 lowercase + 1 uppercase normalized)
|
||||
Expect(counts["flac"]).To(Equal(int64(3)))
|
||||
})
|
||||
})
|
||||
|
||||
It("returns songs ordered by lyrics with a specific title/artist", func() {
|
||||
// attempt to mimic filters.SongsByArtistTitleWithLyricsFirst, except we want all items
|
||||
results, err := mr.GetAll(model.QueryOptions{
|
||||
@@ -119,6 +157,74 @@ var _ = Describe("MediaRepository", func() {
|
||||
Expect(mf.PlayCount).To(Equal(int64(1)))
|
||||
})
|
||||
|
||||
Describe("AverageRating", func() {
|
||||
var raw *mediaFileRepository
|
||||
|
||||
BeforeEach(func() {
|
||||
raw = mr.(*mediaFileRepository)
|
||||
})
|
||||
|
||||
It("returns 0 when no ratings exist", func() {
|
||||
newID := id.NewRandom()
|
||||
Expect(mr.Put(&model.MediaFile{LibraryID: 1, ID: newID, Path: "/test/no-rating.mp3"})).To(Succeed())
|
||||
|
||||
mf, err := mr.Get(newID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mf.AverageRating).To(Equal(0.0))
|
||||
|
||||
_, _ = raw.executeSQL(squirrel.Delete("media_file").Where(squirrel.Eq{"id": newID}))
|
||||
})
|
||||
|
||||
It("returns the user's rating as average when only one user rated", func() {
|
||||
newID := id.NewRandom()
|
||||
Expect(mr.Put(&model.MediaFile{LibraryID: 1, ID: newID, Path: "/test/single-rating.mp3"})).To(Succeed())
|
||||
Expect(mr.SetRating(5, newID)).To(Succeed())
|
||||
|
||||
mf, err := mr.Get(newID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mf.AverageRating).To(Equal(5.0))
|
||||
|
||||
_, _ = raw.executeSQL(squirrel.Delete("annotation").Where(squirrel.Eq{"item_id": newID}))
|
||||
_, _ = raw.executeSQL(squirrel.Delete("media_file").Where(squirrel.Eq{"id": newID}))
|
||||
})
|
||||
|
||||
It("calculates average across multiple users", func() {
|
||||
newID := id.NewRandom()
|
||||
Expect(mr.Put(&model.MediaFile{LibraryID: 1, ID: newID, Path: "/test/multi-rating.mp3"})).To(Succeed())
|
||||
|
||||
Expect(mr.SetRating(3, newID)).To(Succeed())
|
||||
|
||||
user2Ctx := request.WithUser(GinkgoT().Context(), regularUser)
|
||||
user2Repo := NewMediaFileRepository(user2Ctx, GetDBXBuilder())
|
||||
Expect(user2Repo.SetRating(5, newID)).To(Succeed())
|
||||
|
||||
mf, err := mr.Get(newID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mf.AverageRating).To(Equal(4.0))
|
||||
|
||||
_, _ = raw.executeSQL(squirrel.Delete("annotation").Where(squirrel.Eq{"item_id": newID}))
|
||||
_, _ = raw.executeSQL(squirrel.Delete("media_file").Where(squirrel.Eq{"id": newID}))
|
||||
})
|
||||
|
||||
It("excludes zero ratings from average calculation", func() {
|
||||
newID := id.NewRandom()
|
||||
Expect(mr.Put(&model.MediaFile{LibraryID: 1, ID: newID, Path: "/test/zero-excluded.mp3"})).To(Succeed())
|
||||
|
||||
Expect(mr.SetRating(4, newID)).To(Succeed())
|
||||
|
||||
user2Ctx := request.WithUser(GinkgoT().Context(), regularUser)
|
||||
user2Repo := NewMediaFileRepository(user2Ctx, GetDBXBuilder())
|
||||
Expect(user2Repo.SetRating(0, newID)).To(Succeed())
|
||||
|
||||
mf, err := mr.Get(newID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mf.AverageRating).To(Equal(4.0))
|
||||
|
||||
_, _ = raw.executeSQL(squirrel.Delete("annotation").Where(squirrel.Eq{"item_id": newID}))
|
||||
_, _ = raw.executeSQL(squirrel.Delete("media_file").Where(squirrel.Eq{"id": newID}))
|
||||
})
|
||||
})
|
||||
|
||||
It("preserves play date if and only if provided date is older", func() {
|
||||
id := "incplay.playdate"
|
||||
Expect(mr.Put(&model.MediaFile{LibraryID: 1, ID: id})).To(BeNil())
|
||||
|
||||
@@ -130,7 +130,8 @@ var (
|
||||
var (
|
||||
adminUser = model.User{ID: "userid", UserName: "userid", Name: "admin", Email: "admin@email.com", IsAdmin: true}
|
||||
regularUser = model.User{ID: "2222", UserName: "regular-user", Name: "Regular User", Email: "regular@example.com"}
|
||||
testUsers = model.Users{adminUser, regularUser}
|
||||
thirdUser = model.User{ID: "3333", UserName: "third-user", Name: "Third User", Email: "third@example.com"}
|
||||
testUsers = model.Users{adminUser, regularUser, thirdUser}
|
||||
)
|
||||
|
||||
func p(path string) string {
|
||||
|
||||
@@ -17,7 +17,7 @@ const annotationTable = "annotation"
|
||||
func (r sqlRepository) withAnnotation(query SelectBuilder, idField string) SelectBuilder {
|
||||
userID := loggedUser(r.ctx).ID
|
||||
if userID == invalidUserId {
|
||||
return query
|
||||
return query.Columns(fmt.Sprintf("%s.average_rating", r.tableName))
|
||||
}
|
||||
query = query.
|
||||
LeftJoin("annotation on ("+
|
||||
@@ -38,6 +38,8 @@ func (r sqlRepository) withAnnotation(query SelectBuilder, idField string) Selec
|
||||
query = query.Columns("coalesce(play_count, 0) as play_count")
|
||||
}
|
||||
|
||||
query = query.Columns(fmt.Sprintf("%s.average_rating", r.tableName))
|
||||
|
||||
return query
|
||||
}
|
||||
|
||||
@@ -79,7 +81,22 @@ func (r sqlRepository) SetStar(starred bool, ids ...string) error {
|
||||
|
||||
func (r sqlRepository) SetRating(rating int, itemID string) error {
|
||||
ratedAt := time.Now()
|
||||
return r.annUpsert(map[string]interface{}{"rating": rating, "rated_at": ratedAt}, itemID)
|
||||
err := r.annUpsert(map[string]interface{}{"rating": rating, "rated_at": ratedAt}, itemID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return r.updateAvgRating(itemID)
|
||||
}
|
||||
|
||||
func (r sqlRepository) updateAvgRating(itemID string) error {
|
||||
upd := Update(r.tableName).
|
||||
Where(Eq{"id": itemID}).
|
||||
Set("average_rating", Expr(
|
||||
"coalesce((select round(avg(rating), 2) from annotation where item_id = ? and item_type = ? and rating > 0), 0)",
|
||||
itemID, r.tableName,
|
||||
))
|
||||
_, err := r.executeSQL(upd)
|
||||
return err
|
||||
}
|
||||
|
||||
func (r sqlRepository) IncPlayCount(itemID string, ts time.Time) error {
|
||||
|
||||
@@ -4,10 +4,10 @@ go 1.25
|
||||
|
||||
require (
|
||||
github.com/extism/go-pdk v1.1.3
|
||||
github.com/onsi/ginkgo/v2 v2.27.3
|
||||
github.com/onsi/gomega v1.38.3
|
||||
github.com/xeipuuv/gojsonschema v1.2.0
|
||||
golang.org/x/tools v0.40.0
|
||||
github.com/onsi/ginkgo/v2 v2.27.5
|
||||
github.com/onsi/gomega v1.39.0
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2
|
||||
golang.org/x/tools v0.41.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
@@ -16,13 +16,11 @@ require (
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/pprof v0.0.0-20250403155104-27863c87afa6 // indirect
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/mod v0.31.0 // indirect
|
||||
golang.org/x/net v0.48.0 // indirect
|
||||
golang.org/x/mod v0.32.0 // indirect
|
||||
golang.org/x/net v0.49.0 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.39.0 // indirect
|
||||
golang.org/x/text v0.32.0 // indirect
|
||||
golang.org/x/sys v0.40.0 // indirect
|
||||
golang.org/x/text v0.33.0 // indirect
|
||||
)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0=
|
||||
github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
|
||||
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/extism/go-pdk v1.1.3 h1:hfViMPWrqjN6u67cIYRALZTZLk/enSPpNKa+rZ9X2SQ=
|
||||
github.com/extism/go-pdk v1.1.3/go.mod h1:Gz+LIU/YCKnKXhgge8yo5Yu1F/lbv7KtKFkiCSzW/P4=
|
||||
github.com/gkampitakis/ciinfo v0.3.2 h1:JcuOPk8ZU7nZQjdUhctuhQofk7BGHuIy0c9Ez8BNhXs=
|
||||
@@ -19,8 +20,8 @@ github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
|
||||
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/pprof v0.0.0-20250403155104-27863c87afa6 h1:BHT72Gu3keYf3ZEu2J0b1vyeLSOYI8bm5wbJM/8yDe8=
|
||||
github.com/google/pprof v0.0.0-20250403155104-27863c87afa6/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||
github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83 h1:z2ogiKUYzX5Is6zr/vP9vJGqPwcdqsWjOt+V8J7+bTc=
|
||||
github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI=
|
||||
github.com/joshdk/go-junit v1.0.0 h1:S86cUKIdwBHWwA6xCmFlf3RTLfVXYQfvanM5Uh+K6GE=
|
||||
github.com/joshdk/go-junit v1.0.0/go.mod h1:TiiV0PqkaNfFXjEiyjWM3XXrhVyCa1K4Zfga6W52ung=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
@@ -31,16 +32,16 @@ github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo=
|
||||
github.com/maruel/natural v1.1.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg=
|
||||
github.com/mfridman/tparse v0.18.0 h1:wh6dzOKaIwkUGyKgOntDW4liXSo37qg5AXbIhkMV3vE=
|
||||
github.com/mfridman/tparse v0.18.0/go.mod h1:gEvqZTuCgEhPbYk/2lS3Kcxg1GmTxxU7kTC8DvP0i/A=
|
||||
github.com/onsi/ginkgo/v2 v2.27.3 h1:ICsZJ8JoYafeXFFlFAG75a7CxMsJHwgKwtO+82SE9L8=
|
||||
github.com/onsi/ginkgo/v2 v2.27.3/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
|
||||
github.com/onsi/gomega v1.38.3 h1:eTX+W6dobAYfFeGC2PV6RwXRu/MyT+cQguijutvkpSM=
|
||||
github.com/onsi/gomega v1.38.3/go.mod h1:ZCU1pkQcXDO5Sl9/VVEGlDyp+zm0m1cmeG5TOzLgdh4=
|
||||
github.com/onsi/ginkgo/v2 v2.27.5 h1:ZeVgZMx2PDMdJm/+w5fE/OyG6ILo1Y3e+QX4zSR0zTE=
|
||||
github.com/onsi/ginkgo/v2 v2.27.5/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
|
||||
github.com/onsi/gomega v1.39.0 h1:y2ROC3hKFmQZJNFeGAMeHZKkjBL65mIZcvrLQBF9k6Q=
|
||||
github.com/onsi/gomega v1.39.0/go.mod h1:ZCU1pkQcXDO5Sl9/VVEGlDyp+zm0m1cmeG5TOzLgdh4=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
||||
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 h1:KRzFb2m7YtdldCEkzs6KqmJw4nqEVZGK7IN2kJkjTuQ=
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
@@ -51,26 +52,20 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
|
||||
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
||||
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
|
||||
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
|
||||
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
|
||||
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
|
||||
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
|
||||
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
|
||||
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
|
||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
|
||||
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||
google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A=
|
||||
google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
||||
@@ -3,10 +3,11 @@ package internal
|
||||
import (
|
||||
_ "embed"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/xeipuuv/gojsonschema"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
@@ -25,27 +26,61 @@ func ValidateXTPSchema(generatedSchema []byte) error {
|
||||
return fmt.Errorf("failed to parse generated schema as YAML: %w", err)
|
||||
}
|
||||
|
||||
// Convert to JSON for the validator
|
||||
jsonBytes, err := json.Marshal(schemaDoc)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to convert schema to JSON: %w", err)
|
||||
// Parse the XTP schema JSON
|
||||
var xtpSchema any
|
||||
if err := json.Unmarshal([]byte(xtpSchemaJSON), &xtpSchema); err != nil {
|
||||
return fmt.Errorf("failed to parse XTP schema: %w", err)
|
||||
}
|
||||
|
||||
schemaLoader := gojsonschema.NewStringLoader(xtpSchemaJSON)
|
||||
documentLoader := gojsonschema.NewBytesLoader(jsonBytes)
|
||||
|
||||
result, err := gojsonschema.Validate(schemaLoader, documentLoader)
|
||||
if err != nil {
|
||||
return fmt.Errorf("schema validation failed: %w", err)
|
||||
// Compile the XTP schema
|
||||
compiler := jsonschema.NewCompiler()
|
||||
if err := compiler.AddResource("xtp-schema.json", xtpSchema); err != nil {
|
||||
return fmt.Errorf("failed to add XTP schema resource: %w", err)
|
||||
}
|
||||
|
||||
if !result.Valid() {
|
||||
var errs []string
|
||||
for _, desc := range result.Errors() {
|
||||
errs = append(errs, fmt.Sprintf("- %s", desc))
|
||||
}
|
||||
return fmt.Errorf("schema validation errors:\n%s", strings.Join(errs, "\n"))
|
||||
schema, err := compiler.Compile("xtp-schema.json")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to compile XTP schema: %w", err)
|
||||
}
|
||||
|
||||
// Validate the generated schema against XTP schema
|
||||
if err := schema.Validate(schemaDoc); err != nil {
|
||||
return fmt.Errorf("schema validation errors:\n%s", formatValidationErrors(err))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// formatValidationErrors formats jsonschema validation errors into readable strings.
|
||||
func formatValidationErrors(err error) string {
|
||||
var validationErr *jsonschema.ValidationError
|
||||
if !errors.As(err, &validationErr) {
|
||||
return fmt.Sprintf("- %s", err.Error())
|
||||
}
|
||||
|
||||
var errs []string
|
||||
collectValidationErrors(validationErr, &errs)
|
||||
|
||||
if len(errs) == 0 {
|
||||
return fmt.Sprintf("- %s", validationErr.Error())
|
||||
}
|
||||
return strings.Join(errs, "\n")
|
||||
}
|
||||
|
||||
// collectValidationErrors recursively collects leaf validation errors.
|
||||
func collectValidationErrors(err *jsonschema.ValidationError, errs *[]string) {
|
||||
if len(err.Causes) > 0 {
|
||||
for _, cause := range err.Causes {
|
||||
collectValidationErrors(cause, errs)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Leaf error - format with location if available
|
||||
msg := err.Error()
|
||||
if len(err.InstanceLocation) > 0 {
|
||||
location := strings.Join(err.InstanceLocation, "/")
|
||||
msg = fmt.Sprintf("%s: %s", location, msg)
|
||||
}
|
||||
*errs = append(*errs, fmt.Sprintf("- %s", msg))
|
||||
}
|
||||
|
||||
129
plugins/config_validation.go
Normal file
129
plugins/config_validation.go
Normal file
@@ -0,0 +1,129 @@
|
||||
package plugins
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
)
|
||||
|
||||
// ConfigValidationError represents a validation error with field path and message.
|
||||
type ConfigValidationError struct {
|
||||
Field string `json:"field"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
// ConfigValidationErrors is a collection of validation errors.
|
||||
type ConfigValidationErrors struct {
|
||||
Errors []ConfigValidationError `json:"errors"`
|
||||
}
|
||||
|
||||
func (e *ConfigValidationErrors) Error() string {
|
||||
if len(e.Errors) == 0 {
|
||||
return "validation failed"
|
||||
}
|
||||
var msgs []string
|
||||
for _, err := range e.Errors {
|
||||
if err.Field != "" {
|
||||
msgs = append(msgs, fmt.Sprintf("%s: %s", err.Field, err.Message))
|
||||
} else {
|
||||
msgs = append(msgs, err.Message)
|
||||
}
|
||||
}
|
||||
return strings.Join(msgs, "; ")
|
||||
}
|
||||
|
||||
// ValidateConfig validates a config JSON string against a plugin's config schema.
|
||||
// If the manifest has no config schema, it returns an error indicating the plugin
|
||||
// has no configurable options.
|
||||
// Returns nil if validation passes, ConfigValidationErrors if validation fails.
|
||||
func ValidateConfig(manifest *Manifest, configJSON string) error {
|
||||
// If no config schema defined, plugin has no configurable options
|
||||
if !manifest.HasConfigSchema() {
|
||||
return fmt.Errorf("plugin has no configurable options")
|
||||
}
|
||||
|
||||
// Parse the config JSON (empty string treated as empty object)
|
||||
var configData any
|
||||
if configJSON == "" {
|
||||
configData = map[string]any{}
|
||||
} else {
|
||||
if err := json.Unmarshal([]byte(configJSON), &configData); err != nil {
|
||||
return &ConfigValidationErrors{
|
||||
Errors: []ConfigValidationError{{
|
||||
Message: fmt.Sprintf("invalid JSON: %v", err),
|
||||
}},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compile the schema
|
||||
compiler := jsonschema.NewCompiler()
|
||||
if err := compiler.AddResource("schema.json", manifest.Config.Schema); err != nil {
|
||||
return fmt.Errorf("adding schema resource: %w", err)
|
||||
}
|
||||
|
||||
schema, err := compiler.Compile("schema.json")
|
||||
if err != nil {
|
||||
return fmt.Errorf("compiling schema: %w", err)
|
||||
}
|
||||
|
||||
// Validate config against schema
|
||||
if err := schema.Validate(configData); err != nil {
|
||||
return convertValidationError(err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// convertValidationError converts jsonschema validation errors to our format.
|
||||
func convertValidationError(err error) *ConfigValidationErrors {
|
||||
var validationErr *jsonschema.ValidationError
|
||||
if !errors.As(err, &validationErr) {
|
||||
return &ConfigValidationErrors{
|
||||
Errors: []ConfigValidationError{{
|
||||
Message: err.Error(),
|
||||
}},
|
||||
}
|
||||
}
|
||||
|
||||
var configErrors []ConfigValidationError
|
||||
collectErrors(validationErr, &configErrors)
|
||||
|
||||
if len(configErrors) == 0 {
|
||||
configErrors = append(configErrors, ConfigValidationError{
|
||||
Message: validationErr.Error(),
|
||||
})
|
||||
}
|
||||
|
||||
return &ConfigValidationErrors{Errors: configErrors}
|
||||
}
|
||||
|
||||
// collectErrors recursively collects validation errors from the error tree.
|
||||
func collectErrors(err *jsonschema.ValidationError, errors *[]ConfigValidationError) {
|
||||
// If there are child errors, collect from them
|
||||
if len(err.Causes) > 0 {
|
||||
for _, cause := range err.Causes {
|
||||
collectErrors(cause, errors)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Leaf error - add it
|
||||
field := ""
|
||||
if len(err.InstanceLocation) > 0 {
|
||||
field = strings.Join(err.InstanceLocation, "/")
|
||||
}
|
||||
|
||||
*errors = append(*errors, ConfigValidationError{
|
||||
Field: field,
|
||||
Message: err.Error(),
|
||||
})
|
||||
}
|
||||
|
||||
// HasConfigSchema returns true if the manifest defines a config schema.
|
||||
func (m *Manifest) HasConfigSchema() bool {
|
||||
return m.Config != nil && m.Config.Schema != nil
|
||||
}
|
||||
186
plugins/config_validation_test.go
Normal file
186
plugins/config_validation_test.go
Normal file
@@ -0,0 +1,186 @@
|
||||
//go:build !windows
|
||||
|
||||
package plugins
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Config Validation", func() {
|
||||
Describe("ValidateConfig", func() {
|
||||
Context("when manifest has no config schema", func() {
|
||||
It("returns an error", func() {
|
||||
manifest := &Manifest{
|
||||
Name: "test",
|
||||
Author: "test",
|
||||
Version: "1.0.0",
|
||||
}
|
||||
err := ValidateConfig(manifest, `{"key": "value"}`)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("no configurable options"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when manifest has config schema", func() {
|
||||
var manifest *Manifest
|
||||
|
||||
BeforeEach(func() {
|
||||
manifest = &Manifest{
|
||||
Name: "test",
|
||||
Author: "test",
|
||||
Version: "1.0.0",
|
||||
Config: &ConfigDefinition{
|
||||
Schema: map[string]any{
|
||||
"type": "object",
|
||||
"properties": map[string]any{
|
||||
"apiKey": map[string]any{
|
||||
"type": "string",
|
||||
"description": "API key for the service",
|
||||
"minLength": float64(1),
|
||||
},
|
||||
"timeout": map[string]any{
|
||||
"type": "integer",
|
||||
"minimum": float64(1),
|
||||
"maximum": float64(300),
|
||||
},
|
||||
"enabled": map[string]any{
|
||||
"type": "boolean",
|
||||
},
|
||||
},
|
||||
"required": []any{"apiKey"},
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
It("accepts valid config", func() {
|
||||
err := ValidateConfig(manifest, `{"apiKey": "secret123", "timeout": 30}`)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("rejects empty config when required fields are missing", func() {
|
||||
err := ValidateConfig(manifest, "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("apiKey"))
|
||||
|
||||
err = ValidateConfig(manifest, "{}")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("apiKey"))
|
||||
})
|
||||
|
||||
It("rejects config missing required field", func() {
|
||||
err := ValidateConfig(manifest, `{"timeout": 30}`)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("apiKey"))
|
||||
})
|
||||
|
||||
It("rejects config with wrong type", func() {
|
||||
err := ValidateConfig(manifest, `{"apiKey": "secret", "timeout": "not a number"}`)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("timeout"))
|
||||
})
|
||||
|
||||
It("rejects config with value out of range", func() {
|
||||
err := ValidateConfig(manifest, `{"apiKey": "secret", "timeout": 500}`)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("timeout"))
|
||||
})
|
||||
|
||||
It("rejects config with empty required string", func() {
|
||||
err := ValidateConfig(manifest, `{"apiKey": ""}`)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("apiKey"))
|
||||
})
|
||||
|
||||
It("rejects invalid JSON", func() {
|
||||
err := ValidateConfig(manifest, `{invalid json}`)
|
||||
Expect(err).To(HaveOccurred())
|
||||
var validationErr *ConfigValidationErrors
|
||||
Expect(errors.As(err, &validationErr)).To(BeTrue())
|
||||
Expect(validationErr.Errors[0].Message).To(ContainSubstring("invalid JSON"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("with enum values", func() {
|
||||
It("accepts valid enum value", func() {
|
||||
manifest := &Manifest{
|
||||
Name: "test",
|
||||
Author: "test",
|
||||
Version: "1.0.0",
|
||||
Config: &ConfigDefinition{
|
||||
Schema: map[string]any{
|
||||
"type": "object",
|
||||
"properties": map[string]any{
|
||||
"logLevel": map[string]any{
|
||||
"type": "string",
|
||||
"enum": []any{"debug", "info", "warn", "error"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := ValidateConfig(manifest, `{"logLevel": "info"}`)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("rejects invalid enum value", func() {
|
||||
manifest := &Manifest{
|
||||
Name: "test",
|
||||
Author: "test",
|
||||
Version: "1.0.0",
|
||||
Config: &ConfigDefinition{
|
||||
Schema: map[string]any{
|
||||
"type": "object",
|
||||
"properties": map[string]any{
|
||||
"logLevel": map[string]any{
|
||||
"type": "string",
|
||||
"enum": []any{"debug", "info", "warn", "error"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := ValidateConfig(manifest, `{"logLevel": "verbose"}`)
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("HasConfigSchema", func() {
|
||||
It("returns false when config is nil", func() {
|
||||
manifest := &Manifest{
|
||||
Name: "test",
|
||||
Author: "test",
|
||||
Version: "1.0.0",
|
||||
}
|
||||
Expect(manifest.HasConfigSchema()).To(BeFalse())
|
||||
})
|
||||
|
||||
It("returns false when schema is nil", func() {
|
||||
manifest := &Manifest{
|
||||
Name: "test",
|
||||
Author: "test",
|
||||
Version: "1.0.0",
|
||||
Config: &ConfigDefinition{},
|
||||
}
|
||||
Expect(manifest.HasConfigSchema()).To(BeFalse())
|
||||
})
|
||||
|
||||
It("returns true when schema is present", func() {
|
||||
manifest := &Manifest{
|
||||
Name: "test",
|
||||
Author: "test",
|
||||
Version: "1.0.0",
|
||||
Config: &ConfigDefinition{
|
||||
Schema: map[string]any{
|
||||
"type": "object",
|
||||
},
|
||||
},
|
||||
}
|
||||
Expect(manifest.HasConfigSchema()).To(BeTrue())
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -25,6 +25,14 @@ const (
|
||||
|
||||
// ID for the reconnection schedule
|
||||
reconnectScheduleID = "crypto-ticker-reconnect"
|
||||
|
||||
// Config keys (must match manifest.json schema property names)
|
||||
symbolsKey = "symbols"
|
||||
reconnectDelayKey = "reconnectDelay"
|
||||
logPricesKey = "logPrices"
|
||||
|
||||
// Default values
|
||||
defaultReconnectDelay = 5
|
||||
)
|
||||
|
||||
// CoinbaseSubscription message structure
|
||||
@@ -74,36 +82,67 @@ var (
|
||||
func (p *cryptoTickerPlugin) OnInit() error {
|
||||
pdk.Log(pdk.LogInfo, "Crypto Ticker Plugin initializing...")
|
||||
|
||||
// Get ticker configuration
|
||||
tickerConfig, ok := pdk.GetConfig("tickers")
|
||||
if !ok || tickerConfig == "" {
|
||||
tickerConfig = "BTC,ETH" // Default tickers
|
||||
}
|
||||
|
||||
tickers := parseTickerSymbols(tickerConfig)
|
||||
pdk.Log(pdk.LogInfo, fmt.Sprintf("Configured tickers: %v", tickers))
|
||||
// Get ticker configuration from JSON schema config
|
||||
symbols := getSymbols()
|
||||
pdk.Log(pdk.LogInfo, fmt.Sprintf("Configured symbols: %v", symbols))
|
||||
|
||||
// Connect to WebSocket
|
||||
// Errors won't fail init - reconnect logic will handle it
|
||||
return connectAndSubscribe(tickers)
|
||||
return connectAndSubscribe(symbols)
|
||||
}
|
||||
|
||||
// parseTickerSymbols parses a comma-separated list of ticker symbols
|
||||
func parseTickerSymbols(tickerConfig string) []string {
|
||||
parts := strings.Split(tickerConfig, ",")
|
||||
tickers := make([]string, 0, len(parts))
|
||||
for _, ticker := range parts {
|
||||
ticker = strings.TrimSpace(ticker)
|
||||
if ticker == "" {
|
||||
continue
|
||||
}
|
||||
// Add -USD suffix if not present
|
||||
if !strings.Contains(ticker, "-") {
|
||||
ticker = ticker + "-USD"
|
||||
}
|
||||
tickers = append(tickers, ticker)
|
||||
// getSymbols reads the symbols array from config
|
||||
func getSymbols() []string {
|
||||
defaultSymbols := []string{"BTC-USD"}
|
||||
symbolsJSON, ok := pdk.GetConfig(symbolsKey)
|
||||
if !ok || symbolsJSON == "" {
|
||||
return defaultSymbols
|
||||
}
|
||||
return tickers
|
||||
|
||||
var symbols []string
|
||||
if err := json.Unmarshal([]byte(symbolsJSON), &symbols); err != nil {
|
||||
pdk.Log(pdk.LogWarn, fmt.Sprintf("failed to parse symbols config: %v, using defaults", err))
|
||||
return defaultSymbols
|
||||
}
|
||||
|
||||
if len(symbols) == 0 {
|
||||
return defaultSymbols
|
||||
}
|
||||
|
||||
// Normalize symbols - add -USD suffix if not present
|
||||
for i, s := range symbols {
|
||||
s = strings.TrimSpace(s)
|
||||
if !strings.Contains(s, "-") {
|
||||
symbols[i] = s + "-USD"
|
||||
} else {
|
||||
symbols[i] = s
|
||||
}
|
||||
}
|
||||
|
||||
return symbols
|
||||
}
|
||||
|
||||
// getReconnectDelay reads the reconnect delay from config
|
||||
func getReconnectDelay() int32 {
|
||||
delayStr, ok := pdk.GetConfig(reconnectDelayKey)
|
||||
if !ok || delayStr == "" {
|
||||
return defaultReconnectDelay
|
||||
}
|
||||
|
||||
var delay int
|
||||
if _, err := fmt.Sscanf(delayStr, "%d", &delay); err != nil || delay < 1 {
|
||||
return defaultReconnectDelay
|
||||
}
|
||||
return int32(delay)
|
||||
}
|
||||
|
||||
// shouldLogPrices reads the logPrices setting from config
|
||||
func shouldLogPrices() bool {
|
||||
logStr, ok := pdk.GetConfig(logPricesKey)
|
||||
if !ok || logStr == "" {
|
||||
return false
|
||||
}
|
||||
return logStr == "true"
|
||||
}
|
||||
|
||||
// connectAndSubscribe connects to Coinbase WebSocket and subscribes to tickers
|
||||
@@ -164,14 +203,16 @@ func (p *cryptoTickerPlugin) OnTextMessage(input websocket.OnTextMessageRequest)
|
||||
// Calculate 24h change percentage
|
||||
change := calculatePercentChange(ticker.Open24h, ticker.Price)
|
||||
|
||||
// Log ticker information
|
||||
pdk.Log(pdk.LogInfo, fmt.Sprintf("💰 %s: $%s (24h: %s%%) Bid: $%s Ask: $%s",
|
||||
ticker.ProductID,
|
||||
ticker.Price,
|
||||
change,
|
||||
ticker.BestBid,
|
||||
ticker.BestAsk,
|
||||
))
|
||||
// Log ticker information (only if enabled in config)
|
||||
if shouldLogPrices() {
|
||||
pdk.Log(pdk.LogInfo, fmt.Sprintf("💰 %s: $%s (24h: %s%%) Bid: $%s Ask: $%s",
|
||||
ticker.ProductID,
|
||||
ticker.Price,
|
||||
change,
|
||||
ticker.BestBid,
|
||||
ticker.BestAsk,
|
||||
))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -196,10 +237,11 @@ func (p *cryptoTickerPlugin) OnClose(input websocket.OnCloseRequest) error {
|
||||
|
||||
// Only attempt reconnect for our connection
|
||||
if input.ConnectionID == connectionID {
|
||||
pdk.Log(pdk.LogInfo, "Scheduling reconnection attempt in 5 seconds...")
|
||||
delay := getReconnectDelay()
|
||||
pdk.Log(pdk.LogInfo, fmt.Sprintf("Scheduling reconnection attempt in %d seconds...", delay))
|
||||
|
||||
// Schedule a one-time reconnection attempt
|
||||
_, err := host.SchedulerScheduleOneTime(5, "reconnect", reconnectScheduleID)
|
||||
_, err := host.SchedulerScheduleOneTime(delay, "reconnect", reconnectScheduleID)
|
||||
if err != nil {
|
||||
pdk.Log(pdk.LogError, fmt.Sprintf("Failed to schedule reconnection: %v", err))
|
||||
}
|
||||
@@ -218,20 +260,16 @@ func (p *cryptoTickerPlugin) OnCallback(input scheduler.SchedulerCallbackRequest
|
||||
pdk.Log(pdk.LogInfo, "Attempting to reconnect to Coinbase WebSocket API...")
|
||||
|
||||
// Get ticker configuration
|
||||
tickerConfig, ok := pdk.GetConfig("tickers")
|
||||
if !ok || tickerConfig == "" {
|
||||
tickerConfig = "BTC,ETH"
|
||||
}
|
||||
|
||||
tickers := parseTickerSymbols(tickerConfig)
|
||||
symbols := getSymbols()
|
||||
|
||||
// Try to connect and subscribe
|
||||
err := connectAndSubscribe(tickers)
|
||||
err := connectAndSubscribe(symbols)
|
||||
if err != nil {
|
||||
pdk.Log(pdk.LogError, fmt.Sprintf("Reconnection failed: %v - will retry in 10 seconds", err))
|
||||
delay := getReconnectDelay() * 2 // Double delay on failure
|
||||
pdk.Log(pdk.LogError, fmt.Sprintf("Reconnection failed: %v - will retry in %d seconds", err, delay))
|
||||
|
||||
// Schedule another attempt
|
||||
_, err := host.SchedulerScheduleOneTime(10, "reconnect", reconnectScheduleID)
|
||||
_, err := host.SchedulerScheduleOneTime(delay, "reconnect", reconnectScheduleID)
|
||||
if err != nil {
|
||||
pdk.Log(pdk.LogError, fmt.Sprintf("Failed to schedule retry: %v", err))
|
||||
}
|
||||
|
||||
@@ -4,6 +4,61 @@
|
||||
"version": "1.0.0",
|
||||
"description": "Real-time cryptocurrency price ticker using Coinbase WebSocket API",
|
||||
"website": "https://github.com/navidrome/navidrome/tree/master/plugins/examples/crypto-ticker",
|
||||
"config": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"symbols": {
|
||||
"type": "array",
|
||||
"title": "Trading Pairs",
|
||||
"description": "Cryptocurrency trading pairs to track (default: BTC-USD)",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"title": "Trading Pair",
|
||||
"pattern": "^[A-Z]{3,5}-[A-Z]{3,5}$",
|
||||
"description": "Trading pair in the format BASE-QUOTE (e.g., BTC-USD, ETH-USD)"
|
||||
},
|
||||
"default": ["BTC-USD"]
|
||||
},
|
||||
"reconnectDelay": {
|
||||
"type": "integer",
|
||||
"title": "Reconnect Delay",
|
||||
"description": "Delay in seconds before attempting to reconnect after connection loss",
|
||||
"default": 5,
|
||||
"minimum": 1,
|
||||
"maximum": 60
|
||||
},
|
||||
"logPrices": {
|
||||
"type": "boolean",
|
||||
"title": "Log Prices",
|
||||
"description": "Whether to log price updates to the server log",
|
||||
"default": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"uiSchema": {
|
||||
"type": "VerticalLayout",
|
||||
"elements": [
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/symbols"
|
||||
},
|
||||
{
|
||||
"type": "HorizontalLayout",
|
||||
"elements": [
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/reconnectDelay"
|
||||
},
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/logPrices"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"permissions": {
|
||||
"config": {
|
||||
"reason": "To read ticker symbols configuration"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Discord Rich Presence Plugin (Rust)
|
||||
|
||||
A Navidrome plugin that displays your currently playing track on Discord using Rich Presence. This is the Rust implementation demonstrating how to use the generated `nd-host` library.
|
||||
A Navidrome plugin that displays your currently playing track on Discord using Rich Presence. This is the Rust implementation demonstrating how to use the `nd-pdk` library.
|
||||
|
||||
## ⚠️ Warning
|
||||
|
||||
@@ -21,20 +21,20 @@ This plugin is for **demonstration purposes only**. It requires storing your Dis
|
||||
|
||||
## Capabilities
|
||||
|
||||
This plugin implements three capabilities to demonstrate the nd-host library:
|
||||
This plugin implements multiple capabilities to demonstrate the nd-pdk library:
|
||||
|
||||
- **Scrobbler**: Receives now-playing events from Navidrome
|
||||
- **SchedulerCallback**: Handles heartbeat and activity clearing timers
|
||||
- **WebSocketCallback**: Communicates with Discord gateway
|
||||
- **WebSocketCallback**: Communicates with Discord gateway (text, binary, error, and close handlers)
|
||||
|
||||
## Configuration
|
||||
|
||||
Configure in the Navidrome UI (Settings → Plugins → discord-rich-presence):
|
||||
|
||||
| Key | Description | Example |
|
||||
|---------------|-------------------------------------------|--------------------------------|
|
||||
| `clientid` | Your Discord application ID | `123456789012345678` |
|
||||
| `user.<name>` | Discord token for the specified user | `user.alice` = `token123` |
|
||||
| Key | Description | Example |
|
||||
|---------------|--------------------------------------|---------------------------|
|
||||
| `clientid` | Your Discord application ID | `123456789012345678` |
|
||||
| `user.<name>` | Discord token for the specified user | `user.alice` = `token123` |
|
||||
|
||||
Each user is configured as a separate key with the `user.` prefix.
|
||||
|
||||
@@ -69,27 +69,30 @@ make discord-rich-presence-rs.ndp
|
||||
3. Enable and configure the plugin in the Navidrome UI (Settings → Plugins)
|
||||
4. Restart Navidrome if needed
|
||||
|
||||
## Using nd-host Library
|
||||
## Using nd-pdk Library
|
||||
|
||||
This plugin demonstrates how to use the generated Rust host function wrappers:
|
||||
This plugin demonstrates how to use the Rust plugin development kit:
|
||||
|
||||
```rust
|
||||
use nd_host::{artwork, cache, scheduler, websocket};
|
||||
use nd_pdk::host::{artwork, cache, scheduler, websocket};
|
||||
use std::collections::HashMap;
|
||||
|
||||
// Get artwork URL
|
||||
let (url, _) = artwork::artwork_get_track_url(track_id, 300)?;
|
||||
let url = artwork::get_track_url(track_id, 300)?;
|
||||
|
||||
// Cache operations
|
||||
cache::cache_set_string("key", "value", 3600)?;
|
||||
let (value, exists) = cache::cache_get_string("key")?;
|
||||
cache::set_string("key", "value", 3600)?;
|
||||
if let Some(value) = cache::get_string("key")? {
|
||||
// Use the cached value
|
||||
}
|
||||
|
||||
// Schedule tasks
|
||||
scheduler::scheduler_schedule_one_time(60, "payload", "task-id")?;
|
||||
scheduler::scheduler_schedule_recurring("@every 30s", "heartbeat", "heartbeat-task")?;
|
||||
scheduler::schedule_one_time(60, "payload", "task-id")?;
|
||||
scheduler::schedule_recurring("@every 30s", "heartbeat", "heartbeat-task")?;
|
||||
|
||||
// WebSocket operations
|
||||
let conn_id = websocket::websocket_connect("wss://example.com/socket")?;
|
||||
websocket::websocket_send_text(&conn_id, "Hello")?;
|
||||
let conn_id = websocket::connect("wss://example.com/socket", HashMap::new(), "my-conn")?;
|
||||
websocket::send_text(&conn_id, "Hello")?;
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
@@ -25,5 +25,74 @@
|
||||
"artwork": {
|
||||
"reason": "To get track artwork URLs for rich presence display"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"clientid": {
|
||||
"type": "string",
|
||||
"title": "Discord Application Client ID",
|
||||
"description": "The Client ID from your Discord Developer Application. Create one at https://discord.com/developers/applications",
|
||||
"minLength": 17,
|
||||
"maxLength": 20,
|
||||
"pattern": "^[0-9]+$"
|
||||
},
|
||||
"users": {
|
||||
"type": "array",
|
||||
"title": "User Tokens",
|
||||
"description": "Discord tokens for each Navidrome user. WARNING: Store tokens securely!",
|
||||
"minItems": 1,
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "string",
|
||||
"title": "Navidrome Username",
|
||||
"description": "The Navidrome username to associate with this Discord token",
|
||||
"minLength": 1
|
||||
},
|
||||
"token": {
|
||||
"type": "string",
|
||||
"title": "Discord Token",
|
||||
"description": "The user's Discord token (keep this secret!)",
|
||||
"minLength": 1
|
||||
}
|
||||
},
|
||||
"required": ["username", "token"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["clientid", "users"]
|
||||
},
|
||||
"uiSchema": {
|
||||
"type": "VerticalLayout",
|
||||
"elements": [
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/clientid"
|
||||
},
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/users",
|
||||
"options": {
|
||||
"elementLabelProp": "username",
|
||||
"detail": {
|
||||
"type": "HorizontalLayout",
|
||||
"elements": [
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/username"
|
||||
},
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/token"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,12 +8,9 @@
|
||||
//!
|
||||
//! ## Configuration
|
||||
//!
|
||||
//! ```toml
|
||||
//! [PluginConfig.discord-rich-presence-rs]
|
||||
//! clientid = "YOUR_DISCORD_APPLICATION_ID"
|
||||
//! "user.username1" = "discord_token1"
|
||||
//! "user.username2" = "discord_token2"
|
||||
//! ```
|
||||
//! Configure this plugin through the Navidrome UI with:
|
||||
//! - Discord Application Client ID
|
||||
//! - User tokens array mapping Navidrome usernames to Discord tokens
|
||||
//!
|
||||
//! **WARNING**: This plugin is for demonstration purposes only. Storing Discord tokens
|
||||
//! in configuration files is not secure and may violate Discord's terms of service.
|
||||
@@ -32,6 +29,7 @@ use nd_pdk::websocket::{
|
||||
OnBinaryMessageRequest, OnCloseRequest, OnErrorRequest, OnTextMessageRequest,
|
||||
TextMessageProvider,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
|
||||
mod rpc;
|
||||
|
||||
@@ -48,7 +46,7 @@ nd_pdk::register_websocket_close!(DiscordPlugin);
|
||||
// ============================================================================
|
||||
|
||||
const CLIENT_ID_KEY: &str = "clientid";
|
||||
const USER_KEY_PREFIX: &str = "user.";
|
||||
const USERS_KEY: &str = "users";
|
||||
const PAYLOAD_HEARTBEAT: &str = "heartbeat";
|
||||
const PAYLOAD_CLEAR_ACTIVITY: &str = "clear-activity";
|
||||
|
||||
@@ -64,19 +62,31 @@ struct DiscordPlugin;
|
||||
// Configuration
|
||||
// ============================================================================
|
||||
|
||||
/// User token entry from the config schema
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UserToken {
|
||||
username: String,
|
||||
token: String,
|
||||
}
|
||||
|
||||
fn get_config() -> Result<(String, std::collections::HashMap<String, String>), Error> {
|
||||
let client_id = config::get(CLIENT_ID_KEY)?
|
||||
.filter(|s| !s.is_empty())
|
||||
.ok_or_else(|| Error::msg("missing clientid in configuration"))?;
|
||||
|
||||
// Get all user keys with the "user." prefix
|
||||
let user_keys = config::keys(USER_KEY_PREFIX)?;
|
||||
|
||||
// Get users array from config (JSON format)
|
||||
let users_json = config::get(USERS_KEY)?.unwrap_or_default();
|
||||
|
||||
let mut users = std::collections::HashMap::new();
|
||||
for key in user_keys {
|
||||
let username = key.strip_prefix(USER_KEY_PREFIX).unwrap_or(&key);
|
||||
if let Some(token) = config::get(&key)?.filter(|s| !s.is_empty()) {
|
||||
users.insert(username.to_string(), token);
|
||||
if !users_json.is_empty() {
|
||||
// Parse JSON array of user tokens
|
||||
let user_tokens: Vec<UserToken> = serde_json::from_str(&users_json)
|
||||
.map_err(|e| Error::msg(format!("failed to parse users config: {}", e)))?;
|
||||
|
||||
for user_token in user_tokens {
|
||||
if !user_token.username.is_empty() && !user_token.token.is_empty() {
|
||||
users.insert(user_token.username, user_token.token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -24,10 +25,16 @@ import (
|
||||
|
||||
// Configuration keys
|
||||
const (
|
||||
clientIDKey = "clientid"
|
||||
userKeyPrefix = "user."
|
||||
clientIDKey = "clientid"
|
||||
usersKey = "users"
|
||||
)
|
||||
|
||||
// userToken represents a user-token mapping from the config
|
||||
type userToken struct {
|
||||
Username string `json:"username"`
|
||||
Token string `json:"token"`
|
||||
}
|
||||
|
||||
// discordPlugin implements the scrobbler and scheduler interfaces.
|
||||
type discordPlugin struct{}
|
||||
|
||||
@@ -49,24 +56,35 @@ func getConfig() (clientID string, users map[string]string, err error) {
|
||||
return "", nil, nil
|
||||
}
|
||||
|
||||
// Get all user keys with the "user." prefix
|
||||
userKeys := host.ConfigKeys(userKeyPrefix)
|
||||
if len(userKeys) == 0 {
|
||||
// Get the users array from config
|
||||
usersJSON, ok := pdk.GetConfig(usersKey)
|
||||
if !ok || usersJSON == "" {
|
||||
pdk.Log(pdk.LogWarn, "no users configured")
|
||||
return clientID, nil, nil
|
||||
}
|
||||
|
||||
// Parse the JSON array
|
||||
var userTokens []userToken
|
||||
if err := json.Unmarshal([]byte(usersJSON), &userTokens); err != nil {
|
||||
pdk.Log(pdk.LogError, fmt.Sprintf("failed to parse users config: %v", err))
|
||||
return clientID, nil, nil
|
||||
}
|
||||
|
||||
if len(userTokens) == 0 {
|
||||
pdk.Log(pdk.LogWarn, "no users configured")
|
||||
return clientID, nil, nil
|
||||
}
|
||||
|
||||
// Build the users map
|
||||
users = make(map[string]string)
|
||||
for _, key := range userKeys {
|
||||
username := strings.TrimPrefix(key, userKeyPrefix)
|
||||
token, exists := host.ConfigGet(key)
|
||||
if exists && token != "" {
|
||||
users[username] = token
|
||||
for _, ut := range userTokens {
|
||||
if ut.Username != "" && ut.Token != "" {
|
||||
users[ut.Username] = ut.Token
|
||||
}
|
||||
}
|
||||
|
||||
if len(users) == 0 {
|
||||
pdk.Log(pdk.LogWarn, "no users configured")
|
||||
pdk.Log(pdk.LogWarn, "no valid users configured")
|
||||
return clientID, nil, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -29,5 +29,74 @@
|
||||
"artwork": {
|
||||
"reason": "To get track artwork URLs for rich presence display"
|
||||
}
|
||||
},
|
||||
"config": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"clientid": {
|
||||
"type": "string",
|
||||
"title": "Discord Application Client ID",
|
||||
"description": "The Client ID from your Discord Developer Application. Create one at https://discord.com/developers/applications",
|
||||
"minLength": 17,
|
||||
"maxLength": 20,
|
||||
"pattern": "^[0-9]+$"
|
||||
},
|
||||
"users": {
|
||||
"type": "array",
|
||||
"title": "User Tokens",
|
||||
"description": "Discord tokens for each Navidrome user. WARNING: Store tokens securely!",
|
||||
"minItems": 1,
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "string",
|
||||
"title": "Navidrome Username",
|
||||
"description": "The Navidrome username to associate with this Discord token",
|
||||
"minLength": 1
|
||||
},
|
||||
"token": {
|
||||
"type": "string",
|
||||
"title": "Discord Token",
|
||||
"description": "The user's Discord token (keep this secret!)",
|
||||
"minLength": 1
|
||||
}
|
||||
},
|
||||
"required": ["username", "token"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["clientid", "users"]
|
||||
},
|
||||
"uiSchema": {
|
||||
"type": "VerticalLayout",
|
||||
"elements": [
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/clientid"
|
||||
},
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/users",
|
||||
"options": {
|
||||
"elementLabelProp": "username",
|
||||
"detail": {
|
||||
"type": "HorizontalLayout",
|
||||
"elements": [
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/username"
|
||||
},
|
||||
{
|
||||
"type": "Control",
|
||||
"scope": "#/properties/token"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,105 @@ import (
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
// testConfigInput is the input for nd_test_config callback.
|
||||
type testConfigInput struct {
|
||||
Operation string `json:"operation"`
|
||||
Key string `json:"key,omitempty"`
|
||||
Prefix string `json:"prefix,omitempty"`
|
||||
}
|
||||
|
||||
// testConfigOutput is the output from nd_test_config callback.
|
||||
type testConfigOutput struct {
|
||||
StringVal string `json:"string_val,omitempty"`
|
||||
IntVal int64 `json:"int_val,omitempty"`
|
||||
Keys []string `json:"keys,omitempty"`
|
||||
Exists bool `json:"exists,omitempty"`
|
||||
Error *string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
// setupTestConfigPlugin sets up a test environment with the test-config plugin loaded.
|
||||
// Returns a cleanup function and a helper to call the plugin's nd_test_config function.
|
||||
func setupTestConfigPlugin(configJSON string) (*Manager, func(context.Context, testConfigInput) (*testConfigOutput, error)) {
|
||||
tmpDir, err := os.MkdirTemp("", "config-test-*")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Copy the test-config plugin
|
||||
srcPath := filepath.Join(testdataDir, "test-config"+PackageExtension)
|
||||
destPath := filepath.Join(tmpDir, "test-config"+PackageExtension)
|
||||
data, err := os.ReadFile(srcPath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
err = os.WriteFile(destPath, data, 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Compute SHA256 for the plugin
|
||||
hash := sha256.Sum256(data)
|
||||
hashHex := hex.EncodeToString(hash[:])
|
||||
|
||||
// Setup config
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.Plugins.Enabled = true
|
||||
conf.Server.Plugins.Folder = tmpDir
|
||||
conf.Server.Plugins.AutoReload = false
|
||||
conf.Server.CacheFolder = filepath.Join(tmpDir, "cache")
|
||||
|
||||
// Setup mock DataStore
|
||||
mockPluginRepo := tests.CreateMockPluginRepo()
|
||||
mockPluginRepo.Permitted = true
|
||||
mockPluginRepo.SetData(model.Plugins{{
|
||||
ID: "test-config",
|
||||
Path: destPath,
|
||||
SHA256: hashHex,
|
||||
Enabled: true,
|
||||
AllUsers: true,
|
||||
Config: configJSON,
|
||||
}})
|
||||
dataStore := &tests.MockDataStore{MockedPlugin: mockPluginRepo}
|
||||
|
||||
// Create and start manager
|
||||
manager := &Manager{
|
||||
plugins: make(map[string]*plugin),
|
||||
ds: dataStore,
|
||||
subsonicRouter: http.NotFoundHandler(),
|
||||
}
|
||||
err = manager.Start(GinkgoT().Context())
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
DeferCleanup(func() {
|
||||
_ = manager.Stop()
|
||||
_ = os.RemoveAll(tmpDir)
|
||||
})
|
||||
|
||||
// Helper to call test plugin's exported function
|
||||
callTestConfig := func(ctx context.Context, input testConfigInput) (*testConfigOutput, error) {
|
||||
manager.mu.RLock()
|
||||
p := manager.plugins["test-config"]
|
||||
manager.mu.RUnlock()
|
||||
|
||||
instance, err := p.instance(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer instance.Close(ctx)
|
||||
|
||||
inputBytes, _ := json.Marshal(input)
|
||||
_, outputBytes, err := instance.Call("nd_test_config", inputBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var output testConfigOutput
|
||||
if err := json.Unmarshal(outputBytes, &output); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if output.Error != nil {
|
||||
return nil, errors.New(*output.Error)
|
||||
}
|
||||
return &output, nil
|
||||
}
|
||||
|
||||
return manager, callTestConfig
|
||||
}
|
||||
|
||||
var _ = Describe("ConfigService", func() {
|
||||
var service *configServiceImpl
|
||||
var ctx context.Context
|
||||
@@ -144,59 +243,12 @@ var _ = Describe("ConfigService", func() {
|
||||
|
||||
var _ = Describe("ConfigService Integration", Ordered, func() {
|
||||
var (
|
||||
manager *Manager
|
||||
tmpDir string
|
||||
manager *Manager
|
||||
callTestConfig func(context.Context, testConfigInput) (*testConfigOutput, error)
|
||||
)
|
||||
|
||||
BeforeAll(func() {
|
||||
var err error
|
||||
tmpDir, err = os.MkdirTemp("", "config-test-*")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Copy the test-config plugin
|
||||
srcPath := filepath.Join(testdataDir, "test-config"+PackageExtension)
|
||||
destPath := filepath.Join(tmpDir, "test-config"+PackageExtension)
|
||||
data, err := os.ReadFile(srcPath)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
err = os.WriteFile(destPath, data, 0600)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Compute SHA256 for the plugin
|
||||
hash := sha256.Sum256(data)
|
||||
hashHex := hex.EncodeToString(hash[:])
|
||||
|
||||
// Setup config
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.Plugins.Enabled = true
|
||||
conf.Server.Plugins.Folder = tmpDir
|
||||
conf.Server.Plugins.AutoReload = false
|
||||
conf.Server.CacheFolder = filepath.Join(tmpDir, "cache")
|
||||
|
||||
// Setup mock DataStore with pre-enabled plugin and config
|
||||
mockPluginRepo := tests.CreateMockPluginRepo()
|
||||
mockPluginRepo.Permitted = true
|
||||
mockPluginRepo.SetData(model.Plugins{{
|
||||
ID: "test-config",
|
||||
Path: destPath,
|
||||
SHA256: hashHex,
|
||||
Enabled: true,
|
||||
Config: `{"api_key":"test_secret","max_retries":"5","timeout":"30"}`,
|
||||
}})
|
||||
dataStore := &tests.MockDataStore{MockedPlugin: mockPluginRepo}
|
||||
|
||||
// Create and start manager
|
||||
manager = &Manager{
|
||||
plugins: make(map[string]*plugin),
|
||||
ds: dataStore,
|
||||
subsonicRouter: http.NotFoundHandler(),
|
||||
}
|
||||
err = manager.Start(GinkgoT().Context())
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
DeferCleanup(func() {
|
||||
_ = manager.Stop()
|
||||
_ = os.RemoveAll(tmpDir)
|
||||
})
|
||||
manager, callTestConfig = setupTestConfigPlugin(`{"api_key":"test_secret","max_retries":"5","timeout":"30"}`)
|
||||
})
|
||||
|
||||
Describe("Plugin Loading", func() {
|
||||
@@ -205,54 +257,11 @@ var _ = Describe("ConfigService Integration", Ordered, func() {
|
||||
p, ok := manager.plugins["test-config"]
|
||||
manager.mu.RUnlock()
|
||||
Expect(ok).To(BeTrue())
|
||||
// Config service doesn't require permission, so Permissions can be nil
|
||||
// Just verify the plugin loaded
|
||||
Expect(p.manifest.Name).To(Equal("Test Config Plugin"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Config Operations via Plugin", func() {
|
||||
type testConfigInput struct {
|
||||
Operation string `json:"operation"`
|
||||
Key string `json:"key,omitempty"`
|
||||
Prefix string `json:"prefix,omitempty"`
|
||||
}
|
||||
type testConfigOutput struct {
|
||||
StringVal string `json:"string_val,omitempty"`
|
||||
IntVal int64 `json:"int_val,omitempty"`
|
||||
Keys []string `json:"keys,omitempty"`
|
||||
Exists bool `json:"exists,omitempty"`
|
||||
Error *string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
// Helper to call test plugin's exported function
|
||||
callTestConfig := func(ctx context.Context, input testConfigInput) (*testConfigOutput, error) {
|
||||
manager.mu.RLock()
|
||||
p := manager.plugins["test-config"]
|
||||
manager.mu.RUnlock()
|
||||
|
||||
instance, err := p.instance(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer instance.Close(ctx)
|
||||
|
||||
inputBytes, _ := json.Marshal(input)
|
||||
_, outputBytes, err := instance.Call("nd_test_config", inputBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var output testConfigOutput
|
||||
if err := json.Unmarshal(outputBytes, &output); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if output.Error != nil {
|
||||
return nil, errors.New(*output.Error)
|
||||
}
|
||||
return &output, nil
|
||||
}
|
||||
|
||||
It("should get string value", func() {
|
||||
output, err := callTestConfig(GinkgoT().Context(), testConfigInput{
|
||||
Operation: "get",
|
||||
@@ -285,7 +294,7 @@ var _ = Describe("ConfigService Integration", Ordered, func() {
|
||||
It("should return not exists for non-integer value", func() {
|
||||
output, err := callTestConfig(GinkgoT().Context(), testConfigInput{
|
||||
Operation: "get_int",
|
||||
Key: "api_key", // This is a string, not an integer
|
||||
Key: "api_key",
|
||||
})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(output.Exists).To(BeFalse())
|
||||
@@ -310,3 +319,64 @@ var _ = Describe("ConfigService Integration", Ordered, func() {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
var _ = Describe("Complex Config Values Integration", Ordered, func() {
|
||||
var callTestConfig func(context.Context, testConfigInput) (*testConfigOutput, error)
|
||||
|
||||
BeforeAll(func() {
|
||||
// Config with arrays and objects - these should be properly serialized as JSON strings
|
||||
_, callTestConfig = setupTestConfigPlugin(`{"api_key":"secret123","users":[{"username":"admin","token":"tok1"},{"username":"user2","token":"tok2"}],"settings":{"enabled":true,"count":5}}`)
|
||||
})
|
||||
|
||||
Describe("Config Serialization", func() {
|
||||
It("should make simple string config values accessible to plugin", func() {
|
||||
output, err := callTestConfig(GinkgoT().Context(), testConfigInput{
|
||||
Operation: "get",
|
||||
Key: "api_key",
|
||||
})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(output.Exists).To(BeTrue())
|
||||
Expect(output.StringVal).To(Equal("secret123"))
|
||||
})
|
||||
|
||||
It("should serialize array config values as JSON strings", func() {
|
||||
output, err := callTestConfig(GinkgoT().Context(), testConfigInput{
|
||||
Operation: "get",
|
||||
Key: "users",
|
||||
})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(output.Exists).To(BeTrue())
|
||||
// Array values are serialized as JSON strings - parse to verify structure
|
||||
var users []map[string]string
|
||||
Expect(json.Unmarshal([]byte(output.StringVal), &users)).To(Succeed())
|
||||
Expect(users).To(HaveLen(2))
|
||||
Expect(users[0]).To(HaveKeyWithValue("username", "admin"))
|
||||
Expect(users[0]).To(HaveKeyWithValue("token", "tok1"))
|
||||
Expect(users[1]).To(HaveKeyWithValue("username", "user2"))
|
||||
Expect(users[1]).To(HaveKeyWithValue("token", "tok2"))
|
||||
})
|
||||
|
||||
It("should serialize object config values as JSON strings", func() {
|
||||
output, err := callTestConfig(GinkgoT().Context(), testConfigInput{
|
||||
Operation: "get",
|
||||
Key: "settings",
|
||||
})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(output.Exists).To(BeTrue())
|
||||
// Object values are serialized as JSON strings - parse to verify structure
|
||||
var settings map[string]any
|
||||
Expect(json.Unmarshal([]byte(output.StringVal), &settings)).To(Succeed())
|
||||
Expect(settings).To(HaveKeyWithValue("enabled", true))
|
||||
Expect(settings).To(HaveKeyWithValue("count", float64(5)))
|
||||
})
|
||||
|
||||
It("should list all config keys including complex values", func() {
|
||||
output, err := callTestConfig(GinkgoT().Context(), testConfigInput{
|
||||
Operation: "list",
|
||||
Prefix: "",
|
||||
})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(output.Keys).To(ConsistOf("api_key", "users", "settings"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -381,6 +381,30 @@ func (m *Manager) DisablePlugin(ctx context.Context, id string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidatePluginConfig validates a config JSON string against the plugin's config schema.
|
||||
// If the plugin has no config schema defined, it returns an error.
|
||||
// Returns nil if validation passes, or an error describing the validation failure.
|
||||
func (m *Manager) ValidatePluginConfig(ctx context.Context, id, configJSON string) error {
|
||||
if m.ds == nil {
|
||||
return fmt.Errorf("datastore not configured")
|
||||
}
|
||||
|
||||
adminCtx := adminContext(ctx)
|
||||
repo := m.ds.Plugin(adminCtx)
|
||||
|
||||
plugin, err := repo.Get(id)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getting plugin from DB: %w", err)
|
||||
}
|
||||
|
||||
manifest, err := readManifest(plugin.Path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("reading manifest: %w", err)
|
||||
}
|
||||
|
||||
return ValidateConfig(manifest, configJSON)
|
||||
}
|
||||
|
||||
// UpdatePluginConfig updates the configuration for a plugin.
|
||||
// If the plugin is enabled, it will be reloaded with the new config.
|
||||
func (m *Manager) UpdatePluginConfig(ctx context.Context, id, configJSON string) error {
|
||||
|
||||
@@ -230,11 +230,9 @@ func (m *Manager) loadPluginWithConfig(p *model.Plugin) error {
|
||||
}
|
||||
|
||||
// Parse config from JSON
|
||||
var pluginConfig map[string]string
|
||||
if p.Config != "" {
|
||||
if err := json.Unmarshal([]byte(p.Config), &pluginConfig); err != nil {
|
||||
return fmt.Errorf("parsing plugin config: %w", err)
|
||||
}
|
||||
pluginConfig, err := parsePluginConfig(p.Config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Parse users from JSON
|
||||
@@ -379,3 +377,30 @@ func (m *Manager) loadPluginWithConfig(p *model.Plugin) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// parsePluginConfig parses a JSON config string into a map of string values.
|
||||
// For Extism, all config values must be strings, so non-string values are serialized as JSON.
|
||||
func parsePluginConfig(configJSON string) (map[string]string, error) {
|
||||
if configJSON == "" {
|
||||
return nil, nil
|
||||
}
|
||||
var rawConfig map[string]any
|
||||
if err := json.Unmarshal([]byte(configJSON), &rawConfig); err != nil {
|
||||
return nil, fmt.Errorf("parsing plugin config: %w", err)
|
||||
}
|
||||
pluginConfig := make(map[string]string)
|
||||
for key, value := range rawConfig {
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
pluginConfig[key] = v
|
||||
default:
|
||||
// Serialize non-string values as JSON
|
||||
jsonBytes, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("serializing config value %q: %w", key, err)
|
||||
}
|
||||
pluginConfig[key] = string(jsonBytes)
|
||||
}
|
||||
}
|
||||
return pluginConfig, nil
|
||||
}
|
||||
|
||||
60
plugins/manager_loader_test.go
Normal file
60
plugins/manager_loader_test.go
Normal file
@@ -0,0 +1,60 @@
|
||||
//go:build !windows
|
||||
|
||||
package plugins
|
||||
|
||||
import (
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("parsePluginConfig", func() {
|
||||
It("returns nil for empty string", func() {
|
||||
result, err := parsePluginConfig("")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(result).To(BeNil())
|
||||
})
|
||||
|
||||
It("serializes object values as JSON strings", func() {
|
||||
result, err := parsePluginConfig(`{"settings": {"enabled": true, "count": 5}}`)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(result).To(HaveLen(1))
|
||||
Expect(result["settings"]).To(Equal(`{"count":5,"enabled":true}`))
|
||||
})
|
||||
|
||||
It("handles mixed value types", func() {
|
||||
result, err := parsePluginConfig(`{"api_key": "secret", "timeout": 30, "rate": 1.5, "enabled": true, "tags": ["a", "b"]}`)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(result).To(HaveLen(5))
|
||||
Expect(result["api_key"]).To(Equal("secret"))
|
||||
Expect(result["timeout"]).To(Equal("30"))
|
||||
Expect(result["rate"]).To(Equal("1.5"))
|
||||
Expect(result["enabled"]).To(Equal("true"))
|
||||
Expect(result["tags"]).To(Equal(`["a","b"]`))
|
||||
})
|
||||
|
||||
It("returns error for invalid JSON", func() {
|
||||
_, err := parsePluginConfig(`{invalid json}`)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("parsing plugin config"))
|
||||
})
|
||||
|
||||
It("returns error for non-object JSON", func() {
|
||||
_, err := parsePluginConfig(`["array", "not", "object"]`)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("parsing plugin config"))
|
||||
})
|
||||
|
||||
It("handles null values", func() {
|
||||
result, err := parsePluginConfig(`{"key": null}`)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(result).To(HaveLen(1))
|
||||
Expect(result["key"]).To(Equal("null"))
|
||||
})
|
||||
|
||||
It("handles empty object", func() {
|
||||
result, err := parsePluginConfig(`{}`)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(result).To(HaveLen(0))
|
||||
Expect(result).ToNot(BeNil())
|
||||
})
|
||||
})
|
||||
@@ -36,9 +36,28 @@
|
||||
},
|
||||
"experimental": {
|
||||
"$ref": "#/$defs/Experimental"
|
||||
},
|
||||
"config": {
|
||||
"$ref": "#/$defs/ConfigDefinition"
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"ConfigDefinition": {
|
||||
"type": "object",
|
||||
"description": "Configuration schema for the plugin using JSON Schema (draft-07) and optional JSONForms UI Schema",
|
||||
"additionalProperties": false,
|
||||
"required": ["schema"],
|
||||
"properties": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"description": "JSON Schema (draft-07) defining the plugin's configuration options"
|
||||
},
|
||||
"uiSchema": {
|
||||
"type": "object",
|
||||
"description": "Optional JSONForms UI Schema for customizing form layout"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Experimental": {
|
||||
"type": "object",
|
||||
"description": "Experimental features that may change or be removed in future versions",
|
||||
|
||||
@@ -3,6 +3,8 @@ package plugins
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
)
|
||||
|
||||
//go:generate go tool go-jsonschema -p plugins --struct-name-from-title -o manifest_gen.go manifest-schema.json
|
||||
@@ -29,6 +31,26 @@ func (m *Manifest) Validate() error {
|
||||
return fmt.Errorf("'subsonicapi' permission requires 'users' permission to be declared")
|
||||
}
|
||||
}
|
||||
|
||||
// Validate config schema if present
|
||||
if m.Config != nil && m.Config.Schema != nil {
|
||||
if err := validateConfigSchema(m.Config.Schema); err != nil {
|
||||
return fmt.Errorf("invalid config schema: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateConfigSchema validates that the schema is a valid JSON Schema that can be compiled.
|
||||
func validateConfigSchema(schema map[string]any) error {
|
||||
compiler := jsonschema.NewCompiler()
|
||||
if err := compiler.AddResource("schema.json", schema); err != nil {
|
||||
return fmt.Errorf("invalid schema structure: %w", err)
|
||||
}
|
||||
if _, err := compiler.Compile("schema.json"); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -17,6 +17,34 @@ type CachePermission struct {
|
||||
Reason *string `json:"reason,omitempty" yaml:"reason,omitempty" mapstructure:"reason,omitempty"`
|
||||
}
|
||||
|
||||
// Configuration schema for the plugin using JSON Schema (draft-07) and optional
|
||||
// JSONForms UI Schema
|
||||
type ConfigDefinition struct {
|
||||
// JSON Schema (draft-07) defining the plugin's configuration options
|
||||
Schema map[string]interface{} `json:"schema" yaml:"schema" mapstructure:"schema"`
|
||||
|
||||
// Optional JSONForms UI Schema for customizing form layout
|
||||
UiSchema map[string]interface{} `json:"uiSchema,omitempty" yaml:"uiSchema,omitempty" mapstructure:"uiSchema,omitempty"`
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements json.Unmarshaler.
|
||||
func (j *ConfigDefinition) UnmarshalJSON(value []byte) error {
|
||||
var raw map[string]interface{}
|
||||
if err := json.Unmarshal(value, &raw); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, ok := raw["schema"]; raw != nil && !ok {
|
||||
return fmt.Errorf("field schema in ConfigDefinition: required")
|
||||
}
|
||||
type Plain ConfigDefinition
|
||||
var plain Plain
|
||||
if err := json.Unmarshal(value, &plain); err != nil {
|
||||
return err
|
||||
}
|
||||
*j = ConfigDefinition(plain)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Configuration access permissions for a plugin
|
||||
type ConfigPermission struct {
|
||||
// Explanation for why config access is needed
|
||||
@@ -81,6 +109,9 @@ type Manifest struct {
|
||||
// The author of the plugin
|
||||
Author string `json:"author" yaml:"author" mapstructure:"author"`
|
||||
|
||||
// Config corresponds to the JSON schema field "config".
|
||||
Config *ConfigDefinition `json:"config,omitempty" yaml:"config,omitempty" mapstructure:"config,omitempty"`
|
||||
|
||||
// A brief description of what the plugin does
|
||||
Description *string `json:"description,omitempty" yaml:"description,omitempty" mapstructure:"description,omitempty"`
|
||||
|
||||
|
||||
@@ -286,6 +286,107 @@ var _ = Describe("Manifest", func() {
|
||||
err := m.Validate()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("validates manifest with valid config schema", func() {
|
||||
m := &Manifest{
|
||||
Name: "Test",
|
||||
Author: "Author",
|
||||
Version: "1.0.0",
|
||||
Config: &ConfigDefinition{
|
||||
Schema: map[string]any{
|
||||
"type": "object",
|
||||
"properties": map[string]any{
|
||||
"api_key": map[string]any{
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := m.Validate()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("validates manifest with complex config schema", func() {
|
||||
m := &Manifest{
|
||||
Name: "Test",
|
||||
Author: "Author",
|
||||
Version: "1.0.0",
|
||||
Config: &ConfigDefinition{
|
||||
Schema: map[string]any{
|
||||
"type": "object",
|
||||
"properties": map[string]any{
|
||||
"users": map[string]any{
|
||||
"type": "array",
|
||||
"items": map[string]any{
|
||||
"type": "object",
|
||||
"properties": map[string]any{
|
||||
"username": map[string]any{"type": "string"},
|
||||
"token": map[string]any{"type": "string"},
|
||||
},
|
||||
"required": []any{"username", "token"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := m.Validate()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("returns error for invalid config schema - bad type", func() {
|
||||
m := &Manifest{
|
||||
Name: "Test",
|
||||
Author: "Author",
|
||||
Version: "1.0.0",
|
||||
Config: &ConfigDefinition{
|
||||
Schema: map[string]any{
|
||||
"type": "invalid_type",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := m.Validate()
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("config schema"))
|
||||
})
|
||||
|
||||
It("returns error for invalid config schema - bad minLength", func() {
|
||||
m := &Manifest{
|
||||
Name: "Test",
|
||||
Author: "Author",
|
||||
Version: "1.0.0",
|
||||
Config: &ConfigDefinition{
|
||||
Schema: map[string]any{
|
||||
"type": "object",
|
||||
"properties": map[string]any{
|
||||
"name": map[string]any{
|
||||
"type": "string",
|
||||
"minLength": "not_a_number",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := m.Validate()
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(err.Error()).To(ContainSubstring("config schema"))
|
||||
})
|
||||
|
||||
It("validates manifest without config", func() {
|
||||
m := &Manifest{
|
||||
Name: "Test",
|
||||
Author: "Author",
|
||||
Version: "1.0.0",
|
||||
}
|
||||
|
||||
err := m.Validate()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("ValidateWithCapabilities", func() {
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
@@ -42,10 +43,11 @@ func TestPlugins(t *testing.T) {
|
||||
|
||||
func buildTestPlugins(t *testing.T, path string) {
|
||||
t.Helper()
|
||||
start := time.Now()
|
||||
t.Logf("[BeforeSuite] Current working directory: %s", path)
|
||||
cmd := exec.Command("make", "-C", path)
|
||||
out, err := cmd.CombinedOutput()
|
||||
t.Logf("[BeforeSuite] Make output: %s", string(out))
|
||||
t.Logf("[BeforeSuite] Make output: %s elapsed: %s", string(out), time.Since(start))
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to build test plugins: %v", err)
|
||||
}
|
||||
|
||||
57
plugins/testdata/test-config/manifest.json
vendored
57
plugins/testdata/test-config/manifest.json
vendored
@@ -2,5 +2,60 @@
|
||||
"name": "Test Config Plugin",
|
||||
"author": "Navidrome Test",
|
||||
"version": "1.0.0",
|
||||
"description": "A test plugin for config service integration testing"
|
||||
"description": "A test plugin for config service integration testing",
|
||||
"config": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"api_key": {
|
||||
"type": "string",
|
||||
"title": "API Key",
|
||||
"minLength": 1
|
||||
},
|
||||
"max_retries": {
|
||||
"type": "string",
|
||||
"title": "Max Retries"
|
||||
},
|
||||
"timeout": {
|
||||
"type": "string",
|
||||
"title": "Timeout"
|
||||
},
|
||||
"users": {
|
||||
"type": "array",
|
||||
"title": "Users",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "string",
|
||||
"title": "Username",
|
||||
"minLength": 1
|
||||
},
|
||||
"token": {
|
||||
"type": "string",
|
||||
"title": "Token",
|
||||
"minLength": 1
|
||||
}
|
||||
},
|
||||
"required": ["username", "token"]
|
||||
}
|
||||
},
|
||||
"settings": {
|
||||
"type": "object",
|
||||
"title": "Settings",
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"title": "Enabled"
|
||||
},
|
||||
"count": {
|
||||
"type": "integer",
|
||||
"title": "Count"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["api_key"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,12 +12,16 @@
|
||||
"artist": "Artista",
|
||||
"album": "Álbum",
|
||||
"path": "Ruta del archivo",
|
||||
"libraryName": "Biblioteca",
|
||||
"genre": "Género",
|
||||
"compilation": "Compilación",
|
||||
"year": "Año",
|
||||
"size": "Tamaño del archivo",
|
||||
"updatedAt": "Actualizado el",
|
||||
"bitRate": "Tasa de bits",
|
||||
"bitDepth": "Profundidad de bits",
|
||||
"sampleRate": "Frecuencia de muestreo",
|
||||
"channels": "Canales",
|
||||
"discSubtitle": "Subtítulo del disco",
|
||||
"starred": "Favorito",
|
||||
"comment": "Comentario",
|
||||
@@ -25,7 +29,6 @@
|
||||
"quality": "Calidad",
|
||||
"bpm": "BPM",
|
||||
"playDate": "Últimas reproducciones",
|
||||
"channels": "Canales",
|
||||
"createdAt": "Creado el",
|
||||
"grouping": "Agrupación",
|
||||
"mood": "Estado de ánimo",
|
||||
@@ -33,20 +36,17 @@
|
||||
"tags": "Etiquetas",
|
||||
"mappedTags": "Etiquetas asignadas",
|
||||
"rawTags": "Etiquetas sin procesar",
|
||||
"bitDepth": "Profundidad de bits",
|
||||
"sampleRate": "Frecuencia de muestreo",
|
||||
"missing": "Faltante",
|
||||
"libraryName": "Biblioteca"
|
||||
"missing": "Faltante"
|
||||
},
|
||||
"actions": {
|
||||
"addToQueue": "Reproducir después",
|
||||
"playNow": "Reproducir ahora",
|
||||
"addToPlaylist": "Agregar a la playlist",
|
||||
"showInPlaylist": "Mostrar en la lista de reproducción",
|
||||
"shuffleAll": "Todas aleatorias",
|
||||
"download": "Descarga",
|
||||
"playNext": "Siguiente",
|
||||
"info": "Obtener información",
|
||||
"showInPlaylist": "Mostrar en la lista de reproducción"
|
||||
"info": "Obtener información"
|
||||
}
|
||||
},
|
||||
"album": {
|
||||
@@ -57,38 +57,38 @@
|
||||
"duration": "Duración",
|
||||
"songCount": "Canciones",
|
||||
"playCount": "Reproducciones",
|
||||
"size": "Tamaño del archivo",
|
||||
"name": "Nombre",
|
||||
"libraryName": "Biblioteca",
|
||||
"genre": "Género",
|
||||
"compilation": "Compilación",
|
||||
"year": "Año",
|
||||
"updatedAt": "Actualizado el",
|
||||
"comment": "Comentario",
|
||||
"rating": "Calificación",
|
||||
"createdAt": "Creado el",
|
||||
"size": "Tamaño del archivo",
|
||||
"date": "Fecha de grabación",
|
||||
"originalDate": "Original",
|
||||
"releaseDate": "Publicado",
|
||||
"releases": "Lanzamiento |||| Lanzamientos",
|
||||
"released": "Publicado",
|
||||
"updatedAt": "Actualizado el",
|
||||
"comment": "Comentario",
|
||||
"rating": "Calificación",
|
||||
"createdAt": "Creado el",
|
||||
"recordLabel": "Discográfica",
|
||||
"catalogNum": "Número de catálogo",
|
||||
"releaseType": "Tipo de lanzamiento",
|
||||
"grouping": "Agrupación",
|
||||
"media": "Medios",
|
||||
"mood": "Estado de ánimo",
|
||||
"date": "Fecha de grabación",
|
||||
"missing": "Faltante",
|
||||
"libraryName": "Biblioteca"
|
||||
"missing": "Faltante"
|
||||
},
|
||||
"actions": {
|
||||
"playAll": "Reproducir",
|
||||
"playNext": "Reproducir siguiente",
|
||||
"addToQueue": "Reproducir después",
|
||||
"share": "Compartir",
|
||||
"shuffle": "Aleatorio",
|
||||
"addToPlaylist": "Agregar a la lista",
|
||||
"download": "Descargar",
|
||||
"info": "Obtener información",
|
||||
"share": "Compartir"
|
||||
"info": "Obtener información"
|
||||
},
|
||||
"lists": {
|
||||
"all": "Todos",
|
||||
@@ -106,33 +106,33 @@
|
||||
"name": "Nombre",
|
||||
"albumCount": "Número de álbumes",
|
||||
"songCount": "Número de canciones",
|
||||
"size": "Tamaño",
|
||||
"playCount": "Reproducciones",
|
||||
"rating": "Calificación",
|
||||
"genre": "Género",
|
||||
"size": "Tamaño",
|
||||
"role": "Rol",
|
||||
"missing": "Faltante"
|
||||
},
|
||||
"roles": {
|
||||
"albumartist": "Artista del álbum",
|
||||
"artist": "Artista",
|
||||
"composer": "Compositor",
|
||||
"conductor": "Director de orquesta",
|
||||
"lyricist": "Letrista",
|
||||
"arranger": "Arreglista",
|
||||
"producer": "Productor",
|
||||
"director": "Director",
|
||||
"engineer": "Ingeniero de sonido",
|
||||
"mixer": "Mezclador",
|
||||
"remixer": "Remixer",
|
||||
"djmixer": "DJ Mixer",
|
||||
"performer": "Intérprete",
|
||||
"albumartist": "Artista del álbum |||| Artistas del álbum",
|
||||
"artist": "Artista |||| Artistas",
|
||||
"composer": "Compositor |||| Compositores",
|
||||
"conductor": "Director de orquesta |||| Directores de orquesta",
|
||||
"lyricist": "Letrista |||| Letristas",
|
||||
"arranger": "Arreglista |||| Arreglistas",
|
||||
"producer": "Productor |||| Productores",
|
||||
"director": "Director |||| Directores",
|
||||
"engineer": "Ingeniero de sonido |||| Ingenieros de sonido",
|
||||
"mixer": "Mezclador |||| Mezcladores",
|
||||
"remixer": "Remezclador |||| Remezcladores",
|
||||
"djmixer": "DJ Mezclador |||| DJ Mezcladores",
|
||||
"performer": "Intérprete |||| Intérpretes",
|
||||
"maincredit": "Artista del álbum o Artista |||| Artistas del álbum o Artistas"
|
||||
},
|
||||
"actions": {
|
||||
"topSongs": "Más destacadas",
|
||||
"shuffle": "Aleatorio",
|
||||
"radio": "Radio",
|
||||
"topSongs": "Más destacadas"
|
||||
"radio": "Radio"
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
@@ -141,6 +141,7 @@
|
||||
"userName": "Nombre de usuario",
|
||||
"isAdmin": "Es administrador",
|
||||
"lastLoginAt": "Último inicio de sesión",
|
||||
"lastAccessAt": "Último acceso",
|
||||
"updatedAt": "Actualizado el",
|
||||
"name": "Nombre",
|
||||
"password": "Contraseña",
|
||||
@@ -149,7 +150,6 @@
|
||||
"currentPassword": "Contraseña actual",
|
||||
"newPassword": "Nueva contraseña",
|
||||
"token": "Token",
|
||||
"lastAccessAt": "Último acceso",
|
||||
"libraries": "Bibliotecas"
|
||||
},
|
||||
"helperTexts": {
|
||||
@@ -189,7 +189,7 @@
|
||||
"fields": {
|
||||
"name": "Nombre",
|
||||
"targetFormat": "Formato de destino",
|
||||
"defaultBitRate": "Tasa de bits default",
|
||||
"defaultBitRate": "Tasa de bits por defecto",
|
||||
"command": "Comando"
|
||||
}
|
||||
},
|
||||
@@ -211,9 +211,9 @@
|
||||
"selectPlaylist": "Seleccione una lista:",
|
||||
"addNewPlaylist": "Creada \"%{name}\"",
|
||||
"export": "Exportar",
|
||||
"saveQueue": "Guardar la fila de reproducción en una playlist",
|
||||
"makePublic": "Hazla pública",
|
||||
"makePrivate": "Hazla privada",
|
||||
"saveQueue": "Guardar la fila de reproducción en una playlist",
|
||||
"searchOrCreate": "Buscar listas de reproducción o escribe para crear una nueva…",
|
||||
"pressEnterToCreate": "Pulsa Enter para crear una nueva lista de reproducción",
|
||||
"removeFromSelection": "Quitar de la selección"
|
||||
@@ -239,11 +239,12 @@
|
||||
}
|
||||
},
|
||||
"share": {
|
||||
"name": "Compartir",
|
||||
"name": "Compartir |||| Compartidos",
|
||||
"fields": {
|
||||
"username": "Nombre de usuario",
|
||||
"username": "Compartido por",
|
||||
"url": "URL",
|
||||
"description": "Descripción",
|
||||
"downloadable": "¿Permitir descargas?",
|
||||
"contents": "Contenido",
|
||||
"expiresAt": "Caduca el",
|
||||
"lastVisitedAt": "Visitado por última vez el",
|
||||
@@ -251,12 +252,14 @@
|
||||
"format": "Formato",
|
||||
"maxBitRate": "Tasa de bits Máx.",
|
||||
"updatedAt": "Actualizado el",
|
||||
"createdAt": "Creado el",
|
||||
"downloadable": "¿Permitir descargas?"
|
||||
}
|
||||
"createdAt": "Creado el"
|
||||
},
|
||||
"notifications": {},
|
||||
"actions": {}
|
||||
},
|
||||
"missing": {
|
||||
"name": "Faltante",
|
||||
"name": "Fichero faltante |||| Ficheros faltantes",
|
||||
"empty": "No faltan archivos",
|
||||
"fields": {
|
||||
"path": "Ruta",
|
||||
"size": "Tamaño",
|
||||
@@ -269,8 +272,7 @@
|
||||
},
|
||||
"notifications": {
|
||||
"removed": "Eliminado"
|
||||
},
|
||||
"empty": "No hay archivos perdidos"
|
||||
}
|
||||
},
|
||||
"library": {
|
||||
"name": "Biblioteca |||| Bibliotecas",
|
||||
@@ -290,7 +292,7 @@
|
||||
"totalMissingFiles": "Archivos faltantes",
|
||||
"totalSize": "Tamaño total",
|
||||
"totalDuration": "Duración",
|
||||
"defaultNewUsers": "Valor por defecto para los nuevos usuarios",
|
||||
"defaultNewUsers": "Por defecto para nuevos usuarios",
|
||||
"createdAt": "Creado",
|
||||
"updatedAt": "Actualizado"
|
||||
},
|
||||
@@ -300,20 +302,20 @@
|
||||
},
|
||||
"actions": {
|
||||
"scan": "Escanear biblioteca",
|
||||
"manageUsers": "Gestionar el acceso de usarios",
|
||||
"viewDetails": "Ver detalles",
|
||||
"quickScan": "Escaneo rápido",
|
||||
"fullScan": "Escaneo completo"
|
||||
"fullScan": "Escaneo completo",
|
||||
"manageUsers": "Gestionar el acceso de usarios",
|
||||
"viewDetails": "Ver detalles"
|
||||
},
|
||||
"notifications": {
|
||||
"created": "La biblioteca se creó correctamente",
|
||||
"updated": "La biblioteca se actualizó correctamente",
|
||||
"deleted": "La biblioteca se eliminó correctamente",
|
||||
"scanStarted": "El escaneo de la biblioteca ha comenzado",
|
||||
"scanCompleted": "El escaneo de la biblioteca se completó",
|
||||
"quickScanStarted": "Escaneo rápido ha comenzado",
|
||||
"fullScanStarted": "Escaneo completo ha comenzado",
|
||||
"scanError": "Error al iniciar el escaneo. Revisa los registros"
|
||||
"scanError": "Error al iniciar el escaneo. Revisa los registros",
|
||||
"scanCompleted": "El escaneo de la biblioteca se completó"
|
||||
},
|
||||
"validation": {
|
||||
"nameRequired": "El nombre de la biblioteca es obligatorio",
|
||||
@@ -328,6 +330,78 @@
|
||||
"scanInProgress": "Escaneo en curso...",
|
||||
"noLibrariesAssigned": "No hay bibliotecas asignadas a este usuario"
|
||||
}
|
||||
},
|
||||
"plugin": {
|
||||
"name": "Plugin |||| Plugins",
|
||||
"fields": {
|
||||
"id": "ID",
|
||||
"name": "Nombre",
|
||||
"description": "Descripción",
|
||||
"version": "Versión",
|
||||
"author": "Autor",
|
||||
"website": "Web",
|
||||
"permissions": "Permisos",
|
||||
"enabled": "Activado",
|
||||
"status": "Estado",
|
||||
"path": "Ruta",
|
||||
"lastError": "Error",
|
||||
"hasError": "Error",
|
||||
"updatedAt": "Actualizado",
|
||||
"createdAt": "Instalado",
|
||||
"configKey": "Clave",
|
||||
"configValue": "Valor",
|
||||
"allUsers": "Permitir todos los usuarios",
|
||||
"selectedUsers": "Usuarios seleccionados",
|
||||
"allLibraries": "Permitir todas las bibliotecas",
|
||||
"selectedLibraries": "Bibliotecas seleccionadas"
|
||||
},
|
||||
"sections": {
|
||||
"status": "Estado",
|
||||
"info": "Información del Plugin",
|
||||
"configuration": "Configuración",
|
||||
"manifest": "Manifiesto",
|
||||
"usersPermission": "Permiso del usuario",
|
||||
"libraryPermission": "Permiso de la biblioteca"
|
||||
},
|
||||
"status": {
|
||||
"enabled": "Activado",
|
||||
"disabled": "Deshabilitado"
|
||||
},
|
||||
"actions": {
|
||||
"enable": "Activar",
|
||||
"disable": "Desactivar",
|
||||
"disabledDueToError": "Corrige el error antes de activar",
|
||||
"disabledUsersRequired": "Selecciona usuarios antes de activar",
|
||||
"disabledLibrariesRequired": "Selecciona bibliotecas antes de activar",
|
||||
"addConfig": "Añadir configuración",
|
||||
"rescan": "Reescanear"
|
||||
},
|
||||
"notifications": {
|
||||
"enabled": "Plugin activado",
|
||||
"disabled": "Plugin deshabilitado",
|
||||
"updated": "Plugin actualizado",
|
||||
"error": "Error al actualizar el plugin"
|
||||
},
|
||||
"validation": {
|
||||
"invalidJson": "La configuración debe ser un JSON válido"
|
||||
},
|
||||
"messages": {
|
||||
"configHelp": "Configura el plugin utilizando pares de clave-valor. Déjalo en blanco si el plugin no requiere configuración.",
|
||||
"clickPermissions": "Haz clic en un permiso para ver los detalles",
|
||||
"noConfig": "No hay configuración establecida",
|
||||
"allUsersHelp": "Cuando se active, el plugin tendrá acceso a todos los usuarios, incluidos los que se creen en el futuro.",
|
||||
"noUsers": "Ningún usuario seleccionado",
|
||||
"permissionReason": "Razón",
|
||||
"usersRequired": "Este plugin requiere acceso a la información de los usuarios. Selecciona a qué usuarios puede acceder el plugin, o activa 'Permitir todos los usuarios'.",
|
||||
"allLibrariesHelp": "Cuando se active, el plugin tendrá acceso a todas las bibliotecas, incluidas las que se creen en el futuro.",
|
||||
"noLibraries": "Ninguna biblioteca seleccionada",
|
||||
"librariesRequired": "Este plugin requiere acceso a la información de las bibliotecas. Selecciona a qué bibliotecas puede acceder el plugin, o activa 'Permitir todas las bibliotecas'.",
|
||||
"requiredHosts": "Hosts requeridos"
|
||||
},
|
||||
"placeholders": {
|
||||
"configKey": "clave",
|
||||
"configValue": "valor"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ra": {
|
||||
@@ -365,6 +439,7 @@
|
||||
"add": "Añadir",
|
||||
"back": "Ir atrás",
|
||||
"bulk_actions": "1 elemento seleccionado |||| %{smart_count} elementos seleccionados",
|
||||
"bulk_actions_mobile": "1 |||| %{smart_count}",
|
||||
"cancel": "Cancelar",
|
||||
"clear_input_value": "Limpiar valor",
|
||||
"clone": "Duplicar",
|
||||
@@ -388,7 +463,6 @@
|
||||
"close_menu": "Cerrar menú",
|
||||
"unselect": "Deseleccionado",
|
||||
"skip": "Omitir",
|
||||
"bulk_actions_mobile": "1 |||| %{smart_count}",
|
||||
"share": "Compartir",
|
||||
"download": "Descargar"
|
||||
},
|
||||
@@ -480,41 +554,47 @@
|
||||
"transcodingDisabled": "Cambiar la configuración de la transcodificación a través de la interfaz web esta deshabilitado por motivos de seguridad. Si quieres cambiar (editar o agregar) opciones de transcodificación, reinicia el servidor con la %{config} opción de configuración.",
|
||||
"transcodingEnabled": "Navidrom se esta ejecutando con %{config}, lo que hace posible ejecutar comandos de sistema desde el apartado de transcodificación en la interfaz web. Recomendamos deshabilitarlo por motivos de seguridad y solo habilitarlo cuando se este configurando opciones de transcodificación.",
|
||||
"songsAddedToPlaylist": "1 canción agregada a la lista |||| %{smart_count} canciones agregadas a la lista",
|
||||
"noSimilarSongsFound": "No se encontraron canciones similares",
|
||||
"noTopSongsFound": "No se encontraron canciones destacadas",
|
||||
"noPlaylistsAvailable": "Ninguna lista disponible",
|
||||
"delete_user_title": "Eliminar usuario '%{name}'",
|
||||
"delete_user_content": "¿Esta seguro de eliminar a este usuario y todos sus datos (incluyendo listas y preferencias)?",
|
||||
"remove_missing_title": "Eliminar archivos faltantes",
|
||||
"remove_missing_content": "¿Realmente desea eliminar los archivos faltantes seleccionados de la base de datos? Esto eliminará permanentemente cualquier referencia a ellos, incluidas sus reproducciones y valoraciones.",
|
||||
"remove_all_missing_title": "Eliminar todos los archivos faltantes",
|
||||
"remove_all_missing_content": "¿Realmente desea eliminar todos los archivos faltantes de la base de datos? Esto eliminará permanentemente cualquier referencia a ellos, incluidas sus reproducciones y valoraciones.",
|
||||
"notifications_blocked": "Las notificaciones de este sitio están bloqueadas en tu navegador",
|
||||
"notifications_not_available": "Este navegador no soporta notificaciones o no ingresaste a Navidrome usando https",
|
||||
"lastfmLinkSuccess": "Last.fm esta conectado y el scrobbling esta activado",
|
||||
"lastfmLinkFailure": "No se pudo conectar con Last.fm",
|
||||
"lastfmUnlinkSuccess": "Last.fm se ha desconectado y el scrobbling se desactivo",
|
||||
"lastfmUnlinkFailure": "No se pudo desconectar Last.fm",
|
||||
"listenBrainzLinkSuccess": "Se ha conectado correctamente a ListenBrainz y se activó el scrobbling como el usuario: %{user}",
|
||||
"listenBrainzLinkFailure": "No se pudo conectar con ListenBrainz: %{error}",
|
||||
"listenBrainzUnlinkSuccess": "Se desconectó ListenBrainz y se desactivó el scrobbling",
|
||||
"listenBrainzUnlinkFailure": "No se pudo desconectar ListenBrainz",
|
||||
"openIn": {
|
||||
"lastfm": "Ver en Last.fm",
|
||||
"musicbrainz": "Ver en MusicBrainz"
|
||||
},
|
||||
"lastfmLink": "Leer más...",
|
||||
"listenBrainzLinkSuccess": "Se ha conectado correctamente a ListenBrainz y se activo el scrobbling como el usuario: %{user}",
|
||||
"listenBrainzLinkFailure": "No se pudo conectar con ListenBrainz: %{error}",
|
||||
"listenBrainzUnlinkSuccess": "Se desconecto ListenBrainz y se desactivo el scrobbling",
|
||||
"listenBrainzUnlinkFailure": "No se pudo desconectar ListenBrainz",
|
||||
"downloadOriginalFormat": "Descargar formato original",
|
||||
"shareOriginalFormat": "Compartir formato original",
|
||||
"shareDialogTitle": "Compartir %{resource} '%{name}'",
|
||||
"shareBatchDialogTitle": "Compartir 1 %{resource} |||| Compartir %{smart_count} %{resource}",
|
||||
"shareCopyToClipboard": "Copiar al portapapeles: Ctrl+C, Intro",
|
||||
"shareSuccess": "URL copiada al portapapeles: %{url}",
|
||||
"shareFailure": "Error al copiar la URL %{url} al portapapeles",
|
||||
"downloadDialogTitle": "Descargar %{resource} '%{name}' (%{size})",
|
||||
"shareCopyToClipboard": "Copiar al portapapeles: Ctrl+C, Intro",
|
||||
"remove_missing_title": "Eliminar elemento faltante",
|
||||
"remove_missing_content": "¿Realmente desea eliminar los archivos faltantes seleccionados de la base de datos? Esto eliminará permanentemente cualquier referencia a ellos, incluidas sus reproducciones y valoraciones.",
|
||||
"remove_all_missing_title": "Eliminar todos los archivos perdidos",
|
||||
"remove_all_missing_content": "¿Realmente desea eliminar todos los archivos faltantes de la base de datos? Esto eliminará permanentemente cualquier referencia a ellos, incluidas sus reproducciones y valoraciones.",
|
||||
"noSimilarSongsFound": "No se encontraron canciones similares",
|
||||
"noTopSongsFound": "No se encontraron canciones destacadas"
|
||||
"downloadOriginalFormat": "Descargar formato original"
|
||||
},
|
||||
"menu": {
|
||||
"library": "Biblioteca",
|
||||
"librarySelector": {
|
||||
"allLibraries": "Todas las bibliotecas (%{count})",
|
||||
"multipleLibraries": "%{selected} de %{total} bibliotecas",
|
||||
"selectLibraries": "Seleccionar bibliotecas",
|
||||
"none": "Ninguno"
|
||||
},
|
||||
"settings": "Ajustes",
|
||||
"version": "Versión",
|
||||
"theme": "Tema",
|
||||
@@ -525,28 +605,22 @@
|
||||
"language": "Idioma",
|
||||
"defaultView": "Vista por defecto",
|
||||
"desktop_notifications": "Notificaciones de escritorio",
|
||||
"lastfmNotConfigured": "La clave API de Last.fm no está configurada",
|
||||
"lastfmScrobbling": "Scrobble a Last.fm",
|
||||
"listenBrainzScrobbling": "Scrobble a ListenBrainz",
|
||||
"replaygain": "Modo de ReplayGain",
|
||||
"preAmp": "ReplayGain PreAmp (dB)",
|
||||
"gain": {
|
||||
"none": "Ninguno",
|
||||
"album": "Álbum",
|
||||
"track": "Pista"
|
||||
},
|
||||
"lastfmNotConfigured": "La clave API de Last.fm no está configurada"
|
||||
"none": "Desactivado",
|
||||
"album": "Ganancia del álbum",
|
||||
"track": "Ganancia de pista"
|
||||
}
|
||||
}
|
||||
},
|
||||
"albumList": "Álbumes",
|
||||
"about": "Acerca de",
|
||||
"playlists": "Playlists",
|
||||
"sharedPlaylists": "Playlists Compartidas",
|
||||
"librarySelector": {
|
||||
"allLibraries": "Todas las bibliotecas (%{count})",
|
||||
"multipleLibraries": "%{selected} de %{total} bibliotecas",
|
||||
"selectLibraries": "Seleccionar bibliotecas",
|
||||
"none": "Ninguno"
|
||||
}
|
||||
"about": "Acerca de"
|
||||
},
|
||||
"player": {
|
||||
"playListsText": "Fila de reproducción",
|
||||
@@ -605,12 +679,17 @@
|
||||
"totalScanned": "Total de carpetas escaneadas",
|
||||
"quickScan": "Escaneo rápido",
|
||||
"fullScan": "Escaneo completo",
|
||||
"selectiveScan": "Selectivo",
|
||||
"serverUptime": "Uptime del servidor",
|
||||
"serverDown": "OFFLINE",
|
||||
"scanType": "Tipo",
|
||||
"status": "Error de escaneo",
|
||||
"elapsedTime": "Tiempo transcurrido",
|
||||
"selectiveScan": "Selectivo"
|
||||
"elapsedTime": "Tiempo transcurrido"
|
||||
},
|
||||
"nowPlaying": {
|
||||
"title": "En reproducción",
|
||||
"empty": "Nada en reproducción",
|
||||
"minutesAgo": "Hace %{smart_count} minuto |||| Hace %{smart_count} minutos"
|
||||
},
|
||||
"help": {
|
||||
"title": "Atajos de teclado de Navidrome",
|
||||
@@ -620,15 +699,10 @@
|
||||
"toggle_play": "Reproducir / Pausar",
|
||||
"prev_song": "Canción anterior",
|
||||
"next_song": "Siguiente canción",
|
||||
"current_song": "Canción actual",
|
||||
"vol_up": "Subir volumen",
|
||||
"vol_down": "Bajar volumen",
|
||||
"toggle_love": "Marca esta canción como favorita",
|
||||
"current_song": "Canción actual"
|
||||
"toggle_love": "Marca esta canción como favorita"
|
||||
}
|
||||
},
|
||||
"nowPlaying": {
|
||||
"title": "En reproducción",
|
||||
"empty": "Nada en reproducción",
|
||||
"minutesAgo": "Hace %{smart_count} minuto |||| Hace %{smart_count} minutos"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -387,6 +387,8 @@
|
||||
},
|
||||
"messages": {
|
||||
"configHelp": "Configure o plugin usando pares chave-valor. Deixe vazio se o plugin não precisa de configuração.",
|
||||
"configValidationError": "Falha na validação da configuração:",
|
||||
"schemaRenderError": "Não foi possível renderizar o formulário de configuração. O schema do plugin pode estar inválido.",
|
||||
"clickPermissions": "Clique em uma permissão para ver detalhes",
|
||||
"noConfig": "Nenhuma configuração definida",
|
||||
"allUsersHelp": "Quando habilitado, o plugin terá acesso a todos os usuários, incluindo os criados no futuro.",
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
"playCount": "Spelningar",
|
||||
"title": "Titel",
|
||||
"artist": "Artist",
|
||||
"composer": "Kompositör",
|
||||
"album": "Album",
|
||||
"path": "Sökväg",
|
||||
"genre": "Genre",
|
||||
|
||||
@@ -14,8 +14,8 @@ import (
|
||||
)
|
||||
|
||||
func TestScanner(t *testing.T) {
|
||||
// Only run goleak checks when not in CI environment
|
||||
if os.Getenv("CI") == "" {
|
||||
// Only run goleak checks when the GOLEAK env var is set
|
||||
if os.Getenv("GOLEAK") != "" {
|
||||
// Detect any goroutine leaks in the scanner code under test
|
||||
defer goleak.VerifyNone(t,
|
||||
goleak.IgnoreTopFunction("github.com/onsi/ginkgo/v2/internal/interrupt_handler.(*InterruptHandler).registerForInterrupts.func2"),
|
||||
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
type PluginManager interface {
|
||||
EnablePlugin(ctx context.Context, id string) error
|
||||
DisablePlugin(ctx context.Context, id string) error
|
||||
ValidatePluginConfig(ctx context.Context, id, configJSON string) error
|
||||
UpdatePluginConfig(ctx context.Context, id, configJSON string) error
|
||||
UpdatePluginUsers(ctx context.Context, id, usersJSON string, allUsers bool) error
|
||||
UpdatePluginLibraries(ctx context.Context, id, librariesJSON string, allLibraries bool) error
|
||||
|
||||
@@ -171,13 +171,26 @@ func isValidJSON(s string) bool {
|
||||
return json.Unmarshal([]byte(s), &js) == nil
|
||||
}
|
||||
|
||||
// validateAndUpdateConfig validates the config JSON and updates the plugin.
|
||||
// validateAndUpdateConfig validates the config JSON against the plugin's schema and updates the plugin.
|
||||
// Returns an error if validation or update fails (error response already written).
|
||||
func validateAndUpdateConfig(ctx context.Context, pm PluginManager, id, configJSON string, w http.ResponseWriter) error {
|
||||
// Basic JSON syntax check
|
||||
if configJSON != "" && !isValidJSON(configJSON) {
|
||||
http.Error(w, "Invalid JSON in config field", http.StatusBadRequest)
|
||||
return errors.New("invalid JSON")
|
||||
}
|
||||
|
||||
// Validate against plugin's config schema
|
||||
if err := pm.ValidatePluginConfig(ctx, id, configJSON); err != nil {
|
||||
log.Warn(ctx, "Config validation failed", "id", id, err)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
// Try to return structured validation errors if available
|
||||
response := map[string]any{"message": err.Error()}
|
||||
_ = json.NewEncoder(w).Encode(response)
|
||||
return err
|
||||
}
|
||||
|
||||
if err := pm.UpdatePluginConfig(ctx, id, configJSON); err != nil {
|
||||
log.Error(ctx, "Error updating plugin config", "id", id, err)
|
||||
http.Error(w, "Error updating plugin configuration: "+err.Error(), http.StatusInternalServerError)
|
||||
|
||||
@@ -35,7 +35,7 @@ func (pub *Router) handleImages(w http.ResponseWriter, r *http.Request) {
|
||||
artId, err := decodeArtworkID(id)
|
||||
if err != nil {
|
||||
log.Error(r, "Error decoding artwork id", "id", id, err)
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
http.Error(w, "invalid request", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
size := p.IntOr("size", 0)
|
||||
|
||||
@@ -410,6 +410,9 @@ func (api *Router) buildArtistDirectory(ctx context.Context, artist *model.Artis
|
||||
}
|
||||
dir.AlbumCount = getArtistAlbumCount(artist)
|
||||
dir.UserRating = int32(artist.Rating)
|
||||
if conf.Server.Subsonic.EnableAverageRating {
|
||||
dir.AverageRating = artist.AverageRating
|
||||
}
|
||||
if artist.Starred {
|
||||
dir.Starred = artist.StarredAt
|
||||
}
|
||||
@@ -447,6 +450,9 @@ func (api *Router) buildAlbumDirectory(ctx context.Context, album *model.Album)
|
||||
dir.Played = album.PlayDate
|
||||
}
|
||||
dir.UserRating = int32(album.Rating)
|
||||
if conf.Server.Subsonic.EnableAverageRating {
|
||||
dir.AverageRating = album.AverageRating
|
||||
}
|
||||
dir.SongCount = int32(album.SongCount)
|
||||
dir.CoverArt = album.CoverArtID().String()
|
||||
if album.Starred {
|
||||
|
||||
@@ -101,6 +101,9 @@ func toArtist(r *http.Request, a model.Artist) responses.Artist {
|
||||
CoverArt: a.CoverArtID().String(),
|
||||
ArtistImageUrl: publicurl.ImageURL(r, a.CoverArtID(), 600),
|
||||
}
|
||||
if conf.Server.Subsonic.EnableAverageRating {
|
||||
artist.AverageRating = a.AverageRating
|
||||
}
|
||||
if a.Starred {
|
||||
artist.Starred = a.StarredAt
|
||||
}
|
||||
@@ -116,6 +119,9 @@ func toArtistID3(r *http.Request, a model.Artist) responses.ArtistID3 {
|
||||
ArtistImageUrl: publicurl.ImageURL(r, a.CoverArtID(), 600),
|
||||
UserRating: int32(a.Rating),
|
||||
}
|
||||
if conf.Server.Subsonic.EnableAverageRating {
|
||||
artist.AverageRating = a.AverageRating
|
||||
}
|
||||
if a.Starred {
|
||||
artist.Starred = a.StarredAt
|
||||
}
|
||||
@@ -166,11 +172,30 @@ func getTranscoding(ctx context.Context) (format string, bitRate int) {
|
||||
return
|
||||
}
|
||||
|
||||
func isClientInList(clientList, client string) bool {
|
||||
if clientList == "" || client == "" {
|
||||
return false
|
||||
}
|
||||
clients := strings.Split(clientList, ",")
|
||||
for _, c := range clients {
|
||||
if strings.TrimSpace(c) == client {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func childFromMediaFile(ctx context.Context, mf model.MediaFile) responses.Child {
|
||||
child := responses.Child{}
|
||||
child.Id = mf.ID
|
||||
child.Title = mf.FullTitle()
|
||||
child.IsDir = false
|
||||
|
||||
player, ok := request.PlayerFrom(ctx)
|
||||
if ok && isClientInList(conf.Server.Subsonic.MinimalClients, player.Client) {
|
||||
return child
|
||||
}
|
||||
|
||||
child.Parent = mf.AlbumID
|
||||
child.Album = mf.Album
|
||||
child.Year = int32(mf.Year)
|
||||
@@ -183,7 +208,7 @@ func childFromMediaFile(ctx context.Context, mf model.MediaFile) responses.Child
|
||||
child.BitRate = int32(mf.BitRate)
|
||||
child.CoverArt = mf.CoverArtID().String()
|
||||
child.ContentType = mf.ContentType()
|
||||
player, ok := request.PlayerFrom(ctx)
|
||||
|
||||
if ok && player.ReportRealPath {
|
||||
child.Path = mf.AbsolutePath()
|
||||
} else {
|
||||
@@ -199,6 +224,9 @@ func childFromMediaFile(ctx context.Context, mf model.MediaFile) responses.Child
|
||||
child.Starred = mf.StarredAt
|
||||
}
|
||||
child.UserRating = int32(mf.Rating)
|
||||
if conf.Server.Subsonic.EnableAverageRating {
|
||||
child.AverageRating = mf.AverageRating
|
||||
}
|
||||
|
||||
format, _ := getTranscoding(ctx)
|
||||
if mf.Suffix != "" && format != "" && mf.Suffix != format {
|
||||
@@ -211,8 +239,8 @@ func childFromMediaFile(ctx context.Context, mf model.MediaFile) responses.Child
|
||||
}
|
||||
|
||||
func osChildFromMediaFile(ctx context.Context, mf model.MediaFile) *responses.OpenSubsonicChild {
|
||||
player, _ := request.PlayerFrom(ctx)
|
||||
if strings.Contains(conf.Server.Subsonic.LegacyClients, player.Client) {
|
||||
player, ok := request.PlayerFrom(ctx)
|
||||
if ok && isClientInList(conf.Server.Subsonic.MinimalClients, player.Client) {
|
||||
return nil
|
||||
}
|
||||
child := responses.OpenSubsonicChild{}
|
||||
@@ -310,6 +338,9 @@ func childFromAlbum(ctx context.Context, al model.Album) responses.Child {
|
||||
}
|
||||
child.PlayCount = al.PlayCount
|
||||
child.UserRating = int32(al.Rating)
|
||||
if conf.Server.Subsonic.EnableAverageRating {
|
||||
child.AverageRating = al.AverageRating
|
||||
}
|
||||
child.OpenSubsonicChild = osChildFromAlbum(ctx, al)
|
||||
return child
|
||||
}
|
||||
@@ -403,6 +434,9 @@ func buildOSAlbumID3(ctx context.Context, album model.Album) *responses.OpenSubs
|
||||
dir.Played = album.PlayDate
|
||||
}
|
||||
dir.UserRating = int32(album.Rating)
|
||||
if conf.Server.Subsonic.EnableAverageRating {
|
||||
dir.AverageRating = album.AverageRating
|
||||
}
|
||||
dir.RecordLabels = slice.Map(album.Tags.Values(model.TagRecordLabel), func(s string) responses.RecordLabel {
|
||||
return responses.RecordLabel{Name: s}
|
||||
})
|
||||
|
||||
@@ -4,8 +4,10 @@ import (
|
||||
"context"
|
||||
"net/http/httptest"
|
||||
|
||||
"github.com/go-chi/jwtauth/v5"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/auth"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/server/subsonic/responses"
|
||||
@@ -17,6 +19,7 @@ import (
|
||||
var _ = Describe("helpers", func() {
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
auth.TokenAuth = jwtauth.New("HS256", []byte("test secret"), nil)
|
||||
})
|
||||
|
||||
Describe("fakePath", func() {
|
||||
@@ -169,6 +172,190 @@ var _ = Describe("helpers", func() {
|
||||
})
|
||||
})
|
||||
|
||||
DescribeTable("isClientInList",
|
||||
func(list, client string, expected bool) {
|
||||
Expect(isClientInList(list, client)).To(Equal(expected))
|
||||
},
|
||||
Entry("returns false when clientList is empty", "", "some-client", false),
|
||||
Entry("returns false when client is empty", "client1,client2", "", false),
|
||||
Entry("returns false when both are empty", "", "", false),
|
||||
Entry("returns true when client matches single entry", "my-client", "my-client", true),
|
||||
Entry("returns true when client matches first in list", "client1,client2,client3", "client1", true),
|
||||
Entry("returns true when client matches middle in list", "client1,client2,client3", "client2", true),
|
||||
Entry("returns true when client matches last in list", "client1,client2,client3", "client3", true),
|
||||
Entry("returns false when client does not match", "client1,client2", "client3", false),
|
||||
Entry("trims whitespace from client list entries", "client1, client2 , client3", "client2", true),
|
||||
Entry("does not trim the client parameter", "client1,client2", " client1", false),
|
||||
)
|
||||
|
||||
Describe("childFromMediaFile", func() {
|
||||
var mf model.MediaFile
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
mf = model.MediaFile{
|
||||
ID: "mf-1",
|
||||
Title: "Test Song",
|
||||
Album: "Test Album",
|
||||
AlbumID: "album-1",
|
||||
Artist: "Test Artist",
|
||||
ArtistID: "artist-1",
|
||||
Year: 2023,
|
||||
Genre: "Rock",
|
||||
TrackNumber: 5,
|
||||
Duration: 180.5,
|
||||
Size: 5000000,
|
||||
Suffix: "mp3",
|
||||
BitRate: 320,
|
||||
}
|
||||
ctx = context.Background()
|
||||
})
|
||||
|
||||
Context("with minimal client", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.Subsonic.MinimalClients = "minimal-client"
|
||||
player := model.Player{Client: "minimal-client"}
|
||||
ctx = request.WithPlayer(ctx, player)
|
||||
})
|
||||
|
||||
It("returns only basic fields", func() {
|
||||
child := childFromMediaFile(ctx, mf)
|
||||
Expect(child.Id).To(Equal("mf-1"))
|
||||
Expect(child.Title).To(Equal("Test Song"))
|
||||
Expect(child.IsDir).To(BeFalse())
|
||||
|
||||
// These should not be set
|
||||
Expect(child.Album).To(BeEmpty())
|
||||
Expect(child.Artist).To(BeEmpty())
|
||||
Expect(child.Parent).To(BeEmpty())
|
||||
Expect(child.Year).To(BeZero())
|
||||
Expect(child.Genre).To(BeEmpty())
|
||||
Expect(child.Track).To(BeZero())
|
||||
Expect(child.Duration).To(BeZero())
|
||||
Expect(child.Size).To(BeZero())
|
||||
Expect(child.Suffix).To(BeEmpty())
|
||||
Expect(child.BitRate).To(BeZero())
|
||||
Expect(child.CoverArt).To(BeEmpty())
|
||||
Expect(child.ContentType).To(BeEmpty())
|
||||
Expect(child.Path).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("does not include OpenSubsonic extension", func() {
|
||||
child := childFromMediaFile(ctx, mf)
|
||||
Expect(child.OpenSubsonicChild).To(BeNil())
|
||||
})
|
||||
})
|
||||
|
||||
Context("with non-minimal client", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.Subsonic.MinimalClients = "minimal-client"
|
||||
player := model.Player{Client: "regular-client"}
|
||||
ctx = request.WithPlayer(ctx, player)
|
||||
})
|
||||
|
||||
It("returns all fields", func() {
|
||||
child := childFromMediaFile(ctx, mf)
|
||||
Expect(child.Id).To(Equal("mf-1"))
|
||||
Expect(child.Title).To(Equal("Test Song"))
|
||||
Expect(child.IsDir).To(BeFalse())
|
||||
Expect(child.Album).To(Equal("Test Album"))
|
||||
Expect(child.Artist).To(Equal("Test Artist"))
|
||||
Expect(child.Parent).To(Equal("album-1"))
|
||||
Expect(child.Year).To(Equal(int32(2023)))
|
||||
Expect(child.Genre).To(Equal("Rock"))
|
||||
Expect(child.Track).To(Equal(int32(5)))
|
||||
Expect(child.Duration).To(Equal(int32(180)))
|
||||
Expect(child.Size).To(Equal(int64(5000000)))
|
||||
Expect(child.Suffix).To(Equal("mp3"))
|
||||
Expect(child.BitRate).To(Equal(int32(320)))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when minimal clients list is empty", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.Subsonic.MinimalClients = ""
|
||||
player := model.Player{Client: "any-client"}
|
||||
ctx = request.WithPlayer(ctx, player)
|
||||
})
|
||||
|
||||
It("returns all fields", func() {
|
||||
child := childFromMediaFile(ctx, mf)
|
||||
Expect(child.Album).To(Equal("Test Album"))
|
||||
Expect(child.Artist).To(Equal("Test Artist"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when no player in context", func() {
|
||||
It("returns all fields", func() {
|
||||
child := childFromMediaFile(ctx, mf)
|
||||
Expect(child.Album).To(Equal("Test Album"))
|
||||
Expect(child.Artist).To(Equal("Test Artist"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("osChildFromMediaFile", func() {
|
||||
var mf model.MediaFile
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
mf = model.MediaFile{
|
||||
ID: "mf-1",
|
||||
Title: "Test Song",
|
||||
Artist: "Test Artist",
|
||||
Comment: "Test Comment",
|
||||
}
|
||||
ctx = context.Background()
|
||||
})
|
||||
|
||||
Context("with minimal client", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.Subsonic.MinimalClients = "minimal-client"
|
||||
player := model.Player{Client: "minimal-client"}
|
||||
ctx = request.WithPlayer(ctx, player)
|
||||
})
|
||||
|
||||
It("returns nil", func() {
|
||||
osChild := osChildFromMediaFile(ctx, mf)
|
||||
Expect(osChild).To(BeNil())
|
||||
})
|
||||
})
|
||||
|
||||
Context("with non-minimal client", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.Subsonic.MinimalClients = "minimal-client"
|
||||
player := model.Player{Client: "regular-client"}
|
||||
ctx = request.WithPlayer(ctx, player)
|
||||
})
|
||||
|
||||
It("returns OpenSubsonic child fields", func() {
|
||||
osChild := osChildFromMediaFile(ctx, mf)
|
||||
Expect(osChild).ToNot(BeNil())
|
||||
Expect(osChild.Comment).To(Equal("Test Comment"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when minimal clients list is empty", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.Subsonic.MinimalClients = ""
|
||||
player := model.Player{Client: "any-client"}
|
||||
ctx = request.WithPlayer(ctx, player)
|
||||
})
|
||||
|
||||
It("returns OpenSubsonic child fields", func() {
|
||||
osChild := osChildFromMediaFile(ctx, mf)
|
||||
Expect(osChild).ToNot(BeNil())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when no player in context", func() {
|
||||
It("returns OpenSubsonic child fields", func() {
|
||||
osChild := osChildFromMediaFile(ctx, mf)
|
||||
Expect(osChild).ToNot(BeNil())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("selectedMusicFolderIds", func() {
|
||||
var user model.User
|
||||
var ctx context.Context
|
||||
@@ -272,4 +459,131 @@ var _ = Describe("helpers", func() {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Describe("AverageRating in responses", func() {
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = context.Background()
|
||||
conf.Server.Subsonic.EnableAverageRating = true
|
||||
})
|
||||
|
||||
Describe("childFromMediaFile", func() {
|
||||
It("includes averageRating when set", func() {
|
||||
mf := model.MediaFile{
|
||||
ID: "mf-avg-1",
|
||||
Title: "Test Song",
|
||||
Annotations: model.Annotations{
|
||||
AverageRating: 4.5,
|
||||
},
|
||||
}
|
||||
child := childFromMediaFile(ctx, mf)
|
||||
Expect(child.AverageRating).To(Equal(4.5))
|
||||
})
|
||||
|
||||
It("returns 0 for averageRating when not set", func() {
|
||||
mf := model.MediaFile{
|
||||
ID: "mf-avg-2",
|
||||
Title: "Test Song No Rating",
|
||||
}
|
||||
child := childFromMediaFile(ctx, mf)
|
||||
Expect(child.AverageRating).To(Equal(0.0))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("childFromAlbum", func() {
|
||||
It("includes averageRating when set", func() {
|
||||
al := model.Album{
|
||||
ID: "al-avg-1",
|
||||
Name: "Test Album",
|
||||
Annotations: model.Annotations{
|
||||
AverageRating: 3.75,
|
||||
},
|
||||
}
|
||||
child := childFromAlbum(ctx, al)
|
||||
Expect(child.AverageRating).To(Equal(3.75))
|
||||
})
|
||||
|
||||
It("returns 0 for averageRating when not set", func() {
|
||||
al := model.Album{
|
||||
ID: "al-avg-2",
|
||||
Name: "Test Album No Rating",
|
||||
}
|
||||
child := childFromAlbum(ctx, al)
|
||||
Expect(child.AverageRating).To(Equal(0.0))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("toArtist", func() {
|
||||
It("includes averageRating when set", func() {
|
||||
conf.Server.Subsonic.EnableAverageRating = true
|
||||
r := httptest.NewRequest("GET", "/test", nil)
|
||||
a := model.Artist{
|
||||
ID: "ar-avg-1",
|
||||
Name: "Test Artist",
|
||||
Annotations: model.Annotations{
|
||||
AverageRating: 5.0,
|
||||
},
|
||||
}
|
||||
artist := toArtist(r, a)
|
||||
Expect(artist.AverageRating).To(Equal(5.0))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("toArtistID3", func() {
|
||||
It("includes averageRating when set", func() {
|
||||
conf.Server.Subsonic.EnableAverageRating = true
|
||||
r := httptest.NewRequest("GET", "/test", nil)
|
||||
a := model.Artist{
|
||||
ID: "ar-avg-2",
|
||||
Name: "Test Artist ID3",
|
||||
Annotations: model.Annotations{
|
||||
AverageRating: 2.5,
|
||||
},
|
||||
}
|
||||
artist := toArtistID3(r, a)
|
||||
Expect(artist.AverageRating).To(Equal(2.5))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("EnableAverageRating config", func() {
|
||||
It("excludes averageRating when disabled", func() {
|
||||
conf.Server.Subsonic.EnableAverageRating = false
|
||||
|
||||
mf := model.MediaFile{
|
||||
ID: "mf-cfg-1",
|
||||
Title: "Test Song",
|
||||
Annotations: model.Annotations{
|
||||
AverageRating: 4.5,
|
||||
},
|
||||
}
|
||||
child := childFromMediaFile(ctx, mf)
|
||||
Expect(child.AverageRating).To(Equal(0.0))
|
||||
|
||||
al := model.Album{
|
||||
ID: "al-cfg-1",
|
||||
Name: "Test Album",
|
||||
Annotations: model.Annotations{
|
||||
AverageRating: 3.75,
|
||||
},
|
||||
}
|
||||
albumChild := childFromAlbum(ctx, al)
|
||||
Expect(albumChild.AverageRating).To(Equal(0.0))
|
||||
|
||||
r := httptest.NewRequest("GET", "/test", nil)
|
||||
a := model.Artist{
|
||||
ID: "ar-cfg-1",
|
||||
Name: "Test Artist",
|
||||
Annotations: model.Annotations{
|
||||
AverageRating: 5.0,
|
||||
},
|
||||
}
|
||||
artist := toArtist(r, a)
|
||||
Expect(artist.AverageRating).To(Equal(0.0))
|
||||
|
||||
artistID3 := toArtistID3(r, a)
|
||||
Expect(artistID3.AverageRating).To(Equal(0.0))
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -7,8 +7,10 @@ import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/server/subsonic/responses"
|
||||
"github.com/navidrome/navidrome/utils/req"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
@@ -23,7 +25,7 @@ func (api *Router) GetPlaylists(r *http.Request) (*responses.Subsonic, error) {
|
||||
}
|
||||
response := newResponse()
|
||||
response.Playlists = &responses.Playlists{
|
||||
Playlist: slice.Map(allPls, api.buildPlaylist),
|
||||
Playlist: slice.MapWithArg(allPls, ctx, api.buildPlaylist),
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
@@ -51,7 +53,7 @@ func (api *Router) getPlaylist(ctx context.Context, id string) (*responses.Subso
|
||||
|
||||
response := newResponse()
|
||||
response.Playlist = &responses.PlaylistWithSongs{
|
||||
Playlist: api.buildPlaylist(*pls),
|
||||
Playlist: api.buildPlaylist(ctx, *pls),
|
||||
}
|
||||
response.Playlist.Entry = slice.MapWithArg(pls.MediaFiles(), ctx, childFromMediaFile)
|
||||
return response, nil
|
||||
@@ -152,21 +154,28 @@ func (api *Router) UpdatePlaylist(r *http.Request) (*responses.Subsonic, error)
|
||||
return newResponse(), nil
|
||||
}
|
||||
|
||||
func (api *Router) buildPlaylist(p model.Playlist) responses.Playlist {
|
||||
func (api *Router) buildPlaylist(ctx context.Context, p model.Playlist) responses.Playlist {
|
||||
pls := responses.Playlist{}
|
||||
pls.Id = p.ID
|
||||
pls.Name = p.Name
|
||||
pls.Comment = p.Comment
|
||||
pls.SongCount = int32(p.SongCount)
|
||||
pls.Owner = p.OwnerName
|
||||
pls.Duration = int32(p.Duration)
|
||||
pls.Public = p.Public
|
||||
pls.Created = p.CreatedAt
|
||||
pls.CoverArt = p.CoverArtID().String()
|
||||
if p.IsSmartPlaylist() {
|
||||
pls.Changed = time.Now()
|
||||
} else {
|
||||
pls.Changed = p.UpdatedAt
|
||||
}
|
||||
|
||||
player, ok := request.PlayerFrom(ctx)
|
||||
if ok && isClientInList(conf.Server.Subsonic.MinimalClients, player.Client) {
|
||||
return pls
|
||||
}
|
||||
|
||||
pls.Comment = p.Comment
|
||||
pls.Owner = p.OwnerName
|
||||
pls.Public = p.Public
|
||||
pls.CoverArt = p.CoverArtID().String()
|
||||
|
||||
return pls
|
||||
}
|
||||
|
||||
@@ -2,9 +2,12 @@ package subsonic
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
@@ -12,6 +15,108 @@ import (
|
||||
|
||||
var _ core.Playlists = (*fakePlaylists)(nil)
|
||||
|
||||
var _ = Describe("buildPlaylist", func() {
|
||||
var router *Router
|
||||
var ds model.DataStore
|
||||
var ctx context.Context
|
||||
var playlist model.Playlist
|
||||
|
||||
BeforeEach(func() {
|
||||
ds = &tests.MockDataStore{}
|
||||
router = New(ds, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil)
|
||||
ctx = context.Background()
|
||||
|
||||
createdAt := time.Date(2023, 1, 15, 10, 30, 0, 0, time.UTC)
|
||||
updatedAt := time.Date(2023, 2, 20, 14, 45, 0, 0, time.UTC)
|
||||
|
||||
playlist = model.Playlist{
|
||||
ID: "pls-1",
|
||||
Name: "My Playlist",
|
||||
Comment: "Test comment",
|
||||
OwnerName: "admin",
|
||||
Public: true,
|
||||
SongCount: 10,
|
||||
Duration: 600,
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
}
|
||||
})
|
||||
|
||||
Context("with minimal client", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.Subsonic.MinimalClients = "minimal-client"
|
||||
player := model.Player{Client: "minimal-client"}
|
||||
ctx = request.WithPlayer(ctx, player)
|
||||
})
|
||||
|
||||
It("returns only basic fields", func() {
|
||||
result := router.buildPlaylist(ctx, playlist)
|
||||
|
||||
Expect(result.Id).To(Equal("pls-1"))
|
||||
Expect(result.Name).To(Equal("My Playlist"))
|
||||
Expect(result.SongCount).To(Equal(int32(10)))
|
||||
Expect(result.Duration).To(Equal(int32(600)))
|
||||
Expect(result.Created).To(Equal(playlist.CreatedAt))
|
||||
Expect(result.Changed).To(Equal(playlist.UpdatedAt))
|
||||
|
||||
// These should not be set
|
||||
Expect(result.Comment).To(BeEmpty())
|
||||
Expect(result.Owner).To(BeEmpty())
|
||||
Expect(result.Public).To(BeFalse())
|
||||
Expect(result.CoverArt).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("with non-minimal client", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.Subsonic.MinimalClients = "minimal-client"
|
||||
player := model.Player{Client: "regular-client"}
|
||||
ctx = request.WithPlayer(ctx, player)
|
||||
})
|
||||
|
||||
It("returns all fields", func() {
|
||||
result := router.buildPlaylist(ctx, playlist)
|
||||
|
||||
Expect(result.Id).To(Equal("pls-1"))
|
||||
Expect(result.Name).To(Equal("My Playlist"))
|
||||
Expect(result.SongCount).To(Equal(int32(10)))
|
||||
Expect(result.Duration).To(Equal(int32(600)))
|
||||
Expect(result.Created).To(Equal(playlist.CreatedAt))
|
||||
Expect(result.Changed).To(Equal(playlist.UpdatedAt))
|
||||
Expect(result.Comment).To(Equal("Test comment"))
|
||||
Expect(result.Owner).To(Equal("admin"))
|
||||
Expect(result.Public).To(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when minimal clients list is empty", func() {
|
||||
BeforeEach(func() {
|
||||
conf.Server.Subsonic.MinimalClients = ""
|
||||
player := model.Player{Client: "any-client"}
|
||||
ctx = request.WithPlayer(ctx, player)
|
||||
})
|
||||
|
||||
It("returns all fields", func() {
|
||||
result := router.buildPlaylist(ctx, playlist)
|
||||
|
||||
Expect(result.Comment).To(Equal("Test comment"))
|
||||
Expect(result.Owner).To(Equal("admin"))
|
||||
Expect(result.Public).To(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when no player in context", func() {
|
||||
It("returns all fields", func() {
|
||||
result := router.buildPlaylist(ctx, playlist)
|
||||
|
||||
Expect(result.Comment).To(Equal("Test comment"))
|
||||
Expect(result.Owner).To(Equal("admin"))
|
||||
Expect(result.Public).To(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
var _ = Describe("UpdatePlaylist", func() {
|
||||
var router *Router
|
||||
var ds model.DataStore
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
{
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"isVideo": false,
|
||||
"bpm": 0,
|
||||
"comment": "",
|
||||
"sortName": "sort name",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<albumList>
|
||||
<album id="1" isDir="false" isVideo="false" sortName="sort name" mediaType="album" musicBrainzId="00000000-0000-0000-0000-000000000000" displayArtist="Display artist" displayAlbumArtist="Display album artist" explicitStatus="explicit">
|
||||
<album id="1" isDir="false" sortName="sort name" mediaType="album" musicBrainzId="00000000-0000-0000-0000-000000000000" displayArtist="Display artist" displayAlbumArtist="Display album artist" explicitStatus="explicit">
|
||||
<genres name="Genre 1"></genres>
|
||||
<genres name="Genre 2"></genres>
|
||||
<moods>mood1</moods>
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
{
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"title": "title",
|
||||
"isVideo": false
|
||||
"title": "title"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<albumList>
|
||||
<album id="1" isDir="false" title="title" isVideo="false"></album>
|
||||
<album id="1" isDir="false" title="title"></album>
|
||||
</albumList>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -93,7 +93,6 @@
|
||||
"transcodedSuffix": "mp3",
|
||||
"duration": 146,
|
||||
"bitRate": 320,
|
||||
"isVideo": false,
|
||||
"bpm": 127,
|
||||
"comment": "a comment",
|
||||
"sortName": "sorted song",
|
||||
@@ -185,7 +184,6 @@
|
||||
"transcodedSuffix": "mp3",
|
||||
"duration": 146,
|
||||
"bitRate": 320,
|
||||
"isVideo": false,
|
||||
"bpm": 0,
|
||||
"comment": "",
|
||||
"sortName": "",
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
<moods>sad</moods>
|
||||
<artists id="1" name="artist1"></artists>
|
||||
<artists id="2" name="artist2"></artists>
|
||||
<song id="1" isDir="true" title="title" album="album" artist="artist" track="1" year="1985" genre="Rock" coverArt="1" size="8421341" contentType="audio/flac" suffix="flac" starred="2016-03-02T20:30:00Z" transcodedContentType="audio/mpeg" transcodedSuffix="mp3" duration="146" bitRate="320" isVideo="false" bpm="127" comment="a comment" sortName="sorted song" mediaType="song" musicBrainzId="4321" channelCount="2" samplingRate="44100" bitDepth="16" displayArtist="artist1 & artist2" displayAlbumArtist="album artist1 & album artist2" displayComposer="composer 1 & composer 2" explicitStatus="clean">
|
||||
<song id="1" isDir="true" title="title" album="album" artist="artist" track="1" year="1985" genre="Rock" coverArt="1" size="8421341" contentType="audio/flac" suffix="flac" starred="2016-03-02T20:30:00Z" transcodedContentType="audio/mpeg" transcodedSuffix="mp3" duration="146" bitRate="320" bpm="127" comment="a comment" sortName="sorted song" mediaType="song" musicBrainzId="4321" channelCount="2" samplingRate="44100" bitDepth="16" displayArtist="artist1 & artist2" displayAlbumArtist="album artist1 & album artist2" displayComposer="composer 1 & composer 2" explicitStatus="clean">
|
||||
<isrc>ISRC-1</isrc>
|
||||
<genres name="rock"></genres>
|
||||
<genres name="progressive"></genres>
|
||||
@@ -33,7 +33,7 @@
|
||||
<artist id="2" name="artist2"></artist>
|
||||
</contributors>
|
||||
</song>
|
||||
<song id="2" isDir="true" title="title" album="album" artist="artist" track="1" year="1985" genre="Rock" coverArt="1" size="8421341" contentType="audio/flac" suffix="flac" starred="2016-03-02T20:30:00Z" transcodedContentType="audio/mpeg" transcodedSuffix="mp3" duration="146" bitRate="320" isVideo="false">
|
||||
<song id="2" isDir="true" title="title" album="album" artist="artist" track="1" year="1985" genre="Rock" coverArt="1" size="8421341" contentType="audio/flac" suffix="flac" starred="2016-03-02T20:30:00Z" transcodedContentType="audio/mpeg" transcodedSuffix="mp3" duration="146" bitRate="320">
|
||||
<replayGain trackGain="0" albumGain="0" trackPeak="0" albumPeak="0" baseGain="0" fallbackGain="0"></replayGain>
|
||||
</song>
|
||||
</album>
|
||||
|
||||
@@ -10,8 +10,7 @@
|
||||
"entry": {
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"title": "title",
|
||||
"isVideo": false
|
||||
"title": "title"
|
||||
},
|
||||
"position": 123,
|
||||
"username": "user2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<bookmarks>
|
||||
<bookmark position="123" username="user2" comment="a comment" created="0001-01-01T00:00:00Z" changed="0001-01-01T00:00:00Z">
|
||||
<entry id="1" isDir="false" title="title" isVideo="false"></entry>
|
||||
<entry id="1" isDir="false" title="title"></entry>
|
||||
</bookmark>
|
||||
</bookmarks>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -24,7 +24,6 @@
|
||||
"transcodedSuffix": "mp3",
|
||||
"duration": 146,
|
||||
"bitRate": 320,
|
||||
"isVideo": false,
|
||||
"bpm": 127,
|
||||
"comment": "a comment",
|
||||
"sortName": "sorted title",
|
||||
@@ -116,7 +115,6 @@
|
||||
{
|
||||
"id": "",
|
||||
"isDir": false,
|
||||
"isVideo": false,
|
||||
"bpm": 0,
|
||||
"comment": "",
|
||||
"sortName": "",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<directory id="1" name="N">
|
||||
<child id="1" isDir="true" title="title" album="album" artist="artist" track="1" year="1985" genre="Rock" coverArt="1" size="8421341" contentType="audio/flac" suffix="flac" starred="2016-03-02T20:30:00Z" transcodedContentType="audio/mpeg" transcodedSuffix="mp3" duration="146" bitRate="320" isVideo="false" bpm="127" comment="a comment" sortName="sorted title" mediaType="song" musicBrainzId="4321" channelCount="2" samplingRate="44100" bitDepth="16" displayArtist="artist 1 & artist 2" displayAlbumArtist="album artist 1 & album artist 2" displayComposer="composer 1 & composer 2" explicitStatus="clean">
|
||||
<child id="1" isDir="true" title="title" album="album" artist="artist" track="1" year="1985" genre="Rock" coverArt="1" size="8421341" contentType="audio/flac" suffix="flac" starred="2016-03-02T20:30:00Z" transcodedContentType="audio/mpeg" transcodedSuffix="mp3" duration="146" bitRate="320" bpm="127" comment="a comment" sortName="sorted title" mediaType="song" musicBrainzId="4321" channelCount="2" samplingRate="44100" bitDepth="16" displayArtist="artist 1 & artist 2" displayAlbumArtist="album artist 1 & album artist 2" displayComposer="composer 1 & composer 2" explicitStatus="clean">
|
||||
<isrc>ISRC-1</isrc>
|
||||
<isrc>ISRC-2</isrc>
|
||||
<genres name="rock"></genres>
|
||||
@@ -25,7 +25,7 @@
|
||||
<artist id="4" name="composer2"></artist>
|
||||
</contributors>
|
||||
</child>
|
||||
<child id="" isDir="false" isVideo="false">
|
||||
<child id="" isDir="false">
|
||||
<replayGain trackGain="0" albumGain="0" trackPeak="0" albumPeak="0" baseGain="0" fallbackGain="0"></replayGain>
|
||||
</child>
|
||||
</directory>
|
||||
|
||||
@@ -8,8 +8,7 @@
|
||||
"child": [
|
||||
{
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"isVideo": false
|
||||
"isDir": false
|
||||
}
|
||||
],
|
||||
"id": "",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<directory id="" name="">
|
||||
<child id="1" isDir="false" isVideo="false"></child>
|
||||
<child id="1" isDir="false"></child>
|
||||
</directory>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
{
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"isVideo": false,
|
||||
"bpm": 0,
|
||||
"comment": "",
|
||||
"sortName": "",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<directory id="" name="">
|
||||
<child id="1" isDir="false" isVideo="false"></child>
|
||||
<child id="1" isDir="false"></child>
|
||||
</directory>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
{
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"title": "title",
|
||||
"isVideo": false
|
||||
"title": "title"
|
||||
}
|
||||
],
|
||||
"id": "1",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<directory id="1" name="N">
|
||||
<child id="1" isDir="false" title="title" isVideo="false"></child>
|
||||
<child id="1" isDir="false" title="title"></child>
|
||||
</directory>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
{
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"title": "title",
|
||||
"isVideo": false
|
||||
"title": "title"
|
||||
}
|
||||
],
|
||||
"current": "111",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<playQueue current="111" position="243" username="user1" changed="0001-01-01T00:00:00Z" changedBy="a_client">
|
||||
<entry id="1" isDir="false" title="title" isVideo="false"></entry>
|
||||
<entry id="1" isDir="false" title="title"></entry>
|
||||
</playQueue>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
{
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"title": "title",
|
||||
"isVideo": false
|
||||
"title": "title"
|
||||
}
|
||||
],
|
||||
"currentIndex": 0,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<playQueueByIndex currentIndex="0" position="243" username="user1" changed="0001-01-01T00:00:00Z" changedBy="a_client">
|
||||
<entry id="1" isDir="false" title="title" isVideo="false"></entry>
|
||||
<entry id="1" isDir="false" title="title"></entry>
|
||||
</playQueueByIndex>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
"name": "bbb",
|
||||
"songCount": 0,
|
||||
"duration": 0,
|
||||
"public": false,
|
||||
"created": "0001-01-01T00:00:00Z",
|
||||
"changed": "0001-01-01T00:00:00Z"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<playlists>
|
||||
<playlist id="111" name="aaa" comment="comment" songCount="2" duration="120" public="true" owner="admin" created="0001-01-01T00:00:00Z" changed="0001-01-01T00:00:00Z" coverArt="pl-123123123123"></playlist>
|
||||
<playlist id="222" name="bbb" songCount="0" duration="0" public="false" created="0001-01-01T00:00:00Z" changed="0001-01-01T00:00:00Z"></playlist>
|
||||
<playlist id="222" name="bbb" songCount="0" duration="0" created="0001-01-01T00:00:00Z" changed="0001-01-01T00:00:00Z"></playlist>
|
||||
</playlists>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -14,8 +14,7 @@
|
||||
"title": "title",
|
||||
"album": "album",
|
||||
"artist": "artist",
|
||||
"duration": 120,
|
||||
"isVideo": false
|
||||
"duration": 120
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
@@ -23,8 +22,7 @@
|
||||
"title": "title 2",
|
||||
"album": "album",
|
||||
"artist": "artist",
|
||||
"duration": 300,
|
||||
"isVideo": false
|
||||
"duration": 300
|
||||
}
|
||||
],
|
||||
"id": "ABC123",
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<shares>
|
||||
<share id="ABC123" url="http://localhost/p/ABC123" description="Check it out!" username="deluan" created="2016-03-02T20:30:00Z" expires="2016-03-02T20:30:00Z" lastVisited="2016-03-02T20:30:00Z" visitCount="2">
|
||||
<entry id="1" isDir="false" title="title" album="album" artist="artist" duration="120" isVideo="false"></entry>
|
||||
<entry id="2" isDir="false" title="title 2" album="album" artist="artist" duration="300" isVideo="false"></entry>
|
||||
<entry id="1" isDir="false" title="title" album="album" artist="artist" duration="120"></entry>
|
||||
<entry id="2" isDir="false" title="title 2" album="album" artist="artist" duration="300"></entry>
|
||||
</share>
|
||||
</shares>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
{
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"title": "title",
|
||||
"isVideo": false
|
||||
"title": "title"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<similarSongs>
|
||||
<song id="1" isDir="false" title="title" isVideo="false"></song>
|
||||
<song id="1" isDir="false" title="title"></song>
|
||||
</similarSongs>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
{
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"title": "title",
|
||||
"isVideo": false
|
||||
"title": "title"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<similarSongs2>
|
||||
<song id="1" isDir="false" title="title" isVideo="false"></song>
|
||||
<song id="1" isDir="false" title="title"></song>
|
||||
</similarSongs2>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
{
|
||||
"id": "1",
|
||||
"isDir": false,
|
||||
"title": "title",
|
||||
"isVideo": false
|
||||
"title": "title"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<subsonic-response xmlns="http://subsonic.org/restapi" status="ok" version="1.16.1" type="navidrome" serverVersion="v0.55.0" openSubsonic="true">
|
||||
<topSongs>
|
||||
<song id="1" isDir="false" title="title" isVideo="false"></song>
|
||||
<song id="1" isDir="false" title="title"></song>
|
||||
</topSongs>
|
||||
</subsonic-response>
|
||||
|
||||
@@ -95,11 +95,9 @@ type Artist struct {
|
||||
Name string `xml:"name,attr" json:"name"`
|
||||
Starred *time.Time `xml:"starred,attr,omitempty" json:"starred,omitempty"`
|
||||
UserRating int32 `xml:"userRating,attr,omitempty" json:"userRating,omitempty"`
|
||||
AverageRating float64 `xml:"averageRating,attr,omitempty" json:"averageRating,omitempty"`
|
||||
CoverArt string `xml:"coverArt,attr,omitempty" json:"coverArt,omitempty"`
|
||||
ArtistImageUrl string `xml:"artistImageUrl,attr,omitempty" json:"artistImageUrl,omitempty"`
|
||||
/* TODO:
|
||||
<xs:attribute name="averageRating" type="sub:AverageRating" use="optional"/> <!-- Added in 1.13.0 -->
|
||||
*/
|
||||
}
|
||||
|
||||
type Index struct {
|
||||
@@ -160,13 +158,11 @@ type Child struct {
|
||||
ArtistId string `xml:"artistId,attr,omitempty" json:"artistId,omitempty"`
|
||||
Type string `xml:"type,attr,omitempty" json:"type,omitempty"`
|
||||
UserRating int32 `xml:"userRating,attr,omitempty" json:"userRating,omitempty"`
|
||||
AverageRating float64 `xml:"averageRating,attr,omitempty" json:"averageRating,omitempty"`
|
||||
SongCount int32 `xml:"songCount,attr,omitempty" json:"songCount,omitempty"`
|
||||
IsVideo bool `xml:"isVideo,attr" json:"isVideo"`
|
||||
IsVideo bool `xml:"isVideo,attr,omitempty" json:"isVideo,omitempty"`
|
||||
BookmarkPosition int64 `xml:"bookmarkPosition,attr,omitempty" json:"bookmarkPosition,omitempty"`
|
||||
/*
|
||||
<xs:attribute name="averageRating" type="sub:AverageRating" use="optional"/> <!-- Added in 1.6.0 -->
|
||||
*/
|
||||
*OpenSubsonicChild `xml:",omitempty" json:",omitempty"`
|
||||
*OpenSubsonicChild `xml:",omitempty" json:",omitempty"`
|
||||
}
|
||||
|
||||
type OpenSubsonicChild struct {
|
||||
@@ -177,7 +173,7 @@ type OpenSubsonicChild struct {
|
||||
SortName string `xml:"sortName,attr,omitempty" json:"sortName"`
|
||||
MediaType MediaType `xml:"mediaType,attr,omitempty" json:"mediaType"`
|
||||
MusicBrainzId string `xml:"musicBrainzId,attr,omitempty" json:"musicBrainzId"`
|
||||
Isrc Array[string] `xml:"isrc,omitempty" json:"isrc"`
|
||||
Isrc Array[string] `xml:"isrc,omitempty" json:"isrc"`
|
||||
Genres Array[ItemGenre] `xml:"genres,omitempty" json:"genres"`
|
||||
ReplayGain ReplayGain `xml:"replayGain,omitempty" json:"replayGain"`
|
||||
ChannelCount int32 `xml:"channelCount,attr,omitempty" json:"channelCount"`
|
||||
@@ -198,14 +194,15 @@ type Songs struct {
|
||||
}
|
||||
|
||||
type Directory struct {
|
||||
Child []Child `xml:"child" json:"child,omitempty"`
|
||||
Id string `xml:"id,attr" json:"id"`
|
||||
Name string `xml:"name,attr" json:"name"`
|
||||
Parent string `xml:"parent,attr,omitempty" json:"parent,omitempty"`
|
||||
Starred *time.Time `xml:"starred,attr,omitempty" json:"starred,omitempty"`
|
||||
PlayCount int64 `xml:"playCount,attr,omitempty" json:"playCount,omitempty"`
|
||||
Played *time.Time `xml:"played,attr,omitempty" json:"played,omitempty"`
|
||||
UserRating int32 `xml:"userRating,attr,omitempty" json:"userRating,omitempty"`
|
||||
Child []Child `xml:"child" json:"child,omitempty"`
|
||||
Id string `xml:"id,attr" json:"id"`
|
||||
Name string `xml:"name,attr" json:"name"`
|
||||
Parent string `xml:"parent,attr,omitempty" json:"parent,omitempty"`
|
||||
Starred *time.Time `xml:"starred,attr,omitempty" json:"starred,omitempty"`
|
||||
PlayCount int64 `xml:"playCount,attr,omitempty" json:"playCount,omitempty"`
|
||||
Played *time.Time `xml:"played,attr,omitempty" json:"played,omitempty"`
|
||||
UserRating int32 `xml:"userRating,attr,omitempty" json:"userRating,omitempty"`
|
||||
AverageRating float64 `xml:"averageRating,attr,omitempty" json:"averageRating,omitempty"`
|
||||
|
||||
// ID3
|
||||
Artist string `xml:"artist,attr,omitempty" json:"artist,omitempty"`
|
||||
@@ -217,10 +214,6 @@ type Directory struct {
|
||||
Created *time.Time `xml:"created,attr,omitempty" json:"created,omitempty"`
|
||||
Year int32 `xml:"year,attr,omitempty" json:"year,omitempty"`
|
||||
Genre string `xml:"genre,attr,omitempty" json:"genre,omitempty"`
|
||||
|
||||
/*
|
||||
<xs:attribute name="averageRating" type="sub:AverageRating" use="optional"/> <!-- Added in 1.13.0 -->
|
||||
*/
|
||||
}
|
||||
|
||||
// ArtistID3Ref is a reference to an artist, a simplified version of ArtistID3. This is used to resolve the
|
||||
@@ -237,6 +230,7 @@ type ArtistID3 struct {
|
||||
AlbumCount int32 `xml:"albumCount,attr" json:"albumCount"`
|
||||
Starred *time.Time `xml:"starred,attr,omitempty" json:"starred,omitempty"`
|
||||
UserRating int32 `xml:"userRating,attr,omitempty" json:"userRating,omitempty"`
|
||||
AverageRating float64 `xml:"averageRating,attr,omitempty" json:"averageRating,omitempty"`
|
||||
ArtistImageUrl string `xml:"artistImageUrl,attr,omitempty" json:"artistImageUrl,omitempty"`
|
||||
*OpenSubsonicArtistID3 `xml:",omitempty" json:",omitempty"`
|
||||
}
|
||||
@@ -268,6 +262,7 @@ type OpenSubsonicAlbumID3 struct {
|
||||
// OpenSubsonic extensions
|
||||
Played *time.Time `xml:"played,attr,omitempty" json:"played,omitempty"`
|
||||
UserRating int32 `xml:"userRating,attr,omitempty" json:"userRating"`
|
||||
AverageRating float64 `xml:"averageRating,attr,omitempty" json:"averageRating,omitempty"`
|
||||
Genres Array[ItemGenre] `xml:"genres,omitempty" json:"genres"`
|
||||
MusicBrainzId string `xml:"musicBrainzId,attr,omitempty" json:"musicBrainzId"`
|
||||
IsCompilation bool `xml:"isCompilation,attr,omitempty" json:"isCompilation"`
|
||||
@@ -308,7 +303,7 @@ type Playlist struct {
|
||||
Comment string `xml:"comment,attr,omitempty" json:"comment,omitempty"`
|
||||
SongCount int32 `xml:"songCount,attr" json:"songCount"`
|
||||
Duration int32 `xml:"duration,attr" json:"duration"`
|
||||
Public bool `xml:"public,attr" json:"public"`
|
||||
Public bool `xml:"public,attr,omitempty" json:"public,omitempty"`
|
||||
Owner string `xml:"owner,attr,omitempty" json:"owner,omitempty"`
|
||||
Created time.Time `xml:"created,attr" json:"created"`
|
||||
Changed time.Time `xml:"changed,attr" json:"changed"`
|
||||
|
||||
11
tests/fixtures/playlists/private_playlist.nsp
vendored
Normal file
11
tests/fixtures/playlists/private_playlist.nsp
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "Private Playlist",
|
||||
"comment": "A smart playlist that is explicitly private",
|
||||
"public": false,
|
||||
"all": [
|
||||
{"is": {"loved": true}}
|
||||
],
|
||||
"sort": "title",
|
||||
"order": "asc",
|
||||
"limit": 100
|
||||
}
|
||||
11
tests/fixtures/playlists/public_playlist.nsp
vendored
Normal file
11
tests/fixtures/playlists/public_playlist.nsp
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "Public Playlist",
|
||||
"comment": "A smart playlist that is public",
|
||||
"public": true,
|
||||
"all": [
|
||||
{"inTheLast": {"lastPlayed": 30}}
|
||||
],
|
||||
"sort": "lastPlayed",
|
||||
"order": "desc",
|
||||
"limit": 50
|
||||
}
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
)
|
||||
|
||||
// MockPluginManager is a mock implementation of plugins.PluginManager for testing.
|
||||
// It implements EnablePlugin, DisablePlugin, UpdatePluginConfig, UpdatePluginUsers, UpdatePluginLibraries and RescanPlugins methods.
|
||||
// It implements EnablePlugin, DisablePlugin, UpdatePluginConfig, ValidatePluginConfig, UpdatePluginUsers, UpdatePluginLibraries and RescanPlugins methods.
|
||||
type MockPluginManager struct {
|
||||
// EnablePluginFn is called when EnablePlugin is invoked. If nil, returns EnableError.
|
||||
EnablePluginFn func(ctx context.Context, id string) error
|
||||
@@ -13,6 +13,8 @@ type MockPluginManager struct {
|
||||
DisablePluginFn func(ctx context.Context, id string) error
|
||||
// UpdatePluginConfigFn is called when UpdatePluginConfig is invoked. If nil, returns ConfigError.
|
||||
UpdatePluginConfigFn func(ctx context.Context, id, configJSON string) error
|
||||
// ValidatePluginConfigFn is called when ValidatePluginConfig is invoked. If nil, returns ValidateError.
|
||||
ValidatePluginConfigFn func(ctx context.Context, id, configJSON string) error
|
||||
// UpdatePluginUsersFn is called when UpdatePluginUsers is invoked. If nil, returns UsersError.
|
||||
UpdatePluginUsersFn func(ctx context.Context, id, usersJSON string, allUsers bool) error
|
||||
// UpdatePluginLibrariesFn is called when UpdatePluginLibraries is invoked. If nil, returns LibrariesError.
|
||||
@@ -24,6 +26,7 @@ type MockPluginManager struct {
|
||||
EnableError error
|
||||
DisableError error
|
||||
ConfigError error
|
||||
ValidateError error
|
||||
UsersError error
|
||||
LibrariesError error
|
||||
RescanError error
|
||||
@@ -35,6 +38,10 @@ type MockPluginManager struct {
|
||||
ID string
|
||||
ConfigJSON string
|
||||
}
|
||||
ValidatePluginConfigCalls []struct {
|
||||
ID string
|
||||
ConfigJSON string
|
||||
}
|
||||
UpdatePluginUsersCalls []struct {
|
||||
ID string
|
||||
UsersJSON string
|
||||
@@ -75,6 +82,17 @@ func (m *MockPluginManager) UpdatePluginConfig(ctx context.Context, id, configJS
|
||||
return m.ConfigError
|
||||
}
|
||||
|
||||
func (m *MockPluginManager) ValidatePluginConfig(ctx context.Context, id, configJSON string) error {
|
||||
m.ValidatePluginConfigCalls = append(m.ValidatePluginConfigCalls, struct {
|
||||
ID string
|
||||
ConfigJSON string
|
||||
}{ID: id, ConfigJSON: configJSON})
|
||||
if m.ValidatePluginConfigFn != nil {
|
||||
return m.ValidatePluginConfigFn(ctx, id, configJSON)
|
||||
}
|
||||
return m.ValidateError
|
||||
}
|
||||
|
||||
func (m *MockPluginManager) UpdatePluginUsers(ctx context.Context, id, usersJSON string, allUsers bool) error {
|
||||
m.UpdatePluginUsersCalls = append(m.UpdatePluginUsersCalls, struct {
|
||||
ID string
|
||||
|
||||
@@ -27,6 +27,10 @@
|
||||
<meta property="og:image:width" content="300">
|
||||
<meta property="og:image:height" content="300">
|
||||
<title>Navidrome</title>
|
||||
<script>
|
||||
// Shim for libraries that check for Node.js process object
|
||||
window.process = { env: {} };
|
||||
</script>
|
||||
<script>
|
||||
window.__APP_CONFIG__ = {{ .AppConfig }}
|
||||
</script>
|
||||
|
||||
4742
ui/package-lock.json
generated
4742
ui/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -16,6 +16,9 @@
|
||||
"postinstall": "bin/update-workbox.sh"
|
||||
},
|
||||
"dependencies": {
|
||||
"@jsonforms/core": "^2.5.2",
|
||||
"@jsonforms/material-renderers": "^2.5.2",
|
||||
"@jsonforms/react": "^2.5.2",
|
||||
"@material-ui/core": "^4.12.4",
|
||||
"@material-ui/icons": "^4.11.3",
|
||||
"@material-ui/lab": "^4.0.0-alpha.61",
|
||||
|
||||
@@ -108,6 +108,9 @@ const AlbumSongs = (props) => {
|
||||
/>
|
||||
),
|
||||
artist: isDesktop && <ArtistLinkField source="artist" sortable={false} />,
|
||||
composer: isDesktop && (
|
||||
<ArtistLinkField source="composer" sortable={false} />
|
||||
),
|
||||
duration: <DurationField source="duration" sortable={false} />,
|
||||
year: isDesktop && (
|
||||
<FunctionField
|
||||
@@ -148,6 +151,7 @@ const AlbumSongs = (props) => {
|
||||
columns: toggleableFields,
|
||||
omittedColumns: ['title'],
|
||||
defaultOff: [
|
||||
'composer',
|
||||
'channels',
|
||||
'bpm',
|
||||
'year',
|
||||
|
||||
@@ -95,6 +95,19 @@ const Player = () => {
|
||||
}
|
||||
}, [audioInstance, context, gainNode, playerState, gainInfo])
|
||||
|
||||
useEffect(() => {
|
||||
const handleBeforeUnload = (e) => {
|
||||
// Check there's a current track and is actually playing/not paused
|
||||
if (playerState.current?.uuid && audioInstance && !audioInstance.paused) {
|
||||
e.preventDefault()
|
||||
e.returnValue = '' // Chrome requires returnValue to be set
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener('beforeunload', handleBeforeUnload)
|
||||
return () => window.removeEventListener('beforeunload', handleBeforeUnload)
|
||||
}, [playerState, audioInstance])
|
||||
|
||||
const defaultOptions = useMemo(
|
||||
() => ({
|
||||
theme: playerTheme,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user