mirror of
https://github.com/navidrome/navidrome.git
synced 2026-01-01 19:38:06 -05:00
Compare commits
1 Commits
plugin-spi
...
v0.54.5
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
287079a9e4 |
@@ -4,7 +4,7 @@
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
// Update the VARIANT arg to pick a version of Go: 1, 1.15, 1.14
|
||||
"VARIANT": "1.24",
|
||||
"VARIANT": "1.23",
|
||||
// Options
|
||||
"INSTALL_NODE": "true",
|
||||
"NODE_VERSION": "v20"
|
||||
|
||||
2
.github/workflows/pipeline.yml
vendored
2
.github/workflows/pipeline.yml
vendored
@@ -71,7 +71,7 @@ jobs:
|
||||
version: ${{ env.CROSS_TAGLIB_VERSION }}
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v7
|
||||
uses: golangci/golangci-lint-action@v6
|
||||
with:
|
||||
version: latest
|
||||
problem-matchers: true
|
||||
|
||||
52
.github/workflows/update-translations.sh
vendored
52
.github/workflows/update-translations.sh
vendored
@@ -9,7 +9,6 @@ process_json() {
|
||||
jq 'walk(if type == "object" then with_entries(select(.value != null and .value != "" and .value != [] and .value != {})) | to_entries | sort_by(.key) | from_entries else . end)' "$1"
|
||||
}
|
||||
|
||||
# Function to check differences between local and remote translations
|
||||
check_lang_diff() {
|
||||
filename=${I18N_DIR}/"$1".json
|
||||
url=$(curl -s -X POST https://poeditor.com/api/ \
|
||||
@@ -36,58 +35,19 @@ check_lang_diff() {
|
||||
rm -f poeditor.json poeditor.tmp "$filename".tmp
|
||||
}
|
||||
|
||||
# Function to get the list of languages
|
||||
get_language_list() {
|
||||
response=$(curl -s -X POST https://api.poeditor.com/v2/languages/list \
|
||||
-d api_token="${POEDITOR_APIKEY}" \
|
||||
-d id="${POEDITOR_PROJECTID}")
|
||||
|
||||
echo $response
|
||||
}
|
||||
|
||||
# Function to get the language name from the language code
|
||||
get_language_name() {
|
||||
lang_code="$1"
|
||||
lang_list="$2"
|
||||
|
||||
lang_name=$(echo "$lang_list" | jq -r ".result.languages[] | select(.code == \"$lang_code\") | .name")
|
||||
|
||||
if [ -z "$lang_name" ]; then
|
||||
echo "Error: Language code '$lang_code' not found" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$lang_name"
|
||||
}
|
||||
|
||||
# Function to get the language code from the file path
|
||||
get_lang_code() {
|
||||
filepath="$1"
|
||||
# Extract just the filename
|
||||
filename=$(basename "$filepath")
|
||||
|
||||
# Remove the extension
|
||||
lang_code="${filename%.*}"
|
||||
|
||||
echo "$lang_code"
|
||||
}
|
||||
|
||||
lang_list=$(get_language_list)
|
||||
|
||||
# Check differences for each language
|
||||
for file in ${I18N_DIR}/*.json; do
|
||||
code=$(get_lang_code "$file")
|
||||
name=$(basename "$file")
|
||||
code=$(echo "$name" | cut -f1 -d.)
|
||||
lang=$(jq -r .languageName < "$file")
|
||||
lang_name=$(get_language_name "$code" "$lang_list")
|
||||
echo "Downloading $lang_name - $lang ($code)"
|
||||
echo "Downloading $lang ($code)"
|
||||
check_lang_diff "$code"
|
||||
done
|
||||
|
||||
|
||||
# List changed languages to stderr
|
||||
languages=""
|
||||
for file in $(git diff --name-only --exit-code | grep json); do
|
||||
lang_code=$(get_lang_code "$file")
|
||||
lang_name=$(get_language_name "$lang_code" "$lang_list")
|
||||
languages="${languages}$(echo "$lang_name" | tr -d '\n'), "
|
||||
lang=$(jq -r .languageName < "$file")
|
||||
languages="${languages}$(echo $lang | tr -d '\n'), "
|
||||
done
|
||||
echo "${languages%??}" 1>&2
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -23,5 +23,5 @@ music
|
||||
docker-compose.yml
|
||||
!contrib/docker-compose.yml
|
||||
binaries
|
||||
navidrome-master
|
||||
*.exe
|
||||
taglib
|
||||
navidrome-master
|
||||
@@ -1,7 +1,7 @@
|
||||
version: "2"
|
||||
run:
|
||||
build-tags:
|
||||
- netgo
|
||||
|
||||
linters:
|
||||
enable:
|
||||
- asasalint
|
||||
@@ -11,48 +11,31 @@ linters:
|
||||
- copyloopvar
|
||||
- dogsled
|
||||
- durationcheck
|
||||
- errcheck
|
||||
- errorlint
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- goprintffuncname
|
||||
- gosec
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nakedret
|
||||
- nilerr
|
||||
- rowserrcheck
|
||||
- unconvert
|
||||
- whitespace
|
||||
disable:
|
||||
- staticcheck
|
||||
settings:
|
||||
gocritic:
|
||||
disable-all: true
|
||||
enabled-checks:
|
||||
- deprecatedComment
|
||||
gosec:
|
||||
excludes:
|
||||
- G501
|
||||
- G401
|
||||
- G505
|
||||
- G115
|
||||
govet:
|
||||
enable:
|
||||
- nilness
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
- typecheck
|
||||
- unconvert
|
||||
- unused
|
||||
- whitespace
|
||||
|
||||
linters-settings:
|
||||
govet:
|
||||
enable:
|
||||
- nilness
|
||||
gosec:
|
||||
excludes:
|
||||
- G501
|
||||
- G401
|
||||
- G505
|
||||
- G115 # Can't check context, where the warning is clearly a false positive. See discussion in https://github.com/securego/gosec/pull/1149
|
||||
|
||||
11
Dockerfile
11
Dockerfile
@@ -61,7 +61,7 @@ COPY --from=ui /build /build
|
||||
|
||||
########################################################################################################################
|
||||
### Build Navidrome binary
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/golang:1.24-bookworm AS base
|
||||
FROM --platform=$BUILDPLATFORM public.ecr.aws/docker/library/golang:1.23-bookworm AS base
|
||||
RUN apt-get update && apt-get install -y clang lld
|
||||
COPY --from=xx / /
|
||||
WORKDIR /workspace
|
||||
@@ -70,6 +70,8 @@ FROM --platform=$BUILDPLATFORM base AS build
|
||||
|
||||
# Install build dependencies for the target platform
|
||||
ARG TARGETPLATFORM
|
||||
ARG GIT_SHA
|
||||
ARG GIT_TAG
|
||||
|
||||
RUN xx-apt install -y binutils gcc g++ libc6-dev zlib1g-dev
|
||||
RUN xx-verify --setup
|
||||
@@ -79,9 +81,6 @@ RUN --mount=type=bind,source=. \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
go mod download
|
||||
|
||||
ARG GIT_SHA
|
||||
ARG GIT_TAG
|
||||
|
||||
RUN --mount=type=bind,source=. \
|
||||
--mount=from=ui,source=/build,target=./ui/build,ro \
|
||||
--mount=from=osxcross,src=/osxcross/SDK,target=/xx-sdk,ro \
|
||||
@@ -125,7 +124,7 @@ LABEL maintainer="deluan@navidrome.org"
|
||||
LABEL org.opencontainers.image.source="https://github.com/navidrome/navidrome"
|
||||
|
||||
# Install ffmpeg and mpv
|
||||
RUN apk add -U --no-cache ffmpeg mpv sqlite
|
||||
RUN apk add -U --no-cache ffmpeg mpv
|
||||
|
||||
# Copy navidrome binary
|
||||
COPY --from=build /out/navidrome /app/
|
||||
@@ -133,12 +132,12 @@ COPY --from=build /out/navidrome /app/
|
||||
VOLUME ["/data", "/music"]
|
||||
ENV ND_MUSICFOLDER=/music
|
||||
ENV ND_DATAFOLDER=/data
|
||||
ENV ND_CONFIGFILE=/data/navidrome.toml
|
||||
ENV ND_PORT=4533
|
||||
ENV GODEBUG="asyncpreemptoff=1"
|
||||
RUN touch /.nddockerenv
|
||||
|
||||
EXPOSE ${ND_PORT}
|
||||
HEALTHCHECK CMD wget -O- http://localhost:${ND_PORT}/ping || exit 1
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["/app/navidrome"]
|
||||
|
||||
18
Makefile
18
Makefile
@@ -29,27 +29,23 @@ dev: check_env ##@Development Start Navidrome in development mode, with hot-re
|
||||
.PHONY: dev
|
||||
|
||||
server: check_go_env buildjs ##@Development Start the backend in development mode
|
||||
@ND_ENABLEINSIGHTSCOLLECTOR="false" go tool reflex -d none -c reflex.conf
|
||||
@ND_ENABLEINSIGHTSCOLLECTOR="false" go run github.com/cespare/reflex@latest -d none -c reflex.conf
|
||||
.PHONY: server
|
||||
|
||||
watch: ##@Development Start Go tests in watch mode (re-run when code changes)
|
||||
go tool ginkgo watch -tags=netgo -notify ./...
|
||||
go run github.com/onsi/ginkgo/v2/ginkgo@latest watch -tags netgo -notify ./...
|
||||
.PHONY: watch
|
||||
|
||||
test: ##@Development Run Go tests
|
||||
go test -tags netgo ./...
|
||||
.PHONY: test
|
||||
|
||||
testrace: ##@Development Run Go tests with race detector
|
||||
go test -tags netgo -race -shuffle=on ./...
|
||||
.PHONY: test
|
||||
|
||||
testall: testrace ##@Development Run Go and JS tests
|
||||
testall: test ##@Development Run Go and JS tests
|
||||
@(cd ./ui && npm run test:ci)
|
||||
.PHONY: testall
|
||||
|
||||
lint: ##@Development Lint Go code
|
||||
go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest run -v --timeout 5m
|
||||
go run github.com/golangci/golangci-lint/cmd/golangci-lint@latest run -v --timeout 5m
|
||||
.PHONY: lint
|
||||
|
||||
lintall: lint ##@Development Lint Go and JS code
|
||||
@@ -59,16 +55,16 @@ lintall: lint ##@Development Lint Go and JS code
|
||||
|
||||
format: ##@Development Format code
|
||||
@(cd ./ui && npm run prettier)
|
||||
@go tool goimports -w `find . -name '*.go' | grep -v _gen.go$$`
|
||||
@go run golang.org/x/tools/cmd/goimports@latest -w `find . -name '*.go' | grep -v _gen.go$$`
|
||||
@go mod tidy
|
||||
.PHONY: format
|
||||
|
||||
wire: check_go_env ##@Development Update Dependency Injection
|
||||
go tool wire gen -tags=netgo ./...
|
||||
go run github.com/google/wire/cmd/wire@latest gen -tags=netgo ./...
|
||||
.PHONY: wire
|
||||
|
||||
snapshots: ##@Development Update (GoLang) Snapshot tests
|
||||
UPDATE_SNAPSHOTS=true go tool ginkgo ./server/subsonic/responses/...
|
||||
UPDATE_SNAPSHOTS=true go run github.com/onsi/ginkgo/v2/ginkgo@latest ./server/subsonic/...
|
||||
.PHONY: snapshots
|
||||
|
||||
migration-sql: ##@Development Create an empty SQL migration file
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
JS: sh -c "cd ./ui && npm start"
|
||||
GO: go tool reflex -d none -c reflex.conf
|
||||
GO: go run github.com/cespare/reflex@latest -d none -c reflex.conf
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/djherbis/times"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type testFileInfo struct {
|
||||
fs.FileInfo
|
||||
}
|
||||
|
||||
func (t testFileInfo) BirthTime() time.Time {
|
||||
if ts := times.Get(t.FileInfo); ts.HasBirthTime() {
|
||||
return ts.BirthTime()
|
||||
}
|
||||
return t.FileInfo.ModTime()
|
||||
}
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
toP := func(name, sortName, mbid string) model.Participant {
|
||||
return model.Participant{
|
||||
Artist: model.Artist{Name: name, SortArtistName: sortName, MbzArtistID: mbid},
|
||||
}
|
||||
}
|
||||
|
||||
roles := []struct {
|
||||
model.Role
|
||||
model.ParticipantList
|
||||
}{
|
||||
{model.RoleComposer, model.ParticipantList{
|
||||
toP("coma a", "a, coma", "bf13b584-f27c-43db-8f42-32898d33d4e2"),
|
||||
toP("comb", "comb", "924039a2-09c6-4d29-9b4f-50cc54447d36"),
|
||||
}},
|
||||
{model.RoleLyricist, model.ParticipantList{
|
||||
toP("la a", "a, la", "c84f648f-68a6-40a2-a0cb-d135b25da3c2"),
|
||||
toP("lb", "lb", "0a7c582d-143a-4540-b4e9-77200835af65"),
|
||||
}},
|
||||
{model.RoleArranger, model.ParticipantList{
|
||||
toP("aa", "", "4605a1d4-8d15-42a3-bd00-9c20e42f71e6"),
|
||||
toP("ab", "", "002f0ff8-77bf-42cc-8216-61a9c43dc145"),
|
||||
}},
|
||||
{model.RoleConductor, model.ParticipantList{
|
||||
toP("cona", "", "af86879b-2141-42af-bad2-389a4dc91489"),
|
||||
toP("conb", "", "3dfa3c70-d7d3-4b97-b953-c298dd305e12"),
|
||||
}},
|
||||
{model.RoleDirector, model.ParticipantList{
|
||||
toP("dia", "", "f943187f-73de-4794-be47-88c66f0fd0f4"),
|
||||
toP("dib", "", "bceb75da-1853-4b3d-b399-b27f0cafc389"),
|
||||
}},
|
||||
{model.RoleEngineer, model.ParticipantList{
|
||||
toP("ea", "", "f634bf6d-d66a-425d-888a-28ad39392759"),
|
||||
toP("eb", "", "243d64ae-d514-44e1-901a-b918d692baee"),
|
||||
}},
|
||||
{model.RoleProducer, model.ParticipantList{
|
||||
toP("pra", "", "d971c8d7-999c-4a5f-ac31-719721ab35d6"),
|
||||
toP("prb", "", "f0a09070-9324-434f-a599-6d25ded87b69"),
|
||||
}},
|
||||
{model.RoleRemixer, model.ParticipantList{
|
||||
toP("ra", "", "c7dc6095-9534-4c72-87cc-aea0103462cf"),
|
||||
toP("rb", "", "8ebeef51-c08c-4736-992f-c37870becedd"),
|
||||
}},
|
||||
{model.RoleDJMixer, model.ParticipantList{
|
||||
toP("dja", "", "d063f13b-7589-4efc-ab7f-c60e6db17247"),
|
||||
toP("djb", "", "3636670c-385f-4212-89c8-0ff51d6bc456"),
|
||||
}},
|
||||
{model.RoleMixer, model.ParticipantList{
|
||||
toP("ma", "", "53fb5a2d-7016-427e-a563-d91819a5f35a"),
|
||||
toP("mb", "", "64c13e65-f0da-4ab9-a300-71ee53b0376a"),
|
||||
}},
|
||||
}
|
||||
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("Participants", func() {
|
||||
DescribeTable("test tags consistent across formats", func(format string) {
|
||||
path := "tests/fixtures/test." + format
|
||||
mds, err := e.Parse(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
info := mds[path]
|
||||
fileInfo, _ := os.Stat(path)
|
||||
info.FileInfo = testFileInfo{FileInfo: fileInfo}
|
||||
|
||||
metadata := metadata.New(path, info)
|
||||
mf := metadata.ToMediaFile(1, "folderID")
|
||||
|
||||
for _, data := range roles {
|
||||
role := data.Role
|
||||
artists := data.ParticipantList
|
||||
|
||||
actual := mf.Participants[role]
|
||||
Expect(actual).To(HaveLen(len(artists)))
|
||||
|
||||
for i := range artists {
|
||||
actualArtist := actual[i]
|
||||
expectedArtist := artists[i]
|
||||
|
||||
Expect(actualArtist.Name).To(Equal(expectedArtist.Name))
|
||||
Expect(actualArtist.SortArtistName).To(Equal(expectedArtist.SortArtistName))
|
||||
Expect(actualArtist.MbzArtistID).To(Equal(expectedArtist.MbzArtistID))
|
||||
}
|
||||
}
|
||||
|
||||
if format != "m4a" {
|
||||
performers := mf.Participants[model.RolePerformer]
|
||||
Expect(performers).To(HaveLen(8))
|
||||
|
||||
rules := map[string][]string{
|
||||
"pgaa": {"2fd0b311-9fa8-4ff9-be5d-f6f3d16b835e", "Guitar"},
|
||||
"pgbb": {"223d030b-bf97-4c2a-ad26-b7f7bbe25c93", "Guitar", ""},
|
||||
"pvaa": {"cb195f72-448f-41c8-b962-3f3c13d09d38", "Vocals"},
|
||||
"pvbb": {"60a1f832-8ca2-49f6-8660-84d57f07b520", "Vocals", "Flute"},
|
||||
"pfaa": {"51fb40c-0305-4bf9-a11b-2ee615277725", "", "Flute"},
|
||||
}
|
||||
|
||||
for name, rule := range rules {
|
||||
mbid := rule[0]
|
||||
for i := 1; i < len(rule); i++ {
|
||||
found := false
|
||||
|
||||
for _, mapped := range performers {
|
||||
if mapped.Name == name && mapped.MbzArtistID == mbid && mapped.SubRole == rule[i] {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
Expect(found).To(BeTrue(), "Could not find matching artist")
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Entry("FLAC format", "flac"),
|
||||
Entry("M4a format", "m4a"),
|
||||
Entry("OGG format", "ogg"),
|
||||
Entry("WMA format", "wv"),
|
||||
|
||||
Entry("MP3 format", "mp3"),
|
||||
Entry("WAV format", "wav"),
|
||||
Entry("AIFF format", "aiff"),
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -1,151 +0,0 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/core/storage/local"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
)
|
||||
|
||||
type extractor struct {
|
||||
baseDir string
|
||||
}
|
||||
|
||||
func (e extractor) Parse(files ...string) (map[string]metadata.Info, error) {
|
||||
results := make(map[string]metadata.Info)
|
||||
for _, path := range files {
|
||||
props, err := e.extractMetadata(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
results[path] = *props
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (e extractor) Version() string {
|
||||
return Version()
|
||||
}
|
||||
|
||||
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
|
||||
fullPath := filepath.Join(e.baseDir, filePath)
|
||||
tags, err := Read(fullPath)
|
||||
if err != nil {
|
||||
log.Warn("extractor: Error reading metadata from file. Skipping", "filePath", fullPath, err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse audio properties
|
||||
ap := metadata.AudioProperties{}
|
||||
if length, ok := tags["_lengthinmilliseconds"]; ok && len(length) > 0 {
|
||||
millis, _ := strconv.Atoi(length[0])
|
||||
if millis > 0 {
|
||||
ap.Duration = (time.Millisecond * time.Duration(millis)).Round(time.Millisecond * 10)
|
||||
}
|
||||
delete(tags, "_lengthinmilliseconds")
|
||||
}
|
||||
parseProp := func(prop string, target *int) {
|
||||
if value, ok := tags[prop]; ok && len(value) > 0 {
|
||||
*target, _ = strconv.Atoi(value[0])
|
||||
delete(tags, prop)
|
||||
}
|
||||
}
|
||||
parseProp("_bitrate", &ap.BitRate)
|
||||
parseProp("_channels", &ap.Channels)
|
||||
parseProp("_samplerate", &ap.SampleRate)
|
||||
parseProp("_bitspersample", &ap.BitDepth)
|
||||
|
||||
// Parse track/disc totals
|
||||
parseTuple := func(prop string) {
|
||||
tagName := prop + "number"
|
||||
tagTotal := prop + "total"
|
||||
if value, ok := tags[tagName]; ok && len(value) > 0 {
|
||||
parts := strings.Split(value[0], "/")
|
||||
tags[tagName] = []string{parts[0]}
|
||||
if len(parts) == 2 {
|
||||
tags[tagTotal] = []string{parts[1]}
|
||||
}
|
||||
}
|
||||
}
|
||||
parseTuple("track")
|
||||
parseTuple("disc")
|
||||
|
||||
// Adjust some ID3 tags
|
||||
parseLyrics(tags)
|
||||
parseTIPL(tags)
|
||||
delete(tags, "tmcl") // TMCL is already parsed by TagLib
|
||||
|
||||
return &metadata.Info{
|
||||
Tags: tags,
|
||||
AudioProperties: ap,
|
||||
HasPicture: tags["has_picture"] != nil && len(tags["has_picture"]) > 0 && tags["has_picture"][0] == "true",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// parseLyrics make sure lyrics tags have language
|
||||
func parseLyrics(tags map[string][]string) {
|
||||
lyrics := tags["lyrics"]
|
||||
if len(lyrics) > 0 {
|
||||
tags["lyrics:xxx"] = lyrics
|
||||
delete(tags, "lyrics")
|
||||
}
|
||||
}
|
||||
|
||||
// These are the only roles we support, based on Picard's tag map:
|
||||
// https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
var tiplMapping = map[string]string{
|
||||
"arranger": "arranger",
|
||||
"engineer": "engineer",
|
||||
"producer": "producer",
|
||||
"mix": "mixer",
|
||||
"DJ-mix": "djmixer",
|
||||
}
|
||||
|
||||
// parseTIPL parses the ID3v2.4 TIPL frame string, which is received from TagLib in the format:
|
||||
//
|
||||
// "arranger Andrew Powell engineer Chris Blair engineer Pat Stapley producer Eric Woolfson".
|
||||
//
|
||||
// and breaks it down into a map of roles and names, e.g.:
|
||||
//
|
||||
// {"arranger": ["Andrew Powell"], "engineer": ["Chris Blair", "Pat Stapley"], "producer": ["Eric Woolfson"]}.
|
||||
func parseTIPL(tags map[string][]string) {
|
||||
tipl := tags["tipl"]
|
||||
if len(tipl) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
addRole := func(currentRole string, currentValue []string) {
|
||||
if currentRole != "" && len(currentValue) > 0 {
|
||||
role := tiplMapping[currentRole]
|
||||
tags[role] = append(tags[role], strings.Join(currentValue, " "))
|
||||
}
|
||||
}
|
||||
|
||||
var currentRole string
|
||||
var currentValue []string
|
||||
for _, part := range strings.Split(tipl[0], " ") {
|
||||
if _, ok := tiplMapping[part]; ok {
|
||||
addRole(currentRole, currentValue)
|
||||
currentRole = part
|
||||
currentValue = nil
|
||||
continue
|
||||
}
|
||||
currentValue = append(currentValue, part)
|
||||
}
|
||||
addRole(currentRole, currentValue)
|
||||
delete(tags, "tipl")
|
||||
}
|
||||
|
||||
var _ local.Extractor = (*extractor)(nil)
|
||||
|
||||
func init() {
|
||||
local.RegisterExtractor("taglib", func(_ fs.FS, baseDir string) local.Extractor {
|
||||
// ignores fs, as taglib extractor only works with local files
|
||||
return &extractor{baseDir}
|
||||
})
|
||||
}
|
||||
@@ -1,296 +0,0 @@
|
||||
package taglib
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Extractor", func() {
|
||||
var e *extractor
|
||||
|
||||
BeforeEach(func() {
|
||||
e = &extractor{}
|
||||
})
|
||||
|
||||
Describe("Parse", func() {
|
||||
It("correctly parses metadata from all files in folder", func() {
|
||||
mds, err := e.Parse(
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
|
||||
// Test MP3
|
||||
m := mds["tests/fixtures/test.mp3"]
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Song"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
|
||||
Expect(m.HasPicture).To(BeTrue())
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal("1.02s"))
|
||||
Expect(m.AudioProperties.BitRate).To(Equal(192))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(44100))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("tcmp", []string{"1"})),
|
||||
)
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014-05-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("originaldate", []string{"1996-11-21"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("releasedate", []string{"2020-12-31"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("discnumber", []string{"1"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_gain", []string{"+3.21518 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_album_peak", []string{"0.9125"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_gain", []string{"-1.48 dB"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("replaygain_track_peak", []string{"0.4512"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracknumber", []string{"2"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
|
||||
Expect(m.Tags).ToNot(HaveKey("lyrics"))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}), HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
"[00:00.00]This is\n[00:02.50]English SYLT\n",
|
||||
})))
|
||||
Expect(m.Tags).To(Or(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}), HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]unspecified SYLT\n",
|
||||
})))
|
||||
|
||||
// Test OGG
|
||||
m = mds["tests/fixtures/test.ogg"]
|
||||
Expect(err).To(BeNil())
|
||||
Expect(m.Tags).To(HaveKeyWithValue("fbpm", []string{"141.7"}))
|
||||
|
||||
// TabLib 1.12 returns 18, previous versions return 39.
|
||||
// See https://github.com/taglib/taglib/commit/2f238921824741b2cfe6fbfbfc9701d9827ab06b
|
||||
Expect(m.AudioProperties.BitRate).To(BeElementOf(18, 39, 40, 43, 49))
|
||||
Expect(m.AudioProperties.Channels).To(BeElementOf(2))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.AudioProperties.SampleRate).To(BeElementOf(8000))
|
||||
Expect(m.HasPicture).To(BeFalse())
|
||||
})
|
||||
|
||||
DescribeTable("Format-Specific tests",
|
||||
func(file, duration string, channels, samplerate, bitdepth int, albumGain, albumPeak, trackGain, trackPeak string, id3Lyrics bool) {
|
||||
file = "tests/fixtures/" + file
|
||||
mds, err := e.Parse(file)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(1))
|
||||
|
||||
m := mds[file]
|
||||
|
||||
Expect(m.HasPicture).To(BeFalse())
|
||||
Expect(m.AudioProperties.Duration.String()).To(Equal(duration))
|
||||
Expect(m.AudioProperties.Channels).To(Equal(channels))
|
||||
Expect(m.AudioProperties.SampleRate).To(Equal(samplerate))
|
||||
Expect(m.AudioProperties.BitDepth).To(Equal(bitdepth))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_gain", []string{albumGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{albumGain}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_album_peak", []string{albumPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_album_peak", []string{albumPeak}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_gain", []string{trackGain}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_gain", []string{trackGain}),
|
||||
))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("replaygain_track_peak", []string{trackPeak}),
|
||||
HaveKeyWithValue("----:com.apple.itunes:replaygain_track_peak", []string{trackPeak}),
|
||||
))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("title", []string{"Title"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("album", []string{"Album"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("artist", []string{"Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("albumartist", []string{"Album Artist"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("genre", []string{"Rock"}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("date", []string{"2014"}))
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("bpm", []string{"123"}))
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("tracknumber", []string{"3"}),
|
||||
HaveKeyWithValue("tracknumber", []string{"3/10"}),
|
||||
))
|
||||
if !strings.HasSuffix(file, "test.wma") {
|
||||
// TODO Not sure why this is not working for WMA
|
||||
Expect(m.Tags).To(HaveKeyWithValue("tracktotal", []string{"10"}))
|
||||
}
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("discnumber", []string{"1"}),
|
||||
HaveKeyWithValue("discnumber", []string{"1/2"}),
|
||||
))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("disctotal", []string{"2"}))
|
||||
|
||||
// WMA does not have a "compilation" tag, but "wm/iscompilation"
|
||||
Expect(m.Tags).To(Or(
|
||||
HaveKeyWithValue("compilation", []string{"1"}),
|
||||
HaveKeyWithValue("wm/iscompilation", []string{"1"})),
|
||||
)
|
||||
|
||||
if id3Lyrics {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:eng", []string{
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
}))
|
||||
} else {
|
||||
Expect(m.Tags).To(HaveKeyWithValue("lyrics:xxx", []string{
|
||||
"[00:00.00]This is\n[00:02.50]unspecified",
|
||||
"[00:00.00]This is\n[00:02.50]English",
|
||||
}))
|
||||
}
|
||||
|
||||
Expect(m.Tags).To(HaveKeyWithValue("comment", []string{"Comment1\nComment2"}))
|
||||
},
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1200:duration=1" test.flac
|
||||
Entry("correctly parses flac tags", "test.flac", "1s", 1, 44100, 16, "+4.06 dB", "0.12496948", "+4.06 dB", "0.12496948", false),
|
||||
|
||||
Entry("correctly parses m4a (aac) gain tags", "01 Invisible (RED) Edit Version.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false),
|
||||
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false),
|
||||
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
|
||||
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
|
||||
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=800:duration=1" test.wv
|
||||
Entry("correctly parses wv (wavpak) tags", "test.wv", "1s", 1, 44100, 16, "3.43 dB", "0.125061", "3.43 dB", "0.125061", false),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1000:duration=1" test.wav
|
||||
Entry("correctly parses wav tags", "test.wav", "1s", 1, 44100, 16, "3.06 dB", "0.125056", "3.06 dB", "0.125056", true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1400:duration=1" test.aiff
|
||||
Entry("correctly parses aiff tags", "test.aiff", "1s", 1, 44100, 16, "2.00 dB", "0.124972", "2.00 dB", "0.124972", true),
|
||||
)
|
||||
|
||||
// Skip these tests when running as root
|
||||
Context("Access Forbidden", func() {
|
||||
var accessForbiddenFile string
|
||||
var RegularUserContext = XContext
|
||||
var isRegularUser = os.Getuid() != 0
|
||||
if isRegularUser {
|
||||
RegularUserContext = Context
|
||||
}
|
||||
|
||||
// Only run permission tests if we are not root
|
||||
RegularUserContext("when run without root privileges", func() {
|
||||
BeforeEach(func() {
|
||||
accessForbiddenFile = utils.TempFileName("access_forbidden-", ".mp3")
|
||||
|
||||
f, err := os.OpenFile(accessForbiddenFile, os.O_WRONLY|os.O_CREATE, 0222)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
DeferCleanup(func() {
|
||||
Expect(f.Close()).To(Succeed())
|
||||
Expect(os.Remove(accessForbiddenFile)).To(Succeed())
|
||||
})
|
||||
})
|
||||
|
||||
It("correctly handle unreadable file due to insufficient read permission", func() {
|
||||
_, err := e.extractMetadata(accessForbiddenFile)
|
||||
Expect(err).To(MatchError(os.ErrPermission))
|
||||
})
|
||||
|
||||
It("skips the file if it cannot be read", func() {
|
||||
files := []string{
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
accessForbiddenFile,
|
||||
}
|
||||
mds, err := e.Parse(files...)
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(mds).To(HaveLen(2))
|
||||
Expect(mds).ToNot(HaveKey(accessForbiddenFile))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Describe("Error Checking", func() {
|
||||
It("returns a generic ErrPath if file does not exist", func() {
|
||||
testFilePath := "tests/fixtures/NON_EXISTENT.ogg"
|
||||
_, err := e.extractMetadata(testFilePath)
|
||||
Expect(err).To(MatchError(fs.ErrNotExist))
|
||||
})
|
||||
It("does not throw a SIGSEGV error when reading a file with an invalid frame", func() {
|
||||
// File has an empty TDAT frame
|
||||
md, err := e.extractMetadata("tests/fixtures/invalid-files/test-invalid-frame.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(md.Tags).To(HaveKeyWithValue("albumartist", []string{"Elvis Presley"}))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("parseTIPL", func() {
|
||||
var tags map[string][]string
|
||||
|
||||
BeforeEach(func() {
|
||||
tags = make(map[string][]string)
|
||||
})
|
||||
|
||||
Context("when the TIPL string is populated", func() {
|
||||
It("correctly parses roles and names", func() {
|
||||
tags["tipl"] = []string{"arranger Andrew Powell DJ-mix François Kevorkian DJ-mix Jane Doe engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["arranger"]).To(ConsistOf("Andrew Powell"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Chris Blair"))
|
||||
Expect(tags["djmixer"]).To(ConsistOf("François Kevorkian", "Jane Doe"))
|
||||
})
|
||||
|
||||
It("handles multiple names for a single role", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer Eric Woolfson engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags["producer"]).To(ConsistOf("Eric Woolfson"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
|
||||
It("discards roles without names", func() {
|
||||
tags["tipl"] = []string{"engineer Pat Stapley producer engineer Chris Blair"}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).ToNot(HaveKey("producer"))
|
||||
Expect(tags["engineer"]).To(ConsistOf("Pat Stapley", "Chris Blair"))
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL string is empty", func() {
|
||||
It("does nothing", func() {
|
||||
tags["tipl"] = []string{""}
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Context("when the TIPL is not present", func() {
|
||||
It("does nothing", func() {
|
||||
parseTIPL(tags)
|
||||
Expect(tags).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
@@ -1,157 +0,0 @@
|
||||
package taglib
|
||||
|
||||
/*
|
||||
#cgo !windows pkg-config: --define-prefix taglib
|
||||
#cgo windows pkg-config: taglib
|
||||
#cgo illumos LDFLAGS: -lstdc++ -lsendfile
|
||||
#cgo linux darwin CXXFLAGS: -std=c++11
|
||||
#cgo darwin LDFLAGS: -L/opt/homebrew/opt/taglib/lib
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include "taglib_wrapper.h"
|
||||
*/
|
||||
import "C"
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime/debug"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"unsafe"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
)
|
||||
|
||||
const iTunesKeyPrefix = "----:com.apple.itunes:"
|
||||
|
||||
func Version() string {
|
||||
return C.GoString(C.taglib_version())
|
||||
}
|
||||
|
||||
func Read(filename string) (tags map[string][]string, err error) {
|
||||
// Do not crash on failures in the C code/library
|
||||
debug.SetPanicOnFault(true)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
log.Error("extractor: recovered from panic when reading tags", "file", filename, "error", r)
|
||||
err = fmt.Errorf("extractor: recovered from panic: %s", r)
|
||||
}
|
||||
}()
|
||||
|
||||
fp := getFilename(filename)
|
||||
defer C.free(unsafe.Pointer(fp))
|
||||
id, m, release := newMap()
|
||||
defer release()
|
||||
|
||||
log.Trace("extractor: reading tags", "filename", filename, "map_id", id)
|
||||
res := C.taglib_read(fp, C.ulong(id))
|
||||
switch res {
|
||||
case C.TAGLIB_ERR_PARSE:
|
||||
// Check additional case whether the file is unreadable due to permission
|
||||
file, fileErr := os.OpenFile(filename, os.O_RDONLY, 0600)
|
||||
defer file.Close()
|
||||
|
||||
if os.IsPermission(fileErr) {
|
||||
return nil, fmt.Errorf("navidrome does not have permission: %w", fileErr)
|
||||
} else if fileErr != nil {
|
||||
return nil, fmt.Errorf("cannot parse file media file: %w", fileErr)
|
||||
} else {
|
||||
return nil, fmt.Errorf("cannot parse file media file")
|
||||
}
|
||||
case C.TAGLIB_ERR_AUDIO_PROPS:
|
||||
return nil, fmt.Errorf("can't get audio properties from file")
|
||||
}
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
j, _ := json.Marshal(m)
|
||||
log.Trace("extractor: read tags", "tags", string(j), "filename", filename, "id", id)
|
||||
} else {
|
||||
log.Trace("extractor: read tags", "tags", m, "filename", filename, "id", id)
|
||||
}
|
||||
|
||||
return m, nil
|
||||
}
|
||||
|
||||
type tagMap map[string][]string
|
||||
|
||||
var allMaps sync.Map
|
||||
var mapsNextID atomic.Uint32
|
||||
|
||||
func newMap() (uint32, tagMap, func()) {
|
||||
id := mapsNextID.Add(1)
|
||||
|
||||
m := tagMap{}
|
||||
allMaps.Store(id, m)
|
||||
|
||||
return id, m, func() {
|
||||
allMaps.Delete(id)
|
||||
}
|
||||
}
|
||||
|
||||
func doPutTag(id C.ulong, key string, val *C.char) {
|
||||
if key == "" {
|
||||
return
|
||||
}
|
||||
|
||||
r, _ := allMaps.Load(uint32(id))
|
||||
m := r.(tagMap)
|
||||
k := strings.ToLower(key)
|
||||
v := strings.TrimSpace(C.GoString(val))
|
||||
m[k] = append(m[k], v)
|
||||
}
|
||||
|
||||
//export goPutM4AStr
|
||||
func goPutM4AStr(id C.ulong, key *C.char, val *C.char) {
|
||||
k := C.GoString(key)
|
||||
|
||||
// Special for M4A, do not catch keys that have no actual name
|
||||
k = strings.TrimPrefix(k, iTunesKeyPrefix)
|
||||
doPutTag(id, k, val)
|
||||
}
|
||||
|
||||
//export goPutStr
|
||||
func goPutStr(id C.ulong, key *C.char, val *C.char) {
|
||||
doPutTag(id, C.GoString(key), val)
|
||||
}
|
||||
|
||||
//export goPutInt
|
||||
func goPutInt(id C.ulong, key *C.char, val C.int) {
|
||||
valStr := strconv.Itoa(int(val))
|
||||
vp := C.CString(valStr)
|
||||
defer C.free(unsafe.Pointer(vp))
|
||||
goPutStr(id, key, vp)
|
||||
}
|
||||
|
||||
//export goPutLyrics
|
||||
func goPutLyrics(id C.ulong, lang *C.char, val *C.char) {
|
||||
doPutTag(id, "lyrics:"+C.GoString(lang), val)
|
||||
}
|
||||
|
||||
//export goPutLyricLine
|
||||
func goPutLyricLine(id C.ulong, lang *C.char, text *C.char, time C.int) {
|
||||
language := C.GoString(lang)
|
||||
line := C.GoString(text)
|
||||
timeGo := int64(time)
|
||||
|
||||
ms := timeGo % 1000
|
||||
timeGo /= 1000
|
||||
sec := timeGo % 60
|
||||
timeGo /= 60
|
||||
minimum := timeGo % 60
|
||||
formattedLine := fmt.Sprintf("[%02d:%02d.%02d]%s\n", minimum, sec, ms/10, line)
|
||||
|
||||
key := "lyrics:" + language
|
||||
|
||||
r, _ := allMaps.Load(uint32(id))
|
||||
m := r.(tagMap)
|
||||
k := strings.ToLower(key)
|
||||
existing, ok := m[k]
|
||||
if ok {
|
||||
existing[0] += formattedLine
|
||||
} else {
|
||||
m[k] = []string{formattedLine}
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
#define TAGLIB_ERR_PARSE -1
|
||||
#define TAGLIB_ERR_AUDIO_PROPS -2
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#ifdef WIN32
|
||||
#define FILENAME_CHAR_T wchar_t
|
||||
#else
|
||||
#define FILENAME_CHAR_T char
|
||||
#endif
|
||||
|
||||
extern void goPutM4AStr(unsigned long id, char *key, char *val);
|
||||
extern void goPutStr(unsigned long id, char *key, char *val);
|
||||
extern void goPutInt(unsigned long id, char *key, int val);
|
||||
extern void goPutLyrics(unsigned long id, char *lang, char *val);
|
||||
extern void goPutLyricLine(unsigned long id, char *lang, char *text, int time);
|
||||
int taglib_read(const FILENAME_CHAR_T *filename, unsigned long id);
|
||||
char* taglib_version();
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
@@ -5,20 +5,25 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/scanner/metadata"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var (
|
||||
format string
|
||||
extractor string
|
||||
format string
|
||||
)
|
||||
|
||||
func init() {
|
||||
inspectCmd.Flags().StringVarP(&format, "format", "f", "jsonindent", "output format (pretty, toml, yaml, json, jsonindent)")
|
||||
inspectCmd.Flags().StringVarP(&extractor, "extractor", "x", "", "extractor to use (ffmpeg or taglib, default: auto)")
|
||||
inspectCmd.Flags().StringVarP(&format, "format", "f", "pretty", "output format (pretty, toml, yaml, json, jsonindent)")
|
||||
rootCmd.AddCommand(inspectCmd)
|
||||
}
|
||||
|
||||
@@ -43,7 +48,7 @@ var marshalers = map[string]func(interface{}) ([]byte, error){
|
||||
}
|
||||
|
||||
func prettyMarshal(v interface{}) ([]byte, error) {
|
||||
out := v.([]core.InspectOutput)
|
||||
out := v.([]inspectorOutput)
|
||||
var res strings.Builder
|
||||
for i := range out {
|
||||
res.WriteString(fmt.Sprintf("====================\nFile: %s\n\n", out[i].File))
|
||||
@@ -55,24 +60,39 @@ func prettyMarshal(v interface{}) ([]byte, error) {
|
||||
return []byte(res.String()), nil
|
||||
}
|
||||
|
||||
type inspectorOutput struct {
|
||||
File string
|
||||
RawTags metadata.ParsedTags
|
||||
MappedTags model.MediaFile
|
||||
}
|
||||
|
||||
func runInspector(args []string) {
|
||||
if extractor != "" {
|
||||
conf.Server.Scanner.Extractor = extractor
|
||||
}
|
||||
log.Info("Using extractor", "extractor", conf.Server.Scanner.Extractor)
|
||||
md, err := metadata.Extract(args...)
|
||||
if err != nil {
|
||||
log.Fatal("Error extracting tags", err)
|
||||
}
|
||||
mapper := scanner.NewMediaFileMapper(conf.Server.MusicFolder, &tests.MockedGenreRepo{})
|
||||
marshal := marshalers[format]
|
||||
if marshal == nil {
|
||||
log.Fatal("Invalid format", "format", format)
|
||||
}
|
||||
var out []core.InspectOutput
|
||||
for _, filePath := range args {
|
||||
if !model.IsAudioFile(filePath) {
|
||||
log.Warn("Not an audio file", "file", filePath)
|
||||
var out []inspectorOutput
|
||||
for k, v := range md {
|
||||
if !model.IsAudioFile(k) {
|
||||
continue
|
||||
}
|
||||
output, err := core.Inspect(filePath, 1, "")
|
||||
if err != nil {
|
||||
log.Warn("Unable to process file", "file", filePath, "error", err)
|
||||
if len(v.Tags) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
out = append(out, *output)
|
||||
out = append(out, inspectorOutput{
|
||||
File: k,
|
||||
RawTags: v.Tags,
|
||||
MappedTags: mapper.ToMediaFile(v),
|
||||
})
|
||||
}
|
||||
data, _ := marshal(out)
|
||||
fmt.Println(string(data))
|
||||
|
||||
@@ -69,7 +69,7 @@ func runExporter() {
|
||||
sqlDB := db.Db()
|
||||
ds := persistence.New(sqlDB)
|
||||
ctx := auth.WithAdminUser(context.Background(), ds)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true, false)
|
||||
playlist, err := ds.Playlist(ctx).GetWithTracks(playlistID, true)
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
@@ -79,7 +79,7 @@ func runExporter() {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
if len(playlists) > 0 {
|
||||
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true, false)
|
||||
playlist, err = ds.Playlist(ctx).GetWithTracks(playlists[0].ID, true)
|
||||
if err != nil {
|
||||
log.Fatal("Error retrieving playlist", "name", playlistID, err)
|
||||
}
|
||||
|
||||
131
cmd/root.go
131
cmd/root.go
@@ -9,14 +9,11 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/chi/v5/middleware"
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/resources"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/scheduler"
|
||||
"github.com/navidrome/navidrome/server/backgrounds"
|
||||
"github.com/spf13/cobra"
|
||||
@@ -48,11 +45,8 @@ Complete documentation is available at https://www.navidrome.org/docs`,
|
||||
|
||||
// Execute runs the root cobra command, which will start the Navidrome server by calling the runNavidrome function.
|
||||
func Execute() {
|
||||
ctx, cancel := mainContext(context.Background())
|
||||
defer cancel()
|
||||
|
||||
rootCmd.SetVersionTemplate(`{{println .Version}}`)
|
||||
if err := rootCmd.ExecuteContext(ctx); err != nil {
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
@@ -61,7 +55,7 @@ func preRun() {
|
||||
if !noBanner {
|
||||
println(resources.Banner())
|
||||
}
|
||||
conf.Load(noBanner)
|
||||
conf.Load()
|
||||
}
|
||||
|
||||
func postRun() {
|
||||
@@ -72,23 +66,19 @@ func postRun() {
|
||||
// If any of the services returns an error, it will log it and exit. If the process receives a signal to exit,
|
||||
// it will cancel the context and exit gracefully.
|
||||
func runNavidrome(ctx context.Context) {
|
||||
defer db.Init(ctx)()
|
||||
defer db.Init()()
|
||||
|
||||
ctx, cancel := mainContext(ctx)
|
||||
defer cancel()
|
||||
|
||||
g, ctx := errgroup.WithContext(ctx)
|
||||
g.Go(startServer(ctx))
|
||||
g.Go(startSignaller(ctx))
|
||||
g.Go(startScheduler(ctx))
|
||||
g.Go(startPlaybackServer(ctx))
|
||||
g.Go(schedulePeriodicScan(ctx))
|
||||
g.Go(schedulePeriodicBackup(ctx))
|
||||
g.Go(startInsightsCollector(ctx))
|
||||
g.Go(scheduleDBOptimizer(ctx))
|
||||
if conf.Server.Scanner.Enabled {
|
||||
g.Go(runInitialScan(ctx))
|
||||
g.Go(startScanWatcher(ctx))
|
||||
g.Go(schedulePeriodicScan(ctx))
|
||||
} else {
|
||||
log.Warn(ctx, "Automatic Scanning is DISABLED")
|
||||
}
|
||||
|
||||
if err := g.Wait(); err != nil {
|
||||
log.Error("Fatal error in Navidrome. Aborting", err)
|
||||
@@ -108,9 +98,9 @@ func mainContext(ctx context.Context) (context.Context, context.CancelFunc) {
|
||||
// startServer starts the Navidrome web server, adding all the necessary routers.
|
||||
func startServer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
a := CreateServer()
|
||||
a := CreateServer(conf.Server.MusicFolder)
|
||||
a.MountRouter("Native API", consts.URLPathNativeAPI, CreateNativeAPIRouter())
|
||||
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter(ctx))
|
||||
a.MountRouter("Subsonic API", consts.URLPathSubsonicAPI, CreateSubsonicAPIRouter())
|
||||
a.MountRouter("Public Endpoints", consts.URLPathPublic, CreatePublicRouter())
|
||||
if conf.Server.LastFM.Enabled {
|
||||
a.MountRouter("LastFM Auth", consts.URLPathNativeAPI+"/lastfm", CreateLastFMRouter())
|
||||
@@ -137,97 +127,29 @@ func startServer(ctx context.Context) func() error {
|
||||
// schedulePeriodicScan schedules a periodic scan of the music library, if configured.
|
||||
func schedulePeriodicScan(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
schedule := conf.Server.Scanner.Schedule
|
||||
schedule := conf.Server.ScanSchedule
|
||||
if schedule == "" {
|
||||
log.Info(ctx, "Periodic scan is DISABLED")
|
||||
log.Warn("Periodic scan is DISABLED")
|
||||
return nil
|
||||
}
|
||||
|
||||
s := CreateScanner(ctx)
|
||||
scanner := GetScanner()
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
|
||||
log.Info("Scheduling periodic scan", "schedule", schedule)
|
||||
err := schedulerInstance.Add(schedule, func() {
|
||||
_, err := s.ScanAll(ctx, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error executing periodic scan", err)
|
||||
}
|
||||
_ = scanner.RescanAll(ctx, false)
|
||||
})
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error scheduling periodic scan", err)
|
||||
log.Error("Error scheduling periodic scan", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func pidHashChanged(ds model.DataStore) (bool, error) {
|
||||
pidAlbum, err := ds.Property(context.Background()).DefaultGet(consts.PIDAlbumKey, "")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
pidTrack, err := ds.Property(context.Background()).DefaultGet(consts.PIDTrackKey, "")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return !strings.EqualFold(pidAlbum, conf.Server.PID.Album) || !strings.EqualFold(pidTrack, conf.Server.PID.Track), nil
|
||||
}
|
||||
|
||||
func runInitialScan(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
ds := CreateDataStore()
|
||||
fullScanRequired, err := ds.Property(ctx).DefaultGet(consts.FullScanAfterMigrationFlagKey, "0")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
inProgress, err := ds.Library(ctx).ScanInProgress()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pidHasChanged, err := pidHashChanged(ds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
scanNeeded := conf.Server.Scanner.ScanOnStartup || inProgress || fullScanRequired == "1" || pidHasChanged
|
||||
time.Sleep(2 * time.Second) // Wait 2 seconds before the initial scan
|
||||
if scanNeeded {
|
||||
scanner := CreateScanner(ctx)
|
||||
switch {
|
||||
case fullScanRequired == "1":
|
||||
log.Warn(ctx, "Full scan required after migration")
|
||||
_ = ds.Property(ctx).Delete(consts.FullScanAfterMigrationFlagKey)
|
||||
case pidHasChanged:
|
||||
log.Warn(ctx, "PID config changed, performing full scan")
|
||||
fullScanRequired = "1"
|
||||
case inProgress:
|
||||
log.Warn(ctx, "Resuming interrupted scan")
|
||||
default:
|
||||
log.Info("Executing initial scan")
|
||||
}
|
||||
|
||||
_, err = scanner.ScanAll(ctx, fullScanRequired == "1")
|
||||
if err != nil {
|
||||
log.Error(ctx, "Scan failed", err)
|
||||
} else {
|
||||
log.Info(ctx, "Scan completed")
|
||||
}
|
||||
} else {
|
||||
log.Debug(ctx, "Initial scan not needed")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func startScanWatcher(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
if conf.Server.Scanner.WatcherWait == 0 {
|
||||
log.Debug("Folder watcher is DISABLED")
|
||||
return nil
|
||||
}
|
||||
w := CreateScanWatcher(ctx)
|
||||
err := w.Run(ctx)
|
||||
if err != nil {
|
||||
log.Error("Error starting watcher", err)
|
||||
log.Debug("Executing initial scan")
|
||||
if err := scanner.RescanAll(ctx, false); err != nil {
|
||||
log.Error("Error executing initial scan", err)
|
||||
}
|
||||
log.Debug("Finished initial scan")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -236,7 +158,7 @@ func schedulePeriodicBackup(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
schedule := conf.Server.Backup.Schedule
|
||||
if schedule == "" {
|
||||
log.Info(ctx, "Periodic backup is DISABLED")
|
||||
log.Warn("Periodic backup is DISABLED")
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -267,21 +189,6 @@ func schedulePeriodicBackup(ctx context.Context) func() error {
|
||||
}
|
||||
}
|
||||
|
||||
func scheduleDBOptimizer(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
log.Info(ctx, "Scheduling DB optimizer", "schedule", consts.OptimizeDBSchedule)
|
||||
schedulerInstance := scheduler.GetInstance()
|
||||
err := schedulerInstance.Add(consts.OptimizeDBSchedule, func() {
|
||||
if scanner.IsScanning() {
|
||||
log.Debug(ctx, "Skipping DB optimization because a scan is in progress")
|
||||
return
|
||||
}
|
||||
db.Optimize(ctx)
|
||||
})
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// startScheduler starts the Navidrome scheduler, which is used to run periodic tasks.
|
||||
func startScheduler(ctx context.Context) func() error {
|
||||
return func() error {
|
||||
|
||||
64
cmd/scan.go
64
cmd/scan.go
@@ -2,28 +2,15 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/gob"
|
||||
"os"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/utils/pl"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
fullScan bool
|
||||
subprocess bool
|
||||
)
|
||||
var fullRescan bool
|
||||
|
||||
func init() {
|
||||
scanCmd.Flags().BoolVarP(&fullScan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||
scanCmd.Flags().BoolVarP(&subprocess, "subprocess", "", false, "run as subprocess (internal use)")
|
||||
scanCmd.Flags().BoolVarP(&fullRescan, "full", "f", false, "check all subfolders, ignoring timestamps")
|
||||
rootCmd.AddCommand(scanCmd)
|
||||
}
|
||||
|
||||
@@ -32,53 +19,16 @@ var scanCmd = &cobra.Command{
|
||||
Short: "Scan music folder",
|
||||
Long: "Scan music folder for updates",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runScanner(cmd.Context())
|
||||
runScanner()
|
||||
},
|
||||
}
|
||||
|
||||
func trackScanInteractively(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
|
||||
for status := range pl.ReadOrDone(ctx, progress) {
|
||||
if status.Warning != "" {
|
||||
log.Warn(ctx, "Scan warning", "error", status.Warning)
|
||||
}
|
||||
if status.Error != "" {
|
||||
log.Error(ctx, "Scan error", "error", status.Error)
|
||||
}
|
||||
// Discard the progress status, we only care about errors
|
||||
}
|
||||
|
||||
if fullScan {
|
||||
func runScanner() {
|
||||
scanner := GetScanner()
|
||||
_ = scanner.RescanAll(context.Background(), fullRescan)
|
||||
if fullRescan {
|
||||
log.Info("Finished full rescan")
|
||||
} else {
|
||||
log.Info("Finished rescan")
|
||||
}
|
||||
}
|
||||
|
||||
func trackScanAsSubprocess(ctx context.Context, progress <-chan *scanner.ProgressInfo) {
|
||||
encoder := gob.NewEncoder(os.Stdout)
|
||||
for status := range pl.ReadOrDone(ctx, progress) {
|
||||
err := encoder.Encode(status)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Failed to encode status", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func runScanner(ctx context.Context) {
|
||||
sqlDB := db.Db()
|
||||
defer db.Db().Close()
|
||||
ds := persistence.New(sqlDB)
|
||||
pls := core.NewPlaylists(ds)
|
||||
|
||||
progress, err := scanner.CallScan(ctx, ds, artwork.NoopCacheWarmer(), pls, metrics.NewNoopInstance(), fullScan)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to scan", err)
|
||||
}
|
||||
|
||||
// Wait for the scanner to finish
|
||||
if subprocess {
|
||||
trackScanAsSubprocess(ctx, progress)
|
||||
} else {
|
||||
trackScanInteractively(ctx, progress)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ const triggerScanSignal = syscall.SIGUSR1
|
||||
|
||||
func startSignaller(ctx context.Context) func() error {
|
||||
log.Info(ctx, "Starting signaler")
|
||||
scanner := CreateScanner(ctx)
|
||||
scanner := GetScanner()
|
||||
|
||||
return func() error {
|
||||
var sigChan = make(chan os.Signal, 1)
|
||||
@@ -27,11 +27,11 @@ func startSignaller(ctx context.Context) func() error {
|
||||
case sig := <-sigChan:
|
||||
log.Info(ctx, "Received signal, triggering a new scan", "signal", sig)
|
||||
start := time.Now()
|
||||
_, err := scanner.ScanAll(ctx, false)
|
||||
err := scanner.RescanAll(ctx, false)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error scanning", err)
|
||||
}
|
||||
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start))
|
||||
log.Info(ctx, "Triggered scan complete", "elapsed", time.Since(start).Round(100*time.Millisecond))
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -7,20 +7,17 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
"github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
"github.com/navidrome/navidrome/core/artwork"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/core/playback"
|
||||
"github.com/navidrome/navidrome/core/scrobbler"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/server"
|
||||
@@ -30,19 +27,9 @@ import (
|
||||
"github.com/navidrome/navidrome/server/subsonic"
|
||||
)
|
||||
|
||||
import (
|
||||
_ "github.com/navidrome/navidrome/adapters/taglib"
|
||||
)
|
||||
|
||||
// Injectors from wire_injectors.go:
|
||||
|
||||
func CreateDataStore() model.DataStore {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
return dataStore
|
||||
}
|
||||
|
||||
func CreateServer() *server.Server {
|
||||
func CreateServer(musicFolder string) *server.Server {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
broker := events.GetBroker()
|
||||
@@ -61,27 +48,27 @@ func CreateNativeAPIRouter() *nativeapi.Router {
|
||||
return router
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||
func CreateSubsonicAPIRouter() *subsonic.Router {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
transcodingCache := core.GetTranscodingCache()
|
||||
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
|
||||
share := core.NewShare(dataStore)
|
||||
archiver := core.NewArchiver(mediaStreamer, dataStore, share)
|
||||
players := core.NewPlayers(dataStore)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
scannerScanner := scanner.GetInstance(dataStore, playlists, cacheWarmer, broker, metricsMetrics)
|
||||
playTracker := scrobbler.GetPlayTracker(dataStore, broker)
|
||||
playbackServer := playback.GetInstance(dataStore)
|
||||
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, scannerScanner, broker, playlists, playTracker, share, playbackServer)
|
||||
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, externalMetadata, scannerScanner, broker, playlists, playTracker, share, playbackServer)
|
||||
return router
|
||||
}
|
||||
|
||||
@@ -90,9 +77,9 @@ func CreatePublicRouter() *public.Router {
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
transcodingCache := core.GetTranscodingCache()
|
||||
mediaStreamer := core.NewMediaStreamer(dataStore, fFmpeg, transcodingCache)
|
||||
share := core.NewShare(dataStore)
|
||||
@@ -129,39 +116,22 @@ func CreatePrometheus() metrics.Metrics {
|
||||
return metricsMetrics
|
||||
}
|
||||
|
||||
func CreateScanner(ctx context.Context) scanner.Scanner {
|
||||
func GetScanner() scanner.Scanner {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
agentsAgents := agents.New(dataStore)
|
||||
externalMetadata := core.NewExternalMetadata(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, externalMetadata)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
scannerScanner := scanner.GetInstance(dataStore, playlists, cacheWarmer, broker, metricsMetrics)
|
||||
return scannerScanner
|
||||
}
|
||||
|
||||
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
fileCache := artwork.GetImageCache()
|
||||
fFmpeg := ffmpeg.New()
|
||||
agentsAgents := agents.GetAgents(dataStore)
|
||||
provider := external.NewProvider(dataStore, agentsAgents)
|
||||
artworkArtwork := artwork.NewArtwork(dataStore, fileCache, fFmpeg, provider)
|
||||
cacheWarmer := artwork.NewCacheWarmer(artworkArtwork, fileCache)
|
||||
broker := events.GetBroker()
|
||||
playlists := core.NewPlaylists(dataStore)
|
||||
metricsMetrics := metrics.NewPrometheusInstance(dataStore)
|
||||
scannerScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
|
||||
watcher := scanner.NewWatcher(dataStore, scannerScanner)
|
||||
return watcher
|
||||
}
|
||||
|
||||
func GetPlaybackServer() playback.PlaybackServer {
|
||||
sqlDB := db.Db()
|
||||
dataStore := persistence.New(sqlDB)
|
||||
@@ -171,4 +141,4 @@ func GetPlaybackServer() playback.PlaybackServer {
|
||||
|
||||
// wire_injectors.go:
|
||||
|
||||
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.New, scanner.NewWatcher, metrics.NewPrometheusInstance, db.Db)
|
||||
var allProviders = wire.NewSet(core.Set, artwork.Set, server.New, subsonic.New, nativeapi.New, public.New, persistence.New, lastfm.NewRouter, listenbrainz.NewRouter, events.GetBroker, scanner.GetInstance, db.Db, metrics.NewPrometheusInstance)
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
@@ -13,7 +11,6 @@ import (
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/core/playback"
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/persistence"
|
||||
"github.com/navidrome/navidrome/scanner"
|
||||
"github.com/navidrome/navidrome/server"
|
||||
@@ -34,19 +31,12 @@ var allProviders = wire.NewSet(
|
||||
lastfm.NewRouter,
|
||||
listenbrainz.NewRouter,
|
||||
events.GetBroker,
|
||||
scanner.New,
|
||||
scanner.NewWatcher,
|
||||
metrics.NewPrometheusInstance,
|
||||
scanner.GetInstance,
|
||||
db.Db,
|
||||
metrics.NewPrometheusInstance,
|
||||
)
|
||||
|
||||
func CreateDataStore() model.DataStore {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateServer() *server.Server {
|
||||
func CreateServer(musicFolder string) *server.Server {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
@@ -58,7 +48,7 @@ func CreateNativeAPIRouter() *nativeapi.Router {
|
||||
))
|
||||
}
|
||||
|
||||
func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
|
||||
func CreateSubsonicAPIRouter() *subsonic.Router {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
@@ -94,13 +84,7 @@ func CreatePrometheus() metrics.Metrics {
|
||||
))
|
||||
}
|
||||
|
||||
func CreateScanner(ctx context.Context) scanner.Scanner {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
}
|
||||
|
||||
func CreateScanWatcher(ctx context.Context) scanner.Watcher {
|
||||
func GetScanner() scanner.Scanner {
|
||||
panic(wire.Build(
|
||||
allProviders,
|
||||
))
|
||||
|
||||
@@ -9,12 +9,9 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/go-viper/encoding/ini"
|
||||
"github.com/kr/pretty"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/utils/chain"
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
@@ -30,6 +27,8 @@ type configOptions struct {
|
||||
DbPath string
|
||||
LogLevel string
|
||||
LogFile string
|
||||
ScanInterval time.Duration
|
||||
ScanSchedule string
|
||||
SessionTimeout time.Duration
|
||||
BaseURL string
|
||||
BasePath string
|
||||
@@ -60,6 +59,7 @@ type configOptions struct {
|
||||
PreferSortTags bool
|
||||
IgnoredArticles string
|
||||
IndexGroups string
|
||||
SubsonicArtistParticipations bool
|
||||
FFmpegPath string
|
||||
MPVPath string
|
||||
MPVCmdTemplate string
|
||||
@@ -90,15 +90,11 @@ type configOptions struct {
|
||||
Scanner scannerOptions
|
||||
Jukebox jukeboxOptions
|
||||
Backup backupOptions
|
||||
PID pidOptions
|
||||
Inspect inspectOptions
|
||||
Subsonic subsonicOptions
|
||||
|
||||
Agents string
|
||||
LastFM lastfmOptions
|
||||
Spotify spotifyOptions
|
||||
ListenBrainz listenBrainzOptions
|
||||
Tags map[string]TagConf
|
||||
|
||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||
DevLogSourceLine bool
|
||||
@@ -117,37 +113,14 @@ type configOptions struct {
|
||||
DevArtworkThrottleBacklogTimeout time.Duration
|
||||
DevArtistInfoTimeToLive time.Duration
|
||||
DevAlbumInfoTimeToLive time.Duration
|
||||
DevExternalScanner bool
|
||||
DevScannerThreads uint
|
||||
DevInsightsInitialDelay time.Duration
|
||||
DevEnablePlayerInsights bool
|
||||
}
|
||||
|
||||
type scannerOptions struct {
|
||||
Enabled bool
|
||||
Schedule string
|
||||
WatcherWait time.Duration
|
||||
ScanOnStartup bool
|
||||
Extractor string
|
||||
ArtistJoiner string
|
||||
GenreSeparators string // Deprecated: Use Tags.genre.Split instead
|
||||
GroupAlbumReleases bool // Deprecated: Use PID.Album instead
|
||||
}
|
||||
|
||||
type subsonicOptions struct {
|
||||
AppendSubtitle bool
|
||||
ArtistParticipations bool
|
||||
DefaultReportRealPath bool
|
||||
LegacyClients string
|
||||
}
|
||||
|
||||
type TagConf struct {
|
||||
Ignore bool `yaml:"ignore"`
|
||||
Aliases []string `yaml:"aliases"`
|
||||
Type string `yaml:"type"`
|
||||
MaxLength int `yaml:"maxLength"`
|
||||
Split []string `yaml:"split"`
|
||||
Album bool `yaml:"album"`
|
||||
GenreSeparators string
|
||||
GroupAlbumReleases bool
|
||||
}
|
||||
|
||||
type lastfmOptions struct {
|
||||
@@ -192,18 +165,6 @@ type backupOptions struct {
|
||||
Schedule string
|
||||
}
|
||||
|
||||
type pidOptions struct {
|
||||
Track string
|
||||
Album string
|
||||
}
|
||||
|
||||
type inspectOptions struct {
|
||||
Enabled bool
|
||||
MaxRequests int
|
||||
BacklogLimit int
|
||||
BacklogTimeout int
|
||||
}
|
||||
|
||||
var (
|
||||
Server = &configOptions{}
|
||||
hooks []func()
|
||||
@@ -216,10 +177,10 @@ func LoadFromFile(confFile string) {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error reading config file:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
Load(true)
|
||||
Load()
|
||||
}
|
||||
|
||||
func Load(noConfigDump bool) {
|
||||
func Load() {
|
||||
parseIniFileConfiguration()
|
||||
|
||||
err := viper.Unmarshal(&Server)
|
||||
@@ -271,12 +232,11 @@ func Load(noConfigDump bool) {
|
||||
log.SetLogSourceLine(Server.DevLogSourceLine)
|
||||
log.SetRedacting(Server.EnableLogRedacting)
|
||||
|
||||
err = chain.RunSequentially(
|
||||
validateScanSchedule,
|
||||
validateBackupSchedule,
|
||||
validatePlaylistsPath,
|
||||
)
|
||||
if err != nil {
|
||||
if err := validateScanSchedule(); err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if err := validateBackupSchedule(); err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@@ -294,7 +254,7 @@ func Load(noConfigDump bool) {
|
||||
}
|
||||
|
||||
// Print current configuration if log level is Debug
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) && !noConfigDump {
|
||||
if log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
prettyConf := pretty.Sprintf("Loaded configuration from '%s': %# v", Server.ConfigFile, Server)
|
||||
if Server.EnableLogRedacting {
|
||||
prettyConf = log.Redact(prettyConf)
|
||||
@@ -306,31 +266,12 @@ func Load(noConfigDump bool) {
|
||||
disableExternalServices()
|
||||
}
|
||||
|
||||
if Server.Scanner.Extractor != consts.DefaultScannerExtractor {
|
||||
log.Warn(fmt.Sprintf("Extractor '%s' is not implemented, using 'taglib'", Server.Scanner.Extractor))
|
||||
Server.Scanner.Extractor = consts.DefaultScannerExtractor
|
||||
}
|
||||
logDeprecatedOptions("Scanner.GenreSeparators")
|
||||
logDeprecatedOptions("Scanner.GroupAlbumReleases")
|
||||
|
||||
// Call init hooks
|
||||
for _, hook := range hooks {
|
||||
hook()
|
||||
}
|
||||
}
|
||||
|
||||
func logDeprecatedOptions(options ...string) {
|
||||
for _, option := range options {
|
||||
envVar := "ND_" + strings.ToUpper(strings.ReplaceAll(option, ".", "_"))
|
||||
if os.Getenv(envVar) != "" {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", envVar))
|
||||
}
|
||||
if viper.InConfig(option) {
|
||||
log.Warn(fmt.Sprintf("Option '%s' is deprecated and will be ignored in a future release", option))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parseIniFileConfiguration is used to parse the config file when it is in INI format. For INI files, it
|
||||
// would require a nested structure, so instead we unmarshal it to a map and then merge the nested [default]
|
||||
// section into the root level.
|
||||
@@ -368,24 +309,26 @@ func disableExternalServices() {
|
||||
}
|
||||
}
|
||||
|
||||
func validatePlaylistsPath() error {
|
||||
for _, path := range strings.Split(Server.PlaylistsPath, string(filepath.ListSeparator)) {
|
||||
_, err := doublestar.Match(path, "")
|
||||
if err != nil {
|
||||
log.Error("Invalid PlaylistsPath", "path", path, err)
|
||||
return err
|
||||
func validateScanSchedule() error {
|
||||
if Server.ScanInterval != -1 {
|
||||
log.Warn("ScanInterval is DEPRECATED. Please use ScanSchedule. See docs at https://navidrome.org/docs/usage/configuration-options/")
|
||||
if Server.ScanSchedule != "@every 1m" {
|
||||
log.Error("You cannot specify both ScanInterval and ScanSchedule, ignoring ScanInterval")
|
||||
} else {
|
||||
if Server.ScanInterval == 0 {
|
||||
Server.ScanSchedule = ""
|
||||
} else {
|
||||
Server.ScanSchedule = fmt.Sprintf("@every %s", Server.ScanInterval)
|
||||
}
|
||||
log.Warn("Setting ScanSchedule", "schedule", Server.ScanSchedule)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateScanSchedule() error {
|
||||
if Server.Scanner.Schedule == "0" || Server.Scanner.Schedule == "" {
|
||||
Server.Scanner.Schedule = ""
|
||||
if Server.ScanSchedule == "0" || Server.ScanSchedule == "" {
|
||||
Server.ScanSchedule = ""
|
||||
return nil
|
||||
}
|
||||
var err error
|
||||
Server.Scanner.Schedule, err = validateSchedule(Server.Scanner.Schedule, "Scanner.Schedule")
|
||||
Server.ScanSchedule, err = validateSchedule(Server.ScanSchedule, "ScanSchedule")
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -394,8 +337,10 @@ func validateBackupSchedule() error {
|
||||
Server.Backup.Schedule = ""
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
Server.Backup.Schedule, err = validateSchedule(Server.Backup.Schedule, "Backup.Schedule")
|
||||
Server.Backup.Schedule, err = validateSchedule(Server.Backup.Schedule, "BackupSchedule")
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -406,7 +351,7 @@ func validateSchedule(schedule, field string) (string, error) {
|
||||
c := cron.New()
|
||||
id, err := c.AddFunc(schedule, func() {})
|
||||
if err != nil {
|
||||
log.Error(fmt.Sprintf("Invalid %s. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", field), "schedule", schedule, err)
|
||||
log.Error(fmt.Sprintf("Invalid %s. Please read format spec at https://pkg.go.dev/github.com/robfig/cron#hdr-CRON_Expression_Format", field), "schedule", field, err)
|
||||
} else {
|
||||
c.Remove(id)
|
||||
}
|
||||
@@ -428,6 +373,8 @@ func init() {
|
||||
viper.SetDefault("port", 4533)
|
||||
viper.SetDefault("unixsocketperm", "0660")
|
||||
viper.SetDefault("sessiontimeout", consts.DefaultSessionTimeout)
|
||||
viper.SetDefault("scaninterval", -1)
|
||||
viper.SetDefault("scanschedule", "@every 1m")
|
||||
viper.SetDefault("baseurl", "")
|
||||
viper.SetDefault("tlscert", "")
|
||||
viper.SetDefault("tlskey", "")
|
||||
@@ -441,7 +388,7 @@ func init() {
|
||||
viper.SetDefault("enableartworkprecache", true)
|
||||
viper.SetDefault("autoimportplaylists", true)
|
||||
viper.SetDefault("defaultplaylistpublicvisibility", false)
|
||||
viper.SetDefault("playlistspath", "")
|
||||
viper.SetDefault("playlistspath", consts.DefaultPlaylistsPath)
|
||||
viper.SetDefault("smartPlaylistRefreshDelay", 5*time.Second)
|
||||
viper.SetDefault("enabledownloads", true)
|
||||
viper.SetDefault("enableexternalservices", true)
|
||||
@@ -453,6 +400,7 @@ func init() {
|
||||
viper.SetDefault("prefersorttags", false)
|
||||
viper.SetDefault("ignoredarticles", "The El La Los Las Le Les Os As O A")
|
||||
viper.SetDefault("indexgroups", "A B C D E F G H I J K L M N O P Q R S T U V W X-Z(XYZ) [Unknown]([)")
|
||||
viper.SetDefault("subsonicartistparticipations", false)
|
||||
viper.SetDefault("ffmpegpath", "")
|
||||
viper.SetDefault("mpvcmdtemplate", "mpv --audio-device=%d --no-audio-display --pause %f --input-ipc-server=%s")
|
||||
|
||||
@@ -468,9 +416,6 @@ func init() {
|
||||
viper.SetDefault("defaultuivolume", consts.DefaultUIVolume)
|
||||
viper.SetDefault("enablereplaygain", true)
|
||||
viper.SetDefault("enablecoveranimation", true)
|
||||
viper.SetDefault("enablesharing", false)
|
||||
viper.SetDefault("shareurl", "")
|
||||
viper.SetDefault("defaultdownloadableshare", false)
|
||||
viper.SetDefault("gatrackingid", "")
|
||||
viper.SetDefault("enableinsightscollector", true)
|
||||
viper.SetDefault("enablelogredacting", true)
|
||||
@@ -490,20 +435,10 @@ func init() {
|
||||
viper.SetDefault("jukebox.default", "")
|
||||
viper.SetDefault("jukebox.adminonly", true)
|
||||
|
||||
viper.SetDefault("scanner.enabled", true)
|
||||
viper.SetDefault("scanner.schedule", "0")
|
||||
viper.SetDefault("scanner.extractor", consts.DefaultScannerExtractor)
|
||||
viper.SetDefault("scanner.watcherwait", consts.DefaultWatcherWait)
|
||||
viper.SetDefault("scanner.scanonstartup", true)
|
||||
viper.SetDefault("scanner.artistjoiner", consts.ArtistJoiner)
|
||||
viper.SetDefault("scanner.genreseparators", "")
|
||||
viper.SetDefault("scanner.genreseparators", ";/,")
|
||||
viper.SetDefault("scanner.groupalbumreleases", false)
|
||||
|
||||
viper.SetDefault("subsonic.appendsubtitle", true)
|
||||
viper.SetDefault("subsonic.artistparticipations", false)
|
||||
viper.SetDefault("subsonic.defaultreportrealpath", false)
|
||||
viper.SetDefault("subsonic.legacyclients", "DSub")
|
||||
|
||||
viper.SetDefault("agents", "lastfm,spotify")
|
||||
viper.SetDefault("lastfm.enabled", true)
|
||||
viper.SetDefault("lastfm.language", "en")
|
||||
@@ -520,14 +455,6 @@ func init() {
|
||||
viper.SetDefault("backup.schedule", "")
|
||||
viper.SetDefault("backup.count", 0)
|
||||
|
||||
viper.SetDefault("pid.track", consts.DefaultTrackPID)
|
||||
viper.SetDefault("pid.album", consts.DefaultAlbumPID)
|
||||
|
||||
viper.SetDefault("inspect.enabled", true)
|
||||
viper.SetDefault("inspect.maxrequests", 1)
|
||||
viper.SetDefault("inspect.backloglimit", consts.RequestThrottleBacklogLimit)
|
||||
viper.SetDefault("inspect.backlogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||
|
||||
// DevFlags. These are used to enable/disable debugging and incomplete features
|
||||
viper.SetDefault("devlogsourceline", false)
|
||||
viper.SetDefault("devenableprofiler", false)
|
||||
@@ -535,6 +462,9 @@ func init() {
|
||||
viper.SetDefault("devautologinusername", "")
|
||||
viper.SetDefault("devactivitypanel", true)
|
||||
viper.SetDefault("devactivitypanelupdaterate", 300*time.Millisecond)
|
||||
viper.SetDefault("enablesharing", false)
|
||||
viper.SetDefault("shareurl", "")
|
||||
viper.SetDefault("defaultdownloadableshare", false)
|
||||
viper.SetDefault("devenablebufferedscrobble", true)
|
||||
viper.SetDefault("devsidebarplaylists", true)
|
||||
viper.SetDefault("devshowartistpage", true)
|
||||
@@ -544,17 +474,11 @@ func init() {
|
||||
viper.SetDefault("devartworkthrottlebacklogtimeout", consts.RequestThrottleBacklogTimeout)
|
||||
viper.SetDefault("devartistinfotimetolive", consts.ArtistInfoTimeToLive)
|
||||
viper.SetDefault("devalbuminfotimetolive", consts.AlbumInfoTimeToLive)
|
||||
viper.SetDefault("devexternalscanner", true)
|
||||
viper.SetDefault("devscannerthreads", 5)
|
||||
viper.SetDefault("devinsightsinitialdelay", consts.InsightsInitialDelay)
|
||||
viper.SetDefault("devenableplayerinsights", true)
|
||||
}
|
||||
|
||||
func InitConfig(cfgFile string) {
|
||||
codecRegistry := viper.NewCodecRegistry()
|
||||
_ = codecRegistry.RegisterCodec("ini", ini.Codec{})
|
||||
viper.SetOptions(viper.WithCodecRegistry(codecRegistry))
|
||||
|
||||
cfgFile = getConfigFile(cfgFile)
|
||||
if cfgFile != "" {
|
||||
// Use config file from the flag.
|
||||
@@ -578,17 +502,9 @@ func InitConfig(cfgFile string) {
|
||||
}
|
||||
}
|
||||
|
||||
// getConfigFile returns the path to the config file, either from the flag or from the environment variable.
|
||||
// If it is defined in the environment variable, it will check if the file exists.
|
||||
func getConfigFile(cfgFile string) string {
|
||||
if cfgFile != "" {
|
||||
return cfgFile
|
||||
}
|
||||
cfgFile = os.Getenv("ND_CONFIGFILE")
|
||||
if cfgFile != "" {
|
||||
if _, err := os.Stat(cfgFile); err == nil {
|
||||
return cfgFile
|
||||
}
|
||||
}
|
||||
return ""
|
||||
return os.Getenv("ND_CONFIGFILE")
|
||||
}
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
package conf_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
. "github.com/navidrome/navidrome/conf"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
func TestConfiguration(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Configuration Suite")
|
||||
}
|
||||
|
||||
var _ = Describe("Configuration", func() {
|
||||
BeforeEach(func() {
|
||||
// Reset viper configuration
|
||||
viper.Reset()
|
||||
viper.SetDefault("datafolder", GinkgoT().TempDir())
|
||||
viper.SetDefault("loglevel", "error")
|
||||
ResetConf()
|
||||
})
|
||||
|
||||
DescribeTable("should load configuration from",
|
||||
func(format string) {
|
||||
filename := filepath.Join("testdata", "cfg."+format)
|
||||
|
||||
// Initialize config with the test file
|
||||
InitConfig(filename)
|
||||
// Load the configuration (with noConfigDump=true)
|
||||
Load(true)
|
||||
|
||||
// Execute the format-specific assertions
|
||||
Expect(Server.MusicFolder).To(Equal(fmt.Sprintf("/%s/music", format)))
|
||||
Expect(Server.UIWelcomeMessage).To(Equal("Welcome " + format))
|
||||
Expect(Server.Tags["custom"].Aliases).To(Equal([]string{format, "test"}))
|
||||
|
||||
// The config file used should be the one we created
|
||||
Expect(Server.ConfigFile).To(Equal(filename))
|
||||
},
|
||||
Entry("TOML format", "toml"),
|
||||
Entry("YAML format", "yaml"),
|
||||
Entry("INI format", "ini"),
|
||||
Entry("JSON format", "json"),
|
||||
)
|
||||
})
|
||||
@@ -1,5 +0,0 @@
|
||||
package conf
|
||||
|
||||
func ResetConf() {
|
||||
Server = &configOptions{}
|
||||
}
|
||||
6
conf/testdata/cfg.ini
vendored
6
conf/testdata/cfg.ini
vendored
@@ -1,6 +0,0 @@
|
||||
[default]
|
||||
MusicFolder = /ini/music
|
||||
UIWelcomeMessage = Welcome ini
|
||||
|
||||
[Tags]
|
||||
Custom.Aliases = ini,test
|
||||
12
conf/testdata/cfg.json
vendored
12
conf/testdata/cfg.json
vendored
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"musicFolder": "/json/music",
|
||||
"uiWelcomeMessage": "Welcome json",
|
||||
"Tags": {
|
||||
"custom": {
|
||||
"aliases": [
|
||||
"json",
|
||||
"test"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
5
conf/testdata/cfg.toml
vendored
5
conf/testdata/cfg.toml
vendored
@@ -1,5 +0,0 @@
|
||||
musicFolder = "/toml/music"
|
||||
uiWelcomeMessage = "Welcome toml"
|
||||
|
||||
[Tags.custom]
|
||||
aliases = ["toml", "test"]
|
||||
7
conf/testdata/cfg.yaml
vendored
7
conf/testdata/cfg.yaml
vendored
@@ -1,7 +0,0 @@
|
||||
musicFolder: "/yaml/music"
|
||||
uiWelcomeMessage: "Welcome yaml"
|
||||
Tags:
|
||||
custom:
|
||||
aliases:
|
||||
- yaml
|
||||
- test
|
||||
@@ -1,29 +1,27 @@
|
||||
package consts
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/model/id"
|
||||
)
|
||||
|
||||
const (
|
||||
AppName = "navidrome"
|
||||
|
||||
DefaultDbPath = "navidrome.db?cache=shared&_busy_timeout=15000&_journal_mode=WAL&_foreign_keys=on&synchronous=normal"
|
||||
InitialSetupFlagKey = "InitialSetup"
|
||||
FullScanAfterMigrationFlagKey = "FullScanAfterMigration"
|
||||
DefaultDbPath = "navidrome.db?cache=shared&_busy_timeout=15000&_journal_mode=WAL&_foreign_keys=on"
|
||||
InitialSetupFlagKey = "InitialSetup"
|
||||
|
||||
UIAuthorizationHeader = "X-ND-Authorization"
|
||||
UIClientUniqueIDHeader = "X-ND-Client-Unique-Id"
|
||||
JWTSecretKey = "JWTSecret"
|
||||
JWTIssuer = "ND"
|
||||
DefaultSessionTimeout = 48 * time.Hour
|
||||
DefaultSessionTimeout = 24 * time.Hour
|
||||
CookieExpiry = 365 * 24 * 3600 // One year
|
||||
|
||||
OptimizeDBSchedule = "@every 24h"
|
||||
|
||||
// DefaultEncryptionKey This is the encryption key used if none is specified in the `PasswordEncryptionKey` option
|
||||
// Never ever change this! Or it will break all Navidrome installations that don't set the config option
|
||||
DefaultEncryptionKey = "just for obfuscation"
|
||||
@@ -53,13 +51,11 @@ const (
|
||||
|
||||
ServerReadHeaderTimeout = 3 * time.Second
|
||||
|
||||
ArtistInfoTimeToLive = 24 * time.Hour
|
||||
AlbumInfoTimeToLive = 7 * 24 * time.Hour
|
||||
UpdateLastAccessFrequency = time.Minute
|
||||
UpdatePlayerFrequency = time.Minute
|
||||
ArtistInfoTimeToLive = 24 * time.Hour
|
||||
AlbumInfoTimeToLive = 7 * 24 * time.Hour
|
||||
|
||||
I18nFolder = "i18n"
|
||||
ScanIgnoreFile = ".ndignore"
|
||||
I18nFolder = "i18n"
|
||||
SkipScanFile = ".ndignore"
|
||||
|
||||
PlaceholderArtistArt = "artist-placeholder.webp"
|
||||
PlaceholderAlbumArt = "album-placeholder.webp"
|
||||
@@ -70,8 +66,8 @@ const (
|
||||
DefaultHttpClientTimeOut = 10 * time.Second
|
||||
|
||||
DefaultScannerExtractor = "taglib"
|
||||
DefaultWatcherWait = 5 * time.Second
|
||||
Zwsp = string('\u200b')
|
||||
|
||||
Zwsp = string('\u200b')
|
||||
)
|
||||
|
||||
// Prometheus options
|
||||
@@ -97,14 +93,6 @@ const (
|
||||
AlbumPlayCountModeNormalized = "normalized"
|
||||
)
|
||||
|
||||
const (
|
||||
//DefaultAlbumPID = "album_legacy"
|
||||
DefaultAlbumPID = "musicbrainz_albumid|albumartistid,album,albumversion,releasedate"
|
||||
DefaultTrackPID = "musicbrainz_trackid|albumid,discnumber,tracknumber,title"
|
||||
PIDAlbumKey = "PIDAlbum"
|
||||
PIDTrackKey = "PIDTrack"
|
||||
)
|
||||
|
||||
const (
|
||||
InsightsIDKey = "InsightsID"
|
||||
InsightsEndpoint = "https://insights.navidrome.org/collect"
|
||||
@@ -139,29 +127,25 @@ var (
|
||||
Command: "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a aac -f adts -",
|
||||
},
|
||||
}
|
||||
|
||||
DefaultPlaylistsPath = strings.Join([]string{".", "**/**"}, string(filepath.ListSeparator))
|
||||
)
|
||||
|
||||
var (
|
||||
VariousArtists = "Various Artists"
|
||||
// TODO This will be dynamic when using disambiguation
|
||||
VariousArtistsID = "63sqASlAfjbGMuLP4JhnZU"
|
||||
UnknownAlbum = "[Unknown Album]"
|
||||
UnknownArtist = "[Unknown Artist]"
|
||||
// TODO This will be dynamic when using disambiguation
|
||||
UnknownArtistID = id.NewHash(strings.ToLower(UnknownArtist))
|
||||
VariousArtists = "Various Artists"
|
||||
VariousArtistsID = fmt.Sprintf("%x", md5.Sum([]byte(strings.ToLower(VariousArtists))))
|
||||
UnknownAlbum = "[Unknown Album]"
|
||||
UnknownArtist = "[Unknown Artist]"
|
||||
UnknownArtistID = fmt.Sprintf("%x", md5.Sum([]byte(strings.ToLower(UnknownArtist))))
|
||||
VariousArtistsMbzId = "89ad4ac3-39f7-470e-963a-56509c546377"
|
||||
|
||||
ArtistJoiner = " • "
|
||||
)
|
||||
|
||||
var (
|
||||
ServerStart = time.Now()
|
||||
|
||||
InContainer = func() bool {
|
||||
// Check if the /.nddockerenv file exists
|
||||
if _, err := os.Stat("/.nddockerenv"); err == nil {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}()
|
||||
)
|
||||
|
||||
var InContainer = func() bool {
|
||||
// Check if the /.nddockerenv file exists
|
||||
if _, err := os.Stat("/.nddockerenv"); err == nil {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}()
|
||||
|
||||
@@ -10,7 +10,6 @@ import (
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
"github.com/navidrome/navidrome/utils/singleton"
|
||||
)
|
||||
|
||||
type Agents struct {
|
||||
@@ -18,36 +17,22 @@ type Agents struct {
|
||||
agents []Interface
|
||||
}
|
||||
|
||||
func GetAgents(ds model.DataStore) *Agents {
|
||||
return singleton.GetInstance(func() *Agents {
|
||||
return createAgents(ds)
|
||||
})
|
||||
}
|
||||
|
||||
func createAgents(ds model.DataStore) *Agents {
|
||||
func New(ds model.DataStore) *Agents {
|
||||
var order []string
|
||||
if conf.Server.Agents != "" {
|
||||
order = strings.Split(conf.Server.Agents, ",")
|
||||
}
|
||||
order = append(order, LocalAgentName)
|
||||
var res []Interface
|
||||
var enabled []string
|
||||
for _, name := range order {
|
||||
init, ok := Map[name]
|
||||
if !ok {
|
||||
log.Error("Invalid agent. Check `Agents` configuration", "name", name, "conf", conf.Server.Agents)
|
||||
log.Error("Agent not available. Check configuration", "name", name)
|
||||
continue
|
||||
}
|
||||
|
||||
agent := init(ds)
|
||||
if agent == nil {
|
||||
log.Debug("Agent not available. Missing configuration?", "name", name)
|
||||
continue
|
||||
}
|
||||
enabled = append(enabled, name)
|
||||
res = append(res, init(ds))
|
||||
}
|
||||
log.Debug("List of agents enabled", "names", enabled)
|
||||
|
||||
return &Agents{ds: ds, agents: res}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
@@ -29,7 +28,7 @@ var _ = Describe("Agents", func() {
|
||||
var ag *Agents
|
||||
BeforeEach(func() {
|
||||
conf.Server.Agents = ""
|
||||
ag = createAgents(ds)
|
||||
ag = New(ds)
|
||||
})
|
||||
|
||||
It("calls the placeholder GetArtistImages", func() {
|
||||
@@ -45,21 +44,19 @@ var _ = Describe("Agents", func() {
|
||||
var mock *mockAgent
|
||||
BeforeEach(func() {
|
||||
mock = &mockAgent{}
|
||||
Register("fake", func(model.DataStore) Interface { return mock })
|
||||
Register("disabled", func(model.DataStore) Interface { return nil })
|
||||
Register("empty", func(model.DataStore) Interface { return &emptyAgent{} })
|
||||
conf.Server.Agents = "empty,fake,disabled"
|
||||
ag = createAgents(ds)
|
||||
Register("fake", func(ds model.DataStore) Interface {
|
||||
return mock
|
||||
})
|
||||
Register("empty", func(ds model.DataStore) Interface {
|
||||
return struct {
|
||||
Interface
|
||||
}{}
|
||||
})
|
||||
conf.Server.Agents = "empty,fake"
|
||||
ag = New(ds)
|
||||
Expect(ag.AgentName()).To(Equal("agents"))
|
||||
})
|
||||
|
||||
It("does not register disabled agents", func() {
|
||||
ags := slice.Map(ag.agents, func(a Interface) string { return a.AgentName() })
|
||||
// local agent is always appended to the end of the agents list
|
||||
Expect(ags).To(HaveExactElements("empty", "fake", "local"))
|
||||
Expect(ags).ToNot(ContainElement("disabled"))
|
||||
})
|
||||
|
||||
Describe("GetArtistMBID", func() {
|
||||
It("returns on first match", func() {
|
||||
Expect(ag.GetArtistMBID(ctx, "123", "test")).To(Equal("mbid"))
|
||||
@@ -347,11 +344,3 @@ func (a *mockAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string)
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
type emptyAgent struct {
|
||||
Interface
|
||||
}
|
||||
|
||||
func (e *emptyAgent) AgentName() string {
|
||||
return "empty"
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/andybalholm/cascadia"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
@@ -32,19 +31,15 @@ var ignoredBiographies = []string{
|
||||
}
|
||||
|
||||
type lastfmAgent struct {
|
||||
ds model.DataStore
|
||||
sessionKeys *agents.SessionKeys
|
||||
apiKey string
|
||||
secret string
|
||||
lang string
|
||||
client *client
|
||||
getInfoMutex sync.Mutex
|
||||
ds model.DataStore
|
||||
sessionKeys *agents.SessionKeys
|
||||
apiKey string
|
||||
secret string
|
||||
lang string
|
||||
client *client
|
||||
}
|
||||
|
||||
func lastFMConstructor(ds model.DataStore) *lastfmAgent {
|
||||
if !conf.Server.LastFM.Enabled || conf.Server.LastFM.ApiKey == "" || conf.Server.LastFM.Secret == "" {
|
||||
return nil
|
||||
}
|
||||
l := &lastfmAgent{
|
||||
ds: ds,
|
||||
lang: conf.Server.LastFM.Language,
|
||||
@@ -112,7 +107,7 @@ func (l *lastfmAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid strin
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
a, err := l.callArtistGetInfo(ctx, name, "")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -123,7 +118,7 @@ func (l *lastfmAgent) GetArtistMBID(ctx context.Context, id string, name string)
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistURL(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
a, err := l.callArtistGetInfo(ctx, name, mbid)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -134,7 +129,7 @@ func (l *lastfmAgent) GetArtistURL(ctx context.Context, id, name, mbid string) (
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistBiography(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
a, err := l.callArtistGetInfo(ctx, name, mbid)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -151,7 +146,7 @@ func (l *lastfmAgent) GetArtistBiography(ctx context.Context, id, name, mbid str
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||
resp, err := l.callArtistGetSimilar(ctx, name, limit)
|
||||
resp, err := l.callArtistGetSimilar(ctx, name, mbid, limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -169,7 +164,7 @@ func (l *lastfmAgent) GetSimilarArtists(ctx context.Context, id, name, mbid stri
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||
resp, err := l.callArtistGetTopTracks(ctx, artistName, count)
|
||||
resp, err := l.callArtistGetTopTracks(ctx, artistName, mbid, count)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -189,19 +184,15 @@ func (l *lastfmAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbi
|
||||
var artistOpenGraphQuery = cascadia.MustCompile(`html > head > meta[property="og:image"]`)
|
||||
|
||||
func (l *lastfmAgent) GetArtistImages(ctx context.Context, _, name, mbid string) ([]agents.ExternalImage, error) {
|
||||
log.Debug(ctx, "Getting artist images from Last.fm", "name", name)
|
||||
hc := http.Client{
|
||||
Timeout: consts.DefaultHttpClientTimeOut,
|
||||
}
|
||||
a, err := l.callArtistGetInfo(ctx, name)
|
||||
a, err := l.callArtistGetInfo(ctx, name, mbid)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get artist info: %w", err)
|
||||
}
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, a.URL, nil)
|
||||
req, err := http.NewRequest(http.MethodGet, a.URL, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("create artist image request: %w", err)
|
||||
}
|
||||
resp, err := hc.Do(req)
|
||||
resp, err := l.client.hc.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get artist url: %w", err)
|
||||
}
|
||||
@@ -249,31 +240,48 @@ func (l *lastfmAgent) callAlbumGetInfo(ctx context.Context, name, artist, mbid s
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetInfo(ctx context.Context, name string) (*Artist, error) {
|
||||
l.getInfoMutex.Lock()
|
||||
defer l.getInfoMutex.Unlock()
|
||||
func (l *lastfmAgent) callArtistGetInfo(ctx context.Context, name string, mbid string) (*Artist, error) {
|
||||
a, err := l.client.artistGetInfo(ctx, name, mbid)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
|
||||
if mbid != "" && ((err == nil && a.Name == "[unknown]") || (isLastFMError && lfErr.Code == 6)) {
|
||||
log.Debug(ctx, "LastFM/artist.getInfo could not find artist by mbid, trying again", "artist", name, "mbid", mbid)
|
||||
return l.callArtistGetInfo(ctx, name, "")
|
||||
}
|
||||
|
||||
a, err := l.client.artistGetInfo(ctx, name)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getInfo", "artist", name, err)
|
||||
log.Error(ctx, "Error calling LastFM/artist.getInfo", "artist", name, "mbid", mbid, err)
|
||||
return nil, err
|
||||
}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetSimilar(ctx context.Context, name string, limit int) ([]Artist, error) {
|
||||
s, err := l.client.artistGetSimilar(ctx, name, limit)
|
||||
func (l *lastfmAgent) callArtistGetSimilar(ctx context.Context, name string, mbid string, limit int) ([]Artist, error) {
|
||||
s, err := l.client.artistGetSimilar(ctx, name, mbid, limit)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
if mbid != "" && ((err == nil && s.Attr.Artist == "[unknown]") || (isLastFMError && lfErr.Code == 6)) {
|
||||
log.Debug(ctx, "LastFM/artist.getSimilar could not find artist by mbid, trying again", "artist", name, "mbid", mbid)
|
||||
return l.callArtistGetSimilar(ctx, name, "", limit)
|
||||
}
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getSimilar", "artist", name, err)
|
||||
log.Error(ctx, "Error calling LastFM/artist.getSimilar", "artist", name, "mbid", mbid, err)
|
||||
return nil, err
|
||||
}
|
||||
return s.Artists, nil
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName string, count int) ([]Track, error) {
|
||||
t, err := l.client.artistGetTopTracks(ctx, artistName, count)
|
||||
func (l *lastfmAgent) callArtistGetTopTracks(ctx context.Context, artistName, mbid string, count int) ([]Track, error) {
|
||||
t, err := l.client.artistGetTopTracks(ctx, artistName, mbid, count)
|
||||
var lfErr *lastFMError
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
if mbid != "" && ((err == nil && t.Attr.Artist == "[unknown]") || (isLastFMError && lfErr.Code == 6)) {
|
||||
log.Debug(ctx, "LastFM/artist.getTopTracks could not find artist by mbid, trying again", "artist", artistName, "mbid", mbid)
|
||||
return l.callArtistGetTopTracks(ctx, artistName, "", count)
|
||||
}
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error calling LastFM/artist.getTopTracks", "artist", artistName, err)
|
||||
log.Error(ctx, "Error calling LastFM/artist.getTopTracks", "artist", artistName, "mbid", mbid, err)
|
||||
return nil, err
|
||||
}
|
||||
return t.Track, nil
|
||||
@@ -296,7 +304,7 @@ func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *mode
|
||||
})
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Last.fm client.updateNowPlaying returned error", "track", track.Title, err)
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
return scrobbler.ErrUnrecoverable
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -304,7 +312,7 @@ func (l *lastfmAgent) NowPlaying(ctx context.Context, userId string, track *mode
|
||||
func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.Scrobble) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||
return scrobbler.ErrNotAuthorized
|
||||
}
|
||||
|
||||
if s.Duration <= 30 {
|
||||
@@ -328,12 +336,12 @@ func (l *lastfmAgent) Scrobble(ctx context.Context, userId string, s scrobbler.S
|
||||
isLastFMError := errors.As(err, &lfErr)
|
||||
if !isLastFMError {
|
||||
log.Warn(ctx, "Last.fm client.scrobble returned error", "track", s.Title, err)
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
return scrobbler.ErrRetryLater
|
||||
}
|
||||
if lfErr.Code == 11 || lfErr.Code == 16 {
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
return scrobbler.ErrRetryLater
|
||||
}
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
return scrobbler.ErrUnrecoverable
|
||||
}
|
||||
|
||||
func (l *lastfmAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
@@ -343,19 +351,15 @@ func (l *lastfmAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
|
||||
func init() {
|
||||
conf.AddHook(func() {
|
||||
agents.Register(lastFMAgentName, func(ds model.DataStore) agents.Interface {
|
||||
a := lastFMConstructor(ds)
|
||||
if a != nil {
|
||||
return a
|
||||
if conf.Server.LastFM.Enabled {
|
||||
if conf.Server.LastFM.ApiKey != "" && conf.Server.LastFM.Secret != "" {
|
||||
agents.Register(lastFMAgentName, func(ds model.DataStore) agents.Interface {
|
||||
return lastFMConstructor(ds)
|
||||
})
|
||||
scrobbler.Register(lastFMAgentName, func(ds model.DataStore) scrobbler.Scrobbler {
|
||||
return lastFMConstructor(ds)
|
||||
})
|
||||
}
|
||||
return nil
|
||||
})
|
||||
scrobbler.Register(lastFMAgentName, func(ds model.DataStore) scrobbler.Scrobbler {
|
||||
a := lastFMConstructor(ds)
|
||||
if a != nil {
|
||||
return a
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/scrobbler"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -31,38 +30,16 @@ var _ = Describe("lastfmAgent", func() {
|
||||
BeforeEach(func() {
|
||||
ds = &tests.MockDataStore{}
|
||||
ctx = context.Background()
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.LastFM.Enabled = true
|
||||
conf.Server.LastFM.ApiKey = "123"
|
||||
conf.Server.LastFM.Secret = "secret"
|
||||
})
|
||||
Describe("lastFMConstructor", func() {
|
||||
When("Agent is properly configured", func() {
|
||||
It("uses configured api key and language", func() {
|
||||
conf.Server.LastFM.Language = "pt"
|
||||
agent := lastFMConstructor(ds)
|
||||
Expect(agent.apiKey).To(Equal("123"))
|
||||
Expect(agent.secret).To(Equal("secret"))
|
||||
Expect(agent.lang).To(Equal("pt"))
|
||||
})
|
||||
})
|
||||
When("Agent is disabled", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.Enabled = false
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
When("ApiKey is empty", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.ApiKey = ""
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
})
|
||||
When("Secret is empty", func() {
|
||||
It("returns nil", func() {
|
||||
conf.Server.LastFM.Secret = ""
|
||||
Expect(lastFMConstructor(ds)).To(BeNil())
|
||||
})
|
||||
It("uses configured api key and language", func() {
|
||||
conf.Server.LastFM.ApiKey = "123"
|
||||
conf.Server.LastFM.Secret = "secret"
|
||||
conf.Server.LastFM.Language = "pt"
|
||||
agent := lastFMConstructor(ds)
|
||||
Expect(agent.apiKey).To(Equal("123"))
|
||||
Expect(agent.secret).To(Equal("secret"))
|
||||
Expect(agent.lang).To(Equal("pt"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -79,25 +56,48 @@ var _ = Describe("lastfmAgent", func() {
|
||||
It("returns the biography", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetArtistBiography(ctx, "123", "U2", "")).To(Equal("U2 é uma das mais importantes bandas de rock de todos os tempos. Formada em 1976 em Dublin, composta por Bono (vocalista e guitarrista), The Edge (guitarrista, pianista e backing vocal), Adam Clayton (baixista), Larry Mullen, Jr. (baterista e percussionista).\n\nDesde a década de 80, U2 é uma das bandas mais populares no mundo. Seus shows são únicos e um verdadeiro festival de efeitos especiais, além de serem um dos que mais arrecadam anualmente. <a href=\"https://www.last.fm/music/U2\">Read more on Last.fm</a>"))
|
||||
Expect(agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")).To(Equal("U2 é uma das mais importantes bandas de rock de todos os tempos. Formada em 1976 em Dublin, composta por Bono (vocalista e guitarrista), The Edge (guitarrista, pianista e backing vocal), Adam Clayton (baixista), Larry Mullen, Jr. (baterista e percussionista).\n\nDesde a década de 80, U2 é uma das bandas mais populares no mundo. Seus shows são únicos e um verdadeiro festival de efeitos especiais, além de serem um dos que mais arrecadam anualmente. <a href=\"https://www.last.fm/music/U2\">Read more on Last.fm</a>"))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, err := agent.GetArtistBiography(ctx, "123", "U2", "")
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
Context("MBID non existent in Last.fm", func() {
|
||||
It("calls again when the response is artist == [unknown]", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.unknown.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
_, _ = agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
It("calls again when last.fm returns an error 6", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, _ = agent.GetArtistBiography(ctx, "123", "U2", "mbid-1234")
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -114,28 +114,51 @@ var _ = Describe("lastfmAgent", func() {
|
||||
It("returns similar artists", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetSimilarArtists(ctx, "123", "U2", "", 2)).To(Equal([]agents.Artist{
|
||||
Expect(agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)).To(Equal([]agents.Artist{
|
||||
{Name: "Passengers", MBID: "e110c11f-1c94-4471-a350-c38f46b29389"},
|
||||
{Name: "INXS", MBID: "481bf5f9-2e7c-4c44-b08a-05b32bc7c00d"},
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "", 2)
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, err := agent.GetSimilarArtists(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
Context("MBID non existent in Last.fm", func() {
|
||||
It("calls again when the response is artist == [unknown]", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.unknown.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
_, _ = agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
It("calls again when last.fm returns an error 6", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, _ = agent.GetSimilarArtists(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -152,28 +175,51 @@ var _ = Describe("lastfmAgent", func() {
|
||||
It("returns top songs", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
Expect(agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)).To(Equal([]agents.Song{
|
||||
Expect(agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)).To(Equal([]agents.Song{
|
||||
{Name: "Beautiful Day", MBID: "f7f264d0-a89b-4682-9cd7-a4e7c37637af"},
|
||||
{Name: "With or Without You", MBID: "6b9a509f-6907-4a6e-9345-2f12da09ba4b"},
|
||||
}))
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call fails", func() {
|
||||
httpClient.Err = errors.New("error")
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError3)), StatusCode: 200}
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(Equal("mbid-1234"))
|
||||
})
|
||||
|
||||
It("returns an error if Last.fm call returns an error 6 and mbid is empty", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, err := agent.GetArtistTopSongs(ctx, "123", "U2", "", 2)
|
||||
Expect(err).To(HaveOccurred())
|
||||
Expect(httpClient.RequestCount).To(Equal(1))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("artist")).To(Equal("U2"))
|
||||
})
|
||||
|
||||
Context("MBID non existent in Last.fm", func() {
|
||||
It("calls again when the response is artist == [unknown]", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.unknown.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
_, _ = agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
It("calls again when last.fm returns an error 6", func() {
|
||||
httpClient.Res = http.Response{Body: io.NopCloser(bytes.NewBufferString(lastfmError6)), StatusCode: 200}
|
||||
_, _ = agent.GetArtistTopSongs(ctx, "123", "U2", "mbid-1234", 2)
|
||||
Expect(httpClient.RequestCount).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.Query().Get("mbid")).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -59,10 +59,11 @@ func (c *client) albumGetInfo(ctx context.Context, name string, artist string, m
|
||||
return &response.Album, nil
|
||||
}
|
||||
|
||||
func (c *client) artistGetInfo(ctx context.Context, name string) (*Artist, error) {
|
||||
func (c *client) artistGetInfo(ctx context.Context, name string, mbid string) (*Artist, error) {
|
||||
params := url.Values{}
|
||||
params.Add("method", "artist.getInfo")
|
||||
params.Add("artist", name)
|
||||
params.Add("mbid", mbid)
|
||||
params.Add("lang", c.lang)
|
||||
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||
if err != nil {
|
||||
@@ -71,10 +72,11 @@ func (c *client) artistGetInfo(ctx context.Context, name string) (*Artist, error
|
||||
return &response.Artist, nil
|
||||
}
|
||||
|
||||
func (c *client) artistGetSimilar(ctx context.Context, name string, limit int) (*SimilarArtists, error) {
|
||||
func (c *client) artistGetSimilar(ctx context.Context, name string, mbid string, limit int) (*SimilarArtists, error) {
|
||||
params := url.Values{}
|
||||
params.Add("method", "artist.getSimilar")
|
||||
params.Add("artist", name)
|
||||
params.Add("mbid", mbid)
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||
if err != nil {
|
||||
@@ -83,10 +85,11 @@ func (c *client) artistGetSimilar(ctx context.Context, name string, limit int) (
|
||||
return &response.SimilarArtists, nil
|
||||
}
|
||||
|
||||
func (c *client) artistGetTopTracks(ctx context.Context, name string, limit int) (*TopTracks, error) {
|
||||
func (c *client) artistGetTopTracks(ctx context.Context, name string, mbid string, limit int) (*TopTracks, error) {
|
||||
params := url.Values{}
|
||||
params.Add("method", "artist.getTopTracks")
|
||||
params.Add("artist", name)
|
||||
params.Add("mbid", mbid)
|
||||
params.Add("limit", strconv.Itoa(limit))
|
||||
response, err := c.makeRequest(ctx, http.MethodGet, params, false)
|
||||
if err != nil {
|
||||
|
||||
@@ -42,10 +42,10 @@ var _ = Describe("client", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getinfo.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
|
||||
artist, err := client.artistGetInfo(context.Background(), "U2")
|
||||
artist, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
Expect(err).To(BeNil())
|
||||
Expect(artist.Name).To(Equal("U2"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&lang=pt&method=artist.getInfo"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&lang=pt&mbid=123&method=artist.getInfo"))
|
||||
})
|
||||
|
||||
It("fails if Last.fm returns an http status != 200", func() {
|
||||
@@ -54,7 +54,7 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 500,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
Expect(err).To(MatchError("last.fm http status: (500)"))
|
||||
})
|
||||
|
||||
@@ -64,7 +64,7 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 400,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
Expect(err).To(MatchError(&lastFMError{Code: 3, Message: "Invalid Method - No method with that name in this package"}))
|
||||
})
|
||||
|
||||
@@ -74,14 +74,14 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 200,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
Expect(err).To(MatchError(&lastFMError{Code: 6, Message: "The artist you supplied could not be found"}))
|
||||
})
|
||||
|
||||
It("fails if HttpClient.Do() returns error", func() {
|
||||
httpClient.Err = errors.New("generic error")
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
Expect(err).To(MatchError("generic error"))
|
||||
})
|
||||
|
||||
@@ -91,7 +91,7 @@ var _ = Describe("client", func() {
|
||||
StatusCode: 200,
|
||||
}
|
||||
|
||||
_, err := client.artistGetInfo(context.Background(), "U2")
|
||||
_, err := client.artistGetInfo(context.Background(), "U2", "123")
|
||||
Expect(err).To(MatchError("invalid character '<' looking for beginning of value"))
|
||||
})
|
||||
|
||||
@@ -102,10 +102,10 @@ var _ = Describe("client", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.getsimilar.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
|
||||
similar, err := client.artistGetSimilar(context.Background(), "U2", 2)
|
||||
similar, err := client.artistGetSimilar(context.Background(), "U2", "123", 2)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(len(similar.Artists)).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&method=artist.getSimilar"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&mbid=123&method=artist.getSimilar"))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -114,10 +114,10 @@ var _ = Describe("client", func() {
|
||||
f, _ := os.Open("tests/fixtures/lastfm.artist.gettoptracks.json")
|
||||
httpClient.Res = http.Response{Body: f, StatusCode: 200}
|
||||
|
||||
top, err := client.artistGetTopTracks(context.Background(), "U2", 2)
|
||||
top, err := client.artistGetTopTracks(context.Background(), "U2", "123", 2)
|
||||
Expect(err).To(BeNil())
|
||||
Expect(len(top.Track)).To(Equal(2))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&method=artist.getTopTracks"))
|
||||
Expect(httpClient.SavedRequest.URL.String()).To(Equal(apiBaseUrl + "?api_key=API_KEY&artist=U2&format=json&limit=2&mbid=123&method=artist.getTopTracks"))
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/cache"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -46,12 +45,6 @@ func (l *listenBrainzAgent) AgentName() string {
|
||||
}
|
||||
|
||||
func (l *listenBrainzAgent) formatListen(track *model.MediaFile) listenInfo {
|
||||
artistMBIDs := slice.Map(track.Participants[model.RoleArtist], func(p model.Participant) string {
|
||||
return p.MbzArtistID
|
||||
})
|
||||
artistNames := slice.Map(track.Participants[model.RoleArtist], func(p model.Participant) string {
|
||||
return p.Name
|
||||
})
|
||||
li := listenInfo{
|
||||
TrackMetadata: trackMetadata{
|
||||
ArtistName: track.Artist,
|
||||
@@ -61,11 +54,9 @@ func (l *listenBrainzAgent) formatListen(track *model.MediaFile) listenInfo {
|
||||
SubmissionClient: consts.AppName,
|
||||
SubmissionClientVersion: consts.Version,
|
||||
TrackNumber: track.TrackNumber,
|
||||
ArtistNames: artistNames,
|
||||
ArtistMBIDs: artistMBIDs,
|
||||
RecordingMBID: track.MbzRecordingID,
|
||||
ReleaseMBID: track.MbzAlbumID,
|
||||
ReleaseGroupMBID: track.MbzReleaseGroupID,
|
||||
ArtistMbzIDs: []string{track.MbzArtistID},
|
||||
RecordingMbzID: track.MbzRecordingID,
|
||||
ReleaseMbID: track.MbzAlbumID,
|
||||
DurationMs: int(track.Duration * 1000),
|
||||
},
|
||||
},
|
||||
@@ -76,14 +67,14 @@ func (l *listenBrainzAgent) formatListen(track *model.MediaFile) listenInfo {
|
||||
func (l *listenBrainzAgent) NowPlaying(ctx context.Context, userId string, track *model.MediaFile) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||
return scrobbler.ErrNotAuthorized
|
||||
}
|
||||
|
||||
li := l.formatListen(track)
|
||||
err = l.client.updateNowPlaying(ctx, sk, li)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "ListenBrainz updateNowPlaying returned error", "track", track.Title, err)
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
return scrobbler.ErrUnrecoverable
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -91,7 +82,7 @@ func (l *listenBrainzAgent) NowPlaying(ctx context.Context, userId string, track
|
||||
func (l *listenBrainzAgent) Scrobble(ctx context.Context, userId string, s scrobbler.Scrobble) error {
|
||||
sk, err := l.sessionKeys.Get(ctx, userId)
|
||||
if err != nil || sk == "" {
|
||||
return errors.Join(err, scrobbler.ErrNotAuthorized)
|
||||
return scrobbler.ErrNotAuthorized
|
||||
}
|
||||
|
||||
li := l.formatListen(&s.MediaFile)
|
||||
@@ -105,12 +96,12 @@ func (l *listenBrainzAgent) Scrobble(ctx context.Context, userId string, s scrob
|
||||
isListenBrainzError := errors.As(err, &lbErr)
|
||||
if !isListenBrainzError {
|
||||
log.Warn(ctx, "ListenBrainz Scrobble returned HTTP error", "track", s.Title, err)
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
return scrobbler.ErrRetryLater
|
||||
}
|
||||
if lbErr.Code == 500 || lbErr.Code == 503 {
|
||||
return errors.Join(err, scrobbler.ErrRetryLater)
|
||||
return scrobbler.ErrRetryLater
|
||||
}
|
||||
return errors.Join(err, scrobbler.ErrUnrecoverable)
|
||||
return scrobbler.ErrUnrecoverable
|
||||
}
|
||||
|
||||
func (l *listenBrainzAgent) IsAuthorized(ctx context.Context, userId string) bool {
|
||||
|
||||
@@ -32,26 +32,24 @@ var _ = Describe("listenBrainzAgent", func() {
|
||||
agent = listenBrainzConstructor(ds)
|
||||
agent.client = newClient("http://localhost:8080", httpClient)
|
||||
track = &model.MediaFile{
|
||||
ID: "123",
|
||||
Title: "Track Title",
|
||||
Album: "Track Album",
|
||||
Artist: "Track Artist",
|
||||
TrackNumber: 1,
|
||||
MbzRecordingID: "mbz-123",
|
||||
MbzAlbumID: "mbz-456",
|
||||
MbzReleaseGroupID: "mbz-789",
|
||||
Duration: 142.2,
|
||||
Participants: map[model.Role]model.ParticipantList{
|
||||
model.RoleArtist: []model.Participant{
|
||||
{Artist: model.Artist{ID: "ar-1", Name: "Artist 1", MbzArtistID: "mbz-111"}},
|
||||
{Artist: model.Artist{ID: "ar-2", Name: "Artist 2", MbzArtistID: "mbz-222"}},
|
||||
},
|
||||
},
|
||||
ID: "123",
|
||||
Title: "Track Title",
|
||||
Album: "Track Album",
|
||||
Artist: "Track Artist",
|
||||
TrackNumber: 1,
|
||||
MbzRecordingID: "mbz-123",
|
||||
MbzAlbumID: "mbz-456",
|
||||
MbzArtistID: "mbz-789",
|
||||
Duration: 142.2,
|
||||
}
|
||||
})
|
||||
|
||||
Describe("formatListen", func() {
|
||||
It("constructs the listenInfo properly", func() {
|
||||
var idArtistId = func(element interface{}) string {
|
||||
return element.(string)
|
||||
}
|
||||
|
||||
lr := agent.formatListen(track)
|
||||
Expect(lr).To(MatchAllFields(Fields{
|
||||
"ListenedAt": Equal(0),
|
||||
@@ -63,12 +61,12 @@ var _ = Describe("listenBrainzAgent", func() {
|
||||
"SubmissionClient": Equal(consts.AppName),
|
||||
"SubmissionClientVersion": Equal(consts.Version),
|
||||
"TrackNumber": Equal(track.TrackNumber),
|
||||
"RecordingMBID": Equal(track.MbzRecordingID),
|
||||
"ReleaseMBID": Equal(track.MbzAlbumID),
|
||||
"ReleaseGroupMBID": Equal(track.MbzReleaseGroupID),
|
||||
"ArtistNames": ConsistOf("Artist 1", "Artist 2"),
|
||||
"ArtistMBIDs": ConsistOf("mbz-111", "mbz-222"),
|
||||
"DurationMs": Equal(142200),
|
||||
"RecordingMbzID": Equal(track.MbzRecordingID),
|
||||
"ReleaseMbID": Equal(track.MbzAlbumID),
|
||||
"ArtistMbzIDs": MatchAllElements(idArtistId, Elements{
|
||||
"mbz-789": Equal(track.MbzArtistID),
|
||||
}),
|
||||
"DurationMs": Equal(142200),
|
||||
}),
|
||||
}),
|
||||
}))
|
||||
|
||||
@@ -76,11 +76,9 @@ type additionalInfo struct {
|
||||
SubmissionClient string `json:"submission_client,omitempty"`
|
||||
SubmissionClientVersion string `json:"submission_client_version,omitempty"`
|
||||
TrackNumber int `json:"tracknumber,omitempty"`
|
||||
ArtistNames []string `json:"artist_names,omitempty"`
|
||||
ArtistMBIDs []string `json:"artist_mbids,omitempty"`
|
||||
RecordingMBID string `json:"recording_mbid,omitempty"`
|
||||
ReleaseMBID string `json:"release_mbid,omitempty"`
|
||||
ReleaseGroupMBID string `json:"release_group_mbid,omitempty"`
|
||||
RecordingMbzID string `json:"recording_mbid,omitempty"`
|
||||
ArtistMbzIDs []string `json:"artist_mbids,omitempty"`
|
||||
ReleaseMbID string `json:"release_mbid,omitempty"`
|
||||
DurationMs int `json:"duration_ms,omitempty"`
|
||||
}
|
||||
|
||||
|
||||
@@ -74,12 +74,11 @@ var _ = Describe("client", func() {
|
||||
TrackName: "Track Title",
|
||||
ReleaseName: "Track Album",
|
||||
AdditionalInfo: additionalInfo{
|
||||
TrackNumber: 1,
|
||||
ArtistNames: []string{"Artist 1", "Artist 2"},
|
||||
ArtistMBIDs: []string{"mbz-789", "mbz-012"},
|
||||
RecordingMBID: "mbz-123",
|
||||
ReleaseMBID: "mbz-456",
|
||||
DurationMs: 142200,
|
||||
TrackNumber: 1,
|
||||
RecordingMbzID: "mbz-123",
|
||||
ArtistMbzIDs: []string{"mbz-789"},
|
||||
ReleaseMbID: "mbz-456",
|
||||
DurationMs: 142200,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -27,9 +27,6 @@ type spotifyAgent struct {
|
||||
}
|
||||
|
||||
func spotifyConstructor(ds model.DataStore) agents.Interface {
|
||||
if conf.Server.Spotify.ID == "" || conf.Server.Spotify.Secret == "" {
|
||||
return nil
|
||||
}
|
||||
l := &spotifyAgent{
|
||||
ds: ds,
|
||||
id: conf.Server.Spotify.ID,
|
||||
@@ -91,6 +88,8 @@ func (s *spotifyAgent) searchArtist(ctx context.Context, name string) (*Artist,
|
||||
|
||||
func init() {
|
||||
conf.AddHook(func() {
|
||||
agents.Register(spotifyAgentName, spotifyConstructor)
|
||||
if conf.Server.Spotify.ID != "" && conf.Server.Spotify.Secret != "" {
|
||||
agents.Register(spotifyAgentName, spotifyConstructor)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -53,11 +53,11 @@ func (a *archiver) zipAlbums(ctx context.Context, id string, format string, bitr
|
||||
})
|
||||
for _, album := range albums {
|
||||
discs := slice.Group(album, func(mf model.MediaFile) int { return mf.DiscNumber })
|
||||
isMultiDisc := len(discs) > 1
|
||||
isMultDisc := len(discs) > 1
|
||||
log.Debug(ctx, "Zipping album", "name", album[0].Album, "artist", album[0].AlbumArtist,
|
||||
"format", format, "bitrate", bitrate, "isMultiDisc", isMultiDisc, "numTracks", len(album))
|
||||
"format", format, "bitrate", bitrate, "isMultDisc", isMultDisc, "numTracks", len(album))
|
||||
for _, mf := range album {
|
||||
file := a.albumFilename(mf, format, isMultiDisc)
|
||||
file := a.albumFilename(mf, format, isMultDisc)
|
||||
_ = a.addFileToZip(ctx, z, mf, format, bitrate, file)
|
||||
}
|
||||
}
|
||||
@@ -78,12 +78,12 @@ func createZipWriter(out io.Writer, format string, bitrate int) *zip.Writer {
|
||||
return z
|
||||
}
|
||||
|
||||
func (a *archiver) albumFilename(mf model.MediaFile, format string, isMultiDisc bool) string {
|
||||
func (a *archiver) albumFilename(mf model.MediaFile, format string, isMultDisc bool) string {
|
||||
_, file := filepath.Split(mf.Path)
|
||||
if format != "raw" {
|
||||
file = strings.TrimSuffix(file, mf.Suffix) + format
|
||||
}
|
||||
if isMultiDisc {
|
||||
if isMultDisc {
|
||||
file = fmt.Sprintf("Disc %02d/%s", mf.DiscNumber, file)
|
||||
}
|
||||
return fmt.Sprintf("%s/%s", sanitizeName(mf.Album), file)
|
||||
@@ -91,18 +91,18 @@ func (a *archiver) albumFilename(mf model.MediaFile, format string, isMultiDisc
|
||||
|
||||
func (a *archiver) ZipShare(ctx context.Context, id string, out io.Writer) error {
|
||||
s, err := a.shares.Load(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !s.Downloadable {
|
||||
return model.ErrNotAuthorized
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debug(ctx, "Zipping share", "name", s.ID, "format", s.Format, "bitrate", s.MaxBitRate, "numTracks", len(s.Tracks))
|
||||
return a.zipMediaFiles(ctx, id, s.Format, s.MaxBitRate, out, s.Tracks)
|
||||
}
|
||||
|
||||
func (a *archiver) ZipPlaylist(ctx context.Context, id string, format string, bitrate int, out io.Writer) error {
|
||||
pls, err := a.ds.Playlist(ctx).GetWithTracks(id, true, false)
|
||||
pls, err := a.ds.Playlist(ctx).GetWithTracks(id, true)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error loading mediafiles from playlist", "id", id, err)
|
||||
return err
|
||||
@@ -138,14 +138,13 @@ func sanitizeName(target string) string {
|
||||
}
|
||||
|
||||
func (a *archiver) addFileToZip(ctx context.Context, z *zip.Writer, mf model.MediaFile, format string, bitrate int, filename string) error {
|
||||
path := mf.AbsolutePath()
|
||||
w, err := z.CreateHeader(&zip.FileHeader{
|
||||
Name: filename,
|
||||
Modified: mf.UpdatedAt,
|
||||
Method: zip.Store,
|
||||
})
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error creating zip entry", "file", path, err)
|
||||
log.Error(ctx, "Error creating zip entry", "file", mf.Path, err)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -153,22 +152,22 @@ func (a *archiver) addFileToZip(ctx context.Context, z *zip.Writer, mf model.Med
|
||||
if format != "raw" && format != "" {
|
||||
r, err = a.ms.DoStream(ctx, &mf, format, bitrate, 0)
|
||||
} else {
|
||||
r, err = os.Open(path)
|
||||
r, err = os.Open(mf.Path)
|
||||
}
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error opening file for zipping", "file", path, "format", format, err)
|
||||
log.Error(ctx, "Error opening file for zipping", "file", mf.Path, "format", format, err)
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := r.Close(); err != nil && log.IsGreaterOrEqualTo(log.LevelDebug) {
|
||||
log.Error(ctx, "Error closing stream", "id", mf.ID, "file", path, err)
|
||||
log.Error(ctx, "Error closing stream", "id", mf.ID, "file", mf.Path, err)
|
||||
}
|
||||
}()
|
||||
|
||||
_, err = io.Copy(w, r)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error zipping file", "file", path, err)
|
||||
log.Error(ctx, "Error zipping file", "file", mf.Path, err)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -25,8 +25,8 @@ var _ = Describe("Archiver", func() {
|
||||
|
||||
BeforeEach(func() {
|
||||
ms = &mockMediaStreamer{}
|
||||
sh = &mockShare{}
|
||||
ds = &mockDataStore{}
|
||||
sh = &mockShare{}
|
||||
arch = core.NewArchiver(ms, ds, sh)
|
||||
})
|
||||
|
||||
@@ -134,7 +134,7 @@ var _ = Describe("Archiver", func() {
|
||||
}
|
||||
|
||||
plRepo := &mockPlaylistRepository{}
|
||||
plRepo.On("GetWithTracks", "1", true, false).Return(pls, nil)
|
||||
plRepo.On("GetWithTracks", "1", true).Return(pls, nil)
|
||||
ds.On("Playlist", mock.Anything).Return(plRepo)
|
||||
ms.On("DoStream", mock.Anything, mock.Anything, "mp3", 128, 0).Return(io.NopCloser(strings.NewReader("test")), nil).Times(2)
|
||||
|
||||
@@ -167,19 +167,6 @@ func (m *mockDataStore) Playlist(ctx context.Context) model.PlaylistRepository {
|
||||
return args.Get(0).(model.PlaylistRepository)
|
||||
}
|
||||
|
||||
func (m *mockDataStore) Library(context.Context) model.LibraryRepository {
|
||||
return &mockLibraryRepository{}
|
||||
}
|
||||
|
||||
type mockLibraryRepository struct {
|
||||
mock.Mock
|
||||
model.LibraryRepository
|
||||
}
|
||||
|
||||
func (m *mockLibraryRepository) GetPath(id int) (string, error) {
|
||||
return "/music", nil
|
||||
}
|
||||
|
||||
type mockMediaFileRepository struct {
|
||||
mock.Mock
|
||||
model.MediaFileRepository
|
||||
@@ -195,8 +182,8 @@ type mockPlaylistRepository struct {
|
||||
model.PlaylistRepository
|
||||
}
|
||||
|
||||
func (m *mockPlaylistRepository) GetWithTracks(id string, refreshSmartPlaylists, includeMissing bool) (*model.Playlist, error) {
|
||||
args := m.Called(id, refreshSmartPlaylists, includeMissing)
|
||||
func (m *mockPlaylistRepository) GetWithTracks(id string, includeTracks bool) (*model.Playlist, error) {
|
||||
args := m.Called(id, includeTracks)
|
||||
return args.Get(0).(*model.Playlist), args.Error(1)
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -24,15 +24,15 @@ type Artwork interface {
|
||||
GetOrPlaceholder(ctx context.Context, id string, size int, square bool) (io.ReadCloser, time.Time, error)
|
||||
}
|
||||
|
||||
func NewArtwork(ds model.DataStore, cache cache.FileCache, ffmpeg ffmpeg.FFmpeg, provider external.Provider) Artwork {
|
||||
return &artwork{ds: ds, cache: cache, ffmpeg: ffmpeg, provider: provider}
|
||||
func NewArtwork(ds model.DataStore, cache cache.FileCache, ffmpeg ffmpeg.FFmpeg, em core.ExternalMetadata) Artwork {
|
||||
return &artwork{ds: ds, cache: cache, ffmpeg: ffmpeg, em: em}
|
||||
}
|
||||
|
||||
type artwork struct {
|
||||
ds model.DataStore
|
||||
cache cache.FileCache
|
||||
ffmpeg ffmpeg.FFmpeg
|
||||
provider external.Provider
|
||||
ds model.DataStore
|
||||
cache cache.FileCache
|
||||
ffmpeg ffmpeg.FFmpeg
|
||||
em core.ExternalMetadata
|
||||
}
|
||||
|
||||
type artworkReader interface {
|
||||
@@ -115,9 +115,9 @@ func (a *artwork) getArtworkReader(ctx context.Context, artID model.ArtworkID, s
|
||||
} else {
|
||||
switch artID.Kind {
|
||||
case model.KindArtistArtwork:
|
||||
artReader, err = newArtistReader(ctx, a, artID, a.provider)
|
||||
artReader, err = newArtistReader(ctx, a, artID, a.em)
|
||||
case model.KindAlbumArtwork:
|
||||
artReader, err = newAlbumArtworkReader(ctx, a, artID, a.provider)
|
||||
artReader, err = newAlbumArtworkReader(ctx, a, artID, a.em)
|
||||
case model.KindMediaFileArtwork:
|
||||
artReader, err = newMediafileArtworkReader(ctx, a, artID)
|
||||
case model.KindPlaylistArtwork:
|
||||
|
||||
@@ -4,10 +4,15 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"image"
|
||||
"image/jpeg"
|
||||
"image/png"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
@@ -15,8 +20,7 @@ import (
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
// TODO Fix tests
|
||||
var _ = XDescribe("Artwork", func() {
|
||||
var _ = Describe("Artwork", func() {
|
||||
var aw *artwork
|
||||
var ds model.DataStore
|
||||
var ffmpeg *tests.MockFFmpeg
|
||||
@@ -33,17 +37,17 @@ var _ = XDescribe("Artwork", func() {
|
||||
ds = &tests.MockDataStore{MockedTranscoding: &tests.MockTranscodingRepo{}}
|
||||
alOnlyEmbed = model.Album{ID: "222", Name: "Only embed", EmbedArtPath: "tests/fixtures/artist/an-album/test.mp3"}
|
||||
alEmbedNotFound = model.Album{ID: "333", Name: "Embed not found", EmbedArtPath: "tests/fixtures/NON_EXISTENT.mp3"}
|
||||
//alOnlyExternal = model.Album{ID: "444", Name: "Only external", ImageFiles: "tests/fixtures/artist/an-album/front.png"}
|
||||
//alExternalNotFound = model.Album{ID: "555", Name: "External not found", ImageFiles: "tests/fixtures/NON_EXISTENT.png"}
|
||||
alOnlyExternal = model.Album{ID: "444", Name: "Only external", ImageFiles: "tests/fixtures/artist/an-album/front.png"}
|
||||
alExternalNotFound = model.Album{ID: "555", Name: "External not found", ImageFiles: "tests/fixtures/NON_EXISTENT.png"}
|
||||
arMultipleCovers = model.Artist{ID: "777", Name: "All options"}
|
||||
alMultipleCovers = model.Album{
|
||||
ID: "666",
|
||||
Name: "All options",
|
||||
EmbedArtPath: "tests/fixtures/artist/an-album/test.mp3",
|
||||
//Paths: []string{"tests/fixtures/artist/an-album"},
|
||||
//ImageFiles: "tests/fixtures/artist/an-album/cover.jpg" + consts.Zwsp +
|
||||
// "tests/fixtures/artist/an-album/front.png" + consts.Zwsp +
|
||||
// "tests/fixtures/artist/an-album/artist.png",
|
||||
Paths: "tests/fixtures/artist/an-album",
|
||||
ImageFiles: "tests/fixtures/artist/an-album/cover.jpg" + consts.Zwsp +
|
||||
"tests/fixtures/artist/an-album/front.png" + consts.Zwsp +
|
||||
"tests/fixtures/artist/an-album/artist.png",
|
||||
AlbumArtistID: "777",
|
||||
}
|
||||
mfWithEmbed = model.MediaFile{ID: "22", Path: "tests/fixtures/test.mp3", HasCoverArt: true, AlbumID: "222"}
|
||||
@@ -241,11 +245,11 @@ var _ = XDescribe("Artwork", func() {
|
||||
DescribeTable("resize",
|
||||
func(format string, landscape bool, size int) {
|
||||
coverFileName := "cover." + format
|
||||
//dirName := createImage(format, landscape, size)
|
||||
dirName := createImage(format, landscape, size)
|
||||
alCover = model.Album{
|
||||
ID: "444",
|
||||
Name: "Only external",
|
||||
//ImageFiles: filepath.Join(dirName, coverFileName),
|
||||
ID: "444",
|
||||
Name: "Only external",
|
||||
ImageFiles: filepath.Join(dirName, coverFileName),
|
||||
}
|
||||
ds.Album(ctx).(*tests.MockAlbumRepo).SetData(model.Albums{
|
||||
alCover,
|
||||
@@ -270,24 +274,24 @@ var _ = XDescribe("Artwork", func() {
|
||||
})
|
||||
})
|
||||
|
||||
//func createImage(format string, landscape bool, size int) string {
|
||||
// var img image.Image
|
||||
//
|
||||
// if landscape {
|
||||
// img = image.NewRGBA(image.Rect(0, 0, size, size/2))
|
||||
// } else {
|
||||
// img = image.NewRGBA(image.Rect(0, 0, size/2, size))
|
||||
// }
|
||||
//
|
||||
// tmpDir := GinkgoT().TempDir()
|
||||
// f, _ := os.Create(filepath.Join(tmpDir, "cover."+format))
|
||||
// defer f.Close()
|
||||
// switch format {
|
||||
// case "png":
|
||||
// _ = png.Encode(f, img)
|
||||
// case "jpg":
|
||||
// _ = jpeg.Encode(f, img, &jpeg.Options{Quality: 75})
|
||||
// }
|
||||
//
|
||||
// return tmpDir
|
||||
//}
|
||||
func createImage(format string, landscape bool, size int) string {
|
||||
var img image.Image
|
||||
|
||||
if landscape {
|
||||
img = image.NewRGBA(image.Rect(0, 0, size, size/2))
|
||||
} else {
|
||||
img = image.NewRGBA(image.Rect(0, 0, size/2, size))
|
||||
}
|
||||
|
||||
tmpDir := GinkgoT().TempDir()
|
||||
f, _ := os.Create(filepath.Join(tmpDir, "cover."+format))
|
||||
defer f.Close()
|
||||
switch format {
|
||||
case "png":
|
||||
_ = png.Encode(f, img)
|
||||
case "jpg":
|
||||
_ = jpeg.Encode(f, img, &jpeg.Options{Quality: 75})
|
||||
}
|
||||
|
||||
return tmpDir
|
||||
}
|
||||
|
||||
@@ -22,9 +22,6 @@ type CacheWarmer interface {
|
||||
PreCache(artID model.ArtworkID)
|
||||
}
|
||||
|
||||
// NewCacheWarmer creates a new CacheWarmer instance. The CacheWarmer will pre-cache Artwork images in the background
|
||||
// to speed up the response time when the image is requested by the UI. The cache is pre-populated with the original
|
||||
// image size, as well as the size defined in the UICoverArtSize constant.
|
||||
func NewCacheWarmer(artwork Artwork, cache cache.FileCache) CacheWarmer {
|
||||
// If image cache is disabled, return a NOOP implementation
|
||||
if conf.Server.ImageCacheSize == "0" || !conf.Server.EnableArtworkPrecache {
|
||||
@@ -52,7 +49,15 @@ type cacheWarmer struct {
|
||||
wakeSignal chan struct{}
|
||||
}
|
||||
|
||||
var ignoredIds = map[string]struct{}{
|
||||
consts.VariousArtistsID: {},
|
||||
consts.UnknownArtistID: {},
|
||||
}
|
||||
|
||||
func (a *cacheWarmer) PreCache(artID model.ArtworkID) {
|
||||
if _, shouldIgnore := ignoredIds[artID.ID]; shouldIgnore {
|
||||
return
|
||||
}
|
||||
a.mutex.Lock()
|
||||
defer a.mutex.Unlock()
|
||||
a.buffer[artID] = struct{}{}
|
||||
@@ -99,8 +104,14 @@ func (a *cacheWarmer) run(ctx context.Context) {
|
||||
}
|
||||
|
||||
func (a *cacheWarmer) waitSignal(ctx context.Context, timeout time.Duration) {
|
||||
var to <-chan time.Time
|
||||
if !a.cache.Available(ctx) {
|
||||
tmr := time.NewTimer(timeout)
|
||||
defer tmr.Stop()
|
||||
to = tmr.C
|
||||
}
|
||||
select {
|
||||
case <-time.After(timeout):
|
||||
case <-to:
|
||||
case <-a.wakeSignal:
|
||||
case <-ctx.Done():
|
||||
}
|
||||
@@ -131,10 +142,6 @@ func (a *cacheWarmer) doCacheImage(ctx context.Context, id model.ArtworkID) erro
|
||||
return nil
|
||||
}
|
||||
|
||||
func NoopCacheWarmer() CacheWarmer {
|
||||
return &noopCacheWarmer{}
|
||||
}
|
||||
|
||||
type noopCacheWarmer struct{}
|
||||
|
||||
func (a *noopCacheWarmer) PreCache(model.ArtworkID) {}
|
||||
|
||||
@@ -5,52 +5,34 @@ import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
)
|
||||
|
||||
type albumArtworkReader struct {
|
||||
cacheKey
|
||||
a *artwork
|
||||
provider external.Provider
|
||||
album model.Album
|
||||
updatedAt *time.Time
|
||||
imgFiles []string
|
||||
rootFolder string
|
||||
a *artwork
|
||||
em core.ExternalMetadata
|
||||
album model.Album
|
||||
}
|
||||
|
||||
func newAlbumArtworkReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, provider external.Provider) (*albumArtworkReader, error) {
|
||||
func newAlbumArtworkReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, em core.ExternalMetadata) (*albumArtworkReader, error) {
|
||||
al, err := artwork.ds.Album(ctx).Get(artID.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, imgFiles, imagesUpdateAt, err := loadAlbumFoldersPaths(ctx, artwork.ds, *al)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
a := &albumArtworkReader{
|
||||
a: artwork,
|
||||
provider: provider,
|
||||
album: *al,
|
||||
updatedAt: imagesUpdateAt,
|
||||
imgFiles: imgFiles,
|
||||
rootFolder: core.AbsolutePath(ctx, artwork.ds, al.LibraryID, ""),
|
||||
a: artwork,
|
||||
em: em,
|
||||
album: *al,
|
||||
}
|
||||
a.cacheKey.artID = artID
|
||||
if a.updatedAt != nil && a.updatedAt.After(al.UpdatedAt) {
|
||||
a.cacheKey.lastUpdate = *a.updatedAt
|
||||
} else {
|
||||
a.cacheKey.lastUpdate = al.UpdatedAt
|
||||
}
|
||||
a.cacheKey.lastUpdate = al.UpdatedAt
|
||||
return a, nil
|
||||
}
|
||||
|
||||
@@ -81,43 +63,12 @@ func (a *albumArtworkReader) fromCoverArtPriority(ctx context.Context, ffmpeg ff
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
switch {
|
||||
case pattern == "embedded":
|
||||
embedArtPath := filepath.Join(a.rootFolder, a.album.EmbedArtPath)
|
||||
ff = append(ff, fromTag(ctx, embedArtPath), fromFFmpegTag(ctx, ffmpeg, embedArtPath))
|
||||
ff = append(ff, fromTag(ctx, a.album.EmbedArtPath), fromFFmpegTag(ctx, ffmpeg, a.album.EmbedArtPath))
|
||||
case pattern == "external":
|
||||
ff = append(ff, fromAlbumExternalSource(ctx, a.album, a.provider))
|
||||
case len(a.imgFiles) > 0:
|
||||
ff = append(ff, fromExternalFile(ctx, a.imgFiles, pattern))
|
||||
ff = append(ff, fromAlbumExternalSource(ctx, a.album, a.em))
|
||||
case a.album.ImageFiles != "":
|
||||
ff = append(ff, fromExternalFile(ctx, a.album.ImageFiles, pattern))
|
||||
}
|
||||
}
|
||||
return ff
|
||||
}
|
||||
|
||||
func loadAlbumFoldersPaths(ctx context.Context, ds model.DataStore, albums ...model.Album) ([]string, []string, *time.Time, error) {
|
||||
var folderIDs []string
|
||||
for _, album := range albums {
|
||||
folderIDs = append(folderIDs, album.FolderIDs...)
|
||||
}
|
||||
folders, err := ds.Folder(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"folder.id": folderIDs, "missing": false}})
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
var paths []string
|
||||
var imgFiles []string
|
||||
var updatedAt time.Time
|
||||
for _, f := range folders {
|
||||
path := f.AbsolutePath()
|
||||
paths = append(paths, path)
|
||||
if f.ImagesUpdatedAt.After(updatedAt) {
|
||||
updatedAt = f.ImagesUpdatedAt
|
||||
}
|
||||
for _, img := range f.ImageFiles {
|
||||
imgFiles = append(imgFiles, filepath.Join(path, img))
|
||||
}
|
||||
}
|
||||
|
||||
// Sort image files to ensure consistent selection of cover art
|
||||
// This prioritizes files from lower-numbered disc folders by sorting the paths
|
||||
slices.Sort(imgFiles)
|
||||
|
||||
return paths, imgFiles, &updatedAt, nil
|
||||
}
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
package artwork
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("Album Artwork Reader", func() {
|
||||
Describe("loadAlbumFoldersPaths", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
ds *fakeDataStore
|
||||
repo *fakeFolderRepo
|
||||
album model.Album
|
||||
now time.Time
|
||||
expectedAt time.Time
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = context.Background()
|
||||
now = time.Now().Truncate(time.Second)
|
||||
expectedAt = now.Add(5 * time.Minute)
|
||||
|
||||
// Set up the test folders with image files
|
||||
repo = &fakeFolderRepo{
|
||||
result: []model.Folder{
|
||||
{
|
||||
Path: "Artist/Album/Disc1",
|
||||
ImagesUpdatedAt: expectedAt,
|
||||
ImageFiles: []string{"cover.jpg", "back.jpg"},
|
||||
},
|
||||
{
|
||||
Path: "Artist/Album/Disc2",
|
||||
ImagesUpdatedAt: now,
|
||||
ImageFiles: []string{"cover.jpg"},
|
||||
},
|
||||
{
|
||||
Path: "Artist/Album/Disc10",
|
||||
ImagesUpdatedAt: now,
|
||||
ImageFiles: []string{"cover.jpg"},
|
||||
},
|
||||
},
|
||||
err: nil,
|
||||
}
|
||||
ds = &fakeDataStore{
|
||||
folderRepo: repo,
|
||||
}
|
||||
album = model.Album{
|
||||
ID: "album1",
|
||||
Name: "Album",
|
||||
FolderIDs: []string{"folder1", "folder2", "folder3"},
|
||||
}
|
||||
})
|
||||
|
||||
It("returns sorted image files", func() {
|
||||
_, imgFiles, imagesUpdatedAt, err := loadAlbumFoldersPaths(ctx, ds, album)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(*imagesUpdatedAt).To(Equal(expectedAt))
|
||||
|
||||
// Check that image files are sorted alphabetically
|
||||
Expect(imgFiles).To(HaveLen(4))
|
||||
|
||||
// The files should be sorted by full path
|
||||
Expect(imgFiles[0]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/back.jpg")))
|
||||
Expect(imgFiles[1]).To(Equal(filepath.FromSlash("Artist/Album/Disc1/cover.jpg")))
|
||||
Expect(imgFiles[2]).To(Equal(filepath.FromSlash("Artist/Album/Disc10/cover.jpg")))
|
||||
Expect(imgFiles[3]).To(Equal(filepath.FromSlash("Artist/Album/Disc2/cover.jpg")))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -13,8 +13,8 @@ import (
|
||||
|
||||
"github.com/Masterminds/squirrel"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/str"
|
||||
@@ -23,49 +23,42 @@ import (
|
||||
type artistReader struct {
|
||||
cacheKey
|
||||
a *artwork
|
||||
provider external.Provider
|
||||
em core.ExternalMetadata
|
||||
artist model.Artist
|
||||
artistFolder string
|
||||
imgFiles []string
|
||||
files string
|
||||
}
|
||||
|
||||
func newArtistReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, provider external.Provider) (*artistReader, error) {
|
||||
func newArtistReader(ctx context.Context, artwork *artwork, artID model.ArtworkID, em core.ExternalMetadata) (*artistReader, error) {
|
||||
ar, err := artwork.ds.Artist(ctx).Get(artID.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Only consider albums where the artist is the sole album artist.
|
||||
als, err := artwork.ds.Album(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.And{
|
||||
squirrel.Eq{"album_artist_id": artID.ID},
|
||||
squirrel.Eq{"json_array_length(participants, '$.albumartist')": 1},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
albumPaths, imgFiles, imagesUpdatedAt, err := loadAlbumFoldersPaths(ctx, artwork.ds, als...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
artistFolder, artistFolderLastUpdate, err := loadArtistFolder(ctx, artwork.ds, als, albumPaths)
|
||||
als, err := artwork.ds.Album(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"album_artist_id": artID.ID}})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
a := &artistReader{
|
||||
a: artwork,
|
||||
provider: provider,
|
||||
artist: *ar,
|
||||
artistFolder: artistFolder,
|
||||
imgFiles: imgFiles,
|
||||
a: artwork,
|
||||
em: em,
|
||||
artist: *ar,
|
||||
}
|
||||
// TODO Find a way to factor in the ExternalUpdateInfoAt in the cache key. Problem is that it can
|
||||
// change _after_ retrieving from external sources, making the key invalid
|
||||
//a.cacheKey.lastUpdate = ar.ExternalInfoUpdatedAt
|
||||
|
||||
a.cacheKey.lastUpdate = *imagesUpdatedAt
|
||||
if artistFolderLastUpdate.After(a.cacheKey.lastUpdate) {
|
||||
a.cacheKey.lastUpdate = artistFolderLastUpdate
|
||||
var files []string
|
||||
var paths []string
|
||||
for _, al := range als {
|
||||
files = append(files, al.ImageFiles)
|
||||
paths = append(paths, splitList(al.Paths)...)
|
||||
if a.cacheKey.lastUpdate.Before(al.UpdatedAt) {
|
||||
a.cacheKey.lastUpdate = al.UpdatedAt
|
||||
}
|
||||
}
|
||||
a.files = strings.Join(files, consts.Zwsp)
|
||||
a.artistFolder = str.LongestCommonPrefix(paths)
|
||||
if !strings.HasSuffix(a.artistFolder, string(filepath.Separator)) {
|
||||
a.artistFolder, _ = filepath.Split(a.artistFolder)
|
||||
}
|
||||
a.cacheKey.artID = artID
|
||||
return a, nil
|
||||
@@ -96,9 +89,9 @@ func (a *artistReader) fromArtistArtPriority(ctx context.Context, priority strin
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
switch {
|
||||
case pattern == "external":
|
||||
ff = append(ff, fromArtistExternalSource(ctx, a.artist, a.provider))
|
||||
ff = append(ff, fromArtistExternalSource(ctx, a.artist, a.em))
|
||||
case strings.HasPrefix(pattern, "album/"):
|
||||
ff = append(ff, fromExternalFile(ctx, a.imgFiles, strings.TrimPrefix(pattern, "album/")))
|
||||
ff = append(ff, fromExternalFile(ctx, a.files, strings.TrimPrefix(pattern, "album/")))
|
||||
default:
|
||||
ff = append(ff, fromArtistFolder(ctx, a.artistFolder, pattern))
|
||||
}
|
||||
@@ -132,33 +125,3 @@ func fromArtistFolder(ctx context.Context, artistFolder string, pattern string)
|
||||
return nil, "", nil
|
||||
}
|
||||
}
|
||||
|
||||
func loadArtistFolder(ctx context.Context, ds model.DataStore, albums model.Albums, paths []string) (string, time.Time, error) {
|
||||
if len(albums) == 0 {
|
||||
return "", time.Time{}, nil
|
||||
}
|
||||
libID := albums[0].LibraryID // Just need one of the albums, as they should all be in the same Library
|
||||
|
||||
folderPath := str.LongestCommonPrefix(paths)
|
||||
if !strings.HasSuffix(folderPath, string(filepath.Separator)) {
|
||||
folderPath, _ = filepath.Split(folderPath)
|
||||
}
|
||||
folderPath = filepath.Dir(folderPath)
|
||||
|
||||
// Manipulate the path to get the folder ID
|
||||
// TODO: This is a bit hacky, but it's the easiest way to get the folder ID, ATM
|
||||
libPath := core.AbsolutePath(ctx, ds, libID, "")
|
||||
folderID := model.FolderID(model.Library{ID: libID, Path: libPath}, folderPath)
|
||||
|
||||
log.Trace(ctx, "Calculating artist folder details", "folderPath", folderPath, "folderID", folderID,
|
||||
"libPath", libPath, "libID", libID, "albumPaths", paths)
|
||||
|
||||
// Get the last update time for the folder
|
||||
folders, err := ds.Folder(ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"folder.id": folderID, "missing": false}})
|
||||
if err != nil || len(folders) == 0 {
|
||||
log.Warn(ctx, "Could not find folder for artist", "folderPath", folderPath, "id", folderID,
|
||||
"libPath", libPath, "libID", libID, err)
|
||||
return "", time.Time{}, err
|
||||
}
|
||||
return folderPath, folders[0].ImagesUpdatedAt, nil
|
||||
}
|
||||
|
||||
@@ -1,141 +0,0 @@
|
||||
package artwork
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = Describe("artistReader", func() {
|
||||
var _ = Describe("loadArtistFolder", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
fds *fakeDataStore
|
||||
repo *fakeFolderRepo
|
||||
albums model.Albums
|
||||
paths []string
|
||||
now time.Time
|
||||
expectedUpdTime time.Time
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = context.Background()
|
||||
DeferCleanup(stubCoreAbsolutePath())
|
||||
|
||||
now = time.Now().Truncate(time.Second)
|
||||
expectedUpdTime = now.Add(5 * time.Minute)
|
||||
repo = &fakeFolderRepo{
|
||||
result: []model.Folder{
|
||||
{
|
||||
ImagesUpdatedAt: expectedUpdTime,
|
||||
},
|
||||
},
|
||||
err: nil,
|
||||
}
|
||||
fds = &fakeDataStore{
|
||||
folderRepo: repo,
|
||||
}
|
||||
albums = model.Albums{
|
||||
{LibraryID: 1, ID: "album1", Name: "Album 1"},
|
||||
}
|
||||
})
|
||||
|
||||
When("no albums provided", func() {
|
||||
It("returns empty and zero time", func() {
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, model.Albums{}, []string{"/dummy/path"})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(BeEmpty())
|
||||
Expect(upd).To(BeZero())
|
||||
})
|
||||
})
|
||||
|
||||
When("artist has only one album", func() {
|
||||
It("returns the parent folder", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/artist/album1"),
|
||||
}
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(Equal("/music/artist"))
|
||||
Expect(upd).To(Equal(expectedUpdTime))
|
||||
})
|
||||
})
|
||||
|
||||
When("the artist have multiple albums", func() {
|
||||
It("returns the common prefix for the albums paths", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/library/artist/one"),
|
||||
filepath.FromSlash("/music/library/artist/two"),
|
||||
}
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(Equal(filepath.FromSlash("/music/library/artist")))
|
||||
Expect(upd).To(Equal(expectedUpdTime))
|
||||
})
|
||||
})
|
||||
|
||||
When("the album paths contain same prefix", func() {
|
||||
It("returns the common prefix", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/artist/album1"),
|
||||
filepath.FromSlash("/music/artist/album2"),
|
||||
}
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(folder).To(Equal("/music/artist"))
|
||||
Expect(upd).To(Equal(expectedUpdTime))
|
||||
})
|
||||
})
|
||||
|
||||
When("ds.Folder().GetAll returns an error", func() {
|
||||
It("returns an error", func() {
|
||||
paths = []string{
|
||||
filepath.FromSlash("/music/artist/album1"),
|
||||
filepath.FromSlash("/music/artist/album2"),
|
||||
}
|
||||
repo.err = errors.New("fake error")
|
||||
folder, upd, err := loadArtistFolder(ctx, fds, albums, paths)
|
||||
Expect(err).To(MatchError(ContainSubstring("fake error")))
|
||||
// Folder and time are empty on error.
|
||||
Expect(folder).To(BeEmpty())
|
||||
Expect(upd).To(BeZero())
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
type fakeFolderRepo struct {
|
||||
model.FolderRepository
|
||||
result []model.Folder
|
||||
err error
|
||||
}
|
||||
|
||||
func (f *fakeFolderRepo) GetAll(...model.QueryOptions) ([]model.Folder, error) {
|
||||
return f.result, f.err
|
||||
}
|
||||
|
||||
type fakeDataStore struct {
|
||||
model.DataStore
|
||||
folderRepo *fakeFolderRepo
|
||||
}
|
||||
|
||||
func (fds *fakeDataStore) Folder(_ context.Context) model.FolderRepository {
|
||||
return fds.folderRepo
|
||||
}
|
||||
|
||||
func stubCoreAbsolutePath() func() {
|
||||
// Override core.AbsolutePath to return a fixed string during tests.
|
||||
original := core.AbsolutePath
|
||||
core.AbsolutePath = func(_ context.Context, ds model.DataStore, libID int, p string) string {
|
||||
return filepath.FromSlash("/music")
|
||||
}
|
||||
return func() {
|
||||
core.AbsolutePath = original
|
||||
}
|
||||
}
|
||||
@@ -54,10 +54,9 @@ func (a *mediafileArtworkReader) LastUpdated() time.Time {
|
||||
func (a *mediafileArtworkReader) Reader(ctx context.Context) (io.ReadCloser, string, error) {
|
||||
var ff []sourceFunc
|
||||
if a.mediafile.CoverArtID().Kind == model.KindMediaFileArtwork {
|
||||
path := a.mediafile.AbsolutePath()
|
||||
ff = []sourceFunc{
|
||||
fromTag(ctx, path),
|
||||
fromFFmpegTag(ctx, a.a.ffmpeg, path),
|
||||
fromTag(ctx, a.mediafile.Path),
|
||||
fromFFmpegTag(ctx, a.a.ffmpeg, a.mediafile.Path),
|
||||
}
|
||||
}
|
||||
ff = append(ff, fromAlbum(ctx, a.a, a.mediafile.AlbumCoverArtID()))
|
||||
|
||||
@@ -61,7 +61,7 @@ func (a *playlistArtworkReader) fromGeneratedTiledCover(ctx context.Context) sou
|
||||
}
|
||||
}
|
||||
|
||||
func toAlbumArtworkIDs(albumIDs []string) []model.ArtworkID {
|
||||
func toArtworkIDs(albumIDs []string) []model.ArtworkID {
|
||||
return slice.Map(albumIDs, func(id string) model.ArtworkID {
|
||||
al := model.Album{ID: id}
|
||||
return al.CoverArtID()
|
||||
@@ -75,21 +75,24 @@ func (a *playlistArtworkReader) loadTiles(ctx context.Context) ([]image.Image, e
|
||||
log.Error(ctx, "Error getting album IDs for playlist", "id", a.pl.ID, "name", a.pl.Name, err)
|
||||
return nil, err
|
||||
}
|
||||
ids := toAlbumArtworkIDs(albumIds)
|
||||
ids := toArtworkIDs(albumIds)
|
||||
|
||||
var tiles []image.Image
|
||||
for _, id := range ids {
|
||||
r, _, err := fromAlbum(ctx, a.a, id)()
|
||||
if err == nil {
|
||||
tile, err := a.createTile(ctx, r)
|
||||
if err == nil {
|
||||
tiles = append(tiles, tile)
|
||||
}
|
||||
_ = r.Close()
|
||||
}
|
||||
if len(tiles) == 4 {
|
||||
for len(tiles) < 4 {
|
||||
if len(ids) == 0 {
|
||||
break
|
||||
}
|
||||
id := ids[len(ids)-1]
|
||||
ids = ids[0 : len(ids)-1]
|
||||
r, _, err := fromAlbum(ctx, a.a, id)()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
tile, err := a.createTile(ctx, r)
|
||||
if err == nil {
|
||||
tiles = append(tiles, tile)
|
||||
}
|
||||
_ = r.Close()
|
||||
}
|
||||
switch len(tiles) {
|
||||
case 0:
|
||||
|
||||
@@ -63,12 +63,12 @@ func (a *resizedArtworkReader) Reader(ctx context.Context) (io.ReadCloser, strin
|
||||
|
||||
resized, origSize, err := resizeImage(orig, a.size, a.square)
|
||||
if resized == nil {
|
||||
log.Trace(ctx, "Image smaller than requested size", "artID", a.artID, "original", origSize, "resized", a.size, "square", a.square)
|
||||
log.Trace(ctx, "Image smaller than requested size", "artID", a.artID, "original", origSize, "resized", a.size)
|
||||
} else {
|
||||
log.Trace(ctx, "Resizing artwork", "artID", a.artID, "original", origSize, "resized", a.size, "square", a.square)
|
||||
log.Trace(ctx, "Resizing artwork", "artID", a.artID, "original", origSize, "resized", a.size)
|
||||
}
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Could not resize image. Will return image as is", "artID", a.artID, "size", a.size, "square", a.square, err)
|
||||
log.Warn(ctx, "Could not resize image. Will return image as is", "artID", a.artID, "size", a.size, err)
|
||||
}
|
||||
if err != nil || resized == nil {
|
||||
// if we couldn't resize the image, return the original
|
||||
|
||||
@@ -17,7 +17,7 @@ import (
|
||||
|
||||
"github.com/dhowden/tag"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -53,9 +53,13 @@ func (f sourceFunc) String() string {
|
||||
return name
|
||||
}
|
||||
|
||||
func fromExternalFile(ctx context.Context, files []string, pattern string) sourceFunc {
|
||||
func splitList(s string) []string {
|
||||
return strings.Split(s, consts.Zwsp)
|
||||
}
|
||||
|
||||
func fromExternalFile(ctx context.Context, files string, pattern string) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
for _, file := range files {
|
||||
for _, file := range splitList(files) {
|
||||
_, name := filepath.Split(file)
|
||||
match, err := filepath.Match(pattern, strings.ToLower(name))
|
||||
if err != nil {
|
||||
@@ -157,9 +161,9 @@ func fromAlbumPlaceholder() sourceFunc {
|
||||
return r, consts.PlaceholderAlbumArt, nil
|
||||
}
|
||||
}
|
||||
func fromArtistExternalSource(ctx context.Context, ar model.Artist, provider external.Provider) sourceFunc {
|
||||
func fromArtistExternalSource(ctx context.Context, ar model.Artist, em core.ExternalMetadata) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
imageUrl, err := provider.ArtistImage(ctx, ar.ID)
|
||||
imageUrl, err := em.ArtistImage(ctx, ar.ID)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
@@ -168,9 +172,9 @@ func fromArtistExternalSource(ctx context.Context, ar model.Artist, provider ext
|
||||
}
|
||||
}
|
||||
|
||||
func fromAlbumExternalSource(ctx context.Context, al model.Album, provider external.Provider) sourceFunc {
|
||||
func fromAlbumExternalSource(ctx context.Context, al model.Album, em core.ExternalMetadata) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
imageUrl, err := provider.AlbumImage(ctx, al.ID)
|
||||
imageUrl, err := em.AlbumImage(ctx, al.ID)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
@@ -8,12 +8,12 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/jwtauth/v5"
|
||||
"github.com/google/uuid"
|
||||
"github.com/lestrrat-go/jwx/v2/jwt"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/id"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
)
|
||||
@@ -125,7 +125,7 @@ func WithAdminUser(ctx context.Context, ds model.DataStore) context.Context {
|
||||
}
|
||||
|
||||
func createNewSecret(ctx context.Context, ds model.DataStore) string {
|
||||
secret := id.NewRandom()
|
||||
secret := uuid.NewString()
|
||||
encSecret, err := utils.Encrypt(ctx, getEncKey(), secret)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Could not encrypt JWT secret", err)
|
||||
|
||||
@@ -2,9 +2,7 @@ package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
)
|
||||
|
||||
@@ -15,13 +13,3 @@ func userName(ctx context.Context) string {
|
||||
return user.UserName
|
||||
}
|
||||
}
|
||||
|
||||
// BFR We should only access files through the `storage.Storage` interface. This will require changing how
|
||||
// TagLib and ffmpeg access files
|
||||
var AbsolutePath = func(ctx context.Context, ds model.DataStore, libId int, path string) string {
|
||||
libPath, err := ds.Library(ctx).GetPath(libId)
|
||||
if err != nil {
|
||||
return path
|
||||
}
|
||||
return filepath.Join(libPath, path)
|
||||
}
|
||||
|
||||
270
core/external/extdata_helper_test.go
vendored
270
core/external/extdata_helper_test.go
vendored
@@ -1,270 +0,0 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
// --- Shared Mock Implementations ---
|
||||
|
||||
// mockArtistRepo mocks model.ArtistRepository
|
||||
type mockArtistRepo struct {
|
||||
mock.Mock
|
||||
model.ArtistRepository
|
||||
}
|
||||
|
||||
func newMockArtistRepo() *mockArtistRepo {
|
||||
return &mockArtistRepo{}
|
||||
}
|
||||
|
||||
// SetData sets up basic Get expectations.
|
||||
func (m *mockArtistRepo) SetData(artists model.Artists) {
|
||||
for _, a := range artists {
|
||||
artistCopy := a
|
||||
m.On("Get", artistCopy.ID).Return(&artistCopy, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// Get implements model.ArtistRepository.
|
||||
func (m *mockArtistRepo) Get(id string) (*model.Artist, error) {
|
||||
args := m.Called(id)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*model.Artist), args.Error(1)
|
||||
}
|
||||
|
||||
// GetAll implements model.ArtistRepository.
|
||||
func (m *mockArtistRepo) GetAll(options ...model.QueryOptions) (model.Artists, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
args := m.Called(argsSlice...)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(model.Artists), args.Error(1)
|
||||
}
|
||||
|
||||
// SetError is a helper to set up a generic error for GetAll.
|
||||
func (m *mockArtistRepo) SetError(hasError bool) {
|
||||
if hasError {
|
||||
m.On("GetAll", mock.Anything).Return(nil, errors.New("mock repo error"))
|
||||
}
|
||||
}
|
||||
|
||||
// FindByName is a helper to set up a GetAll expectation for finding by name.
|
||||
func (m *mockArtistRepo) FindByName(name string, artist model.Artist) {
|
||||
m.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Filters != nil
|
||||
})).Return(model.Artists{artist}, nil).Once()
|
||||
}
|
||||
|
||||
// mockMediaFileRepo mocks model.MediaFileRepository
|
||||
type mockMediaFileRepo struct {
|
||||
mock.Mock
|
||||
model.MediaFileRepository
|
||||
}
|
||||
|
||||
func newMockMediaFileRepo() *mockMediaFileRepo {
|
||||
return &mockMediaFileRepo{}
|
||||
}
|
||||
|
||||
// SetData sets up basic Get expectations.
|
||||
func (m *mockMediaFileRepo) SetData(mediaFiles model.MediaFiles) {
|
||||
for _, mf := range mediaFiles {
|
||||
mfCopy := mf
|
||||
m.On("Get", mfCopy.ID).Return(&mfCopy, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// Get implements model.MediaFileRepository.
|
||||
func (m *mockMediaFileRepo) Get(id string) (*model.MediaFile, error) {
|
||||
args := m.Called(id)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*model.MediaFile), args.Error(1)
|
||||
}
|
||||
|
||||
// GetAll implements model.MediaFileRepository.
|
||||
func (m *mockMediaFileRepo) GetAll(options ...model.QueryOptions) (model.MediaFiles, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
args := m.Called(argsSlice...)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(model.MediaFiles), args.Error(1)
|
||||
}
|
||||
|
||||
// SetError is a helper to set up a generic error for GetAll.
|
||||
func (m *mockMediaFileRepo) SetError(hasError bool) {
|
||||
if hasError {
|
||||
m.On("GetAll", mock.Anything).Return(nil, errors.New("mock repo error"))
|
||||
}
|
||||
}
|
||||
|
||||
// FindByMBID is a helper to set up a GetAll expectation for finding by MBID.
|
||||
func (m *mockMediaFileRepo) FindByMBID(mbid string, mediaFile model.MediaFile) {
|
||||
m.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Filters != nil
|
||||
})).Return(model.MediaFiles{mediaFile}, nil).Once()
|
||||
}
|
||||
|
||||
// FindByArtistAndTitle is a helper to set up a GetAll expectation for finding by artist/title.
|
||||
func (m *mockMediaFileRepo) FindByArtistAndTitle(artistID string, title string, mediaFile model.MediaFile) {
|
||||
m.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Filters != nil
|
||||
})).Return(model.MediaFiles{mediaFile}, nil).Once()
|
||||
}
|
||||
|
||||
// mockAlbumRepo mocks model.AlbumRepository
|
||||
type mockAlbumRepo struct {
|
||||
mock.Mock
|
||||
model.AlbumRepository
|
||||
}
|
||||
|
||||
func newMockAlbumRepo() *mockAlbumRepo {
|
||||
return &mockAlbumRepo{}
|
||||
}
|
||||
|
||||
// Get implements model.AlbumRepository.
|
||||
func (m *mockAlbumRepo) Get(id string) (*model.Album, error) {
|
||||
args := m.Called(id)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*model.Album), args.Error(1)
|
||||
}
|
||||
|
||||
// GetAll implements model.AlbumRepository.
|
||||
func (m *mockAlbumRepo) GetAll(options ...model.QueryOptions) (model.Albums, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
args := m.Called(argsSlice...)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(model.Albums), args.Error(1)
|
||||
}
|
||||
|
||||
// mockSimilarArtistAgent mocks agents implementing ArtistTopSongsRetriever and ArtistSimilarRetriever
|
||||
type mockSimilarArtistAgent struct {
|
||||
mock.Mock
|
||||
agents.Interface // Embed to satisfy methods not explicitly mocked
|
||||
}
|
||||
|
||||
func (m *mockSimilarArtistAgent) AgentName() string {
|
||||
return "mockSimilar"
|
||||
}
|
||||
|
||||
func (m *mockSimilarArtistAgent) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||
args := m.Called(ctx, id, artistName, mbid, count)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Song), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockSimilarArtistAgent) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||
args := m.Called(ctx, id, name, mbid, limit)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Artist), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
// mockAgents mocks the main Agents interface used by Provider
|
||||
type mockAgents struct {
|
||||
mock.Mock // Embed testify mock
|
||||
topSongsAgent agents.ArtistTopSongsRetriever
|
||||
similarAgent agents.ArtistSimilarRetriever
|
||||
imageAgent agents.ArtistImageRetriever
|
||||
albumInfoAgent agents.AlbumInfoRetriever
|
||||
bioAgent agents.ArtistBiographyRetriever
|
||||
mbidAgent agents.ArtistMBIDRetriever
|
||||
urlAgent agents.ArtistURLRetriever
|
||||
agents.Interface
|
||||
}
|
||||
|
||||
func (m *mockAgents) AgentName() string {
|
||||
return "mockCombined"
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetSimilarArtists(ctx context.Context, id, name, mbid string, limit int) ([]agents.Artist, error) {
|
||||
if m.similarAgent != nil {
|
||||
return m.similarAgent.GetSimilarArtists(ctx, id, name, mbid, limit)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid, limit)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Artist), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistTopSongs(ctx context.Context, id, artistName, mbid string, count int) ([]agents.Song, error) {
|
||||
if m.topSongsAgent != nil {
|
||||
return m.topSongsAgent.GetArtistTopSongs(ctx, id, artistName, mbid, count)
|
||||
}
|
||||
args := m.Called(ctx, id, artistName, mbid, count)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.Song), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*agents.AlbumInfo, error) {
|
||||
if m.albumInfoAgent != nil {
|
||||
return m.albumInfoAgent.GetAlbumInfo(ctx, name, artist, mbid)
|
||||
}
|
||||
args := m.Called(ctx, name, artist, mbid)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).(*agents.AlbumInfo), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistMBID(ctx context.Context, id string, name string) (string, error) {
|
||||
if m.mbidAgent != nil {
|
||||
return m.mbidAgent.GetArtistMBID(ctx, id, name)
|
||||
}
|
||||
args := m.Called(ctx, id, name)
|
||||
return args.String(0), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistURL(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
if m.urlAgent != nil {
|
||||
return m.urlAgent.GetArtistURL(ctx, id, name, mbid)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid)
|
||||
return args.String(0), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistBiography(ctx context.Context, id, name, mbid string) (string, error) {
|
||||
if m.bioAgent != nil {
|
||||
return m.bioAgent.GetArtistBiography(ctx, id, name, mbid)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid)
|
||||
return args.String(0), args.Error(1)
|
||||
}
|
||||
|
||||
func (m *mockAgents) GetArtistImages(ctx context.Context, id, name, mbid string) ([]agents.ExternalImage, error) {
|
||||
if m.imageAgent != nil {
|
||||
return m.imageAgent.GetArtistImages(ctx, id, name, mbid)
|
||||
}
|
||||
args := m.Called(ctx, id, name, mbid)
|
||||
if args.Get(0) != nil {
|
||||
return args.Get(0).([]agents.ExternalImage), args.Error(1)
|
||||
}
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
17
core/external/extdata_suite_test.go
vendored
17
core/external/extdata_suite_test.go
vendored
@@ -1,17 +0,0 @@
|
||||
package external
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestExternal(t *testing.T) {
|
||||
tests.Init(t, false)
|
||||
log.SetLevel(log.LevelFatal)
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "External Suite")
|
||||
}
|
||||
303
core/external/provider_albumimage_test.go
vendored
303
core/external/provider_albumimage_test.go
vendored
@@ -1,303 +0,0 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/url"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - AlbumImage", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var provider Provider
|
||||
var mockArtistRepo *mockArtistRepo
|
||||
var mockAlbumRepo *mockAlbumRepo
|
||||
var mockMediaFileRepo *mockMediaFileRepo
|
||||
var mockAlbumAgent *mockAlbumInfoAgent
|
||||
var agentsCombined *mockAgents
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.Agents = "mockAlbum" // Configure mock agent
|
||||
|
||||
mockArtistRepo = newMockArtistRepo()
|
||||
mockAlbumRepo = newMockAlbumRepo()
|
||||
mockMediaFileRepo = newMockMediaFileRepo()
|
||||
|
||||
ds = &tests.MockDataStore{
|
||||
MockedArtist: mockArtistRepo,
|
||||
MockedAlbum: mockAlbumRepo,
|
||||
MockedMediaFile: mockMediaFileRepo,
|
||||
}
|
||||
|
||||
mockAlbumAgent = newMockAlbumInfoAgent()
|
||||
|
||||
agentsCombined = &mockAgents{
|
||||
albumInfoAgent: mockAlbumAgent,
|
||||
}
|
||||
|
||||
provider = NewProvider(ds, agentsCombined)
|
||||
|
||||
// Default mocks
|
||||
// Mocks for GetEntityByID sequence (initial failed lookups)
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockArtistRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
|
||||
// Default mock for non-existent entities - Use Maybe() for flexibility
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
})
|
||||
|
||||
It("returns the largest image URL when successful", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").
|
||||
Return(&agents.AlbumInfo{
|
||||
Images: []agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1") // From GetEntityByID
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertNotCalled(GinkgoT(), "Get", "artist-1") // Artist lookup no longer happens in getAlbum
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "") // Expect empty artist name
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the album is not found in the DB", func() {
|
||||
// Arrange: Explicitly expect the full GetEntityByID sequence for "not-found"
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "not-found")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumAgent.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns the agent error if the agent fails", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
agentErr := errors.New("agent failure")
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").Return(nil, agentErr).Once() // Expect empty artist
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("agent failure"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertNotCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "") // Expect empty artist
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns ErrNotFound", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").Return(nil, agents.ErrNotFound).Once() // Expect empty artist
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "") // Expect empty artist
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns no images", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").
|
||||
Return(&agents.AlbumInfo{Images: []agents.ExternalImage{}}, nil).Once() // Expect empty artist
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "") // Expect empty artist
|
||||
})
|
||||
|
||||
It("returns context error if context is canceled", func() {
|
||||
// Arrange
|
||||
cctx, cancelCtx := context.WithCancel(ctx)
|
||||
// Mock the necessary DB calls *before* canceling the context
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Expect the agent call even if context is cancelled, returning the context error
|
||||
mockAlbumAgent.On("GetAlbumInfo", cctx, "Album One", "", "").Return(nil, context.Canceled).Once()
|
||||
// Cancel the context *before* calling the function under test
|
||||
cancelCtx()
|
||||
|
||||
imgURL, err := provider.AlbumImage(cctx, "album-1")
|
||||
|
||||
Expect(err).To(MatchError("context canceled"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
// Agent should now be called, verify this expectation
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", cctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("derives album ID from MediaFile ID", func() {
|
||||
// Arrange: Mock full GetEntityByID for "mf-1" and recursive "album-1"
|
||||
mockArtistRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "mf-1").Return(&model.MediaFile{ID: "mf-1", Title: "Track One", ArtistID: "artist-1", AlbumID: "album-1"}, nil).Once()
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").
|
||||
Return(&agents.AlbumInfo{
|
||||
Images: []agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "mf-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertNotCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("handles different image orders from agent", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").
|
||||
Return(&agents.AlbumInfo{
|
||||
Images: []agents.ExternalImage{
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL)) // Should still pick the largest
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("handles agent returning only one image", func() {
|
||||
// Arrange
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once() // Expect GetEntityByID sequence
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Once()
|
||||
// Explicitly mock agent call for this test
|
||||
mockAlbumAgent.On("GetAlbumInfo", ctx, "Album One", "", "").
|
||||
Return(&agents.AlbumInfo{
|
||||
Images: []agents.ExternalImage{
|
||||
{URL: "http://example.com/single.jpg", Size: 700},
|
||||
},
|
||||
}, nil).Once()
|
||||
|
||||
expectedURL, _ := url.Parse("http://example.com/single.jpg")
|
||||
imgURL, err := provider.AlbumImage(ctx, "album-1")
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockAlbumAgent.AssertCalled(GinkgoT(), "GetAlbumInfo", ctx, "Album One", "", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if deriving album ID fails", func() {
|
||||
// Arrange: Mock full GetEntityByID for "mf-no-album" and recursive "not-found"
|
||||
mockArtistRepo.On("Get", "mf-no-album").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-no-album").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "mf-no-album").Return(&model.MediaFile{ID: "mf-no-album", Title: "Track No Album", ArtistID: "artist-1", AlbumID: "not-found"}, nil).Once()
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Once()
|
||||
|
||||
imgURL, err := provider.AlbumImage(ctx, "mf-no-album")
|
||||
|
||||
Expect(err).To(MatchError("data not found"))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-no-album")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-no-album")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-no-album")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockAlbumAgent.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
})
|
||||
|
||||
// mockAlbumInfoAgent implementation
|
||||
type mockAlbumInfoAgent struct {
|
||||
mock.Mock
|
||||
agents.AlbumInfoRetriever // Embed interface
|
||||
}
|
||||
|
||||
func newMockAlbumInfoAgent() *mockAlbumInfoAgent {
|
||||
m := new(mockAlbumInfoAgent)
|
||||
m.On("AgentName").Return("mockAlbum").Maybe()
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *mockAlbumInfoAgent) AgentName() string {
|
||||
args := m.Called()
|
||||
return args.String(0)
|
||||
}
|
||||
|
||||
func (m *mockAlbumInfoAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*agents.AlbumInfo, error) {
|
||||
args := m.Called(ctx, name, artist, mbid)
|
||||
if args.Get(0) == nil {
|
||||
return nil, args.Error(1)
|
||||
}
|
||||
return args.Get(0).(*agents.AlbumInfo), args.Error(1)
|
||||
}
|
||||
|
||||
// Ensure mockAgent implements the interface
|
||||
var _ agents.AlbumInfoRetriever = (*mockAlbumInfoAgent)(nil)
|
||||
301
core/external/provider_artistimage_test.go
vendored
301
core/external/provider_artistimage_test.go
vendored
@@ -1,301 +0,0 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/url"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - ArtistImage", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var provider Provider
|
||||
var mockArtistRepo *mockArtistRepo
|
||||
var mockAlbumRepo *mockAlbumRepo
|
||||
var mockMediaFileRepo *mockMediaFileRepo
|
||||
var mockImageAgent *mockArtistImageAgent
|
||||
var agentsCombined *mockAgents
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.Agents = "mockImage" // Configure only the mock agent
|
||||
ctx = GinkgoT().Context()
|
||||
|
||||
mockArtistRepo = newMockArtistRepo()
|
||||
mockAlbumRepo = newMockAlbumRepo()
|
||||
mockMediaFileRepo = newMockMediaFileRepo()
|
||||
|
||||
ds = &tests.MockDataStore{
|
||||
MockedArtist: mockArtistRepo,
|
||||
MockedAlbum: mockAlbumRepo,
|
||||
MockedMediaFile: mockMediaFileRepo,
|
||||
}
|
||||
|
||||
mockImageAgent = newMockArtistImageAgent()
|
||||
|
||||
// Use the mockAgents from helper, setting the specific agent
|
||||
agentsCombined = &mockAgents{
|
||||
imageAgent: mockImageAgent,
|
||||
}
|
||||
|
||||
provider = NewProvider(ds, agentsCombined)
|
||||
|
||||
// Default mocks for successful Get calls
|
||||
mockArtistRepo.On("Get", "artist-1").Return(&model.Artist{ID: "artist-1", Name: "Artist One"}, nil).Maybe()
|
||||
mockAlbumRepo.On("Get", "album-1").Return(&model.Album{ID: "album-1", Name: "Album One", AlbumArtistID: "artist-1"}, nil).Maybe()
|
||||
mockMediaFileRepo.On("Get", "mf-1").Return(&model.MediaFile{ID: "mf-1", Title: "Track One", ArtistID: "artist-1"}, nil).Maybe()
|
||||
// Default mock for non-existent entities
|
||||
mockArtistRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockAlbumRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
|
||||
// Default successful image agent response
|
||||
mockImageAgent.On("GetArtistImages", mock.Anything, "artist-1", "Artist One", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
}, nil).Maybe()
|
||||
})
|
||||
|
||||
AfterEach(func() {
|
||||
mockArtistRepo.AssertExpectations(GinkgoT())
|
||||
mockAlbumRepo.AssertExpectations(GinkgoT())
|
||||
mockMediaFileRepo.AssertExpectations(GinkgoT())
|
||||
mockImageAgent.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns the largest image URL when successful", func() {
|
||||
// Arrange
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the artist is not found in the DB", func() {
|
||||
// Arrange
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "not-found")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockImageAgent.AssertNotCalled(GinkgoT(), "GetArtistImages", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns the agent error if the agent fails", func() {
|
||||
// Arrange
|
||||
agentErr := errors.New("agent failure")
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").Return(nil, agentErr).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound)) // Corrected Expectation: The provider maps agent errors (other than canceled) to ErrNotFound if no image was found/populated
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns ErrNotFound", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").Return(nil, agents.ErrNotFound).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if the agent returns no images", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").Return([]agents.ExternalImage{}, nil).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound)) // Implementation maps empty result to ErrNotFound
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns context error if context is canceled before agent call", func() {
|
||||
// Arrange
|
||||
cctx, cancelCtx := context.WithCancel(context.Background())
|
||||
mockArtistRepo.Mock = mock.Mock{} // Reset default expectation for artist repo as well
|
||||
mockArtistRepo.On("Get", "artist-1").Return(&model.Artist{ID: "artist-1", Name: "Artist One"}, nil).Run(func(args mock.Arguments) {
|
||||
cancelCtx() // Cancel context *during* the DB call simulation
|
||||
}).Once()
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(cctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(context.Canceled))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
})
|
||||
|
||||
It("derives artist ID from MediaFile ID", func() {
|
||||
// Arrange: Add mocks for the initial GetEntityByID lookups
|
||||
mockArtistRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-1").Return(nil, model.ErrNotFound).Once()
|
||||
// Default mocks for MediaFileRepo.Get("mf-1") and ArtistRepo.Get("artist-1") handle the rest
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "mf-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-1") // GetEntityByID sequence
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-1") // GetEntityByID sequence
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-1")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1") // Should be called after getting MF
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("derives artist ID from Album ID", func() {
|
||||
// Arrange: Add mock for the initial GetEntityByID lookup
|
||||
mockArtistRepo.On("Get", "album-1").Return(nil, model.ErrNotFound).Once()
|
||||
// Default mocks for AlbumRepo.Get("album-1") and ArtistRepo.Get("artist-1") handle the rest
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "album-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "album-1") // GetEntityByID sequence
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "album-1")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1") // Should be called after getting Album
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("returns ErrNotFound if derived artist is not found", func() {
|
||||
// Arrange
|
||||
// Add mocks for the initial GetEntityByID lookups
|
||||
mockArtistRepo.On("Get", "mf-bad-artist").Return(nil, model.ErrNotFound).Once()
|
||||
mockAlbumRepo.On("Get", "mf-bad-artist").Return(nil, model.ErrNotFound).Once()
|
||||
mockMediaFileRepo.On("Get", "mf-bad-artist").Return(&model.MediaFile{ID: "mf-bad-artist", ArtistID: "not-found"}, nil).Once()
|
||||
// Add expectation for the recursive GetEntityByID call for the MediaFileRepo
|
||||
mockMediaFileRepo.On("Get", "not-found").Return(nil, model.ErrNotFound).Maybe()
|
||||
// The default mocks for ArtistRepo/AlbumRepo handle the final "not-found" lookups
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "mf-bad-artist")
|
||||
|
||||
// Assert
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(imgURL).To(BeNil())
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "mf-bad-artist") // GetEntityByID sequence
|
||||
mockAlbumRepo.AssertCalled(GinkgoT(), "Get", "mf-bad-artist") // GetEntityByID sequence
|
||||
mockMediaFileRepo.AssertCalled(GinkgoT(), "Get", "mf-bad-artist")
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "not-found")
|
||||
mockImageAgent.AssertNotCalled(GinkgoT(), "GetArtistImages", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("handles different image orders from agent", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/small.jpg", Size: 200},
|
||||
{URL: "http://example.com/large.jpg", Size: 1000},
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
}, nil).Once()
|
||||
expectedURL, _ := url.Parse("http://example.com/large.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL)) // Still picks the largest
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
|
||||
It("handles agent returning only one image", func() {
|
||||
// Arrange
|
||||
mockImageAgent.Mock = mock.Mock{} // Reset default expectation
|
||||
mockImageAgent.On("GetArtistImages", ctx, "artist-1", "Artist One", "").
|
||||
Return([]agents.ExternalImage{
|
||||
{URL: "http://example.com/medium.jpg", Size: 500},
|
||||
}, nil).Once()
|
||||
expectedURL, _ := url.Parse("http://example.com/medium.jpg")
|
||||
|
||||
// Act
|
||||
imgURL, err := provider.ArtistImage(ctx, "artist-1")
|
||||
|
||||
// Assert
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(imgURL).To(Equal(expectedURL))
|
||||
mockArtistRepo.AssertCalled(GinkgoT(), "Get", "artist-1")
|
||||
mockImageAgent.AssertCalled(GinkgoT(), "GetArtistImages", ctx, "artist-1", "Artist One", "")
|
||||
})
|
||||
})
|
||||
|
||||
// mockArtistImageAgent implementation using testify/mock
|
||||
// This remains local as it's specific to testing the ArtistImage functionality
|
||||
type mockArtistImageAgent struct {
|
||||
mock.Mock
|
||||
agents.ArtistImageRetriever // Embed interface
|
||||
}
|
||||
|
||||
// Constructor for the mock agent
|
||||
func newMockArtistImageAgent() *mockArtistImageAgent {
|
||||
mock := new(mockArtistImageAgent)
|
||||
// Set default AgentName if needed, although usually called via mockAgents
|
||||
mock.On("AgentName").Return("mockImage").Maybe()
|
||||
return mock
|
||||
}
|
||||
|
||||
func (m *mockArtistImageAgent) AgentName() string {
|
||||
args := m.Called()
|
||||
return args.String(0)
|
||||
}
|
||||
|
||||
func (m *mockArtistImageAgent) GetArtistImages(ctx context.Context, id, artistName, mbid string) ([]agents.ExternalImage, error) {
|
||||
args := m.Called(ctx, id, artistName, mbid)
|
||||
// Need careful type assertion for potentially nil slice
|
||||
var res []agents.ExternalImage
|
||||
if args.Get(0) != nil {
|
||||
res = args.Get(0).([]agents.ExternalImage)
|
||||
}
|
||||
return res, args.Error(1)
|
||||
}
|
||||
|
||||
// Ensure mockAgent implements the interface
|
||||
var _ agents.ArtistImageRetriever = (*mockArtistImageAgent)(nil)
|
||||
198
core/external/provider_similarsongs_test.go
vendored
198
core/external/provider_similarsongs_test.go
vendored
@@ -1,198 +0,0 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - SimilarSongs", func() {
|
||||
var ds model.DataStore
|
||||
var provider Provider
|
||||
var mockAgent *mockSimilarArtistAgent
|
||||
var mockTopAgent agents.ArtistTopSongsRetriever
|
||||
var mockSimilarAgent agents.ArtistSimilarRetriever
|
||||
var agentsCombined Agents
|
||||
var artistRepo *mockArtistRepo
|
||||
var mediaFileRepo *mockMediaFileRepo
|
||||
var ctx context.Context
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
|
||||
artistRepo = newMockArtistRepo()
|
||||
mediaFileRepo = newMockMediaFileRepo()
|
||||
|
||||
ds = &tests.MockDataStore{
|
||||
MockedArtist: artistRepo,
|
||||
MockedMediaFile: mediaFileRepo,
|
||||
}
|
||||
|
||||
mockAgent = &mockSimilarArtistAgent{}
|
||||
mockTopAgent = mockAgent
|
||||
mockSimilarAgent = mockAgent
|
||||
|
||||
agentsCombined = &mockAgents{
|
||||
topSongsAgent: mockTopAgent,
|
||||
similarAgent: mockSimilarAgent,
|
||||
}
|
||||
|
||||
provider = NewProvider(ds, agentsCombined)
|
||||
})
|
||||
|
||||
It("returns similar songs from main artist and similar artists", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
similarArtist := model.Artist{ID: "artist-3", Name: "Similar Artist"}
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1"}
|
||||
song3 := model.MediaFile{ID: "song-3", Title: "Song Three", ArtistID: "artist-3"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("Get", "artist-3").Return(&similarArtist, nil).Maybe()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
similarAgentsResp := []agents.Artist{
|
||||
{Name: "Similar Artist", MBID: "similar-mbid"},
|
||||
}
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return(similarAgentsResp, nil).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{similarArtist}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-1"},
|
||||
{Name: "Song Two", MBID: "mbid-2"},
|
||||
}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-3", "Similar Artist", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song Three", MBID: "mbid-3"},
|
||||
}, nil).Once()
|
||||
|
||||
mediaFileRepo.FindByMBID("mbid-1", song1)
|
||||
mediaFileRepo.FindByMBID("mbid-2", song2)
|
||||
mediaFileRepo.FindByMBID("mbid-3", song3)
|
||||
|
||||
songs, err := provider.SimilarSongs(ctx, "artist-1", 3)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(3))
|
||||
for _, song := range songs {
|
||||
Expect(song.ID).To(BeElementOf("song-1", "song-2", "song-3"))
|
||||
}
|
||||
})
|
||||
|
||||
It("returns ErrNotFound when artist is not found", func() {
|
||||
artistRepo.On("Get", "artist-unknown-artist").Return(nil, model.ErrNotFound)
|
||||
mediaFileRepo.On("Get", "artist-unknown-artist").Return(nil, model.ErrNotFound)
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Maybe()
|
||||
|
||||
songs, err := provider.SimilarSongs(ctx, "artist-unknown-artist", 5)
|
||||
|
||||
Expect(err).To(Equal(model.ErrNotFound))
|
||||
Expect(songs).To(BeNil())
|
||||
})
|
||||
|
||||
It("returns songs from main artist when GetSimilarArtists returns error", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Maybe()
|
||||
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return(nil, errors.New("error getting similar artists")).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-1"},
|
||||
}, nil).Once()
|
||||
|
||||
mediaFileRepo.FindByMBID("mbid-1", song1)
|
||||
|
||||
songs, err := provider.SimilarSongs(ctx, "artist-1", 5)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
})
|
||||
|
||||
It("returns empty list when GetArtistTopSongs returns error", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Maybe()
|
||||
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return([]agents.Artist{}, nil).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return(nil, errors.New("error getting top songs")).Once()
|
||||
|
||||
songs, err := provider.SimilarSongs(ctx, "artist-1", 5)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("respects count parameter", func() {
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One"}
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1"}
|
||||
|
||||
artistRepo.On("Get", "artist-1").Return(&artist1, nil).Maybe()
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 1 && opt.Filters != nil
|
||||
})).Return(model.Artists{artist1}, nil).Maybe()
|
||||
|
||||
mockAgent.On("GetSimilarArtists", mock.Anything, "artist-1", "Artist One", "", 15).
|
||||
Return([]agents.Artist{}, nil).Once()
|
||||
|
||||
artistRepo.On("GetAll", mock.MatchedBy(func(opt model.QueryOptions) bool {
|
||||
return opt.Max == 0 && opt.Filters != nil
|
||||
})).Return(model.Artists{}, nil).Once()
|
||||
|
||||
mockAgent.On("GetArtistTopSongs", mock.Anything, "artist-1", "Artist One", "", mock.Anything).
|
||||
Return([]agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-1"},
|
||||
{Name: "Song Two", MBID: "mbid-2"},
|
||||
}, nil).Once()
|
||||
|
||||
mediaFileRepo.FindByMBID("mbid-1", song1)
|
||||
mediaFileRepo.FindByMBID("mbid-2", song2)
|
||||
|
||||
songs, err := provider.SimilarSongs(ctx, "artist-1", 1)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(BeElementOf("song-1", "song-2"))
|
||||
})
|
||||
})
|
||||
193
core/external/provider_topsongs_test.go
vendored
193
core/external/provider_topsongs_test.go
vendored
@@ -1,193 +0,0 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
_ "github.com/navidrome/navidrome/core/agents/lastfm"
|
||||
_ "github.com/navidrome/navidrome/core/agents/listenbrainz"
|
||||
_ "github.com/navidrome/navidrome/core/agents/spotify"
|
||||
. "github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
var _ = Describe("Provider - TopSongs", func() {
|
||||
var (
|
||||
p Provider
|
||||
artistRepo *mockArtistRepo // From provider_helper_test.go
|
||||
mediaFileRepo *mockMediaFileRepo // From provider_helper_test.go
|
||||
ag *mockAgents // Consolidated mock from export_test.go
|
||||
ctx context.Context
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
|
||||
artistRepo = newMockArtistRepo() // Use helper mock
|
||||
mediaFileRepo = newMockMediaFileRepo() // Use helper mock
|
||||
|
||||
// Configure tests.MockDataStore to use the testify/mock-based repos
|
||||
ds := &tests.MockDataStore{
|
||||
MockedArtist: artistRepo,
|
||||
MockedMediaFile: mediaFileRepo,
|
||||
}
|
||||
|
||||
ag = new(mockAgents)
|
||||
|
||||
p = NewProvider(ds, ag)
|
||||
})
|
||||
|
||||
BeforeEach(func() {
|
||||
// Setup expectations in individual tests
|
||||
})
|
||||
|
||||
It("returns top songs for a known artist", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response
|
||||
agentSongs := []agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-song-1"},
|
||||
{Name: "Song Two", MBID: "mbid-song-2"},
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 2).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock finding matching tracks
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
song2 := model.MediaFile{ID: "song-2", Title: "Song Two", ArtistID: "artist-1", MbzRecordingID: "mbid-song-2"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once()
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song2}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 2)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(2))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
Expect(songs[1].ID).To(Equal("song-2"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns nil for an unknown artist", func() {
|
||||
// Mock artist not found
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Unknown Artist", 5)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred()) // TopSongs returns nil error if artist not found
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistTopSongs", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns error when the agent returns an error", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent error
|
||||
agentErr := errors.New("agent error")
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 5).Return(nil, agentErr).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 5)
|
||||
|
||||
Expect(err).To(MatchError(agentErr))
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns ErrNotFound when the agent returns ErrNotFound", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent ErrNotFound
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 5).Return(nil, agents.ErrNotFound).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 5)
|
||||
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns fewer songs if count is less than available top songs", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response (only need 1 for the test)
|
||||
agentSongs := []agents.Song{{Name: "Song One", MBID: "mbid-song-1"}}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 1).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock finding matching track
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once()
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 1)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns fewer songs if fewer matching tracks are found", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Mock agent response
|
||||
agentSongs := []agents.Song{
|
||||
{Name: "Song One", MBID: "mbid-song-1"},
|
||||
{Name: "Song Two", MBID: "mbid-song-2"},
|
||||
}
|
||||
ag.On("GetArtistTopSongs", ctx, "artist-1", "Artist One", "mbid-artist-1", 2).Return(agentSongs, nil).Once()
|
||||
|
||||
// Mock finding matching tracks (only find song 1)
|
||||
song1 := model.MediaFile{ID: "song-1", Title: "Song One", ArtistID: "artist-1", MbzRecordingID: "mbid-song-1"}
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{song1}, nil).Once()
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{}, nil).Once() // For mbid-song-2 (fails)
|
||||
mediaFileRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.MediaFiles{}, nil).Once() // For title fallback (fails)
|
||||
|
||||
songs, err := p.TopSongs(ctx, "Artist One", 2)
|
||||
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(songs).To(HaveLen(1))
|
||||
Expect(songs[0].ID).To(Equal("song-1"))
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
mediaFileRepo.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns error when context is canceled during agent call", func() {
|
||||
// Mock finding the artist
|
||||
artist1 := model.Artist{ID: "artist-1", Name: "Artist One", MbzArtistID: "mbid-artist-1"}
|
||||
artistRepo.On("GetAll", mock.AnythingOfType("model.QueryOptions")).Return(model.Artists{artist1}, nil).Once()
|
||||
|
||||
// Setup context that will be canceled
|
||||
canceledCtx, cancel := context.WithCancel(ctx)
|
||||
|
||||
// Mock agent call to return context canceled error
|
||||
ag.On("GetArtistTopSongs", canceledCtx, "artist-1", "Artist One", "mbid-artist-1", 5).Return(nil, context.Canceled).Once()
|
||||
|
||||
cancel() // Cancel the context before calling
|
||||
songs, err := p.TopSongs(canceledCtx, "Artist One", 5)
|
||||
|
||||
Expect(err).To(MatchError(context.Canceled))
|
||||
Expect(songs).To(BeNil())
|
||||
artistRepo.AssertExpectations(GinkgoT())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
})
|
||||
170
core/external/provider_updatealbuminfo_test.go
vendored
170
core/external/provider_updatealbuminfo_test.go
vendored
@@ -1,170 +0,0 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetLevel(log.LevelDebug)
|
||||
}
|
||||
|
||||
var _ = Describe("Provider - UpdateAlbumInfo", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
p external.Provider
|
||||
ds *tests.MockDataStore
|
||||
ag *mockAgents
|
||||
mockAlbumRepo *tests.MockAlbumRepo
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
ctx = GinkgoT().Context()
|
||||
ds = new(tests.MockDataStore)
|
||||
ag = new(mockAgents)
|
||||
p = external.NewProvider(ds, ag)
|
||||
mockAlbumRepo = ds.Album(ctx).(*tests.MockAlbumRepo)
|
||||
conf.Server.DevAlbumInfoTimeToLive = 1 * time.Hour
|
||||
})
|
||||
|
||||
It("returns error when album is not found", func() {
|
||||
album, err := p.UpdateAlbumInfo(ctx, "al-not-found")
|
||||
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(album).To(BeNil())
|
||||
ag.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("populates info when album exists but has no external info", func() {
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-existing",
|
||||
Name: "Test Album",
|
||||
AlbumArtist: "Test Artist",
|
||||
MbzAlbumID: "mbid-album",
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
expectedInfo := &agents.AlbumInfo{
|
||||
URL: "http://example.com/album",
|
||||
Description: "Album Description",
|
||||
Images: []agents.ExternalImage{
|
||||
{URL: "http://example.com/large.jpg", Size: 300},
|
||||
{URL: "http://example.com/medium.jpg", Size: 200},
|
||||
{URL: "http://example.com/small.jpg", Size: 100},
|
||||
},
|
||||
}
|
||||
ag.On("GetAlbumInfo", ctx, "Test Album", "Test Artist", "mbid-album").Return(expectedInfo, nil)
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-existing")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(updatedAlbum.ID).To(Equal("al-existing"))
|
||||
Expect(updatedAlbum.ExternalUrl).To(Equal("http://example.com/album"))
|
||||
Expect(updatedAlbum.Description).To(Equal("Album Description"))
|
||||
Expect(updatedAlbum.LargeImageUrl).To(Equal("http://example.com/large.jpg"))
|
||||
Expect(updatedAlbum.MediumImageUrl).To(Equal("http://example.com/medium.jpg"))
|
||||
Expect(updatedAlbum.SmallImageUrl).To(Equal("http://example.com/small.jpg"))
|
||||
Expect(updatedAlbum.ExternalInfoUpdatedAt).NotTo(BeNil())
|
||||
Expect(*updatedAlbum.ExternalInfoUpdatedAt).To(BeTemporally("~", time.Now(), time.Second))
|
||||
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns cached info when album exists and info is not expired", func() {
|
||||
now := time.Now()
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-cached",
|
||||
Name: "Cached Album",
|
||||
AlbumArtist: "Cached Artist",
|
||||
ExternalUrl: "http://cached.com/album",
|
||||
Description: "Cached Desc",
|
||||
LargeImageUrl: "http://cached.com/large.jpg",
|
||||
ExternalInfoUpdatedAt: gg.P(now.Add(-conf.Server.DevAlbumInfoTimeToLive / 2)),
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-cached")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(*updatedAlbum).To(Equal(*originalAlbum))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns cached info and triggers background refresh when info is expired", func() {
|
||||
now := time.Now()
|
||||
expiredTime := now.Add(-conf.Server.DevAlbumInfoTimeToLive * 2)
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-expired",
|
||||
Name: "Expired Album",
|
||||
AlbumArtist: "Expired Artist",
|
||||
ExternalUrl: "http://expired.com/album",
|
||||
Description: "Expired Desc",
|
||||
LargeImageUrl: "http://expired.com/large.jpg",
|
||||
ExternalInfoUpdatedAt: gg.P(expiredTime),
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-expired")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(*updatedAlbum).To(Equal(*originalAlbum))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetAlbumInfo", mock.Anything, mock.Anything, mock.Anything, mock.Anything)
|
||||
})
|
||||
|
||||
It("returns error when agent fails to get album info", func() {
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-agent-error",
|
||||
Name: "Agent Error Album",
|
||||
AlbumArtist: "Agent Error Artist",
|
||||
MbzAlbumID: "mbid-agent-error",
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
expectedErr := errors.New("agent communication failed")
|
||||
ag.On("GetAlbumInfo", ctx, "Agent Error Album", "Agent Error Artist", "mbid-agent-error").Return(nil, expectedErr)
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-agent-error")
|
||||
|
||||
Expect(err).To(MatchError(expectedErr))
|
||||
Expect(updatedAlbum).To(BeNil())
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns original album when agent returns ErrNotFound", func() {
|
||||
originalAlbum := &model.Album{
|
||||
ID: "al-agent-notfound",
|
||||
Name: "Agent NotFound Album",
|
||||
AlbumArtist: "Agent NotFound Artist",
|
||||
MbzAlbumID: "mbid-agent-notfound",
|
||||
}
|
||||
mockAlbumRepo.SetData(model.Albums{*originalAlbum})
|
||||
|
||||
ag.On("GetAlbumInfo", ctx, "Agent NotFound Album", "Agent NotFound Artist", "mbid-agent-notfound").Return(nil, agents.ErrNotFound)
|
||||
|
||||
updatedAlbum, err := p.UpdateAlbumInfo(ctx, "al-agent-notfound")
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedAlbum).NotTo(BeNil())
|
||||
Expect(*updatedAlbum).To(Equal(*originalAlbum))
|
||||
Expect(updatedAlbum.ExternalInfoUpdatedAt).To(BeNil())
|
||||
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
})
|
||||
229
core/external/provider_updateartistinfo_test.go
vendored
229
core/external/provider_updateartistinfo_test.go
vendored
@@ -1,229 +0,0 @@
|
||||
package external_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
"github.com/navidrome/navidrome/utils/gg"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
"github.com/stretchr/testify/mock"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetLevel(log.LevelDebug)
|
||||
}
|
||||
|
||||
var _ = Describe("Provider - UpdateArtistInfo", func() {
|
||||
var (
|
||||
ctx context.Context
|
||||
p external.Provider
|
||||
ds *tests.MockDataStore
|
||||
ag *mockAgents
|
||||
mockArtistRepo *tests.MockArtistRepo
|
||||
)
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
conf.Server.DevArtistInfoTimeToLive = 1 * time.Hour
|
||||
ctx = GinkgoT().Context()
|
||||
ds = new(tests.MockDataStore)
|
||||
ag = new(mockAgents)
|
||||
p = external.NewProvider(ds, ag)
|
||||
mockArtistRepo = ds.Artist(ctx).(*tests.MockArtistRepo)
|
||||
})
|
||||
|
||||
It("returns error when artist is not found", func() {
|
||||
artist, err := p.UpdateArtistInfo(ctx, "ar-not-found", 10, false)
|
||||
|
||||
Expect(err).To(MatchError(model.ErrNotFound))
|
||||
Expect(artist).To(BeNil())
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistMBID")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistImages")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistBiography")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistURL")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetSimilarArtists")
|
||||
})
|
||||
|
||||
It("populates info when artist exists but has no external info", func() {
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-existing",
|
||||
Name: "Test Artist",
|
||||
}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist})
|
||||
|
||||
expectedMBID := "mbid-artist-123"
|
||||
expectedBio := "Artist Bio"
|
||||
expectedURL := "http://artist.url"
|
||||
expectedImages := []agents.ExternalImage{
|
||||
{URL: "http://large.jpg", Size: 300},
|
||||
{URL: "http://medium.jpg", Size: 200},
|
||||
{URL: "http://small.jpg", Size: 100},
|
||||
}
|
||||
rawSimilar := []agents.Artist{
|
||||
{Name: "Similar Artist 1", MBID: "mbid-similar-1"},
|
||||
{Name: "Similar Artist 2", MBID: "mbid-similar-2"},
|
||||
{Name: "Similar Artist 3", MBID: "mbid-similar-3"},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-similar-2", Name: "Similar Artist 2"}
|
||||
|
||||
ag.On("GetArtistMBID", ctx, "ar-existing", "Test Artist").Return(expectedMBID, nil).Once()
|
||||
ag.On("GetArtistImages", ctx, "ar-existing", "Test Artist", expectedMBID).Return(expectedImages, nil).Once()
|
||||
ag.On("GetArtistBiography", ctx, "ar-existing", "Test Artist", expectedMBID).Return(expectedBio, nil).Once()
|
||||
ag.On("GetArtistURL", ctx, "ar-existing", "Test Artist", expectedMBID).Return(expectedURL, nil).Once()
|
||||
ag.On("GetSimilarArtists", ctx, "ar-existing", "Test Artist", expectedMBID, 100).Return(rawSimilar, nil).Once()
|
||||
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-existing", 10, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal("ar-existing"))
|
||||
Expect(updatedArtist.MbzArtistID).To(Equal(expectedMBID))
|
||||
Expect(updatedArtist.Biography).To(Equal("Artist Bio"))
|
||||
Expect(updatedArtist.ExternalUrl).To(Equal(expectedURL))
|
||||
Expect(updatedArtist.LargeImageUrl).To(Equal("http://large.jpg"))
|
||||
Expect(updatedArtist.MediumImageUrl).To(Equal("http://medium.jpg"))
|
||||
Expect(updatedArtist.SmallImageUrl).To(Equal("http://small.jpg"))
|
||||
Expect(updatedArtist.ExternalInfoUpdatedAt).NotTo(BeNil())
|
||||
Expect(*updatedArtist.ExternalInfoUpdatedAt).To(BeTemporally("~", time.Now(), time.Second))
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal("ar-similar-2"))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal("Similar Artist 2"))
|
||||
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
|
||||
It("returns cached info when artist exists and info is not expired", func() {
|
||||
now := time.Now()
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-cached",
|
||||
Name: "Cached Artist",
|
||||
MbzArtistID: "mbid-cached",
|
||||
ExternalUrl: "http://cached.url",
|
||||
Biography: "Cached Bio",
|
||||
LargeImageUrl: "http://cached_large.jpg",
|
||||
ExternalInfoUpdatedAt: gg.P(now.Add(-conf.Server.DevArtistInfoTimeToLive / 2)),
|
||||
SimilarArtists: model.Artists{
|
||||
{ID: "ar-similar-present", Name: "Similar Present"},
|
||||
{ID: "ar-similar-absent", Name: "Similar Absent"},
|
||||
},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-similar-present", Name: "Similar Present Updated"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-cached", 5, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal(originalArtist.ID))
|
||||
Expect(updatedArtist.Name).To(Equal(originalArtist.Name))
|
||||
Expect(updatedArtist.MbzArtistID).To(Equal(originalArtist.MbzArtistID))
|
||||
Expect(updatedArtist.ExternalUrl).To(Equal(originalArtist.ExternalUrl))
|
||||
Expect(updatedArtist.Biography).To(Equal(originalArtist.Biography))
|
||||
Expect(updatedArtist.LargeImageUrl).To(Equal(originalArtist.LargeImageUrl))
|
||||
Expect(updatedArtist.ExternalInfoUpdatedAt).To(Equal(originalArtist.ExternalInfoUpdatedAt))
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal(similarInDS.ID))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal(similarInDS.Name))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistMBID")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistImages")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistBiography")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistURL")
|
||||
})
|
||||
|
||||
It("returns cached info and triggers background refresh when info is expired", func() {
|
||||
now := time.Now()
|
||||
expiredTime := now.Add(-conf.Server.DevArtistInfoTimeToLive * 2)
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-expired",
|
||||
Name: "Expired Artist",
|
||||
ExternalInfoUpdatedAt: gg.P(expiredTime),
|
||||
SimilarArtists: model.Artists{
|
||||
{ID: "ar-exp-similar", Name: "Expired Similar"},
|
||||
},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-exp-similar", Name: "Expired Similar Updated"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-expired", 5, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal(originalArtist.ID))
|
||||
Expect(updatedArtist.Name).To(Equal(originalArtist.Name))
|
||||
Expect(updatedArtist.ExternalInfoUpdatedAt).To(Equal(originalArtist.ExternalInfoUpdatedAt))
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(1))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal(similarInDS.ID))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal(similarInDS.Name))
|
||||
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistMBID")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistImages")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistBiography")
|
||||
ag.AssertNotCalled(GinkgoT(), "GetArtistURL")
|
||||
})
|
||||
|
||||
It("includes non-present similar artists when includeNotPresent is true", func() {
|
||||
now := time.Now()
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-similar-test",
|
||||
Name: "Similar Test Artist",
|
||||
ExternalInfoUpdatedAt: gg.P(now.Add(-conf.Server.DevArtistInfoTimeToLive / 2)),
|
||||
SimilarArtists: model.Artists{
|
||||
{ID: "ar-sim-present", Name: "Similar Present"},
|
||||
{ID: "", Name: "Similar Absent Raw"},
|
||||
{ID: "ar-sim-absent-lookup", Name: "Similar Absent Lookup"},
|
||||
},
|
||||
}
|
||||
similarInDS := model.Artist{ID: "ar-sim-present", Name: "Similar Present Updated"}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist, similarInDS})
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-similar-test", 5, true)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
|
||||
Expect(updatedArtist.SimilarArtists).To(HaveLen(3))
|
||||
Expect(updatedArtist.SimilarArtists[0].ID).To(Equal(similarInDS.ID))
|
||||
Expect(updatedArtist.SimilarArtists[0].Name).To(Equal(similarInDS.Name))
|
||||
Expect(updatedArtist.SimilarArtists[1].ID).To(BeEmpty())
|
||||
Expect(updatedArtist.SimilarArtists[1].Name).To(Equal("Similar Absent Raw"))
|
||||
Expect(updatedArtist.SimilarArtists[2].ID).To(BeEmpty())
|
||||
Expect(updatedArtist.SimilarArtists[2].Name).To(Equal("Similar Absent Lookup"))
|
||||
})
|
||||
|
||||
It("updates ArtistInfo even if an optional agent call fails", func() {
|
||||
originalArtist := &model.Artist{
|
||||
ID: "ar-agent-fail",
|
||||
Name: "Agent Fail Artist",
|
||||
}
|
||||
mockArtistRepo.SetData(model.Artists{*originalArtist})
|
||||
|
||||
expectedErr := errors.New("agent MBID failed")
|
||||
ag.On("GetArtistMBID", ctx, "ar-agent-fail", "Agent Fail Artist").Return("", expectedErr).Once()
|
||||
ag.On("GetArtistImages", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything).Return(nil, nil).Maybe()
|
||||
ag.On("GetArtistBiography", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetArtistURL", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything).Return("", nil).Maybe()
|
||||
ag.On("GetSimilarArtists", ctx, "ar-agent-fail", "Agent Fail Artist", mock.Anything, 100).Return(nil, nil).Maybe()
|
||||
|
||||
updatedArtist, err := p.UpdateArtistInfo(ctx, "ar-agent-fail", 10, false)
|
||||
|
||||
Expect(err).NotTo(HaveOccurred())
|
||||
Expect(updatedArtist).NotTo(BeNil())
|
||||
Expect(updatedArtist.ID).To(Equal("ar-agent-fail"))
|
||||
ag.AssertExpectations(GinkgoT())
|
||||
})
|
||||
})
|
||||
@@ -1,4 +1,4 @@
|
||||
package external
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -19,19 +19,19 @@ import (
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
. "github.com/navidrome/navidrome/utils/gg"
|
||||
"github.com/navidrome/navidrome/utils/random"
|
||||
"github.com/navidrome/navidrome/utils/slice"
|
||||
"github.com/navidrome/navidrome/utils/str"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
const (
|
||||
maxSimilarArtists = 100
|
||||
refreshDelay = 5 * time.Second
|
||||
refreshTimeout = 15 * time.Second
|
||||
refreshQueueLength = 2000
|
||||
unavailableArtistID = "-1"
|
||||
maxSimilarArtists = 100
|
||||
refreshDelay = 5 * time.Second
|
||||
refreshTimeout = 15 * time.Second
|
||||
refreshQueueLength = 2000
|
||||
)
|
||||
|
||||
type Provider interface {
|
||||
type ExternalMetadata interface {
|
||||
UpdateAlbumInfo(ctx context.Context, id string) (*model.Album, error)
|
||||
UpdateArtistInfo(ctx context.Context, id string, count int, includeNotPresent bool) (*model.Artist, error)
|
||||
SimilarSongs(ctx context.Context, id string, count int) (model.MediaFiles, error)
|
||||
@@ -40,9 +40,9 @@ type Provider interface {
|
||||
AlbumImage(ctx context.Context, id string) (*url.URL, error)
|
||||
}
|
||||
|
||||
type provider struct {
|
||||
type externalMetadata struct {
|
||||
ds model.DataStore
|
||||
ag Agents
|
||||
ag *agents.Agents
|
||||
artistQueue refreshQueue[auxArtist]
|
||||
albumQueue refreshQueue[auxAlbum]
|
||||
}
|
||||
@@ -57,24 +57,14 @@ type auxArtist struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
type Agents interface {
|
||||
agents.AlbumInfoRetriever
|
||||
agents.ArtistBiographyRetriever
|
||||
agents.ArtistMBIDRetriever
|
||||
agents.ArtistImageRetriever
|
||||
agents.ArtistSimilarRetriever
|
||||
agents.ArtistTopSongsRetriever
|
||||
agents.ArtistURLRetriever
|
||||
}
|
||||
|
||||
func NewProvider(ds model.DataStore, agents Agents) Provider {
|
||||
e := &provider{ds: ds, ag: agents}
|
||||
func NewExternalMetadata(ds model.DataStore, agents *agents.Agents) ExternalMetadata {
|
||||
e := &externalMetadata{ds: ds, ag: agents}
|
||||
e.artistQueue = newRefreshQueue(context.TODO(), e.populateArtistInfo)
|
||||
e.albumQueue = newRefreshQueue(context.TODO(), e.populateAlbumInfo)
|
||||
return e
|
||||
}
|
||||
|
||||
func (e *provider) getAlbum(ctx context.Context, id string) (auxAlbum, error) {
|
||||
func (e *externalMetadata) getAlbum(ctx context.Context, id string) (auxAlbum, error) {
|
||||
var entity interface{}
|
||||
entity, err := model.GetEntityByID(ctx, e.ds, id)
|
||||
if err != nil {
|
||||
@@ -91,11 +81,10 @@ func (e *provider) getAlbum(ctx context.Context, id string) (auxAlbum, error) {
|
||||
default:
|
||||
return auxAlbum{}, model.ErrNotFound
|
||||
}
|
||||
|
||||
return album, nil
|
||||
}
|
||||
|
||||
func (e *provider) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album, error) {
|
||||
func (e *externalMetadata) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album, error) {
|
||||
album, err := e.getAlbum(ctx, id)
|
||||
if err != nil {
|
||||
log.Info(ctx, "Not found", "id", id)
|
||||
@@ -120,7 +109,7 @@ func (e *provider) UpdateAlbumInfo(ctx context.Context, id string) (*model.Album
|
||||
return &album.Album, nil
|
||||
}
|
||||
|
||||
func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAlbum, error) {
|
||||
func (e *externalMetadata) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAlbum, error) {
|
||||
start := time.Now()
|
||||
info, err := e.ag.GetAlbumInfo(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
@@ -155,7 +144,7 @@ func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAl
|
||||
}
|
||||
}
|
||||
|
||||
err = e.ds.Album(ctx).UpdateExternalInfo(&album.Album)
|
||||
err = e.ds.Album(ctx).Put(&album.Album)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error trying to update album external information", "id", album.ID, "name", album.Name,
|
||||
"elapsed", time.Since(start), err)
|
||||
@@ -166,7 +155,7 @@ func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAl
|
||||
return album, nil
|
||||
}
|
||||
|
||||
func (e *provider) getArtist(ctx context.Context, id string) (auxArtist, error) {
|
||||
func (e *externalMetadata) getArtist(ctx context.Context, id string) (auxArtist, error) {
|
||||
var entity interface{}
|
||||
entity, err := model.GetEntityByID(ctx, e.ds, id)
|
||||
if err != nil {
|
||||
@@ -188,7 +177,7 @@ func (e *provider) getArtist(ctx context.Context, id string) (auxArtist, error)
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *provider) UpdateArtistInfo(ctx context.Context, id string, similarCount int, includeNotPresent bool) (*model.Artist, error) {
|
||||
func (e *externalMetadata) UpdateArtistInfo(ctx context.Context, id string, similarCount int, includeNotPresent bool) (*model.Artist, error) {
|
||||
artist, err := e.refreshArtistInfo(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -198,7 +187,7 @@ func (e *provider) UpdateArtistInfo(ctx context.Context, id string, similarCount
|
||||
return &artist.Artist, err
|
||||
}
|
||||
|
||||
func (e *provider) refreshArtistInfo(ctx context.Context, id string) (auxArtist, error) {
|
||||
func (e *externalMetadata) refreshArtistInfo(ctx context.Context, id string) (auxArtist, error) {
|
||||
artist, err := e.getArtist(ctx, id)
|
||||
if err != nil {
|
||||
return auxArtist{}, err
|
||||
@@ -222,7 +211,7 @@ func (e *provider) refreshArtistInfo(ctx context.Context, id string) (auxArtist,
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *provider) populateArtistInfo(ctx context.Context, artist auxArtist) (auxArtist, error) {
|
||||
func (e *externalMetadata) populateArtistInfo(ctx context.Context, artist auxArtist) (auxArtist, error) {
|
||||
start := time.Now()
|
||||
// Get MBID first, if it is not yet available
|
||||
if artist.MbzArtistID == "" {
|
||||
@@ -247,7 +236,7 @@ func (e *provider) populateArtistInfo(ctx context.Context, artist auxArtist) (au
|
||||
}
|
||||
|
||||
artist.ExternalInfoUpdatedAt = P(time.Now())
|
||||
err := e.ds.Artist(ctx).UpdateExternalInfo(&artist.Artist)
|
||||
err := e.ds.Artist(ctx).Put(&artist.Artist)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error trying to update artist external information", "id", artist.ID, "name", artist.Name,
|
||||
"elapsed", time.Since(start), err)
|
||||
@@ -257,7 +246,7 @@ func (e *provider) populateArtistInfo(ctx context.Context, artist auxArtist) (au
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *provider) SimilarSongs(ctx context.Context, id string, count int) (model.MediaFiles, error) {
|
||||
func (e *externalMetadata) SimilarSongs(ctx context.Context, id string, count int) (model.MediaFiles, error) {
|
||||
artist, err := e.getArtist(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -315,7 +304,7 @@ func (e *provider) SimilarSongs(ctx context.Context, id string, count int) (mode
|
||||
return similarSongs, nil
|
||||
}
|
||||
|
||||
func (e *provider) ArtistImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
func (e *externalMetadata) ArtistImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
artist, err := e.getArtist(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -329,35 +318,24 @@ func (e *provider) ArtistImage(ctx context.Context, id string) (*url.URL, error)
|
||||
|
||||
imageUrl := artist.ArtistImageUrl()
|
||||
if imageUrl == "" {
|
||||
return nil, model.ErrNotFound
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
return url.Parse(imageUrl)
|
||||
}
|
||||
|
||||
func (e *provider) AlbumImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
func (e *externalMetadata) AlbumImage(ctx context.Context, id string) (*url.URL, error) {
|
||||
album, err := e.getAlbum(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info, err := e.ag.GetAlbumInfo(ctx, album.Name, album.AlbumArtist, album.MbzAlbumID)
|
||||
if err != nil {
|
||||
switch {
|
||||
case errors.Is(err, agents.ErrNotFound):
|
||||
log.Trace(ctx, "Album not found in agent", "albumID", id, "name", album.Name, "artist", album.AlbumArtist)
|
||||
return nil, model.ErrNotFound
|
||||
case errors.Is(err, context.Canceled):
|
||||
log.Debug(ctx, "GetAlbumInfo call canceled", err)
|
||||
default:
|
||||
log.Warn(ctx, "Error getting album info from agent", "albumID", id, "name", album.Name, "artist", album.AlbumArtist, err)
|
||||
}
|
||||
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if info == nil {
|
||||
log.Warn(ctx, "Agent returned nil info without error", "albumID", id, "name", album.Name, "artist", album.AlbumArtist)
|
||||
return nil, model.ErrNotFound
|
||||
if utils.IsCtxDone(ctx) {
|
||||
log.Warn(ctx, "AlbumImage call canceled", ctx.Err())
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
|
||||
// Return the biggest image
|
||||
@@ -368,37 +346,26 @@ func (e *provider) AlbumImage(ctx context.Context, id string) (*url.URL, error)
|
||||
}
|
||||
}
|
||||
if img.URL == "" {
|
||||
return nil, model.ErrNotFound
|
||||
return nil, agents.ErrNotFound
|
||||
}
|
||||
return url.Parse(img.URL)
|
||||
}
|
||||
|
||||
func (e *provider) TopSongs(ctx context.Context, artistName string, count int) (model.MediaFiles, error) {
|
||||
func (e *externalMetadata) TopSongs(ctx context.Context, artistName string, count int) (model.MediaFiles, error) {
|
||||
artist, err := e.findArtistByName(ctx, artistName)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Artist not found", "name", artistName, err)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
songs, err := e.getMatchingTopSongs(ctx, e.ag, artist, count)
|
||||
if err != nil {
|
||||
switch {
|
||||
case errors.Is(err, agents.ErrNotFound):
|
||||
log.Trace(ctx, "TopSongs not found", "name", artistName)
|
||||
return nil, model.ErrNotFound
|
||||
case errors.Is(err, context.Canceled):
|
||||
log.Debug(ctx, "TopSongs call canceled", err)
|
||||
default:
|
||||
log.Warn(ctx, "Error getting top songs from agent", "artist", artistName, err)
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
return songs, nil
|
||||
return e.getMatchingTopSongs(ctx, e.ag, artist, count)
|
||||
}
|
||||
|
||||
func (e *provider) getMatchingTopSongs(ctx context.Context, agent agents.ArtistTopSongsRetriever, artist *auxArtist, count int) (model.MediaFiles, error) {
|
||||
func (e *externalMetadata) getMatchingTopSongs(ctx context.Context, agent agents.ArtistTopSongsRetriever, artist *auxArtist, count int) (model.MediaFiles, error) {
|
||||
songs, err := agent.GetArtistTopSongs(ctx, artist.ID, artist.Name, artist.MbzArtistID, count)
|
||||
if errors.Is(err, agents.ErrNotFound) {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -419,17 +386,13 @@ func (e *provider) getMatchingTopSongs(ctx context.Context, agent agents.ArtistT
|
||||
} else {
|
||||
log.Debug(ctx, "Found matching top songs", "name", artist.Name, "numSongs", len(mfs))
|
||||
}
|
||||
|
||||
return mfs, nil
|
||||
}
|
||||
|
||||
func (e *provider) findMatchingTrack(ctx context.Context, mbid string, artistID, title string) (*model.MediaFile, error) {
|
||||
func (e *externalMetadata) findMatchingTrack(ctx context.Context, mbid string, artistID, title string) (*model.MediaFile, error) {
|
||||
if mbid != "" {
|
||||
mfs, err := e.ds.MediaFile(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.And{
|
||||
squirrel.Eq{"mbz_recording_id": mbid},
|
||||
squirrel.Eq{"missing": false},
|
||||
},
|
||||
Filters: squirrel.Eq{"mbz_recording_id": mbid},
|
||||
})
|
||||
if err == nil && len(mfs) > 0 {
|
||||
return &mfs[0], nil
|
||||
@@ -443,7 +406,6 @@ func (e *provider) findMatchingTrack(ctx context.Context, mbid string, artistID,
|
||||
squirrel.Eq{"album_artist_id": artistID},
|
||||
},
|
||||
squirrel.Like{"order_title": str.SanitizeFieldForSorting(title)},
|
||||
squirrel.Eq{"missing": false},
|
||||
},
|
||||
Sort: "starred desc, rating desc, year asc, compilation asc ",
|
||||
Max: 1,
|
||||
@@ -454,7 +416,7 @@ func (e *provider) findMatchingTrack(ctx context.Context, mbid string, artistID,
|
||||
return &mfs[0], nil
|
||||
}
|
||||
|
||||
func (e *provider) callGetURL(ctx context.Context, agent agents.ArtistURLRetriever, artist *auxArtist) {
|
||||
func (e *externalMetadata) callGetURL(ctx context.Context, agent agents.ArtistURLRetriever, artist *auxArtist) {
|
||||
artisURL, err := agent.GetArtistURL(ctx, artist.ID, artist.Name, artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -462,7 +424,7 @@ func (e *provider) callGetURL(ctx context.Context, agent agents.ArtistURLRetriev
|
||||
artist.ExternalUrl = artisURL
|
||||
}
|
||||
|
||||
func (e *provider) callGetBiography(ctx context.Context, agent agents.ArtistBiographyRetriever, artist *auxArtist) {
|
||||
func (e *externalMetadata) callGetBiography(ctx context.Context, agent agents.ArtistBiographyRetriever, artist *auxArtist) {
|
||||
bio, err := agent.GetArtistBiography(ctx, artist.ID, str.Clear(artist.Name), artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -472,7 +434,7 @@ func (e *provider) callGetBiography(ctx context.Context, agent agents.ArtistBiog
|
||||
artist.Biography = strings.ReplaceAll(bio, "<a ", "<a target='_blank' ")
|
||||
}
|
||||
|
||||
func (e *provider) callGetImage(ctx context.Context, agent agents.ArtistImageRetriever, artist *auxArtist) {
|
||||
func (e *externalMetadata) callGetImage(ctx context.Context, agent agents.ArtistImageRetriever, artist *auxArtist) {
|
||||
images, err := agent.GetArtistImages(ctx, artist.ID, artist.Name, artist.MbzArtistID)
|
||||
if err != nil {
|
||||
return
|
||||
@@ -490,7 +452,7 @@ func (e *provider) callGetImage(ctx context.Context, agent agents.ArtistImageRet
|
||||
}
|
||||
}
|
||||
|
||||
func (e *provider) callGetSimilar(ctx context.Context, agent agents.ArtistSimilarRetriever, artist *auxArtist,
|
||||
func (e *externalMetadata) callGetSimilar(ctx context.Context, agent agents.ArtistSimilarRetriever, artist *auxArtist,
|
||||
limit int, includeNotPresent bool) {
|
||||
similar, err := agent.GetSimilarArtists(ctx, artist.ID, artist.Name, artist.MbzArtistID, limit)
|
||||
if len(similar) == 0 || err != nil {
|
||||
@@ -505,43 +467,24 @@ func (e *provider) callGetSimilar(ctx context.Context, agent agents.ArtistSimila
|
||||
artist.SimilarArtists = sa
|
||||
}
|
||||
|
||||
func (e *provider) mapSimilarArtists(ctx context.Context, similar []agents.Artist, includeNotPresent bool) (model.Artists, error) {
|
||||
func (e *externalMetadata) mapSimilarArtists(ctx context.Context, similar []agents.Artist, includeNotPresent bool) (model.Artists, error) {
|
||||
var result model.Artists
|
||||
var notPresent []string
|
||||
|
||||
artistNames := slice.Map(similar, func(artist agents.Artist) string { return artist.Name })
|
||||
|
||||
// Query all artists at once
|
||||
clauses := slice.Map(artistNames, func(name string) squirrel.Sqlizer {
|
||||
return squirrel.Like{"artist.name": name}
|
||||
})
|
||||
artists, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Or(clauses),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create a map for quick lookup
|
||||
artistMap := make(map[string]model.Artist)
|
||||
for _, artist := range artists {
|
||||
artistMap[artist.Name] = artist
|
||||
}
|
||||
|
||||
// Process the similar artists
|
||||
// First select artists that are present.
|
||||
for _, s := range similar {
|
||||
if artist, found := artistMap[s.Name]; found {
|
||||
result = append(result, artist)
|
||||
} else {
|
||||
sa, err := e.findArtistByName(ctx, s.Name)
|
||||
if err != nil {
|
||||
notPresent = append(notPresent, s.Name)
|
||||
continue
|
||||
}
|
||||
result = append(result, sa.Artist)
|
||||
}
|
||||
|
||||
// Then fill up with non-present artists
|
||||
if includeNotPresent {
|
||||
for _, s := range notPresent {
|
||||
// Let the ID empty to indicate that the artist is not present in the DB
|
||||
sa := model.Artist{Name: s}
|
||||
sa := model.Artist{ID: unavailableArtistID, Name: s}
|
||||
result = append(result, sa)
|
||||
}
|
||||
}
|
||||
@@ -549,7 +492,7 @@ func (e *provider) mapSimilarArtists(ctx context.Context, similar []agents.Artis
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *provider) findArtistByName(ctx context.Context, artistName string) (*auxArtist, error) {
|
||||
func (e *externalMetadata) findArtistByName(ctx context.Context, artistName string) (*auxArtist, error) {
|
||||
artists, err := e.ds.Artist(ctx).GetAll(model.QueryOptions{
|
||||
Filters: squirrel.Like{"artist.name": artistName},
|
||||
Max: 1,
|
||||
@@ -567,10 +510,10 @@ func (e *provider) findArtistByName(ctx context.Context, artistName string) (*au
|
||||
return artist, nil
|
||||
}
|
||||
|
||||
func (e *provider) loadSimilar(ctx context.Context, artist *auxArtist, count int, includeNotPresent bool) error {
|
||||
func (e *externalMetadata) loadSimilar(ctx context.Context, artist *auxArtist, count int, includeNotPresent bool) error {
|
||||
var ids []string
|
||||
for _, sa := range artist.SimilarArtists {
|
||||
if sa.ID == "" {
|
||||
if sa.ID == unavailableArtistID {
|
||||
continue
|
||||
}
|
||||
ids = append(ids, sa.ID)
|
||||
@@ -601,7 +544,7 @@ func (e *provider) loadSimilar(ctx context.Context, artist *auxArtist, count int
|
||||
continue
|
||||
}
|
||||
la = sa
|
||||
la.ID = ""
|
||||
la.ID = unavailableArtistID
|
||||
}
|
||||
loaded = append(loaded, la)
|
||||
}
|
||||
@@ -29,7 +29,7 @@ func New() FFmpeg {
|
||||
}
|
||||
|
||||
const (
|
||||
extractImageCmd = "ffmpeg -i %s -map 0:v -map -0:V -vcodec copy -f image2pipe -"
|
||||
extractImageCmd = "ffmpeg -i %s -an -vcodec copy -f image2pipe -"
|
||||
probeCmd = "ffmpeg %s -f ffmetadata"
|
||||
)
|
||||
|
||||
@@ -39,10 +39,6 @@ func (e *ffmpeg) Transcode(ctx context.Context, command, path string, maxBitRate
|
||||
if _, err := ffmpegCmd(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// First make sure the file exists
|
||||
if err := fileExists(path); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args := createFFmpegCommand(command, path, maxBitRate, offset)
|
||||
return e.start(ctx, args)
|
||||
}
|
||||
@@ -51,25 +47,10 @@ func (e *ffmpeg) ExtractImage(ctx context.Context, path string) (io.ReadCloser,
|
||||
if _, err := ffmpegCmd(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// First make sure the file exists
|
||||
if err := fileExists(path); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args := createFFmpegCommand(extractImageCmd, path, 0, 0)
|
||||
return e.start(ctx, args)
|
||||
}
|
||||
|
||||
func fileExists(path string) error {
|
||||
s, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if s.IsDir() {
|
||||
return fmt.Errorf("'%s' is a directory", path)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *ffmpeg) Probe(ctx context.Context, files []string) (string, error) {
|
||||
if _, err := ffmpegCmd(); err != nil {
|
||||
return "", err
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/navidrome/navidrome/core/storage"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
. "github.com/navidrome/navidrome/utils/gg"
|
||||
)
|
||||
|
||||
type InspectOutput struct {
|
||||
File string `json:"file"`
|
||||
RawTags model.RawTags `json:"rawTags"`
|
||||
MappedTags *model.MediaFile `json:"mappedTags,omitempty"`
|
||||
}
|
||||
|
||||
func Inspect(filePath string, libraryId int, folderId string) (*InspectOutput, error) {
|
||||
path, file := filepath.Split(filePath)
|
||||
|
||||
s, err := storage.For(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fs, err := s.FS()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tags, err := fs.ReadTags(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tag, ok := tags[file]
|
||||
if !ok {
|
||||
log.Error("Could not get tags for path", "path", filePath)
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
|
||||
md := metadata.New(path, tag)
|
||||
result := &InspectOutput{
|
||||
File: filePath,
|
||||
RawTags: tags[file].Tags,
|
||||
MappedTags: P(md.ToMediaFile(libraryId, folderId)),
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
@@ -36,12 +36,11 @@ type mediaStreamer struct {
|
||||
}
|
||||
|
||||
type streamJob struct {
|
||||
ms *mediaStreamer
|
||||
mf *model.MediaFile
|
||||
filePath string
|
||||
format string
|
||||
bitRate int
|
||||
offset int
|
||||
ms *mediaStreamer
|
||||
mf *model.MediaFile
|
||||
format string
|
||||
bitRate int
|
||||
offset int
|
||||
}
|
||||
|
||||
func (j *streamJob) Key() string {
|
||||
@@ -69,14 +68,13 @@ func (ms *mediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, reqF
|
||||
|
||||
format, bitRate = selectTranscodingOptions(ctx, ms.ds, mf, reqFormat, reqBitRate)
|
||||
s := &Stream{ctx: ctx, mf: mf, format: format, bitRate: bitRate}
|
||||
filePath := mf.AbsolutePath()
|
||||
|
||||
if format == "raw" {
|
||||
log.Debug(ctx, "Streaming RAW file", "id", mf.ID, "path", filePath,
|
||||
log.Debug(ctx, "Streaming RAW file", "id", mf.ID, "path", mf.Path,
|
||||
"requestBitrate", reqBitRate, "requestFormat", reqFormat, "requestOffset", reqOffset,
|
||||
"originalBitrate", mf.BitRate, "originalFormat", mf.Suffix,
|
||||
"selectedBitrate", bitRate, "selectedFormat", format)
|
||||
f, err := os.Open(filePath)
|
||||
f, err := os.Open(mf.Path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -87,12 +85,11 @@ func (ms *mediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, reqF
|
||||
}
|
||||
|
||||
job := &streamJob{
|
||||
ms: ms,
|
||||
mf: mf,
|
||||
filePath: filePath,
|
||||
format: format,
|
||||
bitRate: bitRate,
|
||||
offset: reqOffset,
|
||||
ms: ms,
|
||||
mf: mf,
|
||||
format: format,
|
||||
bitRate: bitRate,
|
||||
offset: reqOffset,
|
||||
}
|
||||
r, err := ms.cache.Get(ctx, job)
|
||||
if err != nil {
|
||||
@@ -104,7 +101,7 @@ func (ms *mediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, reqF
|
||||
s.ReadCloser = r
|
||||
s.Seeker = r.Seeker
|
||||
|
||||
log.Debug(ctx, "Streaming TRANSCODED file", "id", mf.ID, "path", filePath,
|
||||
log.Debug(ctx, "Streaming TRANSCODED file", "id", mf.ID, "path", mf.Path,
|
||||
"requestBitrate", reqBitRate, "requestFormat", reqFormat, "requestOffset", reqOffset,
|
||||
"originalBitrate", mf.BitRate, "originalFormat", mf.Suffix,
|
||||
"selectedBitrate", bitRate, "selectedFormat", format, "cached", cached, "seekable", s.Seekable())
|
||||
@@ -204,7 +201,7 @@ func NewTranscodingCache() TranscodingCache {
|
||||
log.Error(ctx, "Error loading transcoding command", "format", job.format, err)
|
||||
return nil, os.ErrInvalid
|
||||
}
|
||||
out, err := job.ms.transcoder.Transcode(ctx, t.Command, job.filePath, job.bitRate, job.offset)
|
||||
out, err := job.ms.transcoder.Transcode(ctx, t.Command, job.mf.Path, job.bitRate, job.offset)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error starting transcoder", "id", job.mf.ID, err)
|
||||
return nil, os.ErrInvalid
|
||||
|
||||
@@ -187,6 +187,7 @@ var staticData = sync.OnceValue(func() insights.Data {
|
||||
data.Config.EnablePrometheus = conf.Server.Prometheus.Enabled
|
||||
data.Config.TranscodingCacheSize = conf.Server.TranscodingCacheSize
|
||||
data.Config.ImageCacheSize = conf.Server.ImageCacheSize
|
||||
data.Config.ScanSchedule = conf.Server.ScanSchedule
|
||||
data.Config.SessionTimeout = uint64(math.Trunc(conf.Server.SessionTimeout.Seconds()))
|
||||
data.Config.SearchFullString = conf.Server.SearchFullString
|
||||
data.Config.RecentlyAddedByModTime = conf.Server.RecentlyAddedByModTime
|
||||
@@ -194,10 +195,6 @@ var staticData = sync.OnceValue(func() insights.Data {
|
||||
data.Config.BackupSchedule = conf.Server.Backup.Schedule
|
||||
data.Config.BackupCount = conf.Server.Backup.Count
|
||||
data.Config.DevActivityPanel = conf.Server.DevActivityPanel
|
||||
data.Config.ScannerEnabled = conf.Server.Scanner.Enabled
|
||||
data.Config.ScanSchedule = conf.Server.Scanner.Schedule
|
||||
data.Config.ScanWatcherWait = uint64(math.Trunc(conf.Server.Scanner.WatcherWait.Seconds()))
|
||||
data.Config.ScanOnStartup = conf.Server.Scanner.ScanOnStartup
|
||||
|
||||
return data
|
||||
})
|
||||
|
||||
@@ -43,10 +43,7 @@ type Data struct {
|
||||
LogLevel string `json:"logLevel,omitempty"`
|
||||
LogFileConfigured bool `json:"logFileConfigured,omitempty"`
|
||||
TLSConfigured bool `json:"tlsConfigured,omitempty"`
|
||||
ScannerEnabled bool `json:"scannerEnabled,omitempty"`
|
||||
ScanSchedule string `json:"scanSchedule,omitempty"`
|
||||
ScanWatcherWait uint64 `json:"scanWatcherWait,omitempty"`
|
||||
ScanOnStartup bool `json:"scanOnStartup,omitempty"`
|
||||
TranscodingCacheSize string `json:"transcodingCacheSize,omitempty"`
|
||||
ImageCacheSize string `json:"imageCacheSize,omitempty"`
|
||||
EnableArtworkPrecache bool `json:"enableArtworkPrecache,omitempty"`
|
||||
|
||||
@@ -28,14 +28,7 @@ type metrics struct {
|
||||
}
|
||||
|
||||
func NewPrometheusInstance(ds model.DataStore) Metrics {
|
||||
if conf.Server.Prometheus.Enabled {
|
||||
return &metrics{ds: ds}
|
||||
}
|
||||
return noopMetrics{}
|
||||
}
|
||||
|
||||
func NewNoopInstance() Metrics {
|
||||
return noopMetrics{}
|
||||
return &metrics{ds: ds}
|
||||
}
|
||||
|
||||
func (m *metrics) WriteInitialMetrics(ctx context.Context) {
|
||||
@@ -151,12 +144,3 @@ func processSqlAggregateMetrics(ctx context.Context, ds model.DataStore, targetG
|
||||
}
|
||||
targetGauge.With(prometheus.Labels{"model": "user"}).Set(float64(usersCount))
|
||||
}
|
||||
|
||||
type noopMetrics struct {
|
||||
}
|
||||
|
||||
func (n noopMetrics) WriteInitialMetrics(context.Context) {}
|
||||
|
||||
func (n noopMetrics) WriteAfterScanMetrics(context.Context, bool) {}
|
||||
|
||||
func (n noopMetrics) GetHandler() http.Handler { return nil }
|
||||
|
||||
@@ -5,13 +5,13 @@ package mpv
|
||||
import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/navidrome/navidrome/model/id"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
func socketName(prefix, suffix string) string {
|
||||
// Windows needs to use a named pipe for the socket
|
||||
// see https://mpv.io/manual/master#using-mpv-from-other-programs-or-scripts
|
||||
return filepath.Join(`\\.\pipe\mpvsocket`, prefix+id.NewRandom()+suffix)
|
||||
return filepath.Join(`\\.\pipe\mpvsocket`, prefix+uuid.NewString()+suffix)
|
||||
}
|
||||
|
||||
func removeSocket(string) {
|
||||
|
||||
@@ -5,13 +5,10 @@ import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/google/uuid"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/id"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
"github.com/navidrome/navidrome/utils"
|
||||
)
|
||||
|
||||
type Players interface {
|
||||
@@ -20,57 +17,46 @@ type Players interface {
|
||||
}
|
||||
|
||||
func NewPlayers(ds model.DataStore) Players {
|
||||
return &players{
|
||||
ds: ds,
|
||||
limiter: utils.Limiter{Interval: consts.UpdatePlayerFrequency},
|
||||
}
|
||||
return &players{ds}
|
||||
}
|
||||
|
||||
type players struct {
|
||||
ds model.DataStore
|
||||
limiter utils.Limiter
|
||||
ds model.DataStore
|
||||
}
|
||||
|
||||
func (p *players) Register(ctx context.Context, playerID, client, userAgent, ip string) (*model.Player, *model.Transcoding, error) {
|
||||
func (p *players) Register(ctx context.Context, id, client, userAgent, ip string) (*model.Player, *model.Transcoding, error) {
|
||||
var plr *model.Player
|
||||
var trc *model.Transcoding
|
||||
var err error
|
||||
user, _ := request.UserFrom(ctx)
|
||||
if playerID != "" {
|
||||
plr, err = p.ds.Player(ctx).Get(playerID)
|
||||
if id != "" {
|
||||
plr, err = p.ds.Player(ctx).Get(id)
|
||||
if err == nil && plr.Client != client {
|
||||
playerID = ""
|
||||
id = ""
|
||||
}
|
||||
}
|
||||
username := userName(ctx)
|
||||
if err != nil || playerID == "" {
|
||||
if err != nil || id == "" {
|
||||
plr, err = p.ds.Player(ctx).FindMatch(user.ID, client, userAgent)
|
||||
if err == nil {
|
||||
log.Debug(ctx, "Found matching player", "id", plr.ID, "client", client, "username", username, "type", userAgent)
|
||||
log.Debug(ctx, "Found matching player", "id", plr.ID, "client", client, "username", userName(ctx), "type", userAgent)
|
||||
} else {
|
||||
plr = &model.Player{
|
||||
ID: id.NewRandom(),
|
||||
ID: uuid.NewString(),
|
||||
UserId: user.ID,
|
||||
Client: client,
|
||||
ScrobbleEnabled: true,
|
||||
ReportRealPath: conf.Server.Subsonic.DefaultReportRealPath,
|
||||
}
|
||||
log.Info(ctx, "Registering new player", "id", plr.ID, "client", client, "username", username, "type", userAgent)
|
||||
log.Info(ctx, "Registering new player", "id", plr.ID, "client", client, "username", userName(ctx), "type", userAgent)
|
||||
}
|
||||
}
|
||||
plr.Name = fmt.Sprintf("%s [%s]", client, userAgent)
|
||||
plr.UserAgent = userAgent
|
||||
plr.IP = ip
|
||||
plr.LastSeen = time.Now()
|
||||
p.limiter.Do(plr.ID, func() {
|
||||
ctx, cancel := context.WithTimeout(ctx, time.Second)
|
||||
defer cancel()
|
||||
|
||||
err = p.ds.Player(ctx).Put(plr)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Could not save player", "id", plr.ID, "client", client, "username", username, "type", userAgent, err)
|
||||
}
|
||||
})
|
||||
err = p.ds.Player(ctx).Put(plr)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if plr.TranscodingId != "" {
|
||||
trc, err = p.ds.Transcoding(ctx).Get(plr.TranscodingId)
|
||||
}
|
||||
|
||||
@@ -9,12 +9,10 @@ import (
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/RaveNoX/go-jsoncommentstrip"
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -24,7 +22,7 @@ import (
|
||||
)
|
||||
|
||||
type Playlists interface {
|
||||
ImportFile(ctx context.Context, folder *model.Folder, filename string) (*model.Playlist, error)
|
||||
ImportFile(ctx context.Context, dir string, fname string) (*model.Playlist, error)
|
||||
Update(ctx context.Context, playlistID string, name *string, comment *string, public *bool, idsToAdd []string, idxToRemove []int) error
|
||||
ImportM3U(ctx context.Context, reader io.Reader) (*model.Playlist, error)
|
||||
}
|
||||
@@ -37,29 +35,16 @@ func NewPlaylists(ds model.DataStore) Playlists {
|
||||
return &playlists{ds: ds}
|
||||
}
|
||||
|
||||
func InPlaylistsPath(folder model.Folder) bool {
|
||||
if conf.Server.PlaylistsPath == "" {
|
||||
return true
|
||||
}
|
||||
rel, _ := filepath.Rel(folder.LibraryPath, folder.AbsolutePath())
|
||||
for _, path := range strings.Split(conf.Server.PlaylistsPath, string(filepath.ListSeparator)) {
|
||||
if match, _ := doublestar.Match(path, rel); match {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *playlists) ImportFile(ctx context.Context, folder *model.Folder, filename string) (*model.Playlist, error) {
|
||||
pls, err := s.parsePlaylist(ctx, filename, folder)
|
||||
func (s *playlists) ImportFile(ctx context.Context, dir string, fname string) (*model.Playlist, error) {
|
||||
pls, err := s.parsePlaylist(ctx, fname, dir)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error parsing playlist", "path", filepath.Join(folder.AbsolutePath(), filename), err)
|
||||
log.Error(ctx, "Error parsing playlist", "path", filepath.Join(dir, fname), err)
|
||||
return nil, err
|
||||
}
|
||||
log.Debug("Found playlist", "name", pls.Name, "lastUpdated", pls.UpdatedAt, "path", pls.Path, "numTracks", len(pls.Tracks))
|
||||
err = s.updatePlaylist(ctx, pls)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error updating playlist", "path", filepath.Join(folder.AbsolutePath(), filename), err)
|
||||
log.Error(ctx, "Error updating playlist", "path", filepath.Join(dir, fname), err)
|
||||
}
|
||||
return pls, err
|
||||
}
|
||||
@@ -71,7 +56,7 @@ func (s *playlists) ImportM3U(ctx context.Context, reader io.Reader) (*model.Pla
|
||||
Public: false,
|
||||
Sync: false,
|
||||
}
|
||||
err := s.parseM3U(ctx, pls, nil, reader)
|
||||
err := s.parseM3U(ctx, pls, "", reader)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error parsing playlist", err)
|
||||
return nil, err
|
||||
@@ -84,8 +69,8 @@ func (s *playlists) ImportM3U(ctx context.Context, reader io.Reader) (*model.Pla
|
||||
return pls, nil
|
||||
}
|
||||
|
||||
func (s *playlists) parsePlaylist(ctx context.Context, playlistFile string, folder *model.Folder) (*model.Playlist, error) {
|
||||
pls, err := s.newSyncedPlaylist(folder.AbsolutePath(), playlistFile)
|
||||
func (s *playlists) parsePlaylist(ctx context.Context, playlistFile string, baseDir string) (*model.Playlist, error) {
|
||||
pls, err := s.newSyncedPlaylist(baseDir, playlistFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -101,7 +86,7 @@ func (s *playlists) parsePlaylist(ctx context.Context, playlistFile string, fold
|
||||
case ".nsp":
|
||||
err = s.parseNSP(ctx, pls, file)
|
||||
default:
|
||||
err = s.parseM3U(ctx, pls, folder, file)
|
||||
err = s.parseM3U(ctx, pls, baseDir, file)
|
||||
}
|
||||
return pls, err
|
||||
}
|
||||
@@ -127,35 +112,14 @@ func (s *playlists) newSyncedPlaylist(baseDir string, playlistFile string) (*mod
|
||||
return pls, nil
|
||||
}
|
||||
|
||||
func getPositionFromOffset(data []byte, offset int64) (line, column int) {
|
||||
line = 1
|
||||
for _, b := range data[:offset] {
|
||||
if b == '\n' {
|
||||
line++
|
||||
column = 1
|
||||
} else {
|
||||
column++
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (s *playlists) parseNSP(_ context.Context, pls *model.Playlist, reader io.Reader) error {
|
||||
func (s *playlists) parseNSP(ctx context.Context, pls *model.Playlist, file io.Reader) error {
|
||||
nsp := &nspFile{}
|
||||
reader = io.LimitReader(reader, 100*1024) // Limit to 100KB
|
||||
reader = jsoncommentstrip.NewReader(reader)
|
||||
input, err := io.ReadAll(reader)
|
||||
reader := jsoncommentstrip.NewReader(file)
|
||||
dec := json.NewDecoder(reader)
|
||||
err := dec.Decode(nsp)
|
||||
if err != nil {
|
||||
return fmt.Errorf("reading SmartPlaylist: %w", err)
|
||||
}
|
||||
err = json.Unmarshal(input, nsp)
|
||||
if err != nil {
|
||||
var syntaxErr *json.SyntaxError
|
||||
if errors.As(err, &syntaxErr) {
|
||||
line, col := getPositionFromOffset(input, syntaxErr.Offset)
|
||||
return fmt.Errorf("JSON syntax error in SmartPlaylist at line %d, column %d: %w", line, col, err)
|
||||
}
|
||||
return fmt.Errorf("JSON parsing error in SmartPlaylist: %w", err)
|
||||
log.Error(ctx, "Error parsing SmartPlaylist", "playlist", pls.Name, err)
|
||||
return err
|
||||
}
|
||||
pls.Rules = &nsp.Criteria
|
||||
if nsp.Name != "" {
|
||||
@@ -167,7 +131,7 @@ func (s *playlists) parseNSP(_ context.Context, pls *model.Playlist, reader io.R
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *model.Folder, reader io.Reader) error {
|
||||
func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, baseDir string, reader io.Reader) error {
|
||||
mediaFileRepository := s.ds.MediaFile(ctx)
|
||||
var mfs model.MediaFiles
|
||||
for lines := range slice.CollectChunks(slice.LinesFrom(reader), 400) {
|
||||
@@ -186,17 +150,12 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
||||
line = strings.TrimPrefix(line, "file://")
|
||||
line, _ = url.QueryUnescape(line)
|
||||
}
|
||||
if !model.IsAudioFile(line) {
|
||||
continue
|
||||
if baseDir != "" && !filepath.IsAbs(line) {
|
||||
line = filepath.Join(baseDir, line)
|
||||
}
|
||||
filteredLines = append(filteredLines, line)
|
||||
}
|
||||
paths, err := s.normalizePaths(ctx, pls, folder, filteredLines)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Error normalizing paths in playlist", "playlist", pls.Name, err)
|
||||
continue
|
||||
}
|
||||
found, err := mediaFileRepository.FindByPaths(paths)
|
||||
found, err := mediaFileRepository.FindByPaths(filteredLines)
|
||||
if err != nil {
|
||||
log.Warn(ctx, "Error reading files from DB", "playlist", pls.Name, err)
|
||||
continue
|
||||
@@ -205,7 +164,7 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
||||
for idx := range found {
|
||||
existing[strings.ToLower(found[idx].Path)] = idx
|
||||
}
|
||||
for _, path := range paths {
|
||||
for _, path := range filteredLines {
|
||||
idx, ok := existing[strings.ToLower(path)]
|
||||
if ok {
|
||||
mfs = append(mfs, found[idx])
|
||||
@@ -223,64 +182,6 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO This won't work for multiple libraries
|
||||
func (s *playlists) normalizePaths(ctx context.Context, pls *model.Playlist, folder *model.Folder, lines []string) ([]string, error) {
|
||||
libRegex, err := s.compileLibraryPaths(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res := make([]string, 0, len(lines))
|
||||
for idx, line := range lines {
|
||||
var libPath string
|
||||
var filePath string
|
||||
|
||||
if folder != nil && !filepath.IsAbs(line) {
|
||||
libPath = folder.LibraryPath
|
||||
filePath = filepath.Join(folder.AbsolutePath(), line)
|
||||
} else {
|
||||
cleanLine := filepath.Clean(line)
|
||||
if libPath = libRegex.FindString(cleanLine); libPath != "" {
|
||||
filePath = cleanLine
|
||||
}
|
||||
}
|
||||
|
||||
if libPath != "" {
|
||||
if rel, err := filepath.Rel(libPath, filePath); err == nil {
|
||||
res = append(res, rel)
|
||||
} else {
|
||||
log.Debug(ctx, "Error getting relative path", "playlist", pls.Name, "path", line, "libPath", libPath,
|
||||
"filePath", filePath, err)
|
||||
}
|
||||
} else {
|
||||
log.Warn(ctx, "Path in playlist not found in any library", "path", line, "line", idx)
|
||||
}
|
||||
}
|
||||
return slice.Map(res, filepath.ToSlash), nil
|
||||
}
|
||||
|
||||
func (s *playlists) compileLibraryPaths(ctx context.Context) (*regexp.Regexp, error) {
|
||||
libs, err := s.ds.Library(ctx).GetAll()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create regex patterns for each library path
|
||||
patterns := make([]string, len(libs))
|
||||
for i, lib := range libs {
|
||||
cleanPath := filepath.Clean(lib.Path)
|
||||
escapedPath := regexp.QuoteMeta(cleanPath)
|
||||
patterns[i] = fmt.Sprintf("^%s(?:/|$)", escapedPath)
|
||||
}
|
||||
// Combine all patterns into a single regex
|
||||
combinedPattern := strings.Join(patterns, "|")
|
||||
re, err := regexp.Compile(combinedPattern)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("compiling library paths `%s`: %w", combinedPattern, err)
|
||||
}
|
||||
return re, nil
|
||||
}
|
||||
|
||||
func (s *playlists) updatePlaylist(ctx context.Context, newPls *model.Playlist) error {
|
||||
owner, _ := request.UserFrom(ctx)
|
||||
|
||||
@@ -315,7 +216,7 @@ func (s *playlists) Update(ctx context.Context, playlistID string,
|
||||
needsInfoUpdate := name != nil || comment != nil || public != nil
|
||||
needsTrackRefresh := len(idxToRemove) > 0
|
||||
|
||||
return s.ds.WithTxImmediate(func(tx model.DataStore) error {
|
||||
return s.ds.WithTx(func(tx model.DataStore) error {
|
||||
var pls *model.Playlist
|
||||
var err error
|
||||
repo := tx.Playlist(ctx)
|
||||
@@ -324,7 +225,7 @@ func (s *playlists) Update(ctx context.Context, playlistID string,
|
||||
return fmt.Errorf("%w: playlist '%s'", model.ErrNotFound, playlistID)
|
||||
}
|
||||
if needsTrackRefresh {
|
||||
pls, err = repo.GetWithTracks(playlistID, true, false)
|
||||
pls, err = repo.GetWithTracks(playlistID, true)
|
||||
pls.RemoveTracks(idxToRemove)
|
||||
pls.AddTracks(idsToAdd)
|
||||
} else {
|
||||
|
||||
@@ -7,8 +7,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/model/criteria"
|
||||
"github.com/navidrome/navidrome/model/request"
|
||||
@@ -20,56 +18,43 @@ import (
|
||||
var _ = Describe("Playlists", func() {
|
||||
var ds *tests.MockDataStore
|
||||
var ps Playlists
|
||||
var mockPlsRepo mockedPlaylistRepo
|
||||
var mockLibRepo *tests.MockLibraryRepo
|
||||
var mp mockedPlaylist
|
||||
ctx := context.Background()
|
||||
|
||||
BeforeEach(func() {
|
||||
mockPlsRepo = mockedPlaylistRepo{}
|
||||
mockLibRepo = &tests.MockLibraryRepo{}
|
||||
mp = mockedPlaylist{}
|
||||
ds = &tests.MockDataStore{
|
||||
MockedPlaylist: &mockPlsRepo,
|
||||
MockedLibrary: mockLibRepo,
|
||||
MockedPlaylist: &mp,
|
||||
}
|
||||
ctx = request.WithUser(ctx, model.User{ID: "123"})
|
||||
// Path should be libPath, but we want to match the root folder referenced in the m3u, which is `/`
|
||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/"}})
|
||||
})
|
||||
|
||||
Describe("ImportFile", func() {
|
||||
var folder *model.Folder
|
||||
BeforeEach(func() {
|
||||
ps = NewPlaylists(ds)
|
||||
ds.MockedMediaFile = &mockedMediaFileRepo{}
|
||||
libPath, _ := os.Getwd()
|
||||
folder = &model.Folder{
|
||||
ID: "1",
|
||||
LibraryID: 1,
|
||||
LibraryPath: libPath,
|
||||
Path: "tests/fixtures",
|
||||
Name: "playlists",
|
||||
}
|
||||
})
|
||||
|
||||
Describe("M3U", func() {
|
||||
It("parses well-formed playlists", func() {
|
||||
pls, err := ps.ImportFile(ctx, folder, "pls1.m3u")
|
||||
pls, err := ps.ImportFile(ctx, "tests/fixtures", "playlists/pls1.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.OwnerID).To(Equal("123"))
|
||||
Expect(pls.Tracks).To(HaveLen(2))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("tests/fixtures/playlists/test.mp3"))
|
||||
Expect(pls.Tracks[1].Path).To(Equal("tests/fixtures/playlists/test.ogg"))
|
||||
Expect(mockPlsRepo.last).To(Equal(pls))
|
||||
Expect(pls.Tracks).To(HaveLen(3))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("tests/fixtures/test.mp3"))
|
||||
Expect(pls.Tracks[1].Path).To(Equal("tests/fixtures/test.ogg"))
|
||||
Expect(pls.Tracks[2].Path).To(Equal("/tests/fixtures/01 Invisible (RED) Edit Version.mp3"))
|
||||
Expect(mp.last).To(Equal(pls))
|
||||
})
|
||||
|
||||
It("parses playlists using LF ending", func() {
|
||||
pls, err := ps.ImportFile(ctx, folder, "lf-ended.m3u")
|
||||
pls, err := ps.ImportFile(ctx, "tests/fixtures/playlists", "lf-ended.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(2))
|
||||
})
|
||||
|
||||
It("parses playlists using CR ending (old Mac format)", func() {
|
||||
pls, err := ps.ImportFile(ctx, folder, "cr-ended.m3u")
|
||||
pls, err := ps.ImportFile(ctx, "tests/fixtures/playlists", "cr-ended.m3u")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(2))
|
||||
})
|
||||
@@ -77,9 +62,9 @@ var _ = Describe("Playlists", func() {
|
||||
|
||||
Describe("NSP", func() {
|
||||
It("parses well-formed playlists", func() {
|
||||
pls, err := ps.ImportFile(ctx, folder, "recently_played.nsp")
|
||||
pls, err := ps.ImportFile(ctx, "tests/fixtures", "playlists/recently_played.nsp")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(mockPlsRepo.last).To(Equal(pls))
|
||||
Expect(mp.last).To(Equal(pls))
|
||||
Expect(pls.OwnerID).To(Equal("123"))
|
||||
Expect(pls.Name).To(Equal("Recently Played"))
|
||||
Expect(pls.Comment).To(Equal("Recently played tracks"))
|
||||
@@ -88,10 +73,6 @@ var _ = Describe("Playlists", func() {
|
||||
Expect(pls.Rules.Limit).To(Equal(100))
|
||||
Expect(pls.Rules.Expression).To(BeAssignableToTypeOf(criteria.All{}))
|
||||
})
|
||||
It("returns an error if the playlist is not well-formed", func() {
|
||||
_, err := ps.ImportFile(ctx, folder, "invalid_json.nsp")
|
||||
Expect(err.Error()).To(ContainSubstring("line 19, column 1: invalid character '\\n'"))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -101,136 +82,79 @@ var _ = Describe("Playlists", func() {
|
||||
repo = &mockedMediaFileFromListRepo{}
|
||||
ds.MockedMediaFile = repo
|
||||
ps = NewPlaylists(ds)
|
||||
mockLibRepo.SetData([]model.Library{{ID: 1, Path: "/music"}, {ID: 2, Path: "/new"}})
|
||||
ctx = request.WithUser(ctx, model.User{ID: "123"})
|
||||
})
|
||||
|
||||
It("parses well-formed playlists", func() {
|
||||
repo.data = []string{
|
||||
"tests/test.mp3",
|
||||
"tests/test.ogg",
|
||||
"tests/01 Invisible (RED) Edit Version.mp3",
|
||||
"downloads/newfile.flac",
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
"/tests/fixtures/01 Invisible (RED) Edit Version.mp3",
|
||||
}
|
||||
m3u := strings.Join([]string{
|
||||
"#PLAYLIST:playlist 1",
|
||||
"/music/tests/test.mp3",
|
||||
"/music/tests/test.ogg",
|
||||
"/new/downloads/newfile.flac",
|
||||
"file:///music/tests/01%20Invisible%20(RED)%20Edit%20Version.mp3",
|
||||
}, "\n")
|
||||
f := strings.NewReader(m3u)
|
||||
|
||||
f, _ := os.Open("tests/fixtures/playlists/pls-with-name.m3u")
|
||||
defer f.Close()
|
||||
pls, err := ps.ImportM3U(ctx, f)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.OwnerID).To(Equal("123"))
|
||||
Expect(pls.Name).To(Equal("playlist 1"))
|
||||
Expect(pls.Sync).To(BeFalse())
|
||||
Expect(pls.Tracks).To(HaveLen(4))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("tests/test.mp3"))
|
||||
Expect(pls.Tracks[1].Path).To(Equal("tests/test.ogg"))
|
||||
Expect(pls.Tracks[2].Path).To(Equal("downloads/newfile.flac"))
|
||||
Expect(pls.Tracks[3].Path).To(Equal("tests/01 Invisible (RED) Edit Version.mp3"))
|
||||
Expect(mockPlsRepo.last).To(Equal(pls))
|
||||
Expect(pls.Tracks).To(HaveLen(3))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("tests/fixtures/test.mp3"))
|
||||
Expect(pls.Tracks[1].Path).To(Equal("tests/fixtures/test.ogg"))
|
||||
Expect(pls.Tracks[2].Path).To(Equal("/tests/fixtures/01 Invisible (RED) Edit Version.mp3"))
|
||||
Expect(mp.last).To(Equal(pls))
|
||||
f.Close()
|
||||
|
||||
})
|
||||
|
||||
It("sets the playlist name as a timestamp if the #PLAYLIST directive is not present", func() {
|
||||
repo.data = []string{
|
||||
"tests/test.mp3",
|
||||
"tests/test.ogg",
|
||||
"/tests/01 Invisible (RED) Edit Version.mp3",
|
||||
"tests/fixtures/test.mp3",
|
||||
"tests/fixtures/test.ogg",
|
||||
"/tests/fixtures/01 Invisible (RED) Edit Version.mp3",
|
||||
}
|
||||
m3u := strings.Join([]string{
|
||||
"/music/tests/test.mp3",
|
||||
"/music/tests/test.ogg",
|
||||
}, "\n")
|
||||
f := strings.NewReader(m3u)
|
||||
f, _ := os.Open("tests/fixtures/playlists/pls-without-name.m3u")
|
||||
defer f.Close()
|
||||
pls, err := ps.ImportM3U(ctx, f)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
_, err = time.Parse(time.RFC3339, pls.Name)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(2))
|
||||
Expect(pls.Tracks).To(HaveLen(3))
|
||||
})
|
||||
|
||||
It("returns only tracks that exist in the database and in the same other as the m3u", func() {
|
||||
repo.data = []string{
|
||||
"album1/test1.mp3",
|
||||
"album2/test2.mp3",
|
||||
"album3/test3.mp3",
|
||||
"test1.mp3",
|
||||
"test2.mp3",
|
||||
"test3.mp3",
|
||||
}
|
||||
m3u := strings.Join([]string{
|
||||
"/music/album3/test3.mp3",
|
||||
"/music/album1/test1.mp3",
|
||||
"/music/album4/test4.mp3",
|
||||
"/music/album2/test2.mp3",
|
||||
"test3.mp3",
|
||||
"test1.mp3",
|
||||
"test4.mp3",
|
||||
"test2.mp3",
|
||||
}, "\n")
|
||||
f := strings.NewReader(m3u)
|
||||
pls, err := ps.ImportM3U(ctx, f)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(3))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("album3/test3.mp3"))
|
||||
Expect(pls.Tracks[1].Path).To(Equal("album1/test1.mp3"))
|
||||
Expect(pls.Tracks[2].Path).To(Equal("album2/test2.mp3"))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("test3.mp3"))
|
||||
Expect(pls.Tracks[1].Path).To(Equal("test1.mp3"))
|
||||
Expect(pls.Tracks[2].Path).To(Equal("test2.mp3"))
|
||||
})
|
||||
|
||||
It("is case-insensitive when comparing paths", func() {
|
||||
repo.data = []string{
|
||||
"abc/tEsT1.Mp3",
|
||||
"tEsT1.Mp3",
|
||||
}
|
||||
m3u := strings.Join([]string{
|
||||
"/music/ABC/TeSt1.mP3",
|
||||
"TeSt1.mP3",
|
||||
}, "\n")
|
||||
f := strings.NewReader(m3u)
|
||||
pls, err := ps.ImportM3U(ctx, f)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(pls.Tracks).To(HaveLen(1))
|
||||
Expect(pls.Tracks[0].Path).To(Equal("abc/tEsT1.Mp3"))
|
||||
})
|
||||
})
|
||||
|
||||
Describe("InPlaylistsPath", func() {
|
||||
var folder model.Folder
|
||||
|
||||
BeforeEach(func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
folder = model.Folder{
|
||||
LibraryPath: "/music",
|
||||
Path: "playlists/abc",
|
||||
Name: "folder1",
|
||||
}
|
||||
})
|
||||
|
||||
It("returns true if PlaylistsPath is empty", func() {
|
||||
conf.Server.PlaylistsPath = ""
|
||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns true if PlaylistsPath is any (**/**)", func() {
|
||||
conf.Server.PlaylistsPath = "**/**"
|
||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns true if folder is in PlaylistsPath", func() {
|
||||
conf.Server.PlaylistsPath = "other/**:playlists/**"
|
||||
Expect(InPlaylistsPath(folder)).To(BeTrue())
|
||||
})
|
||||
|
||||
It("returns false if folder is not in PlaylistsPath", func() {
|
||||
conf.Server.PlaylistsPath = "other"
|
||||
Expect(InPlaylistsPath(folder)).To(BeFalse())
|
||||
})
|
||||
|
||||
It("returns true if for a playlist in root of MusicFolder if PlaylistsPath is '.'", func() {
|
||||
conf.Server.PlaylistsPath = "."
|
||||
Expect(InPlaylistsPath(folder)).To(BeFalse())
|
||||
|
||||
folder2 := model.Folder{
|
||||
LibraryPath: "/music",
|
||||
Path: "",
|
||||
Name: ".",
|
||||
}
|
||||
|
||||
Expect(InPlaylistsPath(folder2)).To(BeTrue())
|
||||
Expect(pls.Tracks[0].Path).To(Equal("tEsT1.Mp3"))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -268,16 +192,16 @@ func (r *mockedMediaFileFromListRepo) FindByPaths([]string) (model.MediaFiles, e
|
||||
return mfs, nil
|
||||
}
|
||||
|
||||
type mockedPlaylistRepo struct {
|
||||
type mockedPlaylist struct {
|
||||
last *model.Playlist
|
||||
model.PlaylistRepository
|
||||
}
|
||||
|
||||
func (r *mockedPlaylistRepo) FindByPath(string) (*model.Playlist, error) {
|
||||
func (r *mockedPlaylist) FindByPath(string) (*model.Playlist, error) {
|
||||
return nil, model.ErrNotFound
|
||||
}
|
||||
|
||||
func (r *mockedPlaylistRepo) Put(pls *model.Playlist) error {
|
||||
func (r *mockedPlaylist) Put(pls *model.Playlist) error {
|
||||
r.last = pls
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -53,25 +53,18 @@ func newPlayTracker(ds model.DataStore, broker events.Broker) *playTracker {
|
||||
m := cache.NewSimpleCache[string, NowPlayingInfo]()
|
||||
p := &playTracker{ds: ds, playMap: m, broker: broker}
|
||||
p.scrobblers = make(map[string]Scrobbler)
|
||||
var enabled []string
|
||||
for name, constructor := range constructors {
|
||||
s := constructor(ds)
|
||||
if s == nil {
|
||||
log.Debug("Scrobbler not available. Missing configuration?", "name", name)
|
||||
continue
|
||||
}
|
||||
enabled = append(enabled, name)
|
||||
if conf.Server.DevEnableBufferedScrobble {
|
||||
s = newBufferedScrobbler(ds, s, name)
|
||||
}
|
||||
p.scrobblers[name] = s
|
||||
}
|
||||
log.Debug("List of scrobblers enabled", "names", enabled)
|
||||
return p
|
||||
}
|
||||
|
||||
func (p *playTracker) NowPlaying(ctx context.Context, playerId string, playerName string, trackId string) error {
|
||||
mf, err := p.ds.MediaFile(ctx).GetWithParticipants(trackId)
|
||||
mf, err := p.ds.MediaFile(ctx).Get(trackId)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error retrieving mediaFile", "id", trackId, err)
|
||||
return err
|
||||
@@ -131,7 +124,7 @@ func (p *playTracker) Submit(ctx context.Context, submissions []Submission) erro
|
||||
success := 0
|
||||
|
||||
for _, s := range submissions {
|
||||
mf, err := p.ds.MediaFile(ctx).GetWithParticipants(s.TrackID)
|
||||
mf, err := p.ds.MediaFile(ctx).Get(s.TrackID)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Cannot find track for scrobbling", "id", s.TrackID, "user", username, err)
|
||||
continue
|
||||
@@ -165,9 +158,7 @@ func (p *playTracker) incPlay(ctx context.Context, track *model.MediaFile, times
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, artist := range track.Participants[model.RoleArtist] {
|
||||
err = tx.Artist(ctx).IncPlayCount(artist.ID, timestamp)
|
||||
}
|
||||
err = tx.Artist(ctx).IncPlayCount(track.ArtistID, timestamp)
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
@@ -22,8 +22,7 @@ var _ = Describe("PlayTracker", func() {
|
||||
var tracker PlayTracker
|
||||
var track model.MediaFile
|
||||
var album model.Album
|
||||
var artist1 model.Artist
|
||||
var artist2 model.Artist
|
||||
var artist model.Artist
|
||||
var fake fakeScrobbler
|
||||
|
||||
BeforeEach(func() {
|
||||
@@ -35,12 +34,9 @@ var _ = Describe("PlayTracker", func() {
|
||||
ctx = request.WithPlayer(ctx, model.Player{ScrobbleEnabled: true})
|
||||
ds = &tests.MockDataStore{}
|
||||
fake = fakeScrobbler{Authorized: true}
|
||||
Register("fake", func(model.DataStore) Scrobbler {
|
||||
Register("fake", func(ds model.DataStore) Scrobbler {
|
||||
return &fake
|
||||
})
|
||||
Register("disabled", func(model.DataStore) Scrobbler {
|
||||
return nil
|
||||
})
|
||||
tracker = newPlayTracker(ds, events.GetBroker())
|
||||
|
||||
track = model.MediaFile{
|
||||
@@ -48,27 +44,20 @@ var _ = Describe("PlayTracker", func() {
|
||||
Title: "Track Title",
|
||||
Album: "Track Album",
|
||||
AlbumID: "al-1",
|
||||
Artist: "Track Artist",
|
||||
ArtistID: "ar-1",
|
||||
AlbumArtist: "Track AlbumArtist",
|
||||
TrackNumber: 1,
|
||||
Duration: 180,
|
||||
MbzRecordingID: "mbz-123",
|
||||
Participants: map[model.Role]model.ParticipantList{
|
||||
model.RoleArtist: []model.Participant{_p("ar-1", "Artist 1"), _p("ar-2", "Artist 2")},
|
||||
},
|
||||
}
|
||||
_ = ds.MediaFile(ctx).Put(&track)
|
||||
artist1 = model.Artist{ID: "ar-1"}
|
||||
_ = ds.Artist(ctx).Put(&artist1)
|
||||
artist2 = model.Artist{ID: "ar-2"}
|
||||
_ = ds.Artist(ctx).Put(&artist2)
|
||||
artist = model.Artist{ID: "ar-1"}
|
||||
_ = ds.Artist(ctx).Put(&artist)
|
||||
album = model.Album{ID: "al-1"}
|
||||
_ = ds.Album(ctx).(*tests.MockAlbumRepo).Put(&album)
|
||||
})
|
||||
|
||||
It("does not register disabled scrobblers", func() {
|
||||
Expect(tracker.(*playTracker).scrobblers).To(HaveKey("fake"))
|
||||
Expect(tracker.(*playTracker).scrobblers).ToNot(HaveKey("disabled"))
|
||||
})
|
||||
|
||||
Describe("NowPlaying", func() {
|
||||
It("sends track to agent", func() {
|
||||
err := tracker.NowPlaying(ctx, "player-1", "player-one", "123")
|
||||
@@ -76,7 +65,6 @@ var _ = Describe("PlayTracker", func() {
|
||||
Expect(fake.NowPlayingCalled).To(BeTrue())
|
||||
Expect(fake.UserID).To(Equal("u-1"))
|
||||
Expect(fake.Track.ID).To(Equal("123"))
|
||||
Expect(fake.Track.Participants).To(Equal(track.Participants))
|
||||
})
|
||||
It("does not send track to agent if user has not authorized", func() {
|
||||
fake.Authorized = false
|
||||
@@ -141,7 +129,6 @@ var _ = Describe("PlayTracker", func() {
|
||||
Expect(fake.ScrobbleCalled).To(BeTrue())
|
||||
Expect(fake.UserID).To(Equal("u-1"))
|
||||
Expect(fake.LastScrobble.ID).To(Equal("123"))
|
||||
Expect(fake.LastScrobble.Participants).To(Equal(track.Participants))
|
||||
})
|
||||
|
||||
It("increments play counts in the DB", func() {
|
||||
@@ -153,10 +140,7 @@ var _ = Describe("PlayTracker", func() {
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(track.PlayCount).To(Equal(int64(1)))
|
||||
Expect(album.PlayCount).To(Equal(int64(1)))
|
||||
|
||||
// It should increment play counts for all artists
|
||||
Expect(artist1.PlayCount).To(Equal(int64(1)))
|
||||
Expect(artist2.PlayCount).To(Equal(int64(1)))
|
||||
Expect(artist.PlayCount).To(Equal(int64(1)))
|
||||
})
|
||||
|
||||
It("does not send track to agent if user has not authorized", func() {
|
||||
@@ -196,11 +180,9 @@ var _ = Describe("PlayTracker", func() {
|
||||
|
||||
Expect(track.PlayCount).To(Equal(int64(1)))
|
||||
Expect(album.PlayCount).To(Equal(int64(1)))
|
||||
|
||||
// It should increment play counts for all artists
|
||||
Expect(artist1.PlayCount).To(Equal(int64(1)))
|
||||
Expect(artist2.PlayCount).To(Equal(int64(1)))
|
||||
Expect(artist.PlayCount).To(Equal(int64(1)))
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
})
|
||||
@@ -238,11 +220,3 @@ func (f *fakeScrobbler) Scrobble(ctx context.Context, userId string, s Scrobble)
|
||||
f.LastScrobble = s
|
||||
return nil
|
||||
}
|
||||
|
||||
func _p(id, name string, sortName ...string) model.Participant {
|
||||
p := model.Participant{Artist: model.Artist{ID: id, Name: name}}
|
||||
if len(sortName) > 0 {
|
||||
p.Artist.SortArtistName = sortName[0]
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
@@ -167,10 +167,7 @@ func (r *shareRepositoryWrapper) contentsLabelFromPlaylist(shareID string, id st
|
||||
|
||||
func (r *shareRepositoryWrapper) contentsLabelFromMediaFiles(shareID string, ids string) string {
|
||||
idList := strings.Split(ids, ",")
|
||||
mfs, err := r.ds.MediaFile(r.ctx).GetAll(model.QueryOptions{Filters: squirrel.And{
|
||||
squirrel.Eq{"media_file.id": idList},
|
||||
squirrel.Eq{"missing": false},
|
||||
}})
|
||||
mfs, err := r.ds.MediaFile(r.ctx).GetAll(model.QueryOptions{Filters: squirrel.Eq{"id": idList}})
|
||||
if err != nil {
|
||||
log.Error(r.ctx, "Error retrieving media files for share", "share", shareID, err)
|
||||
return ""
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io/fs"
|
||||
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
)
|
||||
|
||||
type Storage interface {
|
||||
FS() (MusicFS, error)
|
||||
}
|
||||
|
||||
// MusicFS is an interface that extends the fs.FS interface with the ability to read tags from files
|
||||
type MusicFS interface {
|
||||
fs.FS
|
||||
ReadTags(path ...string) (map[string]metadata.Info, error)
|
||||
}
|
||||
|
||||
// Watcher is a storage with the ability watch the FS and notify changes
|
||||
type Watcher interface {
|
||||
// Start starts a watcher on the whole FS and returns a channel to send detected changes.
|
||||
// The watcher must be stopped when the context is done.
|
||||
Start(context.Context) (<-chan string, error)
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
package local
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"sync"
|
||||
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
)
|
||||
|
||||
// Extractor is an interface that defines the methods that a tag/metadata extractor must implement
|
||||
type Extractor interface {
|
||||
Parse(files ...string) (map[string]metadata.Info, error)
|
||||
Version() string
|
||||
}
|
||||
|
||||
type extractorConstructor func(fs.FS, string) Extractor
|
||||
|
||||
var (
|
||||
extractors = map[string]extractorConstructor{}
|
||||
lock sync.RWMutex
|
||||
)
|
||||
|
||||
// RegisterExtractor registers a new extractor, so it can be used by the local storage. The one to be used is
|
||||
// defined with the configuration option Scanner.Extractor.
|
||||
func RegisterExtractor(id string, f extractorConstructor) {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
extractors[id] = f
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
package local
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/djherbis/times"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core/storage"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
)
|
||||
|
||||
// localStorage implements a Storage that reads the files from the local filesystem and uses registered extractors
|
||||
// to extract the metadata and tags from the files.
|
||||
type localStorage struct {
|
||||
u url.URL
|
||||
extractor Extractor
|
||||
resolvedPath string
|
||||
watching atomic.Bool
|
||||
}
|
||||
|
||||
func newLocalStorage(u url.URL) storage.Storage {
|
||||
newExtractor, ok := extractors[conf.Server.Scanner.Extractor]
|
||||
if !ok || newExtractor == nil {
|
||||
log.Fatal("Extractor not found", "path", conf.Server.Scanner.Extractor)
|
||||
}
|
||||
isWindowsPath := filepath.VolumeName(u.Host) != ""
|
||||
if u.Scheme == storage.LocalSchemaID && isWindowsPath {
|
||||
u.Path = filepath.Join(u.Host, u.Path)
|
||||
}
|
||||
resolvedPath, err := filepath.EvalSymlinks(u.Path)
|
||||
if err != nil {
|
||||
log.Warn("Error resolving path", "path", u.Path, "err", err)
|
||||
resolvedPath = u.Path
|
||||
}
|
||||
return &localStorage{u: u, extractor: newExtractor(os.DirFS(u.Path), u.Path), resolvedPath: resolvedPath}
|
||||
}
|
||||
|
||||
func (s *localStorage) FS() (storage.MusicFS, error) {
|
||||
path := s.u.Path
|
||||
if _, err := os.Stat(path); err != nil {
|
||||
return nil, fmt.Errorf("%w: %s", err, path)
|
||||
}
|
||||
return &localFS{FS: os.DirFS(path), extractor: s.extractor}, nil
|
||||
}
|
||||
|
||||
type localFS struct {
|
||||
fs.FS
|
||||
extractor Extractor
|
||||
}
|
||||
|
||||
func (lfs *localFS) ReadTags(path ...string) (map[string]metadata.Info, error) {
|
||||
res, err := lfs.extractor.Parse(path...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for path, v := range res {
|
||||
if v.FileInfo == nil {
|
||||
info, err := fs.Stat(lfs, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
v.FileInfo = localFileInfo{info}
|
||||
res[path] = v
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// localFileInfo is a wrapper around fs.FileInfo that adds a BirthTime method, to make it compatible
|
||||
// with metadata.FileInfo
|
||||
type localFileInfo struct {
|
||||
fs.FileInfo
|
||||
}
|
||||
|
||||
func (lfi localFileInfo) BirthTime() time.Time {
|
||||
if ts := times.Get(lfi.FileInfo); ts.HasBirthTime() {
|
||||
return ts.BirthTime()
|
||||
}
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
func init() {
|
||||
storage.Register(storage.LocalSchemaID, newLocalStorage)
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
package local
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestLocal(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Local Storage Test Suite")
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
package local
|
||||
|
||||
import "github.com/rjeczalik/notify"
|
||||
|
||||
const WatchEvents = notify.All | notify.FSEventsInodeMetaMod
|
||||
@@ -1,7 +0,0 @@
|
||||
//go:build !linux && !darwin && !windows
|
||||
|
||||
package local
|
||||
|
||||
import "github.com/rjeczalik/notify"
|
||||
|
||||
const WatchEvents = notify.All
|
||||
@@ -1,5 +0,0 @@
|
||||
package local
|
||||
|
||||
import "github.com/rjeczalik/notify"
|
||||
|
||||
const WatchEvents = notify.All | notify.InModify | notify.InAttrib
|
||||
@@ -1,5 +0,0 @@
|
||||
package local
|
||||
|
||||
import "github.com/rjeczalik/notify"
|
||||
|
||||
const WatchEvents = notify.All | notify.FileNotifyChangeAttributes
|
||||
@@ -1,57 +0,0 @@
|
||||
package local
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/rjeczalik/notify"
|
||||
)
|
||||
|
||||
// Start starts a watcher on the whole FS and returns a channel to send detected changes.
|
||||
// It uses `notify` to detect changes in the filesystem, so it may not work on all platforms/use-cases.
|
||||
// Notoriously, it does not work on some networked mounts and Windows with WSL2.
|
||||
func (s *localStorage) Start(ctx context.Context) (<-chan string, error) {
|
||||
if !s.watching.CompareAndSwap(false, true) {
|
||||
return nil, errors.New("watcher already started")
|
||||
}
|
||||
input := make(chan notify.EventInfo, 1)
|
||||
output := make(chan string, 1)
|
||||
|
||||
started := make(chan struct{})
|
||||
go func() {
|
||||
defer close(input)
|
||||
defer close(output)
|
||||
|
||||
libPath := filepath.Join(s.u.Path, "...")
|
||||
log.Debug(ctx, "Starting watcher", "lib", libPath)
|
||||
err := notify.Watch(libPath, input, WatchEvents)
|
||||
if err != nil {
|
||||
log.Error("Error starting watcher", "lib", libPath, err)
|
||||
return
|
||||
}
|
||||
defer notify.Stop(input)
|
||||
close(started) // signals the main goroutine we have started
|
||||
|
||||
for {
|
||||
select {
|
||||
case event := <-input:
|
||||
log.Trace(ctx, "Detected change", "event", event, "lib", s.u.Path)
|
||||
name := event.Path()
|
||||
name = strings.Replace(name, s.resolvedPath, s.u.Path, 1)
|
||||
output <- name
|
||||
case <-ctx.Done():
|
||||
log.Debug(ctx, "Stopping watcher", "path", s.u.Path)
|
||||
s.watching.Store(false)
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
select {
|
||||
case <-started:
|
||||
case <-ctx.Done():
|
||||
}
|
||||
return output, nil
|
||||
}
|
||||
@@ -1,139 +0,0 @@
|
||||
package local_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/core/storage"
|
||||
"github.com/navidrome/navidrome/core/storage/local"
|
||||
_ "github.com/navidrome/navidrome/core/storage/local"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
var _ = XDescribe("Watcher", func() {
|
||||
var lsw storage.Watcher
|
||||
var tmpFolder string
|
||||
|
||||
BeforeEach(func() {
|
||||
tmpFolder = GinkgoT().TempDir()
|
||||
|
||||
local.RegisterExtractor("noop", func(fs fs.FS, path string) local.Extractor { return noopExtractor{} })
|
||||
conf.Server.Scanner.Extractor = "noop"
|
||||
|
||||
ls, err := storage.For(tmpFolder)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// It should implement Watcher
|
||||
var ok bool
|
||||
lsw, ok = ls.(storage.Watcher)
|
||||
Expect(ok).To(BeTrue())
|
||||
|
||||
// Make sure temp folder is created
|
||||
Eventually(func() error {
|
||||
_, err := os.Stat(tmpFolder)
|
||||
return err
|
||||
}).Should(Succeed())
|
||||
})
|
||||
|
||||
It("should start and stop watcher", func() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
w, err := lsw.Start(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
cancel()
|
||||
Eventually(w).Should(BeClosed())
|
||||
})
|
||||
|
||||
It("should return error if watcher is already started", func() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
_, err := lsw.Start(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
_, err = lsw.Start(ctx)
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
|
||||
It("should detect new files", func() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
changes, err := lsw.Start(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
_, err = os.Create(filepath.Join(tmpFolder, "test.txt"))
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
Eventually(changes).WithTimeout(2 * time.Second).Should(Receive(Equal(tmpFolder)))
|
||||
})
|
||||
|
||||
It("should detect new subfolders", func() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
changes, err := lsw.Start(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
Expect(os.Mkdir(filepath.Join(tmpFolder, "subfolder"), 0755)).To(Succeed())
|
||||
|
||||
Eventually(changes).WithTimeout(2 * time.Second).Should(Receive(Equal(filepath.Join(tmpFolder, "subfolder"))))
|
||||
})
|
||||
|
||||
It("should detect changes in subfolders recursively", func() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
subfolder := filepath.Join(tmpFolder, "subfolder1/subfolder2")
|
||||
Expect(os.MkdirAll(subfolder, 0755)).To(Succeed())
|
||||
|
||||
changes, err := lsw.Start(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
filePath := filepath.Join(subfolder, "test.txt")
|
||||
Expect(os.WriteFile(filePath, []byte("test"), 0600)).To(Succeed())
|
||||
|
||||
Eventually(changes).WithTimeout(2 * time.Second).Should(Receive(Equal(filePath)))
|
||||
})
|
||||
|
||||
It("should detect removed in files", func() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
changes, err := lsw.Start(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
filePath := filepath.Join(tmpFolder, "test.txt")
|
||||
Expect(os.WriteFile(filePath, []byte("test"), 0600)).To(Succeed())
|
||||
|
||||
Eventually(changes).WithTimeout(2 * time.Second).Should(Receive(Equal(filePath)))
|
||||
|
||||
Expect(os.Remove(filePath)).To(Succeed())
|
||||
Eventually(changes).WithTimeout(2 * time.Second).Should(Receive(Equal(filePath)))
|
||||
})
|
||||
|
||||
It("should detect file moves", func() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
filePath := filepath.Join(tmpFolder, "test.txt")
|
||||
Expect(os.WriteFile(filePath, []byte("test"), 0600)).To(Succeed())
|
||||
|
||||
changes, err := lsw.Start(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
newPath := filepath.Join(tmpFolder, "test2.txt")
|
||||
Expect(os.Rename(filePath, newPath)).To(Succeed())
|
||||
Eventually(changes).WithTimeout(2 * time.Second).Should(Receive(Equal(newPath)))
|
||||
})
|
||||
})
|
||||
|
||||
type noopExtractor struct{}
|
||||
|
||||
func (s noopExtractor) Parse(files ...string) (map[string]metadata.Info, error) { return nil, nil }
|
||||
func (s noopExtractor) Version() string { return "0" }
|
||||
@@ -1,51 +0,0 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
const LocalSchemaID = "file"
|
||||
|
||||
type constructor func(url.URL) Storage
|
||||
|
||||
var (
|
||||
registry = map[string]constructor{}
|
||||
lock sync.RWMutex
|
||||
)
|
||||
|
||||
func Register(schema string, c constructor) {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
registry[schema] = c
|
||||
}
|
||||
|
||||
// For returns a Storage implementation for the given URI.
|
||||
// It uses the schema part of the URI to find the correct registered
|
||||
// Storage constructor.
|
||||
// If the URI does not contain a schema, it is treated as a file:// URI.
|
||||
func For(uri string) (Storage, error) {
|
||||
lock.RLock()
|
||||
defer lock.RUnlock()
|
||||
parts := strings.Split(uri, "://")
|
||||
|
||||
// Paths without schema are treated as file:// and use the default LocalStorage implementation
|
||||
if len(parts) < 2 {
|
||||
uri, _ = filepath.Abs(uri)
|
||||
uri = filepath.ToSlash(uri)
|
||||
uri = LocalSchemaID + "://" + uri
|
||||
}
|
||||
|
||||
u, err := url.Parse(uri)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c, ok := registry[u.Scheme]
|
||||
if !ok {
|
||||
return nil, errors.New("schema '" + u.Scheme + "' not registered")
|
||||
}
|
||||
return c(*u), nil
|
||||
}
|
||||
@@ -1,78 +0,0 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
func TestApp(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Storage Test Suite")
|
||||
}
|
||||
|
||||
var _ = Describe("Storage", func() {
|
||||
When("schema is not registered", func() {
|
||||
BeforeEach(func() {
|
||||
registry = map[string]constructor{}
|
||||
})
|
||||
|
||||
It("should return error", func() {
|
||||
_, err := For("file:///tmp")
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
})
|
||||
When("schema is registered", func() {
|
||||
BeforeEach(func() {
|
||||
registry = map[string]constructor{}
|
||||
Register("file", func(url url.URL) Storage { return &fakeLocalStorage{u: url} })
|
||||
Register("s3", func(url url.URL) Storage { return &fakeS3Storage{u: url} })
|
||||
})
|
||||
|
||||
It("should return correct implementation", func() {
|
||||
s, err := For("file:///tmp")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(s).To(BeAssignableToTypeOf(&fakeLocalStorage{}))
|
||||
Expect(s.(*fakeLocalStorage).u.Scheme).To(Equal("file"))
|
||||
Expect(s.(*fakeLocalStorage).u.Path).To(Equal("/tmp"))
|
||||
|
||||
s, err = For("s3:///bucket")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(s).To(BeAssignableToTypeOf(&fakeS3Storage{}))
|
||||
Expect(s.(*fakeS3Storage).u.Scheme).To(Equal("s3"))
|
||||
Expect(s.(*fakeS3Storage).u.Path).To(Equal("/bucket"))
|
||||
})
|
||||
It("should return a file implementation when schema is not specified", func() {
|
||||
s, err := For("/tmp")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(s).To(BeAssignableToTypeOf(&fakeLocalStorage{}))
|
||||
Expect(s.(*fakeLocalStorage).u.Scheme).To(Equal("file"))
|
||||
Expect(s.(*fakeLocalStorage).u.Path).To(Equal("/tmp"))
|
||||
})
|
||||
It("should return a file implementation for a relative folder", func() {
|
||||
s, err := For("tmp")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
cwd, _ := os.Getwd()
|
||||
Expect(s).To(BeAssignableToTypeOf(&fakeLocalStorage{}))
|
||||
Expect(s.(*fakeLocalStorage).u.Scheme).To(Equal("file"))
|
||||
Expect(s.(*fakeLocalStorage).u.Path).To(Equal(filepath.Join(cwd, "tmp")))
|
||||
})
|
||||
It("should return error if schema is unregistered", func() {
|
||||
_, err := For("webdav:///tmp")
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
type fakeLocalStorage struct {
|
||||
Storage
|
||||
u url.URL
|
||||
}
|
||||
type fakeS3Storage struct {
|
||||
Storage
|
||||
u url.URL
|
||||
}
|
||||
@@ -1,323 +0,0 @@
|
||||
//nolint:unused
|
||||
package storagetest
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"net/url"
|
||||
"path"
|
||||
"testing/fstest"
|
||||
"time"
|
||||
|
||||
"github.com/navidrome/navidrome/core/storage"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model/metadata"
|
||||
"github.com/navidrome/navidrome/utils/random"
|
||||
)
|
||||
|
||||
// FakeStorage is a fake storage that provides a FakeFS.
|
||||
// It is used for testing purposes.
|
||||
type FakeStorage struct{ fs *FakeFS }
|
||||
|
||||
// Register registers the FakeStorage for the given scheme. To use it, set the model.Library's Path to "fake:///music",
|
||||
// and register a FakeFS with schema = "fake". The storage registered will always return the same FakeFS instance.
|
||||
func Register(schema string, fs *FakeFS) {
|
||||
storage.Register(schema, func(url url.URL) storage.Storage { return &FakeStorage{fs: fs} })
|
||||
}
|
||||
|
||||
func (s FakeStorage) FS() (storage.MusicFS, error) {
|
||||
return s.fs, nil
|
||||
}
|
||||
|
||||
// FakeFS is a fake filesystem that can be used for testing purposes.
|
||||
// It implements the storage.MusicFS interface and keeps all files in memory, by using a fstest.MapFS internally.
|
||||
// You must NOT add files directly in the MapFS property, but use SetFiles and its other methods instead.
|
||||
// This is because the FakeFS keeps track of the latest modification time of directories, simulating the
|
||||
// behavior of a real filesystem, and you should not bypass this logic.
|
||||
type FakeFS struct {
|
||||
fstest.MapFS
|
||||
properInit bool
|
||||
}
|
||||
|
||||
func (ffs *FakeFS) SetFiles(files fstest.MapFS) {
|
||||
ffs.properInit = true
|
||||
ffs.MapFS = files
|
||||
ffs.createDirTimestamps()
|
||||
}
|
||||
|
||||
func (ffs *FakeFS) Add(filePath string, file *fstest.MapFile, when ...time.Time) {
|
||||
if len(when) == 0 {
|
||||
when = append(when, time.Now())
|
||||
}
|
||||
ffs.MapFS[filePath] = file
|
||||
ffs.touchContainingFolder(filePath, when[0])
|
||||
ffs.createDirTimestamps()
|
||||
}
|
||||
|
||||
func (ffs *FakeFS) Remove(filePath string, when ...time.Time) *fstest.MapFile {
|
||||
filePath = path.Clean(filePath)
|
||||
if len(when) == 0 {
|
||||
when = append(when, time.Now())
|
||||
}
|
||||
if f, ok := ffs.MapFS[filePath]; ok {
|
||||
ffs.touchContainingFolder(filePath, when[0])
|
||||
delete(ffs.MapFS, filePath)
|
||||
return f
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ffs *FakeFS) Move(srcPath string, destPath string, when ...time.Time) {
|
||||
if len(when) == 0 {
|
||||
when = append(when, time.Now())
|
||||
}
|
||||
srcPath = path.Clean(srcPath)
|
||||
destPath = path.Clean(destPath)
|
||||
ffs.MapFS[destPath] = ffs.MapFS[srcPath]
|
||||
ffs.touchContainingFolder(destPath, when[0])
|
||||
ffs.Remove(srcPath, when...)
|
||||
}
|
||||
|
||||
// Touch sets the modification time of a file.
|
||||
func (ffs *FakeFS) Touch(filePath string, when ...time.Time) {
|
||||
if len(when) == 0 {
|
||||
when = append(when, time.Now())
|
||||
}
|
||||
filePath = path.Clean(filePath)
|
||||
file, ok := ffs.MapFS[filePath]
|
||||
if ok {
|
||||
file.ModTime = when[0]
|
||||
} else {
|
||||
ffs.MapFS[filePath] = &fstest.MapFile{ModTime: when[0]}
|
||||
}
|
||||
ffs.touchContainingFolder(filePath, file.ModTime)
|
||||
}
|
||||
|
||||
func (ffs *FakeFS) touchContainingFolder(filePath string, ts time.Time) {
|
||||
dir := path.Dir(filePath)
|
||||
dirFile, ok := ffs.MapFS[dir]
|
||||
if !ok {
|
||||
log.Fatal("Directory not found. Forgot to call SetFiles?", "file", filePath)
|
||||
}
|
||||
if dirFile.ModTime.Before(ts) {
|
||||
dirFile.ModTime = ts
|
||||
}
|
||||
}
|
||||
|
||||
// SetError sets an error that will be returned when trying to read the file.
|
||||
func (ffs *FakeFS) SetError(filePath string, err error) {
|
||||
filePath = path.Clean(filePath)
|
||||
if ffs.MapFS[filePath] == nil {
|
||||
ffs.MapFS[filePath] = &fstest.MapFile{Data: []byte{}}
|
||||
}
|
||||
ffs.MapFS[filePath].Sys = err
|
||||
ffs.Touch(filePath)
|
||||
}
|
||||
|
||||
// ClearError clears the error set by SetError.
|
||||
func (ffs *FakeFS) ClearError(filePath string) {
|
||||
filePath = path.Clean(filePath)
|
||||
if file := ffs.MapFS[filePath]; file != nil {
|
||||
file.Sys = nil
|
||||
}
|
||||
ffs.Touch(filePath)
|
||||
}
|
||||
|
||||
func (ffs *FakeFS) UpdateTags(filePath string, newTags map[string]any, when ...time.Time) {
|
||||
f, ok := ffs.MapFS[filePath]
|
||||
if !ok {
|
||||
panic(fmt.Errorf("file %s not found", filePath))
|
||||
}
|
||||
var tags map[string]any
|
||||
err := json.Unmarshal(f.Data, &tags)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
for k, v := range newTags {
|
||||
tags[k] = v
|
||||
}
|
||||
data, _ := json.Marshal(tags)
|
||||
f.Data = data
|
||||
ffs.Touch(filePath, when...)
|
||||
}
|
||||
|
||||
// createDirTimestamps loops through all entries and create/updates directories entries in the map with the
|
||||
// latest ModTime from any children of that directory.
|
||||
func (ffs *FakeFS) createDirTimestamps() bool {
|
||||
var changed bool
|
||||
for filePath, file := range ffs.MapFS {
|
||||
dir := path.Dir(filePath)
|
||||
dirFile, ok := ffs.MapFS[dir]
|
||||
if !ok {
|
||||
dirFile = &fstest.MapFile{Mode: fs.ModeDir}
|
||||
ffs.MapFS[dir] = dirFile
|
||||
}
|
||||
if dirFile.ModTime.IsZero() {
|
||||
dirFile.ModTime = file.ModTime
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
if changed {
|
||||
// If we updated any directory, we need to re-run the loop to create any parent directories
|
||||
ffs.createDirTimestamps()
|
||||
}
|
||||
return changed
|
||||
}
|
||||
|
||||
func ModTime(ts string) map[string]any { return map[string]any{fakeFileInfoModTime: ts} }
|
||||
func BirthTime(ts string) map[string]any { return map[string]any{fakeFileInfoBirthTime: ts} }
|
||||
|
||||
func Template(t ...map[string]any) func(...map[string]any) *fstest.MapFile {
|
||||
return func(tags ...map[string]any) *fstest.MapFile {
|
||||
return MP3(append(t, tags...)...)
|
||||
}
|
||||
}
|
||||
|
||||
func Track(num int, title string, tags ...map[string]any) map[string]any {
|
||||
ts := audioProperties("mp3", 320)
|
||||
ts["title"] = title
|
||||
ts["track"] = num
|
||||
for _, t := range tags {
|
||||
for k, v := range t {
|
||||
ts[k] = v
|
||||
}
|
||||
}
|
||||
return ts
|
||||
}
|
||||
|
||||
func MP3(tags ...map[string]any) *fstest.MapFile {
|
||||
ts := audioProperties("mp3", 320)
|
||||
if _, ok := ts[fakeFileInfoSize]; !ok {
|
||||
duration := ts["duration"].(int64)
|
||||
bitrate := ts["bitrate"].(int)
|
||||
ts[fakeFileInfoSize] = duration * int64(bitrate) / 8 * 1000
|
||||
}
|
||||
return File(append([]map[string]any{ts}, tags...)...)
|
||||
}
|
||||
|
||||
func File(tags ...map[string]any) *fstest.MapFile {
|
||||
ts := map[string]any{}
|
||||
for _, t := range tags {
|
||||
for k, v := range t {
|
||||
ts[k] = v
|
||||
}
|
||||
}
|
||||
modTime := time.Now()
|
||||
if mt, ok := ts[fakeFileInfoModTime]; !ok {
|
||||
ts[fakeFileInfoModTime] = time.Now().Format(time.RFC3339)
|
||||
} else {
|
||||
modTime, _ = time.Parse(time.RFC3339, mt.(string))
|
||||
}
|
||||
if _, ok := ts[fakeFileInfoBirthTime]; !ok {
|
||||
ts[fakeFileInfoBirthTime] = time.Now().Format(time.RFC3339)
|
||||
}
|
||||
if _, ok := ts[fakeFileInfoMode]; !ok {
|
||||
ts[fakeFileInfoMode] = fs.ModePerm
|
||||
}
|
||||
data, _ := json.Marshal(ts)
|
||||
if _, ok := ts[fakeFileInfoSize]; !ok {
|
||||
ts[fakeFileInfoSize] = int64(len(data))
|
||||
}
|
||||
return &fstest.MapFile{Data: data, ModTime: modTime, Mode: ts[fakeFileInfoMode].(fs.FileMode)}
|
||||
}
|
||||
|
||||
func audioProperties(suffix string, bitrate int) map[string]any {
|
||||
duration := random.Int64N(300) + 120
|
||||
return map[string]any{
|
||||
"suffix": suffix,
|
||||
"bitrate": bitrate,
|
||||
"duration": duration,
|
||||
"samplerate": 44100,
|
||||
"bitdepth": 16,
|
||||
"channels": 2,
|
||||
}
|
||||
}
|
||||
|
||||
func (ffs *FakeFS) ReadTags(paths ...string) (map[string]metadata.Info, error) {
|
||||
if !ffs.properInit {
|
||||
log.Fatal("FakeFS not initialized properly. Use SetFiles")
|
||||
}
|
||||
result := make(map[string]metadata.Info)
|
||||
var errs []error
|
||||
for _, file := range paths {
|
||||
p, err := ffs.parseFile(file)
|
||||
if err != nil {
|
||||
log.Warn("Error reading metadata from file", "file", file, "err", err)
|
||||
errs = append(errs, err)
|
||||
} else {
|
||||
result[file] = *p
|
||||
}
|
||||
}
|
||||
if len(errs) > 0 {
|
||||
return result, fmt.Errorf("errors reading metadata: %w", errors.Join(errs...))
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (ffs *FakeFS) parseFile(filePath string) (*metadata.Info, error) {
|
||||
// Check if it should throw an error when reading this file
|
||||
stat, err := ffs.Stat(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if stat.Sys() != nil {
|
||||
return nil, stat.Sys().(error)
|
||||
}
|
||||
|
||||
// Read the file contents and parse the tags
|
||||
contents, err := fs.ReadFile(ffs, filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data := map[string]any{}
|
||||
err = json.Unmarshal(contents, &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p := metadata.Info{
|
||||
Tags: map[string][]string{},
|
||||
AudioProperties: metadata.AudioProperties{},
|
||||
HasPicture: data["has_picture"] == "true",
|
||||
}
|
||||
if d, ok := data["duration"].(float64); ok {
|
||||
p.AudioProperties.Duration = time.Duration(d) * time.Second
|
||||
}
|
||||
getInt := func(key string) int { v, _ := data[key].(float64); return int(v) }
|
||||
p.AudioProperties.BitRate = getInt("bitrate")
|
||||
p.AudioProperties.BitDepth = getInt("bitdepth")
|
||||
p.AudioProperties.SampleRate = getInt("samplerate")
|
||||
p.AudioProperties.Channels = getInt("channels")
|
||||
for k, v := range data {
|
||||
p.Tags[k] = []string{fmt.Sprintf("%v", v)}
|
||||
}
|
||||
file := ffs.MapFS[filePath]
|
||||
p.FileInfo = &fakeFileInfo{path: filePath, tags: data, file: file}
|
||||
return &p, nil
|
||||
}
|
||||
|
||||
const (
|
||||
fakeFileInfoMode = "_mode"
|
||||
fakeFileInfoSize = "_size"
|
||||
fakeFileInfoModTime = "_modtime"
|
||||
fakeFileInfoBirthTime = "_birthtime"
|
||||
)
|
||||
|
||||
type fakeFileInfo struct {
|
||||
path string
|
||||
file *fstest.MapFile
|
||||
tags map[string]any
|
||||
}
|
||||
|
||||
func (ffi *fakeFileInfo) Name() string { return path.Base(ffi.path) }
|
||||
func (ffi *fakeFileInfo) Size() int64 { v, _ := ffi.tags[fakeFileInfoSize].(float64); return int64(v) }
|
||||
func (ffi *fakeFileInfo) Mode() fs.FileMode { return ffi.file.Mode }
|
||||
func (ffi *fakeFileInfo) IsDir() bool { return false }
|
||||
func (ffi *fakeFileInfo) Sys() any { return nil }
|
||||
func (ffi *fakeFileInfo) ModTime() time.Time { return ffi.file.ModTime }
|
||||
func (ffi *fakeFileInfo) BirthTime() time.Time { return ffi.parseTime(fakeFileInfoBirthTime) }
|
||||
func (ffi *fakeFileInfo) parseTime(key string) time.Time {
|
||||
t, _ := time.Parse(time.RFC3339, ffi.tags[key].(string))
|
||||
return t
|
||||
}
|
||||
@@ -1,139 +0,0 @@
|
||||
//nolint:unused
|
||||
package storagetest_test
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"testing"
|
||||
"testing/fstest"
|
||||
"time"
|
||||
|
||||
. "github.com/navidrome/navidrome/core/storage/storagetest"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
|
||||
type _t = map[string]any
|
||||
|
||||
func TestFakeStorage(t *testing.T) {
|
||||
RegisterFailHandler(Fail)
|
||||
RunSpecs(t, "Fake Storage Test Suite")
|
||||
}
|
||||
|
||||
var _ = Describe("FakeFS", func() {
|
||||
var ffs FakeFS
|
||||
var startTime time.Time
|
||||
|
||||
BeforeEach(func() {
|
||||
startTime = time.Now().Add(-time.Hour)
|
||||
boy := Template(_t{"albumartist": "U2", "album": "Boy", "year": 1980, "genre": "Rock"})
|
||||
files := fstest.MapFS{
|
||||
"U2/Boy/I Will Follow.mp3": boy(Track(1, "I Will Follow")),
|
||||
"U2/Boy/Twilight.mp3": boy(Track(2, "Twilight")),
|
||||
"U2/Boy/An Cat Dubh.mp3": boy(Track(3, "An Cat Dubh")),
|
||||
}
|
||||
ffs.SetFiles(files)
|
||||
})
|
||||
|
||||
It("should implement a fs.FS", func() {
|
||||
Expect(fstest.TestFS(ffs, "U2/Boy/I Will Follow.mp3")).To(Succeed())
|
||||
})
|
||||
|
||||
It("should read file info", func() {
|
||||
props, err := ffs.ReadTags("U2/Boy/I Will Follow.mp3", "U2/Boy/Twilight.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
prop := props["U2/Boy/Twilight.mp3"]
|
||||
Expect(prop).ToNot(BeNil())
|
||||
Expect(prop.AudioProperties.Channels).To(Equal(2))
|
||||
Expect(prop.AudioProperties.BitRate).To(Equal(320))
|
||||
Expect(prop.FileInfo.Name()).To(Equal("Twilight.mp3"))
|
||||
Expect(prop.Tags["albumartist"]).To(ConsistOf("U2"))
|
||||
Expect(prop.FileInfo.ModTime()).To(BeTemporally(">=", startTime))
|
||||
|
||||
prop = props["U2/Boy/I Will Follow.mp3"]
|
||||
Expect(prop).ToNot(BeNil())
|
||||
Expect(prop.FileInfo.Name()).To(Equal("I Will Follow.mp3"))
|
||||
})
|
||||
|
||||
It("should return ModTime for directories", func() {
|
||||
root := ffs.MapFS["."]
|
||||
dirInfo1, err := ffs.Stat("U2")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
dirInfo2, err := ffs.Stat("U2/Boy")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(dirInfo1.ModTime()).To(Equal(root.ModTime))
|
||||
Expect(dirInfo1.ModTime()).To(BeTemporally(">=", startTime))
|
||||
Expect(dirInfo1.ModTime()).To(Equal(dirInfo2.ModTime()))
|
||||
})
|
||||
|
||||
When("the file is touched", func() {
|
||||
It("should only update the file and the file's directory ModTime", func() {
|
||||
root, _ := ffs.Stat(".")
|
||||
u2Dir, _ := ffs.Stat("U2")
|
||||
boyDir, _ := ffs.Stat("U2/Boy")
|
||||
previousTime := root.ModTime()
|
||||
|
||||
aTimeStamp := previousTime.Add(time.Hour)
|
||||
ffs.Touch("U2/./Boy/Twilight.mp3", aTimeStamp)
|
||||
|
||||
twilightFile, err := ffs.Stat("U2/Boy/Twilight.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(twilightFile.ModTime()).To(Equal(aTimeStamp))
|
||||
|
||||
Expect(root.ModTime()).To(Equal(previousTime))
|
||||
Expect(u2Dir.ModTime()).To(Equal(previousTime))
|
||||
Expect(boyDir.ModTime()).To(Equal(aTimeStamp))
|
||||
})
|
||||
})
|
||||
|
||||
When("adding/removing files", func() {
|
||||
It("should keep the timestamps correct", func() {
|
||||
root, _ := ffs.Stat(".")
|
||||
u2Dir, _ := ffs.Stat("U2")
|
||||
boyDir, _ := ffs.Stat("U2/Boy")
|
||||
previousTime := root.ModTime()
|
||||
aTimeStamp := previousTime.Add(time.Hour)
|
||||
|
||||
ffs.Add("U2/Boy/../Boy/Another.mp3", &fstest.MapFile{ModTime: aTimeStamp}, aTimeStamp)
|
||||
Expect(u2Dir.ModTime()).To(Equal(previousTime))
|
||||
Expect(boyDir.ModTime()).To(Equal(aTimeStamp))
|
||||
|
||||
aTimeStamp = aTimeStamp.Add(time.Hour)
|
||||
ffs.Remove("U2/./Boy/Twilight.mp3", aTimeStamp)
|
||||
|
||||
_, err := ffs.Stat("U2/Boy/Twilight.mp3")
|
||||
Expect(err).To(MatchError(fs.ErrNotExist))
|
||||
Expect(u2Dir.ModTime()).To(Equal(previousTime))
|
||||
Expect(boyDir.ModTime()).To(Equal(aTimeStamp))
|
||||
})
|
||||
})
|
||||
|
||||
When("moving files", func() {
|
||||
It("should allow relative paths", func() {
|
||||
ffs.Move("U2/../U2/Boy/Twilight.mp3", "./Twilight.mp3")
|
||||
Expect(ffs.MapFS).To(HaveKey("Twilight.mp3"))
|
||||
file, err := ffs.Stat("Twilight.mp3")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(file.Name()).To(Equal("Twilight.mp3"))
|
||||
})
|
||||
It("should keep the timestamps correct", func() {
|
||||
root, _ := ffs.Stat(".")
|
||||
u2Dir, _ := ffs.Stat("U2")
|
||||
boyDir, _ := ffs.Stat("U2/Boy")
|
||||
previousTime := root.ModTime()
|
||||
twilightFile, _ := ffs.Stat("U2/Boy/Twilight.mp3")
|
||||
filePreviousTime := twilightFile.ModTime()
|
||||
aTimeStamp := previousTime.Add(time.Hour)
|
||||
|
||||
ffs.Move("U2/Boy/Twilight.mp3", "Twilight.mp3", aTimeStamp)
|
||||
|
||||
Expect(root.ModTime()).To(Equal(aTimeStamp))
|
||||
Expect(u2Dir.ModTime()).To(Equal(previousTime))
|
||||
Expect(boyDir.ModTime()).To(Equal(aTimeStamp))
|
||||
|
||||
Expect(ffs.MapFS).ToNot(HaveKey("U2/Boy/Twilight.mp3"))
|
||||
twilight := ffs.MapFS["Twilight.mp3"]
|
||||
Expect(twilight.ModTime).To(Equal(filePreviousTime))
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -3,7 +3,6 @@ package core
|
||||
import (
|
||||
"github.com/google/wire"
|
||||
"github.com/navidrome/navidrome/core/agents"
|
||||
"github.com/navidrome/navidrome/core/external"
|
||||
"github.com/navidrome/navidrome/core/ffmpeg"
|
||||
"github.com/navidrome/navidrome/core/metrics"
|
||||
"github.com/navidrome/navidrome/core/playback"
|
||||
@@ -14,12 +13,11 @@ var Set = wire.NewSet(
|
||||
NewMediaStreamer,
|
||||
GetTranscodingCache,
|
||||
NewArchiver,
|
||||
NewExternalMetadata,
|
||||
NewPlayers,
|
||||
NewShare,
|
||||
NewPlaylists,
|
||||
agents.GetAgents,
|
||||
external.NewProvider,
|
||||
wire.Bind(new(external.Agents), new(*agents.Agents)),
|
||||
agents.New,
|
||||
ffmpeg.New,
|
||||
scrobbler.GetPlayTracker,
|
||||
playback.GetInstance,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package db_test
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -9,8 +9,6 @@ import (
|
||||
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
"github.com/navidrome/navidrome/conf/configtest"
|
||||
. "github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
)
|
||||
@@ -73,7 +71,7 @@ var _ = Describe("database backups", func() {
|
||||
})
|
||||
|
||||
for _, time := range timesShuffled {
|
||||
path := BackupPath(time)
|
||||
path := backupPath(time)
|
||||
file, err := os.Create(path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
_ = file.Close()
|
||||
@@ -87,7 +85,7 @@ var _ = Describe("database backups", func() {
|
||||
pruneCount, err := Prune(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
for idx, time := range timesDecreasingChronologically {
|
||||
_, err := os.Stat(BackupPath(time))
|
||||
_, err := os.Stat(backupPath(time))
|
||||
shouldExist := idx < conf.Server.Backup.Count
|
||||
if shouldExist {
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
@@ -112,7 +110,7 @@ var _ = Describe("database backups", func() {
|
||||
DeferCleanup(configtest.SetupConfig())
|
||||
|
||||
conf.Server.DbPath = "file::memory:?cache=shared&_foreign_keys=on"
|
||||
DeferCleanup(Init(ctx))
|
||||
DeferCleanup(Init())
|
||||
})
|
||||
|
||||
BeforeEach(func() {
|
||||
@@ -131,20 +129,25 @@ var _ = Describe("database backups", func() {
|
||||
|
||||
backup, err := sql.Open(Driver, path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(IsSchemaEmpty(ctx, backup)).To(BeFalse())
|
||||
Expect(isSchemaEmpty(backup)).To(BeFalse())
|
||||
})
|
||||
|
||||
It("successfully restores the database", func() {
|
||||
path, err := Backup(ctx)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
err = tests.ClearDB()
|
||||
// https://stackoverflow.com/questions/525512/drop-all-tables-command
|
||||
_, err = Db().ExecContext(ctx, `
|
||||
PRAGMA writable_schema = 1;
|
||||
DELETE FROM sqlite_master WHERE type in ('table', 'index', 'trigger');
|
||||
PRAGMA writable_schema = 0;
|
||||
`)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(IsSchemaEmpty(ctx, Db())).To(BeTrue())
|
||||
Expect(isSchemaEmpty(Db())).To(BeTrue())
|
||||
|
||||
err = Restore(ctx, path)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(IsSchemaEmpty(ctx, Db())).To(BeFalse())
|
||||
Expect(isSchemaEmpty(Db())).To(BeFalse())
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
116
db/db.go
116
db/db.go
@@ -1,11 +1,9 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"embed"
|
||||
"fmt"
|
||||
"runtime"
|
||||
|
||||
"github.com/mattn/go-sqlite3"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
@@ -34,110 +32,61 @@ func Db() *sql.DB {
|
||||
return conn.RegisterFunc("SEEDEDRAND", hasher.HashFunc(), false)
|
||||
},
|
||||
})
|
||||
|
||||
Path = conf.Server.DbPath
|
||||
if Path == ":memory:" {
|
||||
Path = "file::memory:?cache=shared&_foreign_keys=on"
|
||||
conf.Server.DbPath = Path
|
||||
}
|
||||
log.Debug("Opening DataBase", "dbPath", Path, "driver", Driver)
|
||||
db, err := sql.Open(Driver, Path)
|
||||
db.SetMaxOpenConns(max(4, runtime.NumCPU()))
|
||||
instance, err := sql.Open(Driver, Path)
|
||||
if err != nil {
|
||||
log.Fatal("Error opening database", err)
|
||||
panic(err)
|
||||
}
|
||||
_, err = db.Exec("PRAGMA optimize=0x10002")
|
||||
if err != nil {
|
||||
log.Error("Error applying PRAGMA optimize", err)
|
||||
return nil
|
||||
}
|
||||
return db
|
||||
return instance
|
||||
})
|
||||
}
|
||||
|
||||
func Close(ctx context.Context) {
|
||||
// Ignore cancellations when closing the DB
|
||||
ctx = context.WithoutCancel(ctx)
|
||||
|
||||
// Run optimize before closing
|
||||
Optimize(ctx)
|
||||
|
||||
log.Info(ctx, "Closing Database")
|
||||
func Close() {
|
||||
log.Info("Closing Database")
|
||||
err := Db().Close()
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error closing Database", err)
|
||||
log.Error("Error closing Database", err)
|
||||
}
|
||||
}
|
||||
|
||||
func Init(ctx context.Context) func() {
|
||||
func Init() func() {
|
||||
db := Db()
|
||||
|
||||
// Disable foreign_keys to allow re-creating tables in migrations
|
||||
_, err := db.ExecContext(ctx, "PRAGMA foreign_keys=off")
|
||||
_, err := db.Exec("PRAGMA foreign_keys=off")
|
||||
defer func() {
|
||||
_, err := db.ExecContext(ctx, "PRAGMA foreign_keys=on")
|
||||
_, err := db.Exec("PRAGMA foreign_keys=on")
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error re-enabling foreign_keys", err)
|
||||
log.Error("Error re-enabling foreign_keys", err)
|
||||
}
|
||||
}()
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error disabling foreign_keys", err)
|
||||
log.Error("Error disabling foreign_keys", err)
|
||||
}
|
||||
|
||||
gooseLogger := &logAdapter{silent: isSchemaEmpty(db)}
|
||||
goose.SetBaseFS(embedMigrations)
|
||||
|
||||
err = goose.SetDialect(Dialect)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Invalid DB driver", "driver", Driver, err)
|
||||
log.Fatal("Invalid DB driver", "driver", Driver, err)
|
||||
}
|
||||
schemaEmpty := isSchemaEmpty(ctx, db)
|
||||
hasSchemaChanges := hasPendingMigrations(ctx, db, migrationsFolder)
|
||||
if !schemaEmpty && hasSchemaChanges {
|
||||
log.Info(ctx, "Upgrading DB Schema to latest version")
|
||||
if !isSchemaEmpty(db) && hasPendingMigrations(db, migrationsFolder) {
|
||||
log.Info("Upgrading DB Schema to latest version")
|
||||
}
|
||||
goose.SetLogger(&logAdapter{ctx: ctx, silent: schemaEmpty})
|
||||
err = goose.UpContext(ctx, db, migrationsFolder)
|
||||
goose.SetLogger(gooseLogger)
|
||||
err = goose.Up(db, migrationsFolder)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to apply new migrations", err)
|
||||
log.Fatal("Failed to apply new migrations", err)
|
||||
}
|
||||
|
||||
if hasSchemaChanges {
|
||||
log.Debug(ctx, "Applying PRAGMA optimize after schema changes")
|
||||
_, err = db.ExecContext(ctx, "PRAGMA optimize")
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error applying PRAGMA optimize", err)
|
||||
}
|
||||
}
|
||||
|
||||
return func() {
|
||||
Close(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
// Optimize runs PRAGMA optimize on each connection in the pool
|
||||
func Optimize(ctx context.Context) {
|
||||
numConns := Db().Stats().OpenConnections
|
||||
if numConns == 0 {
|
||||
log.Debug(ctx, "No open connections to optimize")
|
||||
return
|
||||
}
|
||||
log.Debug(ctx, "Optimizing open connections", "numConns", numConns)
|
||||
var conns []*sql.Conn
|
||||
for i := 0; i < numConns; i++ {
|
||||
conn, err := Db().Conn(ctx)
|
||||
conns = append(conns, conn)
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error getting connection from pool", err)
|
||||
continue
|
||||
}
|
||||
_, err = conn.ExecContext(ctx, "PRAGMA optimize;")
|
||||
if err != nil {
|
||||
log.Error(ctx, "Error running PRAGMA optimize", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Return all connections to the Connection Pool
|
||||
for _, conn := range conns {
|
||||
conn.Close()
|
||||
}
|
||||
return Close
|
||||
}
|
||||
|
||||
type statusLogger struct{ numPending int }
|
||||
@@ -154,52 +103,51 @@ func (l *statusLogger) Printf(format string, v ...interface{}) {
|
||||
}
|
||||
}
|
||||
|
||||
func hasPendingMigrations(ctx context.Context, db *sql.DB, folder string) bool {
|
||||
func hasPendingMigrations(db *sql.DB, folder string) bool {
|
||||
l := &statusLogger{}
|
||||
goose.SetLogger(l)
|
||||
err := goose.StatusContext(ctx, db, folder)
|
||||
err := goose.Status(db, folder)
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Failed to check for pending migrations", err)
|
||||
log.Fatal("Failed to check for pending migrations", err)
|
||||
}
|
||||
return l.numPending > 0
|
||||
}
|
||||
|
||||
func isSchemaEmpty(ctx context.Context, db *sql.DB) bool {
|
||||
rows, err := db.QueryContext(ctx, "SELECT name FROM sqlite_master WHERE type='table' AND name='goose_db_version';") // nolint:rowserrcheck
|
||||
func isSchemaEmpty(db *sql.DB) bool {
|
||||
rows, err := db.Query("SELECT name FROM sqlite_master WHERE type='table' AND name='goose_db_version';") // nolint:rowserrcheck
|
||||
if err != nil {
|
||||
log.Fatal(ctx, "Database could not be opened!", err)
|
||||
log.Fatal("Database could not be opened!", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
return !rows.Next()
|
||||
}
|
||||
|
||||
type logAdapter struct {
|
||||
ctx context.Context
|
||||
silent bool
|
||||
}
|
||||
|
||||
func (l *logAdapter) Fatal(v ...interface{}) {
|
||||
log.Fatal(l.ctx, fmt.Sprint(v...))
|
||||
log.Fatal(fmt.Sprint(v...))
|
||||
}
|
||||
|
||||
func (l *logAdapter) Fatalf(format string, v ...interface{}) {
|
||||
log.Fatal(l.ctx, fmt.Sprintf(format, v...))
|
||||
log.Fatal(fmt.Sprintf(format, v...))
|
||||
}
|
||||
|
||||
func (l *logAdapter) Print(v ...interface{}) {
|
||||
if !l.silent {
|
||||
log.Info(l.ctx, fmt.Sprint(v...))
|
||||
log.Info(fmt.Sprint(v...))
|
||||
}
|
||||
}
|
||||
|
||||
func (l *logAdapter) Println(v ...interface{}) {
|
||||
if !l.silent {
|
||||
log.Info(l.ctx, fmt.Sprintln(v...))
|
||||
log.Info(fmt.Sprintln(v...))
|
||||
}
|
||||
}
|
||||
|
||||
func (l *logAdapter) Printf(format string, v ...interface{}) {
|
||||
if !l.silent {
|
||||
log.Info(l.ctx, fmt.Sprintf(format, v...))
|
||||
log.Info(fmt.Sprintf(format, v...))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
package db_test
|
||||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"testing"
|
||||
|
||||
"github.com/navidrome/navidrome/db"
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/tests"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
@@ -19,22 +17,20 @@ func TestDB(t *testing.T) {
|
||||
RunSpecs(t, "DB Suite")
|
||||
}
|
||||
|
||||
var _ = Describe("IsSchemaEmpty", func() {
|
||||
var database *sql.DB
|
||||
var ctx context.Context
|
||||
var _ = Describe("isSchemaEmpty", func() {
|
||||
var db *sql.DB
|
||||
BeforeEach(func() {
|
||||
ctx = context.Background()
|
||||
path := "file::memory:"
|
||||
database, _ = sql.Open(db.Dialect, path)
|
||||
db, _ = sql.Open(Dialect, path)
|
||||
})
|
||||
|
||||
It("returns false if the goose metadata table is found", func() {
|
||||
_, err := database.Exec("create table goose_db_version (id primary key);")
|
||||
_, err := db.Exec("create table goose_db_version (id primary key);")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(db.IsSchemaEmpty(ctx, database)).To(BeFalse())
|
||||
Expect(isSchemaEmpty(db)).To(BeFalse())
|
||||
})
|
||||
|
||||
It("returns true if the schema is brand new", func() {
|
||||
Expect(db.IsSchemaEmpty(ctx, database)).To(BeTrue())
|
||||
Expect(isSchemaEmpty(db)).To(BeTrue())
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
package db
|
||||
|
||||
// Definitions for testing private methods
|
||||
var (
|
||||
IsSchemaEmpty = isSchemaEmpty
|
||||
BackupPath = backupPath
|
||||
)
|
||||
@@ -4,8 +4,8 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
"github.com/navidrome/navidrome/model/id"
|
||||
"github.com/pressly/goose/v3"
|
||||
)
|
||||
|
||||
@@ -30,7 +30,7 @@ func upAddDefaultTranscodings(_ context.Context, tx *sql.Tx) error {
|
||||
}
|
||||
|
||||
for _, t := range consts.DefaultTranscodings {
|
||||
_, err := stmt.Exec(id.NewRandom(), t.Name, t.TargetFormat, t.DefaultBitRate, t.Command)
|
||||
_, err := stmt.Exec(uuid.NewString(), t.Name, t.TargetFormat, t.DefaultBitRate, t.Command)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ func upAddLibraryTable(ctx context.Context, tx *sql.Tx) error {
|
||||
}
|
||||
|
||||
_, err = tx.ExecContext(ctx, fmt.Sprintf(`
|
||||
insert into library(id, name, path) values(1, 'Music Library', '%s');
|
||||
insert into library(id, name, path, last_scan_at) values(1, 'Music Library', '%s', current_timestamp);
|
||||
delete from property where id like 'LastScan-%%';
|
||||
`, conf.Server.MusicFolder))
|
||||
if err != nil {
|
||||
|
||||
@@ -1,319 +0,0 @@
|
||||
package migrations
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing/fstest"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
"github.com/navidrome/navidrome/utils/chain"
|
||||
"github.com/pressly/goose/v3"
|
||||
)
|
||||
|
||||
func init() {
|
||||
goose.AddMigrationContext(upSupportNewScanner, downSupportNewScanner)
|
||||
}
|
||||
|
||||
func upSupportNewScanner(ctx context.Context, tx *sql.Tx) error {
|
||||
execute := createExecuteFunc(ctx, tx)
|
||||
addColumn := createAddColumnFunc(ctx, tx)
|
||||
|
||||
return chain.RunSequentially(
|
||||
upSupportNewScanner_CreateTableFolder(ctx, execute),
|
||||
upSupportNewScanner_PopulateTableFolder(ctx, tx),
|
||||
upSupportNewScanner_UpdateTableMediaFile(ctx, execute, addColumn),
|
||||
upSupportNewScanner_UpdateTableAlbum(ctx, execute),
|
||||
upSupportNewScanner_UpdateTableArtist(ctx, execute, addColumn),
|
||||
execute(`
|
||||
alter table library
|
||||
add column last_scan_started_at datetime default '0000-00-00 00:00:00' not null;
|
||||
alter table library
|
||||
add column full_scan_in_progress boolean default false not null;
|
||||
|
||||
create table if not exists media_file_artists(
|
||||
media_file_id varchar not null
|
||||
references media_file (id)
|
||||
on delete cascade,
|
||||
artist_id varchar not null
|
||||
references artist (id)
|
||||
on delete cascade,
|
||||
role varchar default '' not null,
|
||||
sub_role varchar default '' not null,
|
||||
constraint artist_tracks
|
||||
unique (artist_id, media_file_id, role, sub_role)
|
||||
);
|
||||
create index if not exists media_file_artists_media_file_id
|
||||
on media_file_artists (media_file_id);
|
||||
create index if not exists media_file_artists_role
|
||||
on media_file_artists (role);
|
||||
|
||||
create table if not exists album_artists(
|
||||
album_id varchar not null
|
||||
references album (id)
|
||||
on delete cascade,
|
||||
artist_id varchar not null
|
||||
references artist (id)
|
||||
on delete cascade,
|
||||
role varchar default '' not null,
|
||||
sub_role varchar default '' not null,
|
||||
constraint album_artists
|
||||
unique (album_id, artist_id, role, sub_role)
|
||||
);
|
||||
create index if not exists album_artists_album_id
|
||||
on album_artists (album_id);
|
||||
create index if not exists album_artists_role
|
||||
on album_artists (role);
|
||||
|
||||
create table if not exists tag(
|
||||
id varchar not null primary key,
|
||||
tag_name varchar default '' not null,
|
||||
tag_value varchar default '' not null,
|
||||
album_count integer default 0 not null,
|
||||
media_file_count integer default 0 not null,
|
||||
constraint tags_name_value
|
||||
unique (tag_name, tag_value)
|
||||
);
|
||||
|
||||
-- Genres are now stored in the tag table
|
||||
drop table if exists media_file_genres;
|
||||
drop table if exists album_genres;
|
||||
drop table if exists artist_genres;
|
||||
drop table if exists genre;
|
||||
|
||||
-- Drop full_text indexes, as they are not being used by SQLite
|
||||
drop index if exists media_file_full_text;
|
||||
drop index if exists album_full_text;
|
||||
drop index if exists artist_full_text;
|
||||
|
||||
-- Add PID config to properties
|
||||
insert into property (id, value) values ('PIDTrack', 'track_legacy') on conflict do nothing;
|
||||
insert into property (id, value) values ('PIDAlbum', 'album_legacy') on conflict do nothing;
|
||||
`),
|
||||
func() error {
|
||||
notice(tx, "A full scan will be triggered to populate the new tables. This may take a while.")
|
||||
return forceFullRescan(tx)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func upSupportNewScanner_CreateTableFolder(_ context.Context, execute execStmtFunc) execFunc {
|
||||
return execute(`
|
||||
create table if not exists folder(
|
||||
id varchar not null
|
||||
primary key,
|
||||
library_id integer not null
|
||||
references library (id)
|
||||
on delete cascade,
|
||||
path varchar default '' not null,
|
||||
name varchar default '' not null,
|
||||
missing boolean default false not null,
|
||||
parent_id varchar default '' not null,
|
||||
num_audio_files integer default 0 not null,
|
||||
num_playlists integer default 0 not null,
|
||||
image_files jsonb default '[]' not null,
|
||||
images_updated_at datetime default '0000-00-00 00:00:00' not null,
|
||||
updated_at datetime default (datetime(current_timestamp, 'localtime')) not null,
|
||||
created_at datetime default (datetime(current_timestamp, 'localtime')) not null
|
||||
);
|
||||
create index folder_parent_id on folder(parent_id);
|
||||
`)
|
||||
}
|
||||
|
||||
// Use paths from `media_file` table to populate `folder` table. The `folder` table must contain all paths, including
|
||||
// the ones that do not contain any media_file. We can get all paths from the media_file table to populate a
|
||||
// fstest.MapFS{}, and then walk the filesystem to insert all folders into the DB, including empty parent ones.
|
||||
func upSupportNewScanner_PopulateTableFolder(ctx context.Context, tx *sql.Tx) execFunc {
|
||||
return func() error {
|
||||
// First, get all folder paths from media_file table
|
||||
rows, err := tx.QueryContext(ctx, fmt.Sprintf(`
|
||||
select distinct rtrim(media_file.path, replace(media_file.path, '%s', '')), library_id, library.path
|
||||
from media_file
|
||||
join library on media_file.library_id = library.id`, string(os.PathSeparator)))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Then create an in-memory filesystem with all paths
|
||||
var path string
|
||||
var lib model.Library
|
||||
fsys := fstest.MapFS{}
|
||||
|
||||
for rows.Next() {
|
||||
err = rows.Scan(&path, &lib.ID, &lib.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
path = strings.TrimPrefix(path, filepath.Clean(lib.Path))
|
||||
path = strings.TrimPrefix(path, string(os.PathSeparator))
|
||||
path = filepath.Clean(path)
|
||||
fsys[path] = &fstest.MapFile{Mode: fs.ModeDir}
|
||||
}
|
||||
if err = rows.Err(); err != nil {
|
||||
return fmt.Errorf("error loading folders from media_file table: %w", err)
|
||||
}
|
||||
if len(fsys) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
stmt, err := tx.PrepareContext(ctx,
|
||||
"insert into folder (id, library_id, path, name, parent_id, updated_at) values (?, ?, ?, ?, ?, '0000-00-00 00:00:00')",
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Finally, walk the in-mem filesystem and insert all folders into the DB.
|
||||
err = fs.WalkDir(fsys, ".", func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
// Don't abort the walk, just log the error
|
||||
log.Error("error walking folder to DB", "path", path, err)
|
||||
return nil
|
||||
}
|
||||
// Skip entries that are not directories
|
||||
if !d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Create a folder in the DB
|
||||
f := model.NewFolder(lib, path)
|
||||
_, err = stmt.ExecContext(ctx, f.ID, lib.ID, f.Path, f.Name, f.ParentID)
|
||||
if err != nil {
|
||||
log.Error("error writing folder to DB", "path", path, err)
|
||||
}
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("error populating folder table: %w", err)
|
||||
}
|
||||
|
||||
// Count the number of characters in the library path
|
||||
libPath := filepath.Clean(lib.Path)
|
||||
libPathLen := utf8.RuneCountInString(libPath)
|
||||
|
||||
// In one go, update all paths in the media_file table, removing the library path prefix
|
||||
// and replacing any backslashes with slashes (the path separator used by the io/fs package)
|
||||
_, err = tx.ExecContext(ctx, fmt.Sprintf(`
|
||||
update media_file set path = replace(substr(path, %d), '\', '/');`, libPathLen+2))
|
||||
if err != nil {
|
||||
return fmt.Errorf("error updating media_file path: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func upSupportNewScanner_UpdateTableMediaFile(_ context.Context, execute execStmtFunc, addColumn addColumnFunc) execFunc {
|
||||
return func() error {
|
||||
return chain.RunSequentially(
|
||||
execute(`
|
||||
alter table media_file
|
||||
add column folder_id varchar default '' not null;
|
||||
alter table media_file
|
||||
add column pid varchar default '' not null;
|
||||
alter table media_file
|
||||
add column missing boolean default false not null;
|
||||
alter table media_file
|
||||
add column mbz_release_group_id varchar default '' not null;
|
||||
alter table media_file
|
||||
add column tags jsonb default '{}' not null;
|
||||
alter table media_file
|
||||
add column participants jsonb default '{}' not null;
|
||||
alter table media_file
|
||||
add column bit_depth integer default 0 not null;
|
||||
alter table media_file
|
||||
add column explicit_status varchar default '' not null;
|
||||
`),
|
||||
addColumn("media_file", "birth_time", "datetime", "current_timestamp", "created_at"),
|
||||
execute(`
|
||||
update media_file
|
||||
set pid = id where pid = '';
|
||||
create index if not exists media_file_birth_time
|
||||
on media_file (birth_time);
|
||||
create index if not exists media_file_folder_id
|
||||
on media_file (folder_id);
|
||||
create index if not exists media_file_pid
|
||||
on media_file (pid);
|
||||
create index if not exists media_file_missing
|
||||
on media_file (missing);
|
||||
`),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func upSupportNewScanner_UpdateTableAlbum(_ context.Context, execute execStmtFunc) execFunc {
|
||||
return execute(`
|
||||
drop index if exists album_all_artist_ids;
|
||||
alter table album
|
||||
drop column all_artist_ids;
|
||||
drop index if exists album_artist;
|
||||
drop index if exists album_artist_album;
|
||||
alter table album
|
||||
drop column artist;
|
||||
drop index if exists album_artist_id;
|
||||
alter table album
|
||||
drop column artist_id;
|
||||
alter table album
|
||||
add column imported_at datetime default '0000-00-00 00:00:00' not null;
|
||||
alter table album
|
||||
add column missing boolean default false not null;
|
||||
alter table album
|
||||
add column mbz_release_group_id varchar default '' not null;
|
||||
alter table album
|
||||
add column tags jsonb default '{}' not null;
|
||||
alter table album
|
||||
add column participants jsonb default '{}' not null;
|
||||
alter table album
|
||||
drop column paths;
|
||||
alter table album
|
||||
drop column image_files;
|
||||
alter table album
|
||||
add column folder_ids jsonb default '[]' not null;
|
||||
alter table album
|
||||
add column explicit_status varchar default '' not null;
|
||||
create index if not exists album_imported_at
|
||||
on album (imported_at);
|
||||
create index if not exists album_mbz_release_group_id
|
||||
on album (mbz_release_group_id);
|
||||
`)
|
||||
}
|
||||
|
||||
func upSupportNewScanner_UpdateTableArtist(_ context.Context, execute execStmtFunc, addColumn addColumnFunc) execFunc {
|
||||
return func() error {
|
||||
return chain.RunSequentially(
|
||||
execute(`
|
||||
alter table artist
|
||||
drop column album_count;
|
||||
alter table artist
|
||||
drop column song_count;
|
||||
drop index if exists artist_size;
|
||||
alter table artist
|
||||
drop column size;
|
||||
alter table artist
|
||||
add column missing boolean default false not null;
|
||||
alter table artist
|
||||
add column stats jsonb default '{"albumartist":{}}' not null;
|
||||
alter table artist
|
||||
drop column similar_artists;
|
||||
alter table artist
|
||||
add column similar_artists jsonb default '[]' not null;
|
||||
`),
|
||||
addColumn("artist", "updated_at", "datetime", "current_time", "(select min(album.updated_at) from album where album_artist_id = artist.id)"),
|
||||
addColumn("artist", "created_at", "datetime", "current_time", "(select min(album.created_at) from album where album_artist_id = artist.id)"),
|
||||
execute(`create index if not exists artist_updated_at on artist (updated_at);`),
|
||||
execute(`update artist set external_info_updated_at = '0000-00-00 00:00:00';`),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func downSupportNewScanner(context.Context, *sql.Tx) error {
|
||||
return nil
|
||||
}
|
||||
@@ -1,10 +1,8 @@
|
||||
package migrations
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/navidrome/navidrome/consts"
|
||||
@@ -13,29 +11,24 @@ import (
|
||||
// Use this in migrations that need to communicate something important (breaking changes, forced reindexes, etc...)
|
||||
func notice(tx *sql.Tx, msg string) {
|
||||
if isDBInitialized(tx) {
|
||||
line := strings.Repeat("*", len(msg)+8)
|
||||
fmt.Printf("\n%s\nNOTICE: %s\n%s\n\n", line, msg, line)
|
||||
fmt.Printf(`
|
||||
*************************************************************************************
|
||||
NOTICE: %s
|
||||
*************************************************************************************
|
||||
|
||||
`, msg)
|
||||
}
|
||||
}
|
||||
|
||||
// Call this in migrations that requires a full rescan
|
||||
func forceFullRescan(tx *sql.Tx) error {
|
||||
// If a full scan is required, most probably the query optimizer is outdated, so we run `analyze`.
|
||||
_, err := tx.Exec(`ANALYZE;`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = tx.Exec(fmt.Sprintf(`
|
||||
INSERT OR REPLACE into property (id, value) values ('%s', '1');
|
||||
`, consts.FullScanAfterMigrationFlagKey))
|
||||
_, err := tx.Exec(`
|
||||
delete from property where id like 'LastScan%';
|
||||
update media_file set updated_at = '0001-01-01';
|
||||
`)
|
||||
return err
|
||||
}
|
||||
|
||||
// sq := Update(r.tableName).
|
||||
// Set("last_scan_started_at", time.Now()).
|
||||
// Set("full_scan_in_progress", fullScan).
|
||||
// Where(Eq{"id": id})
|
||||
|
||||
var (
|
||||
once sync.Once
|
||||
initialized bool
|
||||
@@ -63,58 +56,3 @@ func checkErr(err error) {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
type (
|
||||
execFunc func() error
|
||||
execStmtFunc func(stmt string) execFunc
|
||||
addColumnFunc func(tableName, columnName, columnType, defaultValue, initialValue string) execFunc
|
||||
)
|
||||
|
||||
func createExecuteFunc(ctx context.Context, tx *sql.Tx) execStmtFunc {
|
||||
return func(stmt string) execFunc {
|
||||
return func() error {
|
||||
_, err := tx.ExecContext(ctx, stmt)
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Hack way to add a new `not null` column to a table, setting the initial value for existing rows based on a
|
||||
// SQL expression. It is done in 3 steps:
|
||||
// 1. Add the column as nullable. Due to the way SQLite manipulates the DDL in memory, we need to add extra padding
|
||||
// to the default value to avoid truncating it when changing the column to not null
|
||||
// 2. Update the column with the initial value
|
||||
// 3. Change the column to not null with the default value
|
||||
//
|
||||
// Based on https://stackoverflow.com/a/25917323
|
||||
func createAddColumnFunc(ctx context.Context, tx *sql.Tx) addColumnFunc {
|
||||
return func(tableName, columnName, columnType, defaultValue, initialValue string) execFunc {
|
||||
return func() error {
|
||||
// Format the `default null` value to have the same length as the final defaultValue
|
||||
finalLen := len(fmt.Sprintf(`%s not`, defaultValue))
|
||||
tempDefault := fmt.Sprintf(`default %s null`, strings.Repeat(" ", finalLen))
|
||||
_, err := tx.ExecContext(ctx, fmt.Sprintf(`
|
||||
alter table %s add column %s %s %s;`, tableName, columnName, columnType, tempDefault))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = tx.ExecContext(ctx, fmt.Sprintf(`
|
||||
update %s set %s = %s where %[2]s is null;`, tableName, columnName, initialValue))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = tx.ExecContext(ctx, fmt.Sprintf(`
|
||||
PRAGMA writable_schema = on;
|
||||
UPDATE sqlite_master
|
||||
SET sql = replace(sql, '%[1]s %[2]s %[5]s', '%[1]s %[2]s default %[3]s not null')
|
||||
WHERE type = 'table'
|
||||
AND name = '%[4]s';
|
||||
PRAGMA writable_schema = off;
|
||||
`, columnName, columnType, defaultValue, tableName, tempDefault))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
1037
docs/hld-plugins.md
1037
docs/hld-plugins.md
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user