mirror of
https://github.com/navidrome/navidrome.git
synced 2026-02-18 23:25:30 -05:00
Compare commits
40 Commits
claude/cre
...
custom-col
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b4b1830513 | ||
|
|
eca4c5acf0 | ||
|
|
e766a5d780 | ||
|
|
90d6cd5f47 | ||
|
|
24ab04581a | ||
|
|
8e647a0e41 | ||
|
|
86c326bd4a | ||
|
|
5fa8356b31 | ||
|
|
929e7193b4 | ||
|
|
9bcefea0ca | ||
|
|
b0cb40b029 | ||
|
|
cad9cdc53e | ||
|
|
b774133cd1 | ||
|
|
a20d56c137 | ||
|
|
b64d8ad334 | ||
|
|
f00af7f983 | ||
|
|
875ffc2b78 | ||
|
|
885334c819 | ||
|
|
ff86b9f2b9 | ||
|
|
13d3d510f5 | ||
|
|
656009e5f8 | ||
|
|
06b3a1f33e | ||
|
|
0f4e8376cb | ||
|
|
199cde4109 | ||
|
|
897de02a84 | ||
|
|
7ee56fe3bf | ||
|
|
34c6f12aee | ||
|
|
eb9ebc3fba | ||
|
|
e05a7e230f | ||
|
|
62f9c3a458 | ||
|
|
fd09ca103f | ||
|
|
ed79a8897b | ||
|
|
302d99aa8b | ||
|
|
bee0305831 | ||
|
|
c280dd67a4 | ||
|
|
8319905d2c | ||
|
|
c80ef8ae41 | ||
|
|
0a4722802a | ||
|
|
a704e86ac1 | ||
|
|
408aa78ed5 |
52
.github/workflows/create-release.yml
vendored
52
.github/workflows/create-release.yml
vendored
@@ -1,52 +0,0 @@
|
||||
name: Create Release
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Release version (e.g. 0.53.0)"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
name: Create Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Validate version format
|
||||
run: |
|
||||
if [[ ! "${{ inputs.version }}" =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$ ]]; then
|
||||
echo "::error::Invalid version format '${{ inputs.version }}'. Expected X.X.X"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check if tag already exists
|
||||
run: |
|
||||
if git rev-parse "v${{ inputs.version }}" >/dev/null 2>&1; then
|
||||
echo "::error::Tag v${{ inputs.version }} already exists"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
|
||||
- name: Run go mod tidy
|
||||
run: go mod tidy
|
||||
|
||||
- name: Check for pending changes
|
||||
run: |
|
||||
if [ -n "$(git status -s)" ]; then
|
||||
echo "::error::There are pending changes after 'go mod tidy'. Please commit them first."
|
||||
git status -s
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create and push tag
|
||||
run: |
|
||||
git tag v${{ inputs.version }}
|
||||
git push origin v${{ inputs.version }}
|
||||
138
.github/workflows/push-translations.sh
vendored
Executable file
138
.github/workflows/push-translations.sh
vendored
Executable file
@@ -0,0 +1,138 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
I18N_DIR=resources/i18n
|
||||
|
||||
# Normalize JSON for deterministic comparison:
|
||||
# remove empty/null attributes, sort keys alphabetically
|
||||
process_json() {
|
||||
jq 'walk(if type == "object" then with_entries(select(.value != null and .value != "" and .value != [] and .value != {})) | to_entries | sort_by(.key) | from_entries else . end)' "$1"
|
||||
}
|
||||
|
||||
# Get list of all languages configured in the POEditor project
|
||||
get_language_list() {
|
||||
curl -s -X POST https://api.poeditor.com/v2/languages/list \
|
||||
-d api_token="${POEDITOR_APIKEY}" \
|
||||
-d id="${POEDITOR_PROJECTID}"
|
||||
}
|
||||
|
||||
# Extract language name from the language list JSON given a language code
|
||||
get_language_name() {
|
||||
lang_code="$1"
|
||||
lang_list="$2"
|
||||
echo "$lang_list" | jq -r ".result.languages[] | select(.code == \"$lang_code\") | .name"
|
||||
}
|
||||
|
||||
# Extract language code from a file path (e.g., "resources/i18n/fr.json" -> "fr")
|
||||
get_lang_code() {
|
||||
filepath="$1"
|
||||
filename=$(basename "$filepath")
|
||||
echo "${filename%.*}"
|
||||
}
|
||||
|
||||
# Export the current translation for a language from POEditor (v2 API)
|
||||
export_language() {
|
||||
lang_code="$1"
|
||||
response=$(curl -s -X POST https://api.poeditor.com/v2/projects/export \
|
||||
-d api_token="${POEDITOR_APIKEY}" \
|
||||
-d id="${POEDITOR_PROJECTID}" \
|
||||
-d language="$lang_code" \
|
||||
-d type="key_value_json")
|
||||
|
||||
url=$(echo "$response" | jq -r '.result.url')
|
||||
if [ -z "$url" ] || [ "$url" = "null" ]; then
|
||||
echo "Failed to export $lang_code: $response" >&2
|
||||
return 1
|
||||
fi
|
||||
echo "$url"
|
||||
}
|
||||
|
||||
# Flatten nested JSON to POEditor languages/update format.
|
||||
# POEditor uses term + context pairs, where:
|
||||
# term = the leaf key name
|
||||
# context = the parent path as "key1"."key2"."key3" (empty for root keys)
|
||||
flatten_to_poeditor() {
|
||||
jq -c '[paths(scalars) as $p |
|
||||
{
|
||||
"term": ($p | last | tostring),
|
||||
"context": (if ($p | length) > 1 then ($p[:-1] | map("\"" + tostring + "\"") | join(".")) else "" end),
|
||||
"translation": {"content": getpath($p)}
|
||||
}
|
||||
]' "$1"
|
||||
}
|
||||
|
||||
# Update translations for a language in POEditor via languages/update API
|
||||
update_language() {
|
||||
lang_code="$1"
|
||||
file="$2"
|
||||
|
||||
flatten_to_poeditor "$file" > /tmp/poeditor_data.json
|
||||
response=$(curl -s -X POST https://api.poeditor.com/v2/languages/update \
|
||||
-d api_token="${POEDITOR_APIKEY}" \
|
||||
-d id="${POEDITOR_PROJECTID}" \
|
||||
-d language="$lang_code" \
|
||||
--data-urlencode data@/tmp/poeditor_data.json)
|
||||
rm -f /tmp/poeditor_data.json
|
||||
|
||||
status=$(echo "$response" | jq -r '.response.status')
|
||||
if [ "$status" != "success" ]; then
|
||||
echo "Failed to update $lang_code: $response" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
parsed=$(echo "$response" | jq -r '.result.translations.parsed')
|
||||
added=$(echo "$response" | jq -r '.result.translations.added')
|
||||
updated=$(echo "$response" | jq -r '.result.translations.updated')
|
||||
echo " Translations - parsed: $parsed, added: $added, updated: $updated"
|
||||
}
|
||||
|
||||
# --- Main ---
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: $0 <file1> [file2] ..."
|
||||
echo "No files specified. Nothing to do."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
lang_list=$(get_language_list)
|
||||
upload_count=0
|
||||
|
||||
for file in "$@"; do
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: File not found: $file, skipping"
|
||||
continue
|
||||
fi
|
||||
|
||||
lang_code=$(get_lang_code "$file")
|
||||
lang_name=$(get_language_name "$lang_code" "$lang_list")
|
||||
|
||||
if [ -z "$lang_name" ]; then
|
||||
echo "Warning: Language code '$lang_code' not found in POEditor, skipping $file"
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Processing $lang_name ($lang_code)..."
|
||||
|
||||
# Export current state from POEditor
|
||||
url=$(export_language "$lang_code")
|
||||
curl -sSL "$url" -o poeditor_export.json
|
||||
|
||||
# Normalize both files for comparison
|
||||
process_json "$file" > local_normalized.json
|
||||
process_json poeditor_export.json > remote_normalized.json
|
||||
|
||||
# Compare normalized versions
|
||||
if diff -q local_normalized.json remote_normalized.json > /dev/null 2>&1; then
|
||||
echo " No differences, skipping"
|
||||
else
|
||||
echo " Differences found, updating POEditor..."
|
||||
update_language "$lang_code" "$file"
|
||||
upload_count=$((upload_count + 1))
|
||||
fi
|
||||
|
||||
rm -f poeditor_export.json local_normalized.json remote_normalized.json
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "Done. Updated $upload_count translation(s) in POEditor."
|
||||
32
.github/workflows/push-translations.yml
vendored
Normal file
32
.github/workflows/push-translations.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: POEditor export
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'resources/i18n/*.json'
|
||||
|
||||
jobs:
|
||||
push-translations:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'navidrome' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Detect changed translation files
|
||||
id: changed
|
||||
run: |
|
||||
CHANGED_FILES=$(git diff --name-only HEAD~1 HEAD -- 'resources/i18n/*.json' | tr '\n' ' ')
|
||||
echo "files=$CHANGED_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Changed translation files: $CHANGED_FILES"
|
||||
|
||||
- name: Push translations to POEditor
|
||||
if: ${{ steps.changed.outputs.files != '' }}
|
||||
env:
|
||||
POEDITOR_APIKEY: ${{ secrets.POEDITOR_APIKEY }}
|
||||
POEDITOR_PROJECTID: ${{ secrets.POEDITOR_PROJECTID }}
|
||||
run: |
|
||||
.github/workflows/push-translations.sh ${{ steps.changed.outputs.files }}
|
||||
8
Makefile
8
Makefile
@@ -20,7 +20,7 @@ DOCKER_TAG ?= deluan/navidrome:develop
|
||||
|
||||
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
|
||||
CROSS_TAGLIB_VERSION ?= 2.1.1-2
|
||||
GOLANGCI_LINT_VERSION ?= v2.8.0
|
||||
GOLANGCI_LINT_VERSION ?= v2.10.0
|
||||
|
||||
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")
|
||||
|
||||
@@ -242,7 +242,11 @@ clean:
|
||||
|
||||
release:
|
||||
@if [[ ! "${V}" =~ ^[0-9]+\.[0-9]+\.[0-9]+.*$$ ]]; then echo "Usage: make release V=X.X.X"; exit 1; fi
|
||||
gh workflow run create-release.yml -f version=${V}
|
||||
go mod tidy
|
||||
@if [ -n "`git status -s`" ]; then echo "\n\nThere are pending changes. Please commit or stash first"; exit 1; fi
|
||||
make pre-push
|
||||
git tag v${V}
|
||||
git push origin v${V} --no-verify
|
||||
.PHONY: release
|
||||
|
||||
download-deps:
|
||||
|
||||
@@ -65,7 +65,7 @@ func (c *client) getJWT(ctx context.Context) (string, error) {
|
||||
}
|
||||
|
||||
type authResponse struct {
|
||||
JWT string `json:"jwt"`
|
||||
JWT string `json:"jwt"` //nolint:gosec
|
||||
}
|
||||
|
||||
var result authResponse
|
||||
|
||||
@@ -252,7 +252,7 @@ var _ = Describe("JWT Authentication", func() {
|
||||
|
||||
// Writer goroutine
|
||||
wg.Go(func() {
|
||||
for i := 0; i < 100; i++ {
|
||||
for i := range 100 {
|
||||
cache.set(fmt.Sprintf("token-%d", i), 1*time.Hour)
|
||||
time.Sleep(1 * time.Millisecond)
|
||||
}
|
||||
@@ -260,7 +260,7 @@ var _ = Describe("JWT Authentication", func() {
|
||||
|
||||
// Reader goroutine
|
||||
wg.Go(func() {
|
||||
for i := 0; i < 100; i++ {
|
||||
for range 100 {
|
||||
cache.get()
|
||||
time.Sleep(1 * time.Millisecond)
|
||||
}
|
||||
|
||||
@@ -49,6 +49,7 @@ func (e extractor) Version() string {
|
||||
func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
|
||||
f, close, err := e.openFile(filePath)
|
||||
if err != nil {
|
||||
log.Warn("gotaglib: Error reading metadata from file. Skipping", "filePath", filePath, err)
|
||||
return nil, err
|
||||
}
|
||||
defer close()
|
||||
@@ -118,7 +119,12 @@ func (e extractor) openFile(filePath string) (f *taglib.File, closeFunc func(),
|
||||
file.Close()
|
||||
return nil, nil, errors.New("file is not seekable")
|
||||
}
|
||||
f, err = taglib.OpenStream(rs, taglib.WithReadStyle(taglib.ReadStyleFast))
|
||||
// WithFilename provides a format detection hint via the file extension,
|
||||
// since OpenStream alone relies on content-sniffing which fails for some files.
|
||||
f, err = taglib.OpenStream(rs,
|
||||
taglib.WithReadStyle(taglib.ReadStyleFast),
|
||||
taglib.WithFilename(filePath),
|
||||
)
|
||||
if err != nil {
|
||||
file.Close()
|
||||
return nil, nil, err
|
||||
@@ -254,7 +260,7 @@ func parseTIPL(tags map[string][]string) {
|
||||
}
|
||||
var currentRole string
|
||||
var currentValue []string
|
||||
for _, part := range strings.Split(tipl[0], " ") {
|
||||
for part := range strings.SplitSeq(tipl[0], " ") {
|
||||
if _, ok := tiplMapping[part]; ok {
|
||||
addRole(currentRole, currentValue)
|
||||
currentRole = part
|
||||
|
||||
@@ -173,6 +173,9 @@ var _ = Describe("Extractor", func() {
|
||||
Entry("correctly parses m4a (aac) gain tags (uppercase)", "test.m4a", "1.04s", 2, 44100, 16, "0.37", "0.48", "0.37", "0.48", false, true),
|
||||
Entry("correctly parses ogg (vorbis) tags", "test.ogg", "1.04s", 2, 8000, 0, "+7.64 dB", "0.11772506", "+7.64 dB", "0.11772506", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=1100:duration=1" -c:a libopus test.opus (tags added via mutagen)
|
||||
Entry("correctly parses opus tags (#4998)", "test.opus", "1s", 1, 48000, 0, "+5.12 dB", "0.11345678", "+5.12 dB", "0.11345678", false, true),
|
||||
|
||||
// ffmpeg -f lavfi -i "sine=frequency=900:duration=1" test.wma
|
||||
// Weird note: for the tag parsing to work, the lyrics are actually stored in the reverse order
|
||||
Entry("correctly parses wma/asf tags", "test.wma", "1.02s", 1, 44100, 16, "3.27 dB", "0.132914", "3.27 dB", "0.132914", false, true),
|
||||
|
||||
@@ -65,7 +65,7 @@ func (s *Router) routes() http.Handler {
|
||||
}
|
||||
|
||||
func (s *Router) getLinkStatus(w http.ResponseWriter, r *http.Request) {
|
||||
resp := map[string]interface{}{
|
||||
resp := map[string]any{
|
||||
"apiKey": s.apiKey,
|
||||
}
|
||||
u, _ := request.UserFrom(r.Context())
|
||||
@@ -110,7 +110,7 @@ func (s *Router) callback(w http.ResponseWriter, r *http.Request) {
|
||||
if err != nil {
|
||||
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
_, _ = w.Write([]byte("An error occurred while authorizing with Last.fm. \n\nRequest ID: " + middleware.GetReqID(ctx)))
|
||||
_, _ = w.Write([]byte("An error occurred while authorizing with Last.fm. \n\nRequest ID: " + middleware.GetReqID(ctx))) //nolint:gosec
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ func (s *Router) routes() http.Handler {
|
||||
}
|
||||
|
||||
func (s *Router) getLinkStatus(w http.ResponseWriter, r *http.Request) {
|
||||
resp := map[string]interface{}{}
|
||||
resp := map[string]any{}
|
||||
u, _ := request.UserFrom(r.Context())
|
||||
key, err := s.sessionKeys.Get(r.Context(), u.ID)
|
||||
if err != nil && !errors.Is(err, model.ErrNotFound) {
|
||||
@@ -107,7 +107,7 @@ func (s *Router) link(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
_ = rest.RespondWithJSON(w, http.StatusOK, map[string]interface{}{"status": resp.Valid, "user": resp.UserName})
|
||||
_ = rest.RespondWithJSON(w, http.StatusOK, map[string]any{"status": resp.Valid, "user": resp.UserName})
|
||||
}
|
||||
|
||||
func (s *Router) unlink(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
@@ -37,7 +37,7 @@ var _ = Describe("ListenBrainz Auth Router", func() {
|
||||
req = httptest.NewRequest("GET", "/listenbrainz/link", nil)
|
||||
r.getLinkStatus(resp, req)
|
||||
Expect(resp.Code).To(Equal(http.StatusOK))
|
||||
var parsed map[string]interface{}
|
||||
var parsed map[string]any
|
||||
Expect(json.Unmarshal(resp.Body.Bytes(), &parsed)).To(BeNil())
|
||||
Expect(parsed["status"]).To(Equal(false))
|
||||
})
|
||||
@@ -47,7 +47,7 @@ var _ = Describe("ListenBrainz Auth Router", func() {
|
||||
req = httptest.NewRequest("GET", "/listenbrainz/link", nil)
|
||||
r.getLinkStatus(resp, req)
|
||||
Expect(resp.Code).To(Equal(http.StatusOK))
|
||||
var parsed map[string]interface{}
|
||||
var parsed map[string]any
|
||||
Expect(json.Unmarshal(resp.Body.Bytes(), &parsed)).To(BeNil())
|
||||
Expect(parsed["status"]).To(Equal(true))
|
||||
})
|
||||
@@ -80,7 +80,7 @@ var _ = Describe("ListenBrainz Auth Router", func() {
|
||||
req = httptest.NewRequest("PUT", "/listenbrainz/link", strings.NewReader(`{"token": "tok-1"}`))
|
||||
r.link(resp, req)
|
||||
Expect(resp.Code).To(Equal(http.StatusOK))
|
||||
var parsed map[string]interface{}
|
||||
var parsed map[string]any
|
||||
Expect(json.Unmarshal(resp.Body.Bytes(), &parsed)).To(BeNil())
|
||||
Expect(parsed["status"]).To(Equal(true))
|
||||
Expect(parsed["user"]).To(Equal("ListenBrainzUser"))
|
||||
|
||||
@@ -57,7 +57,7 @@ type listenBrainzResponse struct {
|
||||
}
|
||||
|
||||
type listenBrainzRequest struct {
|
||||
ApiKey string
|
||||
ApiKey string //nolint:gosec
|
||||
Body listenBrainzRequestBody
|
||||
}
|
||||
|
||||
@@ -75,14 +75,14 @@ const (
|
||||
|
||||
type listenInfo struct {
|
||||
ListenedAt int `json:"listened_at,omitempty"`
|
||||
TrackMetadata trackMetadata `json:"track_metadata,omitempty"`
|
||||
TrackMetadata trackMetadata `json:"track_metadata"`
|
||||
}
|
||||
|
||||
type trackMetadata struct {
|
||||
ArtistName string `json:"artist_name,omitempty"`
|
||||
TrackName string `json:"track_name,omitempty"`
|
||||
ReleaseName string `json:"release_name,omitempty"`
|
||||
AdditionalInfo additionalInfo `json:"additional_info,omitempty"`
|
||||
AdditionalInfo additionalInfo `json:"additional_info"`
|
||||
}
|
||||
|
||||
type additionalInfo struct {
|
||||
|
||||
@@ -73,7 +73,7 @@ func (c *client) authorize(ctx context.Context) (string, error) {
|
||||
auth := c.id + ":" + c.secret
|
||||
req.Header.Add("Authorization", "Basic "+base64.StdEncoding.EncodeToString([]byte(auth)))
|
||||
|
||||
response := map[string]interface{}{}
|
||||
response := map[string]any{}
|
||||
err := c.makeRequest(req, &response)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -86,7 +86,7 @@ func (c *client) authorize(ctx context.Context) (string, error) {
|
||||
return "", errors.New("invalid response")
|
||||
}
|
||||
|
||||
func (c *client) makeRequest(req *http.Request, response interface{}) error {
|
||||
func (c *client) makeRequest(req *http.Request, response any) error {
|
||||
log.Trace(req.Context(), fmt.Sprintf("Sending Spotify %s request", req.Method), "url", req.URL)
|
||||
resp, err := c.hc.Do(req)
|
||||
if err != nil {
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -171,8 +172,8 @@ type TagConf struct {
|
||||
|
||||
type lastfmOptions struct {
|
||||
Enabled bool
|
||||
ApiKey string
|
||||
Secret string
|
||||
ApiKey string //nolint:gosec
|
||||
Secret string //nolint:gosec
|
||||
Language string
|
||||
ScrobbleFirstArtistOnly bool
|
||||
|
||||
@@ -182,7 +183,7 @@ type lastfmOptions struct {
|
||||
|
||||
type spotifyOptions struct {
|
||||
ID string
|
||||
Secret string
|
||||
Secret string //nolint:gosec
|
||||
}
|
||||
|
||||
type deezerOptions struct {
|
||||
@@ -207,7 +208,7 @@ type httpHeaderOptions struct {
|
||||
type prometheusOptions struct {
|
||||
Enabled bool
|
||||
MetricsPath string
|
||||
Password string
|
||||
Password string //nolint:gosec
|
||||
}
|
||||
|
||||
type AudioDeviceDefinition []string
|
||||
@@ -433,7 +434,7 @@ func mapDeprecatedOption(legacyName, newName string) {
|
||||
func parseIniFileConfiguration() {
|
||||
cfgFile := viper.ConfigFileUsed()
|
||||
if strings.ToLower(filepath.Ext(cfgFile)) == ".ini" {
|
||||
var iniConfig map[string]interface{}
|
||||
var iniConfig map[string]any
|
||||
err := viper.Unmarshal(&iniConfig)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, "FATAL: Error parsing config:", err)
|
||||
@@ -466,7 +467,7 @@ func disableExternalServices() {
|
||||
}
|
||||
|
||||
func validatePlaylistsPath() error {
|
||||
for _, path := range strings.Split(Server.PlaylistsPath, string(filepath.ListSeparator)) {
|
||||
for path := range strings.SplitSeq(Server.PlaylistsPath, string(filepath.ListSeparator)) {
|
||||
_, err := doublestar.Match(path, "")
|
||||
if err != nil {
|
||||
log.Error("Invalid PlaylistsPath", "path", path, err)
|
||||
@@ -480,7 +481,7 @@ func validatePlaylistsPath() error {
|
||||
// It trims whitespace from each entry and ensures at least [DefaultInfoLanguage] is returned.
|
||||
func parseLanguages(lang string) []string {
|
||||
var languages []string
|
||||
for _, l := range strings.Split(lang, ",") {
|
||||
for l := range strings.SplitSeq(lang, ",") {
|
||||
l = strings.TrimSpace(l)
|
||||
if l != "" {
|
||||
languages = append(languages, l)
|
||||
@@ -494,13 +495,7 @@ func parseLanguages(lang string) []string {
|
||||
|
||||
func validatePurgeMissingOption() error {
|
||||
allowedValues := []string{consts.PurgeMissingNever, consts.PurgeMissingAlways, consts.PurgeMissingFull}
|
||||
valid := false
|
||||
for _, v := range allowedValues {
|
||||
if v == Server.Scanner.PurgeMissing {
|
||||
valid = true
|
||||
break
|
||||
}
|
||||
}
|
||||
valid := slices.Contains(allowedValues, Server.Scanner.PurgeMissing)
|
||||
if !valid {
|
||||
err := fmt.Errorf("invalid Scanner.PurgeMissing value: '%s'. Must be one of: %v", Server.Scanner.PurgeMissing, allowedValues)
|
||||
log.Error(err.Error())
|
||||
@@ -753,7 +748,7 @@ func getConfigFile(cfgFile string) string {
|
||||
}
|
||||
cfgFile = os.Getenv("ND_CONFIGFILE")
|
||||
if cfgFile != "" {
|
||||
if _, err := os.Stat(cfgFile); err == nil {
|
||||
if _, err := os.Stat(cfgFile); err == nil { //nolint:gosec
|
||||
return cfgFile
|
||||
}
|
||||
}
|
||||
|
||||
@@ -365,7 +365,7 @@ var _ = Describe("Agents", func() {
|
||||
})
|
||||
|
||||
type mockAgent struct {
|
||||
Args []interface{}
|
||||
Args []any
|
||||
Err error
|
||||
}
|
||||
|
||||
@@ -374,7 +374,7 @@ func (a *mockAgent) AgentName() string {
|
||||
}
|
||||
|
||||
func (a *mockAgent) GetArtistMBID(_ context.Context, id string, name string) (string, error) {
|
||||
a.Args = []interface{}{id, name}
|
||||
a.Args = []any{id, name}
|
||||
if a.Err != nil {
|
||||
return "", a.Err
|
||||
}
|
||||
@@ -382,7 +382,7 @@ func (a *mockAgent) GetArtistMBID(_ context.Context, id string, name string) (st
|
||||
}
|
||||
|
||||
func (a *mockAgent) GetArtistURL(_ context.Context, id, name, mbid string) (string, error) {
|
||||
a.Args = []interface{}{id, name, mbid}
|
||||
a.Args = []any{id, name, mbid}
|
||||
if a.Err != nil {
|
||||
return "", a.Err
|
||||
}
|
||||
@@ -390,7 +390,7 @@ func (a *mockAgent) GetArtistURL(_ context.Context, id, name, mbid string) (stri
|
||||
}
|
||||
|
||||
func (a *mockAgent) GetArtistBiography(_ context.Context, id, name, mbid string) (string, error) {
|
||||
a.Args = []interface{}{id, name, mbid}
|
||||
a.Args = []any{id, name, mbid}
|
||||
if a.Err != nil {
|
||||
return "", a.Err
|
||||
}
|
||||
@@ -398,7 +398,7 @@ func (a *mockAgent) GetArtistBiography(_ context.Context, id, name, mbid string)
|
||||
}
|
||||
|
||||
func (a *mockAgent) GetArtistImages(_ context.Context, id, name, mbid string) ([]ExternalImage, error) {
|
||||
a.Args = []interface{}{id, name, mbid}
|
||||
a.Args = []any{id, name, mbid}
|
||||
if a.Err != nil {
|
||||
return nil, a.Err
|
||||
}
|
||||
@@ -409,7 +409,7 @@ func (a *mockAgent) GetArtistImages(_ context.Context, id, name, mbid string) ([
|
||||
}
|
||||
|
||||
func (a *mockAgent) GetSimilarArtists(_ context.Context, id, name, mbid string, limit int) ([]Artist, error) {
|
||||
a.Args = []interface{}{id, name, mbid, limit}
|
||||
a.Args = []any{id, name, mbid, limit}
|
||||
if a.Err != nil {
|
||||
return nil, a.Err
|
||||
}
|
||||
@@ -420,7 +420,7 @@ func (a *mockAgent) GetSimilarArtists(_ context.Context, id, name, mbid string,
|
||||
}
|
||||
|
||||
func (a *mockAgent) GetArtistTopSongs(_ context.Context, id, artistName, mbid string, count int) ([]Song, error) {
|
||||
a.Args = []interface{}{id, artistName, mbid, count}
|
||||
a.Args = []any{id, artistName, mbid, count}
|
||||
if a.Err != nil {
|
||||
return nil, a.Err
|
||||
}
|
||||
@@ -431,7 +431,7 @@ func (a *mockAgent) GetArtistTopSongs(_ context.Context, id, artistName, mbid st
|
||||
}
|
||||
|
||||
func (a *mockAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string) (*AlbumInfo, error) {
|
||||
a.Args = []interface{}{name, artist, mbid}
|
||||
a.Args = []any{name, artist, mbid}
|
||||
if a.Err != nil {
|
||||
return nil, a.Err
|
||||
}
|
||||
@@ -444,7 +444,7 @@ func (a *mockAgent) GetAlbumInfo(ctx context.Context, name, artist, mbid string)
|
||||
}
|
||||
|
||||
func (a *mockAgent) GetSimilarSongsByTrack(_ context.Context, id, name, artist, mbid string, count int) ([]Song, error) {
|
||||
a.Args = []interface{}{id, name, artist, mbid, count}
|
||||
a.Args = []any{id, name, artist, mbid, count}
|
||||
if a.Err != nil {
|
||||
return nil, a.Err
|
||||
}
|
||||
@@ -455,7 +455,7 @@ func (a *mockAgent) GetSimilarSongsByTrack(_ context.Context, id, name, artist,
|
||||
}
|
||||
|
||||
func (a *mockAgent) GetSimilarSongsByAlbum(_ context.Context, id, name, artist, mbid string, count int) ([]Song, error) {
|
||||
a.Args = []interface{}{id, name, artist, mbid, count}
|
||||
a.Args = []any{id, name, artist, mbid, count}
|
||||
if a.Err != nil {
|
||||
return nil, a.Err
|
||||
}
|
||||
@@ -466,7 +466,7 @@ func (a *mockAgent) GetSimilarSongsByAlbum(_ context.Context, id, name, artist,
|
||||
}
|
||||
|
||||
func (a *mockAgent) GetSimilarSongsByArtist(_ context.Context, id, name, mbid string, count int) ([]Song, error) {
|
||||
a.Args = []interface{}{id, name, mbid, count}
|
||||
a.Args = []any{id, name, mbid, count}
|
||||
if a.Err != nil {
|
||||
return nil, a.Err
|
||||
}
|
||||
@@ -488,12 +488,12 @@ type testImageAgent struct {
|
||||
Name string
|
||||
Images []ExternalImage
|
||||
Err error
|
||||
Args []interface{}
|
||||
Args []any
|
||||
}
|
||||
|
||||
func (t *testImageAgent) AgentName() string { return t.Name }
|
||||
|
||||
func (t *testImageAgent) GetArtistImages(_ context.Context, id, name, mbid string) ([]ExternalImage, error) {
|
||||
t.Args = []interface{}{id, name, mbid}
|
||||
t.Args = []any{id, name, mbid}
|
||||
return t.Images, t.Err
|
||||
}
|
||||
|
||||
@@ -143,7 +143,7 @@ var _ = Describe("CacheWarmer", func() {
|
||||
|
||||
It("processes items in batches", func() {
|
||||
cw := NewCacheWarmer(aw, fc).(*cacheWarmer)
|
||||
for i := 0; i < 5; i++ {
|
||||
for i := range 5 {
|
||||
cw.PreCache(model.MustParseArtworkID(fmt.Sprintf("al-%d", i)))
|
||||
}
|
||||
|
||||
|
||||
@@ -79,7 +79,7 @@ func (a *albumArtworkReader) Reader(ctx context.Context) (io.ReadCloser, string,
|
||||
|
||||
func (a *albumArtworkReader) fromCoverArtPriority(ctx context.Context, ffmpeg ffmpeg.FFmpeg, priority string) []sourceFunc {
|
||||
var ff []sourceFunc
|
||||
for _, pattern := range strings.Split(strings.ToLower(priority), ",") {
|
||||
for pattern := range strings.SplitSeq(strings.ToLower(priority), ",") {
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
switch {
|
||||
case pattern == "embedded":
|
||||
|
||||
@@ -99,7 +99,7 @@ func (a *artistReader) Reader(ctx context.Context) (io.ReadCloser, string, error
|
||||
|
||||
func (a *artistReader) fromArtistArtPriority(ctx context.Context, priority string) []sourceFunc {
|
||||
var ff []sourceFunc
|
||||
for _, pattern := range strings.Split(strings.ToLower(priority), ",") {
|
||||
for pattern := range strings.SplitSeq(strings.ToLower(priority), ",") {
|
||||
pattern = strings.TrimSpace(pattern)
|
||||
switch {
|
||||
case pattern == "external":
|
||||
@@ -116,7 +116,7 @@ func (a *artistReader) fromArtistArtPriority(ctx context.Context, priority strin
|
||||
func fromArtistFolder(ctx context.Context, artistFolder string, pattern string) sourceFunc {
|
||||
return func() (io.ReadCloser, string, error) {
|
||||
current := artistFolder
|
||||
for i := 0; i < maxArtistFolderTraversalDepth; i++ {
|
||||
for range maxArtistFolderTraversalDepth {
|
||||
if reader, path, err := findImageInFolder(ctx, current, pattern); err == nil {
|
||||
return reader, path, nil
|
||||
}
|
||||
|
||||
@@ -230,7 +230,7 @@ func fromURL(ctx context.Context, imageUrl *url.URL) (io.ReadCloser, string, err
|
||||
hc := http.Client{Timeout: 5 * time.Second}
|
||||
req, _ := http.NewRequestWithContext(ctx, http.MethodGet, imageUrl.String(), nil)
|
||||
req.Header.Set("User-Agent", consts.HTTPUserAgent)
|
||||
resp, err := hc.Do(req)
|
||||
resp, err := hc.Do(req) //nolint:gosec
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"cmp"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"maps"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
@@ -53,9 +54,7 @@ func createBaseClaims() map[string]any {
|
||||
|
||||
func CreatePublicToken(claims map[string]any) (string, error) {
|
||||
tokenClaims := createBaseClaims()
|
||||
for k, v := range claims {
|
||||
tokenClaims[k] = v
|
||||
}
|
||||
maps.Copy(tokenClaims, claims)
|
||||
_, token, err := TokenAuth.Encode(tokenClaims)
|
||||
|
||||
return token, err
|
||||
@@ -66,9 +65,7 @@ func CreateExpiringPublicToken(exp time.Time, claims map[string]any) (string, er
|
||||
if !exp.IsZero() {
|
||||
tokenClaims[jwt.ExpirationKey] = exp.UTC().Unix()
|
||||
}
|
||||
for k, v := range claims {
|
||||
tokenClaims[k] = v
|
||||
}
|
||||
maps.Copy(tokenClaims, claims)
|
||||
_, token, err := TokenAuth.Encode(tokenClaims)
|
||||
|
||||
return token, err
|
||||
@@ -100,7 +97,7 @@ func TouchToken(token jwt.Token) (string, error) {
|
||||
return newToken, err
|
||||
}
|
||||
|
||||
func Validate(tokenStr string) (map[string]interface{}, error) {
|
||||
func Validate(tokenStr string) (map[string]any, error) {
|
||||
token, err := jwtauth.VerifyToken(TokenAuth, tokenStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -45,7 +45,7 @@ var _ = Describe("Auth", func() {
|
||||
})
|
||||
|
||||
It("returns the claims from a valid JWT token", func() {
|
||||
claims := map[string]interface{}{}
|
||||
claims := map[string]any{}
|
||||
claims["iss"] = "issuer"
|
||||
claims["iat"] = time.Now().Unix()
|
||||
claims["exp"] = time.Now().Add(1 * time.Minute).Unix()
|
||||
@@ -58,7 +58,7 @@ var _ = Describe("Auth", func() {
|
||||
})
|
||||
|
||||
It("returns ErrExpired if the `exp` field is in the past", func() {
|
||||
claims := map[string]interface{}{}
|
||||
claims := map[string]any{}
|
||||
claims["iss"] = "issuer"
|
||||
claims["exp"] = time.Now().Add(-1 * time.Minute).Unix()
|
||||
_, tokenStr, err := auth.TokenAuth.Encode(claims)
|
||||
@@ -93,7 +93,7 @@ var _ = Describe("Auth", func() {
|
||||
Describe("TouchToken", func() {
|
||||
It("updates the expiration time", func() {
|
||||
yesterday := time.Now().Add(-oneDay)
|
||||
claims := map[string]interface{}{}
|
||||
claims := map[string]any{}
|
||||
claims["iss"] = "issuer"
|
||||
claims["exp"] = yesterday.Unix()
|
||||
token, _, err := auth.TokenAuth.Encode(claims)
|
||||
|
||||
6
core/external/extdata_helper_test.go
vendored
6
core/external/extdata_helper_test.go
vendored
@@ -40,7 +40,7 @@ func (m *mockArtistRepo) Get(id string) (*model.Artist, error) {
|
||||
|
||||
// GetAll implements model.ArtistRepository.
|
||||
func (m *mockArtistRepo) GetAll(options ...model.QueryOptions) (model.Artists, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
argsSlice := make([]any, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
@@ -99,7 +99,7 @@ func (m *mockMediaFileRepo) GetAllByTags(_ model.TagName, _ []string, options ..
|
||||
|
||||
// GetAll implements model.MediaFileRepository.
|
||||
func (m *mockMediaFileRepo) GetAll(options ...model.QueryOptions) (model.MediaFiles, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
argsSlice := make([]any, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
@@ -152,7 +152,7 @@ func (m *mockAlbumRepo) Get(id string) (*model.Album, error) {
|
||||
|
||||
// GetAll implements model.AlbumRepository.
|
||||
func (m *mockAlbumRepo) GetAll(options ...model.QueryOptions) (model.Albums, error) {
|
||||
argsSlice := make([]interface{}, len(options))
|
||||
argsSlice := make([]any, len(options))
|
||||
for i, v := range options {
|
||||
argsSlice[i] = v
|
||||
}
|
||||
|
||||
4
core/external/provider.go
vendored
4
core/external/provider.go
vendored
@@ -93,7 +93,7 @@ func NewProvider(ds model.DataStore, agents Agents) Provider {
|
||||
}
|
||||
|
||||
func (e *provider) getAlbum(ctx context.Context, id string) (auxAlbum, error) {
|
||||
var entity interface{}
|
||||
var entity any
|
||||
entity, err := model.GetEntityByID(ctx, e.ds, id)
|
||||
if err != nil {
|
||||
return auxAlbum{}, err
|
||||
@@ -187,7 +187,7 @@ func (e *provider) populateAlbumInfo(ctx context.Context, album auxAlbum) (auxAl
|
||||
}
|
||||
|
||||
func (e *provider) getArtist(ctx context.Context, id string) (auxArtist, error) {
|
||||
var entity interface{}
|
||||
var entity any
|
||||
entity, err := model.GetEntityByID(ctx, e.ds, id)
|
||||
if err != nil {
|
||||
return auxArtist{}, err
|
||||
|
||||
@@ -159,7 +159,7 @@ type libraryRepositoryWrapper struct {
|
||||
pluginManager PluginUnloader
|
||||
}
|
||||
|
||||
func (r *libraryRepositoryWrapper) Save(entity interface{}) (string, error) {
|
||||
func (r *libraryRepositoryWrapper) Save(entity any) (string, error) {
|
||||
lib := entity.(*model.Library)
|
||||
if err := r.validateLibrary(lib); err != nil {
|
||||
return "", err
|
||||
@@ -191,7 +191,7 @@ func (r *libraryRepositoryWrapper) Save(entity interface{}) (string, error) {
|
||||
return strconv.Itoa(lib.ID), nil
|
||||
}
|
||||
|
||||
func (r *libraryRepositoryWrapper) Update(id string, entity interface{}, _ ...string) error {
|
||||
func (r *libraryRepositoryWrapper) Update(id string, entity any, _ ...string) error {
|
||||
lib := entity.(*model.Library)
|
||||
libID, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
|
||||
@@ -196,9 +196,7 @@ func (s *maintenanceService) getAffectedAlbumIDs(ctx context.Context, ids []stri
|
||||
// refreshStatsAsync refreshes artist and album statistics in background goroutines
|
||||
func (s *maintenanceService) refreshStatsAsync(ctx context.Context, affectedAlbumIDs []string) {
|
||||
// Refresh artist stats in background
|
||||
s.wg.Add(1)
|
||||
go func() {
|
||||
defer s.wg.Done()
|
||||
s.wg.Go(func() {
|
||||
bgCtx := request.AddValues(context.Background(), ctx)
|
||||
if _, err := s.ds.Artist(bgCtx).RefreshStats(true); err != nil {
|
||||
log.Error(bgCtx, "Error refreshing artist stats after deleting missing files", err)
|
||||
@@ -214,7 +212,7 @@ func (s *maintenanceService) refreshStatsAsync(ctx context.Context, affectedAlbu
|
||||
log.Debug(bgCtx, "Successfully refreshed album stats after deleting missing files", "count", len(affectedAlbumIDs))
|
||||
}
|
||||
}
|
||||
}()
|
||||
})
|
||||
}
|
||||
|
||||
// Wait waits for all background goroutines to complete.
|
||||
|
||||
@@ -108,7 +108,7 @@ func (c *insightsCollector) sendInsights(ctx context.Context) {
|
||||
return
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
resp, err := hc.Do(req)
|
||||
resp, err := hc.Do(req) //nolint:gosec
|
||||
if err != nil {
|
||||
log.Trace(ctx, "Could not send Insights data", err)
|
||||
return
|
||||
@@ -220,7 +220,7 @@ var staticData = sync.OnceValue(func() insights.Data {
|
||||
data.Config.ScanWatcherWait = uint64(math.Trunc(conf.Server.Scanner.WatcherWait.Seconds()))
|
||||
data.Config.ScanOnStartup = conf.Server.Scanner.ScanOnStartup
|
||||
data.Config.ReverseProxyConfigured = conf.Server.ExtAuth.TrustedSources != ""
|
||||
data.Config.HasCustomPID = conf.Server.PID.Track != "" || conf.Server.PID.Album != ""
|
||||
data.Config.HasCustomPID = conf.Server.PID.Track != consts.DefaultTrackPID || conf.Server.PID.Album != consts.DefaultAlbumPID
|
||||
data.Config.HasCustomTags = len(conf.Server.Tags) > 0
|
||||
|
||||
return data
|
||||
|
||||
@@ -3,6 +3,7 @@ package playback
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"strings"
|
||||
|
||||
"github.com/navidrome/navidrome/log"
|
||||
"github.com/navidrome/navidrome/model"
|
||||
@@ -21,11 +22,11 @@ func NewQueue() *Queue {
|
||||
}
|
||||
|
||||
func (pd *Queue) String() string {
|
||||
filenames := ""
|
||||
var filenames strings.Builder
|
||||
for idx, item := range pd.Items {
|
||||
filenames += fmt.Sprint(idx) + ":" + item.Path + " "
|
||||
filenames.WriteString(fmt.Sprint(idx) + ":" + item.Path + " ")
|
||||
}
|
||||
return fmt.Sprintf("#Items: %d, idx: %d, files: %s", len(pd.Items), pd.Index, filenames)
|
||||
return fmt.Sprintf("#Items: %d, idx: %d, files: %s", len(pd.Items), pd.Index, filenames.String())
|
||||
}
|
||||
|
||||
// returns the current mediafile or nil
|
||||
|
||||
@@ -45,7 +45,7 @@ func InPlaylistsPath(folder model.Folder) bool {
|
||||
return true
|
||||
}
|
||||
rel, _ := filepath.Rel(folder.LibraryPath, folder.AbsolutePath())
|
||||
for _, path := range strings.Split(conf.Server.PlaylistsPath, string(filepath.ListSeparator)) {
|
||||
for path := range strings.SplitSeq(conf.Server.PlaylistsPath, string(filepath.ListSeparator)) {
|
||||
if match, _ := doublestar.Match(path, rel); match {
|
||||
return true
|
||||
}
|
||||
@@ -193,8 +193,8 @@ func (s *playlists) parseM3U(ctx context.Context, pls *model.Playlist, folder *m
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(line, "file://") {
|
||||
line = strings.TrimPrefix(line, "file://")
|
||||
if after, ok := strings.CutPrefix(line, "file://"); ok {
|
||||
line = after
|
||||
line, _ = url.QueryUnescape(line)
|
||||
}
|
||||
if !model.IsAudioFile(line) {
|
||||
@@ -533,7 +533,7 @@ type nspFile struct {
|
||||
}
|
||||
|
||||
func (i *nspFile) UnmarshalJSON(data []byte) error {
|
||||
m := map[string]interface{}{}
|
||||
m := map[string]any{}
|
||||
err := json.Unmarshal(data, &m)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -212,10 +212,7 @@ func (p *playTracker) NowPlaying(ctx context.Context, playerId string, playerNam
|
||||
|
||||
// Calculate TTL based on remaining track duration. If position exceeds track duration,
|
||||
// remaining is set to 0 to avoid negative TTL.
|
||||
remaining := int(mf.Duration) - position
|
||||
if remaining < 0 {
|
||||
remaining = 0
|
||||
}
|
||||
remaining := max(int(mf.Duration)-position, 0)
|
||||
// Add 5 seconds buffer to ensure the NowPlaying info is available slightly longer than the track duration.
|
||||
ttl := time.Duration(remaining+5) * time.Second
|
||||
_ = p.playMap.AddWithTTL(playerId, info, ttl)
|
||||
|
||||
@@ -87,7 +87,7 @@ func (r *shareRepositoryWrapper) newId() (string, error) {
|
||||
}
|
||||
}
|
||||
|
||||
func (r *shareRepositoryWrapper) Save(entity interface{}) (string, error) {
|
||||
func (r *shareRepositoryWrapper) Save(entity any) (string, error) {
|
||||
s := entity.(*model.Share)
|
||||
id, err := r.newId()
|
||||
if err != nil {
|
||||
@@ -127,7 +127,7 @@ func (r *shareRepositoryWrapper) Save(entity interface{}) (string, error) {
|
||||
return id, err
|
||||
}
|
||||
|
||||
func (r *shareRepositoryWrapper) Update(id string, entity interface{}, _ ...string) error {
|
||||
func (r *shareRepositoryWrapper) Update(id string, entity any, _ ...string) error {
|
||||
cols := []string{"description", "downloadable"}
|
||||
|
||||
// TODO Better handling of Share expiration
|
||||
|
||||
@@ -44,7 +44,7 @@ func newLocalStorage(u url.URL) storage.Storage {
|
||||
|
||||
func (s *localStorage) FS() (storage.MusicFS, error) {
|
||||
path := s.u.Path
|
||||
if _, err := os.Stat(path); err != nil {
|
||||
if _, err := os.Stat(path); err != nil { //nolint:gosec
|
||||
return nil, fmt.Errorf("%w: %s", err, path)
|
||||
}
|
||||
return &localFS{FS: os.DirFS(path), extractor: s.extractor}, nil
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"maps"
|
||||
"net/url"
|
||||
"path"
|
||||
"testing/fstest"
|
||||
@@ -135,9 +136,7 @@ func (ffs *FakeFS) UpdateTags(filePath string, newTags map[string]any, when ...t
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
for k, v := range newTags {
|
||||
tags[k] = v
|
||||
}
|
||||
maps.Copy(tags, newTags)
|
||||
data, _ := json.Marshal(tags)
|
||||
f.Data = data
|
||||
ffs.Touch(filePath, when...)
|
||||
@@ -180,9 +179,7 @@ func Track(num int, title string, tags ...map[string]any) map[string]any {
|
||||
ts["title"] = title
|
||||
ts["track"] = num
|
||||
for _, t := range tags {
|
||||
for k, v := range t {
|
||||
ts[k] = v
|
||||
}
|
||||
maps.Copy(ts, t)
|
||||
}
|
||||
return ts
|
||||
}
|
||||
@@ -200,9 +197,7 @@ func MP3(tags ...map[string]any) *fstest.MapFile {
|
||||
func File(tags ...map[string]any) *fstest.MapFile {
|
||||
ts := map[string]any{}
|
||||
for _, t := range tags {
|
||||
for k, v := range t {
|
||||
ts[k] = v
|
||||
}
|
||||
maps.Copy(ts, t)
|
||||
}
|
||||
modTime := time.Now()
|
||||
if mt, ok := ts[fakeFileInfoModTime]; !ok {
|
||||
|
||||
@@ -50,12 +50,12 @@ type userRepositoryWrapper struct {
|
||||
}
|
||||
|
||||
// Save implements rest.Persistable by delegating to the underlying repository.
|
||||
func (r *userRepositoryWrapper) Save(entity interface{}) (string, error) {
|
||||
func (r *userRepositoryWrapper) Save(entity any) (string, error) {
|
||||
return r.UserRepository.(rest.Persistable).Save(entity)
|
||||
}
|
||||
|
||||
// Update implements rest.Persistable by delegating to the underlying repository.
|
||||
func (r *userRepositoryWrapper) Update(id string, entity interface{}, cols ...string) error {
|
||||
func (r *userRepositoryWrapper) Update(id string, entity any, cols ...string) error {
|
||||
return r.UserRepository.(rest.Persistable).Update(id, entity, cols...)
|
||||
}
|
||||
|
||||
|
||||
25
db/db.go
25
db/db.go
@@ -6,7 +6,9 @@ import (
|
||||
"embed"
|
||||
"fmt"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"github.com/maruel/natural"
|
||||
"github.com/mattn/go-sqlite3"
|
||||
"github.com/navidrome/navidrome/conf"
|
||||
_ "github.com/navidrome/navidrome/db/migrations"
|
||||
@@ -31,7 +33,12 @@ func Db() *sql.DB {
|
||||
return singleton.GetInstance(func() *sql.DB {
|
||||
sql.Register(Driver, &sqlite3.SQLiteDriver{
|
||||
ConnectHook: func(conn *sqlite3.SQLiteConn) error {
|
||||
return conn.RegisterFunc("SEEDEDRAND", hasher.HashFunc(), false)
|
||||
if err := conn.RegisterFunc("SEEDEDRAND", hasher.HashFunc(), false); err != nil {
|
||||
return err
|
||||
}
|
||||
return conn.RegisterCollation("NATURALSORT", func(a, b string) int {
|
||||
return natural.Compare(strings.ToLower(a), strings.ToLower(b))
|
||||
})
|
||||
},
|
||||
})
|
||||
Path = conf.Server.DbPath
|
||||
@@ -126,7 +133,7 @@ func Optimize(ctx context.Context) {
|
||||
}
|
||||
log.Debug(ctx, "Optimizing open connections", "numConns", numConns)
|
||||
var conns []*sql.Conn
|
||||
for i := 0; i < numConns; i++ {
|
||||
for range numConns {
|
||||
conn, err := Db().Conn(ctx)
|
||||
conns = append(conns, conn)
|
||||
if err != nil {
|
||||
@@ -147,8 +154,8 @@ func Optimize(ctx context.Context) {
|
||||
|
||||
type statusLogger struct{ numPending int }
|
||||
|
||||
func (*statusLogger) Fatalf(format string, v ...interface{}) { log.Fatal(fmt.Sprintf(format, v...)) }
|
||||
func (l *statusLogger) Printf(format string, v ...interface{}) {
|
||||
func (*statusLogger) Fatalf(format string, v ...any) { log.Fatal(fmt.Sprintf(format, v...)) }
|
||||
func (l *statusLogger) Printf(format string, v ...any) {
|
||||
if len(v) < 1 {
|
||||
return
|
||||
}
|
||||
@@ -183,27 +190,27 @@ type logAdapter struct {
|
||||
silent bool
|
||||
}
|
||||
|
||||
func (l *logAdapter) Fatal(v ...interface{}) {
|
||||
func (l *logAdapter) Fatal(v ...any) {
|
||||
log.Fatal(l.ctx, fmt.Sprint(v...))
|
||||
}
|
||||
|
||||
func (l *logAdapter) Fatalf(format string, v ...interface{}) {
|
||||
func (l *logAdapter) Fatalf(format string, v ...any) {
|
||||
log.Fatal(l.ctx, fmt.Sprintf(format, v...))
|
||||
}
|
||||
|
||||
func (l *logAdapter) Print(v ...interface{}) {
|
||||
func (l *logAdapter) Print(v ...any) {
|
||||
if !l.silent {
|
||||
log.Info(l.ctx, fmt.Sprint(v...))
|
||||
}
|
||||
}
|
||||
|
||||
func (l *logAdapter) Println(v ...interface{}) {
|
||||
func (l *logAdapter) Println(v ...any) {
|
||||
if !l.silent {
|
||||
log.Info(l.ctx, fmt.Sprintln(v...))
|
||||
}
|
||||
}
|
||||
|
||||
func (l *logAdapter) Printf(format string, v ...interface{}) {
|
||||
func (l *logAdapter) Printf(format string, v ...any) {
|
||||
if !l.silent {
|
||||
log.Info(l.ctx, fmt.Sprintf(format, v...))
|
||||
}
|
||||
|
||||
@@ -0,0 +1,152 @@
|
||||
-- +goose Up
|
||||
|
||||
-- Change order_*/sort_* column collation from NOCASE to NATURALSORT.
|
||||
-- This way bare ORDER BY on these columns automatically uses natural sorting,
|
||||
-- without needing explicit COLLATE NATURALSORT in every query.
|
||||
PRAGMA writable_schema = ON;
|
||||
UPDATE sqlite_master
|
||||
SET sql = replace(sql, 'collate NOCASE', 'collate NATURALSORT')
|
||||
WHERE type = 'table' AND name IN ('artist', 'album', 'media_file', 'playlist', 'radio');
|
||||
PRAGMA writable_schema = OFF;
|
||||
|
||||
-- Recreate indexes on order_* and sort expression fields to use NATURALSORT collation.
|
||||
-- This enables natural number ordering (e.g., "Album 2" before "Album 10").
|
||||
|
||||
-- Artist indexes
|
||||
drop index if exists artist_order_artist_name;
|
||||
create index artist_order_artist_name
|
||||
on artist (order_artist_name collate NATURALSORT);
|
||||
|
||||
drop index if exists artist_sort_name;
|
||||
create index artist_sort_name
|
||||
on artist (coalesce(nullif(sort_artist_name,''),order_artist_name) collate NATURALSORT);
|
||||
|
||||
-- Album indexes
|
||||
drop index if exists album_order_album_name;
|
||||
create index album_order_album_name
|
||||
on album (order_album_name collate NATURALSORT);
|
||||
|
||||
drop index if exists album_order_album_artist_name;
|
||||
create index album_order_album_artist_name
|
||||
on album (order_album_artist_name collate NATURALSORT);
|
||||
|
||||
drop index if exists album_alphabetical_by_artist;
|
||||
create index album_alphabetical_by_artist
|
||||
on album (compilation, order_album_artist_name collate NATURALSORT, order_album_name collate NATURALSORT);
|
||||
|
||||
drop index if exists album_sort_name;
|
||||
create index album_sort_name
|
||||
on album (coalesce(nullif(sort_album_name,''),order_album_name) collate NATURALSORT);
|
||||
|
||||
drop index if exists album_sort_album_artist_name;
|
||||
create index album_sort_album_artist_name
|
||||
on album (coalesce(nullif(sort_album_artist_name,''),order_album_artist_name) collate NATURALSORT);
|
||||
|
||||
-- Media file indexes
|
||||
drop index if exists media_file_order_title;
|
||||
create index media_file_order_title
|
||||
on media_file (order_title collate NATURALSORT);
|
||||
|
||||
drop index if exists media_file_order_album_name;
|
||||
create index media_file_order_album_name
|
||||
on media_file (order_album_name collate NATURALSORT);
|
||||
|
||||
drop index if exists media_file_order_artist_name;
|
||||
create index media_file_order_artist_name
|
||||
on media_file (order_artist_name collate NATURALSORT);
|
||||
|
||||
drop index if exists media_file_sort_title;
|
||||
create index media_file_sort_title
|
||||
on media_file (coalesce(nullif(sort_title,''),order_title) collate NATURALSORT);
|
||||
|
||||
drop index if exists media_file_sort_artist_name;
|
||||
create index media_file_sort_artist_name
|
||||
on media_file (coalesce(nullif(sort_artist_name,''),order_artist_name) collate NATURALSORT);
|
||||
|
||||
drop index if exists media_file_sort_album_name;
|
||||
create index media_file_sort_album_name
|
||||
on media_file (coalesce(nullif(sort_album_name,''),order_album_name) collate NATURALSORT);
|
||||
|
||||
-- Playlist and radio indexes: recreate to match new NATURALSORT column collation
|
||||
drop index if exists playlist_name;
|
||||
create index playlist_name
|
||||
on playlist (name collate NATURALSORT);
|
||||
|
||||
drop index if exists radio_name;
|
||||
create index radio_name
|
||||
on radio (name collate NATURALSORT);
|
||||
|
||||
-- +goose Down
|
||||
|
||||
-- Restore NOCASE column collation
|
||||
PRAGMA writable_schema = ON;
|
||||
UPDATE sqlite_master
|
||||
SET sql = replace(sql, 'collate NATURALSORT', 'collate NOCASE')
|
||||
WHERE type = 'table' AND name IN ('artist', 'album', 'media_file', 'playlist', 'radio');
|
||||
PRAGMA writable_schema = OFF;
|
||||
|
||||
-- Restore NOCASE collation indexes
|
||||
|
||||
-- Artist indexes
|
||||
drop index if exists artist_order_artist_name;
|
||||
create index artist_order_artist_name
|
||||
on artist (order_artist_name);
|
||||
|
||||
drop index if exists artist_sort_name;
|
||||
create index artist_sort_name
|
||||
on artist (coalesce(nullif(sort_artist_name,''),order_artist_name) collate NOCASE);
|
||||
|
||||
-- Album indexes
|
||||
drop index if exists album_order_album_name;
|
||||
create index album_order_album_name
|
||||
on album (order_album_name);
|
||||
|
||||
drop index if exists album_order_album_artist_name;
|
||||
create index album_order_album_artist_name
|
||||
on album (order_album_artist_name);
|
||||
|
||||
drop index if exists album_alphabetical_by_artist;
|
||||
create index album_alphabetical_by_artist
|
||||
on album (compilation, order_album_artist_name, order_album_name);
|
||||
|
||||
drop index if exists album_sort_name;
|
||||
create index album_sort_name
|
||||
on album (coalesce(nullif(sort_album_name,''),order_album_name) collate NOCASE);
|
||||
|
||||
drop index if exists album_sort_album_artist_name;
|
||||
create index album_sort_album_artist_name
|
||||
on album (coalesce(nullif(sort_album_artist_name,''),order_album_artist_name) collate NOCASE);
|
||||
|
||||
-- Media file indexes
|
||||
drop index if exists media_file_order_title;
|
||||
create index media_file_order_title
|
||||
on media_file (order_title);
|
||||
|
||||
drop index if exists media_file_order_album_name;
|
||||
create index media_file_order_album_name
|
||||
on media_file (order_album_name);
|
||||
|
||||
drop index if exists media_file_order_artist_name;
|
||||
create index media_file_order_artist_name
|
||||
on media_file (order_artist_name);
|
||||
|
||||
drop index if exists media_file_sort_title;
|
||||
create index media_file_sort_title
|
||||
on media_file (coalesce(nullif(sort_title,''),order_title) collate NOCASE);
|
||||
|
||||
drop index if exists media_file_sort_artist_name;
|
||||
create index media_file_sort_artist_name
|
||||
on media_file (coalesce(nullif(sort_artist_name,''),order_artist_name) collate NOCASE);
|
||||
|
||||
drop index if exists media_file_sort_album_name;
|
||||
create index media_file_sort_album_name
|
||||
on media_file (coalesce(nullif(sort_album_name,''),order_album_name) collate NOCASE);
|
||||
|
||||
-- Restore playlist and radio indexes
|
||||
drop index if exists playlist_name;
|
||||
create index playlist_name
|
||||
on playlist (name);
|
||||
|
||||
drop index if exists radio_name;
|
||||
create index radio_name
|
||||
on radio (name);
|
||||
24
go.mod
24
go.mod
@@ -7,7 +7,7 @@ replace (
|
||||
github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 => github.com/deluan/tag v0.0.0-20241002021117-dfe5e6ea396d
|
||||
|
||||
// Fork to implement raw tags support
|
||||
go.senan.xyz/taglib => github.com/deluan/go-taglib v0.0.0-20260119020817-8753c7531798
|
||||
go.senan.xyz/taglib => github.com/deluan/go-taglib v0.0.0-20260212150743-3f1b97cb0d1e
|
||||
)
|
||||
|
||||
require (
|
||||
@@ -46,13 +46,13 @@ require (
|
||||
github.com/lestrrat-go/jwx/v2 v2.1.6
|
||||
github.com/maruel/natural v1.3.0
|
||||
github.com/matoous/go-nanoid/v2 v2.1.0
|
||||
github.com/mattn/go-sqlite3 v1.14.33
|
||||
github.com/mattn/go-sqlite3 v1.14.34
|
||||
github.com/microcosm-cc/bluemonday v1.0.27
|
||||
github.com/mileusna/useragent v1.3.5
|
||||
github.com/onsi/ginkgo/v2 v2.28.1
|
||||
github.com/onsi/gomega v1.39.1
|
||||
github.com/pelletier/go-toml/v2 v2.2.4
|
||||
github.com/pocketbase/dbx v1.11.0
|
||||
github.com/pocketbase/dbx v1.12.0
|
||||
github.com/pressly/goose/v3 v3.26.0
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
github.com/rjeczalik/notify v0.9.3
|
||||
@@ -68,12 +68,12 @@ require (
|
||||
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342
|
||||
go.senan.xyz/taglib v0.11.1
|
||||
go.uber.org/goleak v1.3.0
|
||||
golang.org/x/image v0.35.0
|
||||
golang.org/x/net v0.49.0
|
||||
golang.org/x/image v0.36.0
|
||||
golang.org/x/net v0.50.0
|
||||
golang.org/x/sync v0.19.0
|
||||
golang.org/x/sys v0.40.0
|
||||
golang.org/x/term v0.39.0
|
||||
golang.org/x/text v0.33.0
|
||||
golang.org/x/sys v0.41.0
|
||||
golang.org/x/term v0.40.0
|
||||
golang.org/x/text v0.34.0
|
||||
golang.org/x/time v0.14.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
@@ -139,11 +139,11 @@ require (
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.3 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/crypto v0.47.0 // indirect
|
||||
golang.org/x/crypto v0.48.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 // indirect
|
||||
golang.org/x/mod v0.32.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20260109210033-bd525da824e2 // indirect
|
||||
golang.org/x/tools v0.41.0 // indirect
|
||||
golang.org/x/mod v0.33.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4 // indirect
|
||||
golang.org/x/tools v0.42.0 // indirect
|
||||
google.golang.org/protobuf v1.36.11 // indirect
|
||||
gopkg.in/ini.v1 v1.67.1 // indirect
|
||||
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
|
||||
|
||||
48
go.sum
48
go.sum
@@ -36,8 +36,8 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40=
|
||||
github.com/deluan/go-taglib v0.0.0-20260119020817-8753c7531798 h1:q4fvcIK/LxElpyQILCejG6WPYjVb2F/4P93+k017ANk=
|
||||
github.com/deluan/go-taglib v0.0.0-20260119020817-8753c7531798/go.mod h1:sKDN0U4qXDlq6LFK+aOAkDH4Me5nDV1V/A4B+B69xBA=
|
||||
github.com/deluan/go-taglib v0.0.0-20260212150743-3f1b97cb0d1e h1:pwx3kmHzl1N28coJV2C1zfm2ZF0qkQcGX+Z6BvXteB4=
|
||||
github.com/deluan/go-taglib v0.0.0-20260212150743-3f1b97cb0d1e/go.mod h1:sKDN0U4qXDlq6LFK+aOAkDH4Me5nDV1V/A4B+B69xBA=
|
||||
github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf h1:tb246l2Zmpt/GpF9EcHCKTtwzrd0HGfEmoODFA/qnk4=
|
||||
github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf/go.mod h1:tSgDythFsl0QgS/PFWfIZqcJKnkADWneY80jaVRlqK8=
|
||||
github.com/deluan/sanitize v0.0.0-20241120162836-fdfd8fdfaa55 h1:wSCnggTs2f2ji6nFwQmfwgINcmSMj0xF0oHnoyRSPe4=
|
||||
@@ -179,8 +179,8 @@ github.com/matoous/go-nanoid/v2 v2.1.0 h1:P64+dmq21hhWdtvZfEAofnvJULaRR1Yib0+PnU
|
||||
github.com/matoous/go-nanoid/v2 v2.1.0/go.mod h1:KlbGNQ+FhrUNIHUxZdL63t7tl4LaPkZNpUULS8H4uVM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.33 h1:A5blZ5ulQo2AtayQ9/limgHEkFreKj1Dv226a1K73s0=
|
||||
github.com/mattn/go-sqlite3 v1.14.33/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.34 h1:3NtcvcUnFBPsuRcno8pUtupspG/GM+9nZ88zgJcp6Zk=
|
||||
github.com/mattn/go-sqlite3 v1.14.34/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY=
|
||||
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
|
||||
github.com/mfridman/tparse v0.18.0 h1:wh6dzOKaIwkUGyKgOntDW4liXSo37qg5AXbIhkMV3vE=
|
||||
@@ -210,8 +210,8 @@ github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pocketbase/dbx v1.11.0 h1:LpZezioMfT3K4tLrqA55wWFw1EtH1pM4tzSVa7kgszU=
|
||||
github.com/pocketbase/dbx v1.11.0/go.mod h1:xXRCIAKTHMgUCyCKZm55pUOdvFziJjQfXaWKhu2vhMs=
|
||||
github.com/pocketbase/dbx v1.12.0 h1:/oLErM+A0b4xI0PWTGPqSDVjzix48PqI/bng2l0PzoA=
|
||||
github.com/pocketbase/dbx v1.12.0/go.mod h1:xXRCIAKTHMgUCyCKZm55pUOdvFziJjQfXaWKhu2vhMs=
|
||||
github.com/pressly/goose/v3 v3.26.0 h1:KJakav68jdH0WDvoAcj8+n61WqOIaPGgH0bJWS6jpmM=
|
||||
github.com/pressly/goose/v3 v3.26.0/go.mod h1:4hC1KrritdCxtuFsqgs1R4AU5bWtTAf+cnWvfhf2DNY=
|
||||
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
|
||||
@@ -319,20 +319,20 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
|
||||
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
|
||||
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
|
||||
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 h1:Z/6YuSHTLOHfNFdb8zVZomZr7cqNgTJvA8+Qz75D8gU=
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96/go.mod h1:nzimsREAkjBCIEFtHiYkrJyT+2uy9YZJB7H1k68CXZU=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.35.0 h1:LKjiHdgMtO8z7Fh18nGY6KDcoEtVfsgLDPeLyguqb7I=
|
||||
golang.org/x/image v0.35.0/go.mod h1:MwPLTVgvxSASsxdLzKrl8BRFuyqMyGhLwmC+TO1Sybk=
|
||||
golang.org/x/image v0.36.0 h1:Iknbfm1afbgtwPTmHnS2gTM/6PPZfH+z2EFuOkSbqwc=
|
||||
golang.org/x/image v0.36.0/go.mod h1:YsWD2TyyGKiIX1kZlu9QfKIsQ4nAAK9bdgdrIsE7xy4=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
|
||||
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
|
||||
golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
|
||||
golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
@@ -344,8 +344,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
|
||||
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
|
||||
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
|
||||
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -370,11 +370,11 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
|
||||
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/telemetry v0.0.0-20260109210033-bd525da824e2 h1:O1cMQHRfwNpDfDJerqRoE2oD+AFlyid87D40L/OkkJo=
|
||||
golang.org/x/telemetry v0.0.0-20260109210033-bd525da824e2/go.mod h1:b7fPSJ0pKZ3ccUh8gnTONJxhn3c/PS6tyzQvyqw4iA8=
|
||||
golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4 h1:bTLqdHv7xrGlFbvf5/TXNxy/iUwwdkjhqQTJDjW7aj0=
|
||||
golang.org/x/telemetry v0.0.0-20260209163413-e7419c687ee4/go.mod h1:g5NllXBEermZrmR51cJDQxmJUHUOfRAaNyWBM+R+548=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
@@ -383,8 +383,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
|
||||
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
|
||||
golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
|
||||
golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
@@ -395,8 +395,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
@@ -406,8 +406,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||
golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
|
||||
golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||
|
||||
24
log/log.go
24
log/log.go
@@ -19,7 +19,7 @@ import (
|
||||
|
||||
type Level uint32
|
||||
|
||||
type LevelFunc = func(ctx interface{}, msg interface{}, keyValuePairs ...interface{})
|
||||
type LevelFunc = func(ctx any, msg any, keyValuePairs ...any)
|
||||
|
||||
var redacted = &Hook{
|
||||
AcceptedLevels: logrus.AllLevels,
|
||||
@@ -152,7 +152,7 @@ func Redact(msg string) string {
|
||||
return r
|
||||
}
|
||||
|
||||
func NewContext(ctx context.Context, keyValuePairs ...interface{}) context.Context {
|
||||
func NewContext(ctx context.Context, keyValuePairs ...any) context.Context {
|
||||
if ctx == nil {
|
||||
ctx = context.Background()
|
||||
}
|
||||
@@ -184,32 +184,32 @@ func IsGreaterOrEqualTo(level Level) bool {
|
||||
return shouldLog(level, 2)
|
||||
}
|
||||
|
||||
func Fatal(args ...interface{}) {
|
||||
func Fatal(args ...any) {
|
||||
Log(LevelFatal, args...)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func Error(args ...interface{}) {
|
||||
func Error(args ...any) {
|
||||
Log(LevelError, args...)
|
||||
}
|
||||
|
||||
func Warn(args ...interface{}) {
|
||||
func Warn(args ...any) {
|
||||
Log(LevelWarn, args...)
|
||||
}
|
||||
|
||||
func Info(args ...interface{}) {
|
||||
func Info(args ...any) {
|
||||
Log(LevelInfo, args...)
|
||||
}
|
||||
|
||||
func Debug(args ...interface{}) {
|
||||
func Debug(args ...any) {
|
||||
Log(LevelDebug, args...)
|
||||
}
|
||||
|
||||
func Trace(args ...interface{}) {
|
||||
func Trace(args ...any) {
|
||||
Log(LevelTrace, args...)
|
||||
}
|
||||
|
||||
func Log(level Level, args ...interface{}) {
|
||||
func Log(level Level, args ...any) {
|
||||
if !shouldLog(level, 3) {
|
||||
return
|
||||
}
|
||||
@@ -250,7 +250,7 @@ func shouldLog(requiredLevel Level, skip int) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func parseArgs(args []interface{}) (*logrus.Entry, string) {
|
||||
func parseArgs(args []any) (*logrus.Entry, string) {
|
||||
var l *logrus.Entry
|
||||
var err error
|
||||
if args[0] == nil {
|
||||
@@ -289,7 +289,7 @@ func parseArgs(args []interface{}) (*logrus.Entry, string) {
|
||||
return l, ""
|
||||
}
|
||||
|
||||
func addFields(logger *logrus.Entry, keyValuePairs []interface{}) *logrus.Entry {
|
||||
func addFields(logger *logrus.Entry, keyValuePairs []any) *logrus.Entry {
|
||||
for i := 0; i < len(keyValuePairs); i += 2 {
|
||||
switch name := keyValuePairs[i].(type) {
|
||||
case error:
|
||||
@@ -316,7 +316,7 @@ func addFields(logger *logrus.Entry, keyValuePairs []interface{}) *logrus.Entry
|
||||
return logger
|
||||
}
|
||||
|
||||
func extractLogger(ctx interface{}) (*logrus.Entry, error) {
|
||||
func extractLogger(ctx any) (*logrus.Entry, error) {
|
||||
switch ctx := ctx.(type) {
|
||||
case *logrus.Entry:
|
||||
return ctx, nil
|
||||
|
||||
@@ -23,6 +23,7 @@ var fieldMap = map[string]*mappedField{
|
||||
"releasedate": {field: "media_file.release_date"},
|
||||
"size": {field: "media_file.size"},
|
||||
"compilation": {field: "media_file.compilation"},
|
||||
"explicitstatus": {field: "media_file.explicit_status"},
|
||||
"dateadded": {field: "media_file.created_at"},
|
||||
"datemodified": {field: "media_file.updated_at"},
|
||||
"discsubtitle": {field: "media_file.disc_subtitle"},
|
||||
|
||||
@@ -41,7 +41,7 @@ type DataStore interface {
|
||||
Scrobble(ctx context.Context) ScrobbleRepository
|
||||
Plugin(ctx context.Context) PluginRepository
|
||||
|
||||
Resource(ctx context.Context, model interface{}) ResourceRepository
|
||||
Resource(ctx context.Context, model any) ResourceRepository
|
||||
|
||||
WithTx(block func(tx DataStore) error, scope ...string) error
|
||||
WithTxImmediate(block func(tx DataStore) error, scope ...string) error
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
)
|
||||
|
||||
// TODO: Should the type be encoded in the ID?
|
||||
func GetEntityByID(ctx context.Context, ds DataStore, id string) (interface{}, error) {
|
||||
func GetEntityByID(ctx context.Context, ds DataStore, id string) (any, error) {
|
||||
ar, err := ds.Artist(ctx).Get(id)
|
||||
if err == nil {
|
||||
return ar, nil
|
||||
|
||||
@@ -38,7 +38,7 @@ type MediaFile struct {
|
||||
AlbumArtistID string `structs:"album_artist_id" json:"albumArtistId"` // Deprecated: Use Participants instead
|
||||
// AlbumArtist is the display name used for the album artist.
|
||||
AlbumArtist string `structs:"album_artist" json:"albumArtist"`
|
||||
AlbumID string `structs:"album_id" json:"albumId"`
|
||||
AlbumID string `structs:"album_id" json:"albumId" hash:"ignore"`
|
||||
HasCoverArt bool `structs:"has_cover_art" json:"hasCoverArt"`
|
||||
TrackNumber int `structs:"track_number" json:"trackNumber"`
|
||||
DiscNumber int `structs:"disc_number" json:"discNumber"`
|
||||
@@ -140,7 +140,7 @@ func (mf MediaFile) Hash() string {
|
||||
}
|
||||
hash, _ := hashstructure.Hash(mf, opts)
|
||||
sum := md5.New()
|
||||
sum.Write([]byte(fmt.Sprintf("%d", hash)))
|
||||
sum.Write(fmt.Appendf(nil, "%d", hash))
|
||||
sum.Write(mf.Tags.Hash())
|
||||
sum.Write(mf.Participants.Hash())
|
||||
return fmt.Sprintf("%x", sum.Sum(nil))
|
||||
|
||||
@@ -268,8 +268,8 @@ func parseID3Pairs(name model.TagName, lowered model.Tags) []string {
|
||||
prefix := string(name) + ":"
|
||||
for tagKey, tagValues := range lowered {
|
||||
keyStr := string(tagKey)
|
||||
if strings.HasPrefix(keyStr, prefix) {
|
||||
keyPart := strings.TrimPrefix(keyStr, prefix)
|
||||
if after, ok := strings.CutPrefix(keyStr, prefix); ok {
|
||||
keyPart := after
|
||||
if keyPart == string(name) {
|
||||
keyPart = ""
|
||||
}
|
||||
|
||||
@@ -49,8 +49,8 @@ func createGetPID(hash hashFunc) getPIDFunc {
|
||||
}
|
||||
getPID = func(mf model.MediaFile, md Metadata, spec string, prependLibId bool) string {
|
||||
pid := ""
|
||||
fields := strings.Split(spec, "|")
|
||||
for _, field := range fields {
|
||||
fields := strings.SplitSeq(spec, "|")
|
||||
for field := range fields {
|
||||
attributes := strings.Split(field, ",")
|
||||
hasValue := false
|
||||
values := slice.Map(attributes, func(attr string) string {
|
||||
|
||||
@@ -51,13 +51,13 @@ func ParseTargets(libFolders []string) ([]ScanTarget, error) {
|
||||
}
|
||||
|
||||
// Split by the first colon
|
||||
colonIdx := strings.Index(part, ":")
|
||||
if colonIdx == -1 {
|
||||
before, after, ok := strings.Cut(part, ":")
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid target format: %q (expected libraryID:folderPath)", part)
|
||||
}
|
||||
|
||||
libIDStr := part[:colonIdx]
|
||||
folderPath := part[colonIdx+1:]
|
||||
libIDStr := before
|
||||
folderPath := after
|
||||
|
||||
libID, err := strconv.Atoi(libIDStr)
|
||||
if err != nil {
|
||||
|
||||
@@ -22,8 +22,8 @@ type Share struct {
|
||||
Format string `structs:"format" json:"format,omitempty"`
|
||||
MaxBitRate int `structs:"max_bit_rate" json:"maxBitRate,omitempty"`
|
||||
VisitCount int `structs:"visit_count" json:"visitCount,omitempty"`
|
||||
CreatedAt time.Time `structs:"created_at" json:"createdAt,omitempty"`
|
||||
UpdatedAt time.Time `structs:"updated_at" json:"updatedAt,omitempty"`
|
||||
CreatedAt time.Time `structs:"created_at" json:"createdAt"`
|
||||
UpdatedAt time.Time `structs:"updated_at" json:"updatedAt"`
|
||||
Tracks MediaFiles `structs:"-" json:"tracks,omitempty"`
|
||||
Albums Albums `structs:"-" json:"albums,omitempty"`
|
||||
URL string `structs:"-" json:"-"`
|
||||
|
||||
@@ -144,10 +144,8 @@ func (t Tags) Merge(tags Tags) {
|
||||
}
|
||||
|
||||
func (t Tags) Add(name TagName, v string) {
|
||||
for _, existing := range t[name] {
|
||||
if existing == v {
|
||||
return
|
||||
}
|
||||
if slices.Contains(t[name], v) {
|
||||
return
|
||||
}
|
||||
t[name] = append(t[name], v)
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ type User struct {
|
||||
Password string `structs:"-" json:"-"`
|
||||
// This is used to set or change a password when calling Put. If it is empty, the password is not changed.
|
||||
// It is received from the UI with the name "password"
|
||||
NewPassword string `structs:"password,omitempty" json:"password,omitempty"`
|
||||
NewPassword string `structs:"password,omitempty" json:"password,omitempty"` //nolint:gosec
|
||||
// If changing the password, this is also required
|
||||
CurrentPassword string `structs:"current_password,omitempty" json:"currentPassword,omitempty"`
|
||||
}
|
||||
|
||||
@@ -145,11 +145,11 @@ func recentlyAddedSort() string {
|
||||
return "created_at"
|
||||
}
|
||||
|
||||
func recentlyPlayedFilter(string, interface{}) Sqlizer {
|
||||
func recentlyPlayedFilter(string, any) Sqlizer {
|
||||
return Gt{"play_count": 0}
|
||||
}
|
||||
|
||||
func yearFilter(_ string, value interface{}) Sqlizer {
|
||||
func yearFilter(_ string, value any) Sqlizer {
|
||||
return Or{
|
||||
And{
|
||||
Gt{"min_year": 0},
|
||||
@@ -160,14 +160,14 @@ func yearFilter(_ string, value interface{}) Sqlizer {
|
||||
}
|
||||
}
|
||||
|
||||
func artistFilter(_ string, value interface{}) Sqlizer {
|
||||
func artistFilter(_ string, value any) Sqlizer {
|
||||
return Or{
|
||||
Exists("json_tree(participants, '$.albumartist')", Eq{"value": value}),
|
||||
Exists("json_tree(participants, '$.artist')", Eq{"value": value}),
|
||||
}
|
||||
}
|
||||
|
||||
func artistRoleFilter(name string, value interface{}) Sqlizer {
|
||||
func artistRoleFilter(name string, value any) Sqlizer {
|
||||
roleName := strings.TrimSuffix(strings.TrimPrefix(name, "role_"), "_id")
|
||||
|
||||
// Check if the role name is valid. If not, return an invalid filter
|
||||
@@ -177,7 +177,7 @@ func artistRoleFilter(name string, value interface{}) Sqlizer {
|
||||
return Exists(fmt.Sprintf("json_tree(participants, '$.%s')", roleName), Eq{"value": value})
|
||||
}
|
||||
|
||||
func allRolesFilter(_ string, value interface{}) Sqlizer {
|
||||
func allRolesFilter(_ string, value any) Sqlizer {
|
||||
return Like{"participants": fmt.Sprintf(`%%"%s"%%`, value)}
|
||||
}
|
||||
|
||||
@@ -248,7 +248,7 @@ func (r *albumRepository) CopyAttributes(fromID, toID string, columns ...string)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getting album to copy fields from: %w", err)
|
||||
}
|
||||
to := make(map[string]interface{})
|
||||
to := make(map[string]any)
|
||||
for _, col := range columns {
|
||||
to[col] = from[col]
|
||||
}
|
||||
@@ -370,11 +370,11 @@ func (r *albumRepository) Count(options ...rest.QueryOptions) (int64, error) {
|
||||
return r.CountAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *albumRepository) Read(id string) (interface{}, error) {
|
||||
func (r *albumRepository) Read(id string) (any, error) {
|
||||
return r.Get(id)
|
||||
}
|
||||
|
||||
func (r *albumRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *albumRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
return r.GetAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
@@ -382,7 +382,7 @@ func (r *albumRepository) EntityName() string {
|
||||
return "album"
|
||||
}
|
||||
|
||||
func (r *albumRepository) NewInstance() interface{} {
|
||||
func (r *albumRepository) NewInstance() any {
|
||||
return &model.Album{}
|
||||
}
|
||||
|
||||
|
||||
@@ -162,7 +162,7 @@ var _ = Describe("AlbumRepository", func() {
|
||||
|
||||
newID := id.NewRandom()
|
||||
Expect(albumRepo.Put(&model.Album{LibraryID: 1, ID: newID, Name: "name", SongCount: songCount})).To(Succeed())
|
||||
for i := 0; i < playCount; i++ {
|
||||
for range playCount {
|
||||
Expect(albumRepo.IncPlayCount(newID, time.Now())).To(Succeed())
|
||||
}
|
||||
|
||||
@@ -185,7 +185,7 @@ var _ = Describe("AlbumRepository", func() {
|
||||
|
||||
newID := id.NewRandom()
|
||||
Expect(albumRepo.Put(&model.Album{LibraryID: 1, ID: newID, Name: "name", SongCount: songCount})).To(Succeed())
|
||||
for i := 0; i < playCount; i++ {
|
||||
for range playCount {
|
||||
Expect(albumRepo.IncPlayCount(newID, time.Now())).To(Succeed())
|
||||
}
|
||||
|
||||
@@ -406,7 +406,7 @@ var _ = Describe("AlbumRepository", func() {
|
||||
sql, args, err := sqlizer.ToSql()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(sql).To(Equal(expectedSQL))
|
||||
Expect(args).To(Equal([]interface{}{artistID}))
|
||||
Expect(args).To(Equal([]any{artistID}))
|
||||
},
|
||||
Entry("artist role", "role_artist_id", "123",
|
||||
"exists (select 1 from json_tree(participants, '$.artist') where value = ?)"),
|
||||
@@ -428,7 +428,7 @@ var _ = Describe("AlbumRepository", func() {
|
||||
sql, args, err := sqlizer.ToSql()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(sql).To(Equal(fmt.Sprintf("exists (select 1 from json_tree(participants, '$.%s') where value = ?)", roleName)))
|
||||
Expect(args).To(Equal([]interface{}{"test-id"}))
|
||||
Expect(args).To(Equal([]any{"test-id"}))
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -138,7 +138,7 @@ func NewArtistRepository(ctx context.Context, db dbx.Builder) model.ArtistReposi
|
||||
"missing": booleanFilter,
|
||||
"library_id": artistLibraryIdFilter,
|
||||
})
|
||||
r.setSortMappings(map[string]string{
|
||||
r.setSortMappings(map[string]string{ //nolint:gosec
|
||||
"name": "order_artist_name",
|
||||
"starred_at": "starred, starred_at",
|
||||
"rated_at": "rating, rated_at",
|
||||
@@ -164,7 +164,7 @@ func roleFilter(_ string, role any) Sqlizer {
|
||||
}
|
||||
|
||||
// artistLibraryIdFilter filters artists based on library access through the library_artist table
|
||||
func artistLibraryIdFilter(_ string, value interface{}) Sqlizer {
|
||||
func artistLibraryIdFilter(_ string, value any) Sqlizer {
|
||||
return Eq{"library_artist.library_id": value}
|
||||
}
|
||||
|
||||
@@ -534,11 +534,11 @@ func (r *artistRepository) Count(options ...rest.QueryOptions) (int64, error) {
|
||||
return r.CountAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *artistRepository) Read(id string) (interface{}, error) {
|
||||
func (r *artistRepository) Read(id string) (any, error) {
|
||||
return r.Get(id)
|
||||
}
|
||||
|
||||
func (r *artistRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *artistRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
role := "total"
|
||||
if len(options) > 0 {
|
||||
if v, ok := options[0].Filters["role"].(string); ok {
|
||||
@@ -555,7 +555,7 @@ func (r *artistRepository) EntityName() string {
|
||||
return "artist"
|
||||
}
|
||||
|
||||
func (r *artistRepository) NewInstance() interface{} {
|
||||
func (r *artistRepository) NewInstance() any {
|
||||
return &model.Artist{}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,45 +17,45 @@ import (
|
||||
var _ = Describe("Collation", func() {
|
||||
conn := db.Db()
|
||||
DescribeTable("Column collation",
|
||||
func(table, column string) {
|
||||
Expect(checkCollation(conn, table, column)).To(Succeed())
|
||||
func(table, column, expectedCollation string) {
|
||||
Expect(checkCollation(conn, table, column, expectedCollation)).To(Succeed())
|
||||
},
|
||||
Entry("artist.order_artist_name", "artist", "order_artist_name"),
|
||||
Entry("artist.sort_artist_name", "artist", "sort_artist_name"),
|
||||
Entry("album.order_album_name", "album", "order_album_name"),
|
||||
Entry("album.order_album_artist_name", "album", "order_album_artist_name"),
|
||||
Entry("album.sort_album_name", "album", "sort_album_name"),
|
||||
Entry("album.sort_album_artist_name", "album", "sort_album_artist_name"),
|
||||
Entry("media_file.order_title", "media_file", "order_title"),
|
||||
Entry("media_file.order_album_name", "media_file", "order_album_name"),
|
||||
Entry("media_file.order_artist_name", "media_file", "order_artist_name"),
|
||||
Entry("media_file.sort_title", "media_file", "sort_title"),
|
||||
Entry("media_file.sort_album_name", "media_file", "sort_album_name"),
|
||||
Entry("media_file.sort_artist_name", "media_file", "sort_artist_name"),
|
||||
Entry("playlist.name", "playlist", "name"),
|
||||
Entry("radio.name", "radio", "name"),
|
||||
Entry("user.name", "user", "name"),
|
||||
Entry("artist.order_artist_name", "artist", "order_artist_name", "NATURALSORT"),
|
||||
Entry("artist.sort_artist_name", "artist", "sort_artist_name", "NATURALSORT"),
|
||||
Entry("album.order_album_name", "album", "order_album_name", "NATURALSORT"),
|
||||
Entry("album.order_album_artist_name", "album", "order_album_artist_name", "NATURALSORT"),
|
||||
Entry("album.sort_album_name", "album", "sort_album_name", "NATURALSORT"),
|
||||
Entry("album.sort_album_artist_name", "album", "sort_album_artist_name", "NATURALSORT"),
|
||||
Entry("media_file.order_title", "media_file", "order_title", "NATURALSORT"),
|
||||
Entry("media_file.order_album_name", "media_file", "order_album_name", "NATURALSORT"),
|
||||
Entry("media_file.order_artist_name", "media_file", "order_artist_name", "NATURALSORT"),
|
||||
Entry("media_file.sort_title", "media_file", "sort_title", "NATURALSORT"),
|
||||
Entry("media_file.sort_album_name", "media_file", "sort_album_name", "NATURALSORT"),
|
||||
Entry("media_file.sort_artist_name", "media_file", "sort_artist_name", "NATURALSORT"),
|
||||
Entry("playlist.name", "playlist", "name", "NATURALSORT"),
|
||||
Entry("radio.name", "radio", "name", "NATURALSORT"),
|
||||
Entry("user.name", "user", "name", "NOCASE"),
|
||||
)
|
||||
|
||||
DescribeTable("Index collation",
|
||||
func(table, column string) {
|
||||
Expect(checkIndexUsage(conn, table, column)).To(Succeed())
|
||||
},
|
||||
Entry("artist.order_artist_name", "artist", "order_artist_name collate nocase"),
|
||||
Entry("artist.sort_artist_name", "artist", "coalesce(nullif(sort_artist_name,''),order_artist_name) collate nocase"),
|
||||
Entry("album.order_album_name", "album", "order_album_name collate nocase"),
|
||||
Entry("album.order_album_artist_name", "album", "order_album_artist_name collate nocase"),
|
||||
Entry("album.sort_album_name", "album", "coalesce(nullif(sort_album_name,''),order_album_name) collate nocase"),
|
||||
Entry("album.sort_album_artist_name", "album", "coalesce(nullif(sort_album_artist_name,''),order_album_artist_name) collate nocase"),
|
||||
Entry("media_file.order_title", "media_file", "order_title collate nocase"),
|
||||
Entry("media_file.order_album_name", "media_file", "order_album_name collate nocase"),
|
||||
Entry("media_file.order_artist_name", "media_file", "order_artist_name collate nocase"),
|
||||
Entry("media_file.sort_title", "media_file", "coalesce(nullif(sort_title,''),order_title) collate nocase"),
|
||||
Entry("media_file.sort_album_name", "media_file", "coalesce(nullif(sort_album_name,''),order_album_name) collate nocase"),
|
||||
Entry("media_file.sort_artist_name", "media_file", "coalesce(nullif(sort_artist_name,''),order_artist_name) collate nocase"),
|
||||
Entry("artist.order_artist_name", "artist", "order_artist_name collate NATURALSORT"),
|
||||
Entry("artist.sort_artist_name", "artist", "coalesce(nullif(sort_artist_name,''),order_artist_name) collate NATURALSORT"),
|
||||
Entry("album.order_album_name", "album", "order_album_name collate NATURALSORT"),
|
||||
Entry("album.order_album_artist_name", "album", "order_album_artist_name collate NATURALSORT"),
|
||||
Entry("album.sort_album_name", "album", "coalesce(nullif(sort_album_name,''),order_album_name) collate NATURALSORT"),
|
||||
Entry("album.sort_album_artist_name", "album", "coalesce(nullif(sort_album_artist_name,''),order_album_artist_name) collate NATURALSORT"),
|
||||
Entry("media_file.order_title", "media_file", "order_title collate NATURALSORT"),
|
||||
Entry("media_file.order_album_name", "media_file", "order_album_name collate NATURALSORT"),
|
||||
Entry("media_file.order_artist_name", "media_file", "order_artist_name collate NATURALSORT"),
|
||||
Entry("media_file.sort_title", "media_file", "coalesce(nullif(sort_title,''),order_title) collate NATURALSORT"),
|
||||
Entry("media_file.sort_album_name", "media_file", "coalesce(nullif(sort_album_name,''),order_album_name) collate NATURALSORT"),
|
||||
Entry("media_file.sort_artist_name", "media_file", "coalesce(nullif(sort_artist_name,''),order_artist_name) collate NATURALSORT"),
|
||||
Entry("media_file.path", "media_file", "path collate nocase"),
|
||||
Entry("playlist.name", "playlist", "name collate nocase"),
|
||||
Entry("radio.name", "radio", "name collate nocase"),
|
||||
Entry("playlist.name", "playlist", "name collate NATURALSORT"),
|
||||
Entry("radio.name", "radio", "name collate NATURALSORT"),
|
||||
Entry("user.user_name", "user", "user_name collate nocase"),
|
||||
)
|
||||
})
|
||||
@@ -91,7 +91,7 @@ order by %[2]s`, table, column))
|
||||
return errors.New("no rows returned")
|
||||
}
|
||||
|
||||
func checkCollation(conn *sql.DB, table string, column string) error {
|
||||
func checkCollation(conn *sql.DB, table, column, expectedCollation string) error {
|
||||
rows, err := conn.Query(fmt.Sprintf("SELECT sql FROM sqlite_master WHERE type='table' AND tbl_name='%s'", table))
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -113,12 +113,12 @@ func checkCollation(conn *sql.DB, table string, column string) error {
|
||||
if !re.MatchString(res) {
|
||||
return fmt.Errorf("column '%s' not found in table '%s'", column, table)
|
||||
}
|
||||
re = regexp.MustCompile(fmt.Sprintf(`(?i)\b%s\b.*collate\s+NOCASE`, column))
|
||||
re = regexp.MustCompile(fmt.Sprintf(`(?i)\b%s\b.*collate\s+%s`, column, expectedCollation))
|
||||
if re.MatchString(res) {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("table '%s' not found", table)
|
||||
}
|
||||
return fmt.Errorf("column '%s' in table '%s' does not have NOCASE collation", column, table)
|
||||
return fmt.Errorf("column '%s' in table '%s' does not have %s collation", column, table, expectedCollation)
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"maps"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
@@ -117,9 +118,7 @@ func (r folderRepository) GetFolderUpdateInfo(lib model.Library, targetPaths ...
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for id, info := range batchResult {
|
||||
result[id] = info
|
||||
}
|
||||
maps.Copy(result, batchResult)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
|
||||
@@ -33,18 +33,18 @@ func (r *genreRepository) GetAll(opt ...model.QueryOptions) (model.Genres, error
|
||||
|
||||
// Override ResourceRepository methods to return Genre objects instead of Tag objects
|
||||
|
||||
func (r *genreRepository) Read(id string) (interface{}, error) {
|
||||
func (r *genreRepository) Read(id string) (any, error) {
|
||||
sel := r.selectGenre().Where(Eq{"tag.id": id})
|
||||
var res model.Genre
|
||||
err := r.queryOne(sel, &res)
|
||||
return &res, err
|
||||
}
|
||||
|
||||
func (r *genreRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *genreRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
return r.GetAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *genreRepository) NewInstance() interface{} {
|
||||
func (r *genreRepository) NewInstance() any {
|
||||
return &model.Genre{}
|
||||
}
|
||||
|
||||
|
||||
@@ -182,7 +182,7 @@ var _ = Describe("GenreRepository", func() {
|
||||
It("should filter by name using like match", func() {
|
||||
// Test filtering by partial name match using the "name" filter which maps to containsFilter("tag_value")
|
||||
options := rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"name": "%rock%"},
|
||||
Filters: map[string]any{"name": "%rock%"},
|
||||
}
|
||||
count, err := restRepo.Count(options)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
@@ -289,7 +289,7 @@ var _ = Describe("GenreRepository", func() {
|
||||
It("should allow headless processes to apply explicit library_id filters", func() {
|
||||
// Filter by specific library
|
||||
genres, err := headlessRestRepo.ReadAll(rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"library_id": 2},
|
||||
Filters: map[string]any{"library_id": 2},
|
||||
})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ type PostMapper interface {
|
||||
PostMapArgs(map[string]any) error
|
||||
}
|
||||
|
||||
func toSQLArgs(rec interface{}) (map[string]interface{}, error) {
|
||||
func toSQLArgs(rec any) (map[string]any, error) {
|
||||
m := structs.Map(rec)
|
||||
for k, v := range m {
|
||||
switch t := v.(type) {
|
||||
@@ -71,7 +71,7 @@ type existsCond struct {
|
||||
not bool
|
||||
}
|
||||
|
||||
func (e existsCond) ToSql() (string, []interface{}, error) {
|
||||
func (e existsCond) ToSql() (string, []any, error) {
|
||||
sql, args, err := e.cond.ToSql()
|
||||
sql = fmt.Sprintf("exists (select 1 from %s where %s)", e.subTable, sql)
|
||||
if e.not {
|
||||
@@ -82,11 +82,11 @@ func (e existsCond) ToSql() (string, []interface{}, error) {
|
||||
|
||||
var sortOrderRegex = regexp.MustCompile(`order_([a-z_]+)`)
|
||||
|
||||
// Convert the order_* columns to an expression using sort_* columns. Example:
|
||||
// sort_album_name -> (coalesce(nullif(sort_album_name,”),order_album_name) collate nocase)
|
||||
// mapSortOrder converts order_* columns to an expression using sort_* columns with NATURALSORT collation. Example:
|
||||
// order_album_name -> (coalesce(nullif(sort_album_name,”),order_album_name) collate NATURALSORT)
|
||||
// It finds order column names anywhere in the substring
|
||||
func mapSortOrder(tableName, order string) string {
|
||||
order = strings.ToLower(order)
|
||||
repl := fmt.Sprintf("(coalesce(nullif(%[1]s.sort_$1,''),%[1]s.order_$1) collate nocase)", tableName)
|
||||
repl := fmt.Sprintf("(coalesce(nullif(%[1]s.sort_$1,''),%[1]s.order_$1) collate NATURALSORT)", tableName)
|
||||
return sortOrderRegex.ReplaceAllString(order, repl)
|
||||
}
|
||||
|
||||
@@ -94,13 +94,13 @@ var _ = Describe("Helpers", func() {
|
||||
sort := "ORDER_ALBUM_NAME asc"
|
||||
mapped := mapSortOrder("album", sort)
|
||||
Expect(mapped).To(Equal(`(coalesce(nullif(album.sort_album_name,''),album.order_album_name)` +
|
||||
` collate nocase) asc`))
|
||||
` collate NATURALSORT) asc`))
|
||||
})
|
||||
It("changes multiple order columns to sort expressions", func() {
|
||||
sort := "compilation, order_title asc, order_album_artist_name desc, year desc"
|
||||
mapped := mapSortOrder("album", sort)
|
||||
Expect(mapped).To(Equal(`compilation, (coalesce(nullif(album.sort_title,''),album.order_title) collate nocase) asc,` +
|
||||
` (coalesce(nullif(album.sort_album_artist_name,''),album.order_album_artist_name) collate nocase) desc, year desc`))
|
||||
Expect(mapped).To(Equal(`compilation, (coalesce(nullif(album.sort_title,''),album.order_title) collate NATURALSORT) asc,` +
|
||||
` (coalesce(nullif(album.sort_album_artist_name,''),album.order_album_artist_name) collate NATURALSORT) desc, year desc`))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -305,7 +305,7 @@ func (r *libraryRepository) Count(options ...rest.QueryOptions) (int64, error) {
|
||||
return r.CountAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *libraryRepository) Read(id string) (interface{}, error) {
|
||||
func (r *libraryRepository) Read(id string) (any, error) {
|
||||
idInt, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
log.Trace(r.ctx, "invalid library id: %s", id, err)
|
||||
@@ -314,7 +314,7 @@ func (r *libraryRepository) Read(id string) (interface{}, error) {
|
||||
return r.Get(idInt)
|
||||
}
|
||||
|
||||
func (r *libraryRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *libraryRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
return r.GetAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
@@ -322,11 +322,11 @@ func (r *libraryRepository) EntityName() string {
|
||||
return "library"
|
||||
}
|
||||
|
||||
func (r *libraryRepository) NewInstance() interface{} {
|
||||
func (r *libraryRepository) NewInstance() any {
|
||||
return &model.Library{}
|
||||
}
|
||||
|
||||
func (r *libraryRepository) Save(entity interface{}) (string, error) {
|
||||
func (r *libraryRepository) Save(entity any) (string, error) {
|
||||
lib := entity.(*model.Library)
|
||||
lib.ID = 0 // Reset ID to ensure we create a new library
|
||||
err := r.Put(lib)
|
||||
@@ -336,7 +336,7 @@ func (r *libraryRepository) Save(entity interface{}) (string, error) {
|
||||
return strconv.Itoa(lib.ID), nil
|
||||
}
|
||||
|
||||
func (r *libraryRepository) Update(id string, entity interface{}, cols ...string) error {
|
||||
func (r *libraryRepository) Update(id string, entity any, cols ...string) error {
|
||||
lib := entity.(*model.Library)
|
||||
idInt, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
|
||||
@@ -148,7 +148,9 @@ func (r *mediaFileRepository) Exists(id string) (bool, error) {
|
||||
}
|
||||
|
||||
func (r *mediaFileRepository) Put(m *model.MediaFile) error {
|
||||
m.CreatedAt = time.Now()
|
||||
if m.CreatedAt.IsZero() {
|
||||
m.CreatedAt = time.Now()
|
||||
}
|
||||
id, err := r.putByMatch(Eq{"path": m.Path, "library_id": m.LibraryID}, m.ID, &dbMediaFile{MediaFile: m})
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -443,11 +445,11 @@ func (r *mediaFileRepository) Count(options ...rest.QueryOptions) (int64, error)
|
||||
return r.CountAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *mediaFileRepository) Read(id string) (interface{}, error) {
|
||||
func (r *mediaFileRepository) Read(id string) (any, error) {
|
||||
return r.Get(id)
|
||||
}
|
||||
|
||||
func (r *mediaFileRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *mediaFileRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
return r.GetAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
@@ -455,7 +457,7 @@ func (r *mediaFileRepository) EntityName() string {
|
||||
return "mediafile"
|
||||
}
|
||||
|
||||
func (r *mediaFileRepository) NewInstance() interface{} {
|
||||
func (r *mediaFileRepository) NewInstance() any {
|
||||
return &model.MediaFile{}
|
||||
}
|
||||
|
||||
|
||||
@@ -104,6 +104,68 @@ var _ = Describe("MediaRepository", func() {
|
||||
}
|
||||
})
|
||||
|
||||
Describe("Put CreatedAt behavior (#5050)", func() {
|
||||
It("sets CreatedAt to now when inserting a new file with zero CreatedAt", func() {
|
||||
before := time.Now().Add(-time.Second)
|
||||
newFile := model.MediaFile{ID: id.NewRandom(), LibraryID: 1, Path: "/test/created-at-zero.mp3"}
|
||||
Expect(mr.Put(&newFile)).To(Succeed())
|
||||
|
||||
retrieved, err := mr.Get(newFile.ID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(retrieved.CreatedAt).To(BeTemporally(">", before))
|
||||
|
||||
_ = mr.Delete(newFile.ID)
|
||||
})
|
||||
|
||||
It("preserves CreatedAt when inserting a new file with non-zero CreatedAt", func() {
|
||||
originalTime := time.Date(2020, 3, 15, 10, 30, 0, 0, time.UTC)
|
||||
newFile := model.MediaFile{
|
||||
ID: id.NewRandom(),
|
||||
LibraryID: 1,
|
||||
Path: "/test/created-at-preserved.mp3",
|
||||
CreatedAt: originalTime,
|
||||
}
|
||||
Expect(mr.Put(&newFile)).To(Succeed())
|
||||
|
||||
retrieved, err := mr.Get(newFile.ID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(retrieved.CreatedAt).To(BeTemporally("~", originalTime, time.Second))
|
||||
|
||||
_ = mr.Delete(newFile.ID)
|
||||
})
|
||||
|
||||
It("does not reset CreatedAt when updating an existing file", func() {
|
||||
originalTime := time.Date(2019, 6, 1, 12, 0, 0, 0, time.UTC)
|
||||
fileID := id.NewRandom()
|
||||
newFile := model.MediaFile{
|
||||
ID: fileID,
|
||||
LibraryID: 1,
|
||||
Path: "/test/created-at-update.mp3",
|
||||
Title: "Original Title",
|
||||
CreatedAt: originalTime,
|
||||
}
|
||||
Expect(mr.Put(&newFile)).To(Succeed())
|
||||
|
||||
// Update the file with a new title but zero CreatedAt
|
||||
updatedFile := model.MediaFile{
|
||||
ID: fileID,
|
||||
LibraryID: 1,
|
||||
Path: "/test/created-at-update.mp3",
|
||||
Title: "Updated Title",
|
||||
// CreatedAt is zero - should NOT overwrite the stored value
|
||||
}
|
||||
Expect(mr.Put(&updatedFile)).To(Succeed())
|
||||
|
||||
retrieved, err := mr.Get(fileID)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(retrieved.Title).To(Equal("Updated Title"))
|
||||
// CreatedAt should still be the original time (not reset)
|
||||
Expect(retrieved.CreatedAt).To(BeTemporally("~", originalTime, time.Second))
|
||||
|
||||
_ = mr.Delete(fileID)
|
||||
})
|
||||
})
|
||||
|
||||
It("checks existence of mediafiles in the DB", func() {
|
||||
Expect(mr.Exists(songAntenna.ID)).To(BeTrue())
|
||||
Expect(mr.Exists("666")).To(BeFalse())
|
||||
@@ -310,7 +372,7 @@ var _ = Describe("MediaRepository", func() {
|
||||
|
||||
// Update "Old Song": created long ago, updated recently
|
||||
_, err := db.Update("media_file",
|
||||
map[string]interface{}{
|
||||
map[string]any{
|
||||
"created_at": oldTime,
|
||||
"updated_at": newTime,
|
||||
},
|
||||
@@ -319,7 +381,7 @@ var _ = Describe("MediaRepository", func() {
|
||||
|
||||
// Update "Middle Song": created and updated at the same middle time
|
||||
_, err = db.Update("media_file",
|
||||
map[string]interface{}{
|
||||
map[string]any{
|
||||
"created_at": middleTime,
|
||||
"updated_at": middleTime,
|
||||
},
|
||||
@@ -328,7 +390,7 @@ var _ = Describe("MediaRepository", func() {
|
||||
|
||||
// Update "New Song": created recently, updated long ago
|
||||
_, err = db.Update("media_file",
|
||||
map[string]interface{}{
|
||||
map[string]any{
|
||||
"created_at": newTime,
|
||||
"updated_at": oldTime,
|
||||
},
|
||||
|
||||
@@ -97,7 +97,7 @@ func (s *SQLStore) Plugin(ctx context.Context) model.PluginRepository {
|
||||
return NewPluginRepository(ctx, s.getDBXBuilder())
|
||||
}
|
||||
|
||||
func (s *SQLStore) Resource(ctx context.Context, m interface{}) model.ResourceRepository {
|
||||
func (s *SQLStore) Resource(ctx context.Context, m any) model.ResourceRepository {
|
||||
switch m.(type) {
|
||||
case model.User:
|
||||
return s.User(ctx).(model.ResourceRepository)
|
||||
|
||||
@@ -103,14 +103,14 @@ func (r *playerRepository) Count(options ...rest.QueryOptions) (int64, error) {
|
||||
return r.CountAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *playerRepository) Read(id string) (interface{}, error) {
|
||||
func (r *playerRepository) Read(id string) (any, error) {
|
||||
sel := r.newRestSelect().Where(Eq{"player.id": id})
|
||||
var res model.Player
|
||||
err := r.queryOne(sel, &res)
|
||||
return &res, err
|
||||
}
|
||||
|
||||
func (r *playerRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *playerRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
sel := r.newRestSelect(r.parseRestOptions(r.ctx, options...))
|
||||
res := model.Players{}
|
||||
err := r.queryAll(sel, &res)
|
||||
@@ -121,7 +121,7 @@ func (r *playerRepository) EntityName() string {
|
||||
return "player"
|
||||
}
|
||||
|
||||
func (r *playerRepository) NewInstance() interface{} {
|
||||
func (r *playerRepository) NewInstance() any {
|
||||
return &model.Player{}
|
||||
}
|
||||
|
||||
@@ -130,7 +130,7 @@ func (r *playerRepository) isPermitted(p *model.Player) bool {
|
||||
return u.IsAdmin || p.UserId == u.ID
|
||||
}
|
||||
|
||||
func (r *playerRepository) Save(entity interface{}) (string, error) {
|
||||
func (r *playerRepository) Save(entity any) (string, error) {
|
||||
t := entity.(*model.Player)
|
||||
if !r.isPermitted(t) {
|
||||
return "", rest.ErrPermissionDenied
|
||||
@@ -142,7 +142,7 @@ func (r *playerRepository) Save(entity interface{}) (string, error) {
|
||||
return id, err
|
||||
}
|
||||
|
||||
func (r *playerRepository) Update(id string, entity interface{}, cols ...string) error {
|
||||
func (r *playerRepository) Update(id string, entity any, cols ...string) error {
|
||||
t := entity.(*model.Player)
|
||||
t.ID = id
|
||||
if !r.isPermitted(t) {
|
||||
|
||||
@@ -61,14 +61,14 @@ func NewPlaylistRepository(ctx context.Context, db dbx.Builder) model.PlaylistRe
|
||||
return r
|
||||
}
|
||||
|
||||
func playlistFilter(_ string, value interface{}) Sqlizer {
|
||||
func playlistFilter(_ string, value any) Sqlizer {
|
||||
return Or{
|
||||
substringFilter("playlist.name", value),
|
||||
substringFilter("playlist.comment", value),
|
||||
}
|
||||
}
|
||||
|
||||
func smartPlaylistFilter(string, interface{}) Sqlizer {
|
||||
func smartPlaylistFilter(string, any) Sqlizer {
|
||||
return Or{
|
||||
Eq{"rules": ""},
|
||||
Eq{"rules": nil},
|
||||
@@ -421,11 +421,11 @@ func (r *playlistRepository) Count(options ...rest.QueryOptions) (int64, error)
|
||||
return r.CountAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *playlistRepository) Read(id string) (interface{}, error) {
|
||||
func (r *playlistRepository) Read(id string) (any, error) {
|
||||
return r.Get(id)
|
||||
}
|
||||
|
||||
func (r *playlistRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *playlistRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
return r.GetAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
@@ -433,11 +433,11 @@ func (r *playlistRepository) EntityName() string {
|
||||
return "playlist"
|
||||
}
|
||||
|
||||
func (r *playlistRepository) NewInstance() interface{} {
|
||||
func (r *playlistRepository) NewInstance() any {
|
||||
return &model.Playlist{}
|
||||
}
|
||||
|
||||
func (r *playlistRepository) Save(entity interface{}) (string, error) {
|
||||
func (r *playlistRepository) Save(entity any) (string, error) {
|
||||
pls := entity.(*model.Playlist)
|
||||
pls.OwnerID = loggedUser(r.ctx).ID
|
||||
pls.ID = "" // Make sure we don't override an existing playlist
|
||||
@@ -448,7 +448,7 @@ func (r *playlistRepository) Save(entity interface{}) (string, error) {
|
||||
return pls.ID, err
|
||||
}
|
||||
|
||||
func (r *playlistRepository) Update(id string, entity interface{}, cols ...string) error {
|
||||
func (r *playlistRepository) Update(id string, entity any, cols ...string) error {
|
||||
pls := dbPlaylist{Playlist: *entity.(*model.Playlist)}
|
||||
current, err := r.Get(id)
|
||||
if err != nil {
|
||||
|
||||
@@ -84,7 +84,7 @@ func (r *playlistTrackRepository) Count(options ...rest.QueryOptions) (int64, er
|
||||
return r.count(query, r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *playlistTrackRepository) Read(id string) (interface{}, error) {
|
||||
func (r *playlistTrackRepository) Read(id string) (any, error) {
|
||||
userID := loggedUser(r.ctx).ID
|
||||
sel := r.newSelect().
|
||||
LeftJoin("annotation on ("+
|
||||
@@ -128,7 +128,7 @@ func (r *playlistTrackRepository) GetAlbumIDs(options ...model.QueryOptions) ([]
|
||||
return ids, nil
|
||||
}
|
||||
|
||||
func (r *playlistTrackRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *playlistTrackRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
return r.GetAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
@@ -136,7 +136,7 @@ func (r *playlistTrackRepository) EntityName() string {
|
||||
return "playlist_tracks"
|
||||
}
|
||||
|
||||
func (r *playlistTrackRepository) NewInstance() interface{} {
|
||||
func (r *playlistTrackRepository) NewInstance() any {
|
||||
return &model.PlaylistTrack{}
|
||||
}
|
||||
|
||||
|
||||
@@ -122,8 +122,8 @@ func (r *playQueueRepository) toModel(pq *playQueue) model.PlayQueue {
|
||||
UpdatedAt: pq.UpdatedAt,
|
||||
}
|
||||
if strings.TrimSpace(pq.Items) != "" {
|
||||
tracks := strings.Split(pq.Items, ",")
|
||||
for _, t := range tracks {
|
||||
tracks := strings.SplitSeq(pq.Items, ",")
|
||||
for t := range tracks {
|
||||
q.Items = append(q.Items, model.MediaFile{ID: t})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,7 +63,7 @@ func (r *radioRepository) Put(radio *model.Radio) error {
|
||||
return rest.ErrPermissionDenied
|
||||
}
|
||||
|
||||
var values map[string]interface{}
|
||||
var values map[string]any
|
||||
|
||||
radio.UpdatedAt = time.Now()
|
||||
|
||||
@@ -97,19 +97,19 @@ func (r *radioRepository) EntityName() string {
|
||||
return "radio"
|
||||
}
|
||||
|
||||
func (r *radioRepository) NewInstance() interface{} {
|
||||
func (r *radioRepository) NewInstance() any {
|
||||
return &model.Radio{}
|
||||
}
|
||||
|
||||
func (r *radioRepository) Read(id string) (interface{}, error) {
|
||||
func (r *radioRepository) Read(id string) (any, error) {
|
||||
return r.Get(id)
|
||||
}
|
||||
|
||||
func (r *radioRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *radioRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
return r.GetAll(r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *radioRepository) Save(entity interface{}) (string, error) {
|
||||
func (r *radioRepository) Save(entity any) (string, error) {
|
||||
t := entity.(*model.Radio)
|
||||
if !r.isPermitted() {
|
||||
return "", rest.ErrPermissionDenied
|
||||
@@ -121,7 +121,7 @@ func (r *radioRepository) Save(entity interface{}) (string, error) {
|
||||
return t.ID, err
|
||||
}
|
||||
|
||||
func (r *radioRepository) Update(id string, entity interface{}, cols ...string) error {
|
||||
func (r *radioRepository) Update(id string, entity any, cols ...string) error {
|
||||
t := entity.(*model.Radio)
|
||||
t.ID = id
|
||||
if !r.isPermitted() {
|
||||
|
||||
@@ -51,7 +51,7 @@ func (r *scrobbleBufferRepository) UserIDs(service string) ([]string, error) {
|
||||
}
|
||||
|
||||
func (r *scrobbleBufferRepository) Enqueue(service, userId, mediaFileId string, playTime time.Time) error {
|
||||
ins := Insert(r.tableName).SetMap(map[string]interface{}{
|
||||
ins := Insert(r.tableName).SetMap(map[string]any{
|
||||
"id": id.NewRandom(),
|
||||
"user_id": userId,
|
||||
"service": service,
|
||||
|
||||
@@ -24,7 +24,7 @@ var _ = Describe("ScrobbleBufferRepository", func() {
|
||||
id := id.NewRandom()
|
||||
ids = append(ids, id)
|
||||
|
||||
ins := squirrel.Insert("scrobble_buffer").SetMap(map[string]interface{}{
|
||||
ins := squirrel.Insert("scrobble_buffer").SetMap(map[string]any{
|
||||
"id": id,
|
||||
"user_id": userId,
|
||||
"service": service,
|
||||
|
||||
@@ -23,7 +23,7 @@ func NewScrobbleRepository(ctx context.Context, db dbx.Builder) model.ScrobbleRe
|
||||
|
||||
func (r *scrobbleRepository) RecordScrobble(mediaFileID string, submissionTime time.Time) error {
|
||||
userID := loggedUser(r.ctx).ID
|
||||
values := map[string]interface{}{
|
||||
values := map[string]any{
|
||||
"media_file_id": mediaFileID,
|
||||
"user_id": userID,
|
||||
"submission_time": submissionTime.Unix(),
|
||||
|
||||
@@ -138,7 +138,7 @@ func sortByIdPosition(mfs model.MediaFiles, ids []string) model.MediaFiles {
|
||||
return sorted
|
||||
}
|
||||
|
||||
func (r *shareRepository) Update(id string, entity interface{}, cols ...string) error {
|
||||
func (r *shareRepository) Update(id string, entity any, cols ...string) error {
|
||||
s := entity.(*model.Share)
|
||||
// TODO Validate record
|
||||
s.ID = id
|
||||
@@ -151,7 +151,7 @@ func (r *shareRepository) Update(id string, entity interface{}, cols ...string)
|
||||
return err
|
||||
}
|
||||
|
||||
func (r *shareRepository) Save(entity interface{}) (string, error) {
|
||||
func (r *shareRepository) Save(entity any) (string, error) {
|
||||
s := entity.(*model.Share)
|
||||
// TODO Validate record
|
||||
u := loggedUser(r.ctx)
|
||||
@@ -179,18 +179,18 @@ func (r *shareRepository) EntityName() string {
|
||||
return "share"
|
||||
}
|
||||
|
||||
func (r *shareRepository) NewInstance() interface{} {
|
||||
func (r *shareRepository) NewInstance() any {
|
||||
return &model.Share{}
|
||||
}
|
||||
|
||||
func (r *shareRepository) Read(id string) (interface{}, error) {
|
||||
func (r *shareRepository) Read(id string) (any, error) {
|
||||
sel := r.selectShare().Where(Eq{"share.id": id})
|
||||
var res model.Share
|
||||
err := r.queryOne(sel, &res)
|
||||
return &res, err
|
||||
}
|
||||
|
||||
func (r *shareRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *shareRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
sq := r.selectShare(r.parseRestOptions(r.ctx, options...))
|
||||
res := model.Shares{}
|
||||
err := r.queryAll(sq, &res)
|
||||
|
||||
@@ -47,7 +47,7 @@ var _ = Describe("ShareRepository", func() {
|
||||
_, err := GetDBXBuilder().NewQuery(`
|
||||
INSERT INTO share (id, user_id, description, resource_type, resource_ids, created_at, updated_at)
|
||||
VALUES ({:id}, {:user}, {:desc}, {:type}, {:ids}, {:created}, {:updated})
|
||||
`).Bind(map[string]interface{}{
|
||||
`).Bind(map[string]any{
|
||||
"id": shareID,
|
||||
"user": adminUser.ID,
|
||||
"desc": "Headless Test Share",
|
||||
@@ -79,7 +79,7 @@ var _ = Describe("ShareRepository", func() {
|
||||
_, err := GetDBXBuilder().NewQuery(`
|
||||
INSERT INTO share (id, user_id, description, resource_type, resource_ids, created_at, updated_at)
|
||||
VALUES ({:id}, {:user}, {:desc}, {:type}, {:ids}, {:created}, {:updated})
|
||||
`).Bind(map[string]interface{}{
|
||||
`).Bind(map[string]any{
|
||||
"id": shareID,
|
||||
"user": adminUser.ID,
|
||||
"desc": "Headless Get Share",
|
||||
@@ -110,7 +110,7 @@ var _ = Describe("ShareRepository", func() {
|
||||
_, err := GetDBXBuilder().NewQuery(`
|
||||
INSERT INTO share (id, user_id, description, resource_type, resource_ids, created_at, updated_at)
|
||||
VALUES ({:id}, {:user}, {:desc}, {:type}, {:ids}, {:created}, {:updated})
|
||||
`).Bind(map[string]interface{}{
|
||||
`).Bind(map[string]any{
|
||||
"id": shareID,
|
||||
"user": adminUser.ID,
|
||||
"desc": "SQL Test Share",
|
||||
|
||||
@@ -66,7 +66,7 @@ func (r sqlRepository) annId(itemID ...string) And {
|
||||
}
|
||||
}
|
||||
|
||||
func (r sqlRepository) annUpsert(values map[string]interface{}, itemIDs ...string) error {
|
||||
func (r sqlRepository) annUpsert(values map[string]any, itemIDs ...string) error {
|
||||
upd := Update(annotationTable).Where(r.annId(itemIDs...))
|
||||
for f, v := range values {
|
||||
upd = upd.Set(f, v)
|
||||
@@ -90,12 +90,12 @@ func (r sqlRepository) annUpsert(values map[string]interface{}, itemIDs ...strin
|
||||
|
||||
func (r sqlRepository) SetStar(starred bool, ids ...string) error {
|
||||
starredAt := time.Now()
|
||||
return r.annUpsert(map[string]interface{}{"starred": starred, "starred_at": starredAt}, ids...)
|
||||
return r.annUpsert(map[string]any{"starred": starred, "starred_at": starredAt}, ids...)
|
||||
}
|
||||
|
||||
func (r sqlRepository) SetRating(rating int, itemID string) error {
|
||||
ratedAt := time.Now()
|
||||
err := r.annUpsert(map[string]interface{}{"rating": rating, "rated_at": ratedAt}, itemID)
|
||||
err := r.annUpsert(map[string]any{"rating": rating, "rated_at": ratedAt}, itemID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -121,7 +121,7 @@ func (r sqlRepository) IncPlayCount(itemID string, ts time.Time) error {
|
||||
|
||||
if c == 0 || errors.Is(err, sql.ErrNoRows) {
|
||||
userID := loggedUser(r.ctx).ID
|
||||
values := map[string]interface{}{}
|
||||
values := map[string]any{}
|
||||
values["user_id"] = userID
|
||||
values["item_type"] = r.tableName
|
||||
values["item_id"] = itemID
|
||||
|
||||
@@ -32,17 +32,17 @@ var _ = Describe("Annotation Filters", func() {
|
||||
|
||||
Describe("annotationBoolFilter", func() {
|
||||
DescribeTable("creates correct SQL expressions",
|
||||
func(field, value string, expectedSQL string, expectedArgs []interface{}) {
|
||||
func(field, value string, expectedSQL string, expectedArgs []any) {
|
||||
sqlizer := annotationBoolFilter(field)(field, value)
|
||||
sql, args, err := sqlizer.ToSql()
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(sql).To(Equal(expectedSQL))
|
||||
Expect(args).To(Equal(expectedArgs))
|
||||
},
|
||||
Entry("starred=true", "starred", "true", "COALESCE(starred, 0) > 0", []interface{}(nil)),
|
||||
Entry("starred=false", "starred", "false", "COALESCE(starred, 0) = 0", []interface{}(nil)),
|
||||
Entry("starred=True (case insensitive)", "starred", "True", "COALESCE(starred, 0) > 0", []interface{}(nil)),
|
||||
Entry("rating=true", "rating", "true", "COALESCE(rating, 0) > 0", []interface{}(nil)),
|
||||
Entry("starred=true", "starred", "true", "COALESCE(starred, 0) > 0", []any(nil)),
|
||||
Entry("starred=false", "starred", "false", "COALESCE(starred, 0) = 0", []any(nil)),
|
||||
Entry("starred=True (case insensitive)", "starred", "True", "COALESCE(starred, 0) > 0", []any(nil)),
|
||||
Entry("rating=true", "rating", "true", "COALESCE(rating, 0) > 0", []any(nil)),
|
||||
)
|
||||
|
||||
It("returns nil if value is not a string", func() {
|
||||
|
||||
@@ -71,8 +71,8 @@ func (r *sqlRepository) registerModel(instance any, filters map[string]filterFun
|
||||
//
|
||||
// If PreferSortTags is enabled, it will map the order fields to the corresponding sort expression,
|
||||
// which gives precedence to sort tags.
|
||||
// Ex: order_title => (coalesce(nullif(sort_title,”),order_title) collate nocase)
|
||||
// To avoid performance issues, indexes should be created for these sort expressions
|
||||
// Ex: order_title => (coalesce(nullif(sort_title,""), order_title) collate NATURALSORT)
|
||||
// To avoid performance issues, indexes should be created for these sort expressions.
|
||||
//
|
||||
// NOTE: if an individual item has spaces, it should be wrapped in parentheses. For example,
|
||||
// you should write "(lyrics != '[]')". This prevents the item being split unexpectedly.
|
||||
@@ -196,7 +196,7 @@ func (r *sqlRepository) withTableName(filter filterFunc) filterFunc {
|
||||
}
|
||||
|
||||
// libraryIdFilter is a filter function to be added to resources that have a library_id column.
|
||||
func libraryIdFilter(_ string, value interface{}) Sqlizer {
|
||||
func libraryIdFilter(_ string, value any) Sqlizer {
|
||||
return Eq{"library_id": value}
|
||||
}
|
||||
|
||||
@@ -281,7 +281,7 @@ func (r sqlRepository) toSQL(sq Sqlizer) (string, dbx.Params, error) {
|
||||
return result, params, nil
|
||||
}
|
||||
|
||||
func (r sqlRepository) queryOne(sq Sqlizer, response interface{}) error {
|
||||
func (r sqlRepository) queryOne(sq Sqlizer, response any) error {
|
||||
query, args, err := r.toSQL(sq)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -328,7 +328,7 @@ func queryWithStableResults[T any](r sqlRepository, sq SelectBuilder, options ..
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r sqlRepository) queryAll(sq SelectBuilder, response interface{}, options ...model.QueryOptions) error {
|
||||
func (r sqlRepository) queryAll(sq SelectBuilder, response any, options ...model.QueryOptions) error {
|
||||
if len(options) > 0 && options[0].Offset > 0 {
|
||||
sq = r.optimizePagination(sq, options[0])
|
||||
}
|
||||
@@ -347,7 +347,7 @@ func (r sqlRepository) queryAll(sq SelectBuilder, response interface{}, options
|
||||
}
|
||||
|
||||
// queryAllSlice is a helper function to query a single column and return the result in a slice
|
||||
func (r sqlRepository) queryAllSlice(sq SelectBuilder, response interface{}) error {
|
||||
func (r sqlRepository) queryAllSlice(sq SelectBuilder, response any) error {
|
||||
query, args, err := r.toSQL(sq)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -394,7 +394,7 @@ func (r sqlRepository) count(countQuery SelectBuilder, options ...model.QueryOpt
|
||||
return res.Count, err
|
||||
}
|
||||
|
||||
func (r sqlRepository) putByMatch(filter Sqlizer, id string, m interface{}, colsToUpdate ...string) (string, error) {
|
||||
func (r sqlRepository) putByMatch(filter Sqlizer, id string, m any, colsToUpdate ...string) (string, error) {
|
||||
if id != "" {
|
||||
return r.put(id, m, colsToUpdate...)
|
||||
}
|
||||
@@ -408,14 +408,14 @@ func (r sqlRepository) putByMatch(filter Sqlizer, id string, m interface{}, cols
|
||||
return r.put(res.ID, m, colsToUpdate...)
|
||||
}
|
||||
|
||||
func (r sqlRepository) put(id string, m interface{}, colsToUpdate ...string) (newId string, err error) {
|
||||
func (r sqlRepository) put(id string, m any, colsToUpdate ...string) (newId string, err error) {
|
||||
values, err := toSQLArgs(m)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error preparing values to write to DB: %w", err)
|
||||
}
|
||||
// If there's an ID, try to update first
|
||||
if id != "" {
|
||||
updateValues := map[string]interface{}{}
|
||||
updateValues := map[string]any{}
|
||||
|
||||
// This is a map of the columns that need to be updated, if specified
|
||||
c2upd := slice.ToMap(colsToUpdate, func(s string) (string, struct{}) {
|
||||
|
||||
@@ -37,7 +37,7 @@ func (r sqlRepository) bmkID(itemID ...string) And {
|
||||
func (r sqlRepository) bmkUpsert(itemID, comment string, position int64) error {
|
||||
client, _ := request.ClientFrom(r.ctx)
|
||||
user, _ := request.UserFrom(r.ctx)
|
||||
values := map[string]interface{}{
|
||||
values := map[string]any{
|
||||
"comment": comment,
|
||||
"position": position,
|
||||
"updated_at": time.Now(),
|
||||
|
||||
@@ -30,7 +30,7 @@ var _ = Describe("sqlRestful", func() {
|
||||
r.filterMappings = map[string]filterFunc{
|
||||
"name": fullTextFilter("table"),
|
||||
}
|
||||
options.Filters = map[string]interface{}{"name": "'"}
|
||||
options.Filters = map[string]any{"name": "'"}
|
||||
Expect(r.parseRestFilters(context.Background(), options)).To(BeEmpty())
|
||||
})
|
||||
|
||||
@@ -40,32 +40,32 @@ var _ = Describe("sqlRestful", func() {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
options.Filters = map[string]interface{}{"name": "joe"}
|
||||
options.Filters = map[string]any{"name": "joe"}
|
||||
Expect(r.parseRestFilters(context.Background(), options)).To(BeEmpty())
|
||||
})
|
||||
|
||||
It("returns a '=' condition for 'id' filter", func() {
|
||||
options.Filters = map[string]interface{}{"id": "123"}
|
||||
options.Filters = map[string]any{"id": "123"}
|
||||
Expect(r.parseRestFilters(context.Background(), options)).To(Equal(squirrel.And{squirrel.Eq{"id": "123"}}))
|
||||
})
|
||||
|
||||
It("returns a 'in' condition for multiples 'id' filters", func() {
|
||||
options.Filters = map[string]interface{}{"id": []string{"123", "456"}}
|
||||
options.Filters = map[string]any{"id": []string{"123", "456"}}
|
||||
Expect(r.parseRestFilters(context.Background(), options)).To(Equal(squirrel.And{squirrel.Eq{"id": []string{"123", "456"}}}))
|
||||
})
|
||||
|
||||
It("returns a 'like' condition for other filters", func() {
|
||||
options.Filters = map[string]interface{}{"name": "joe"}
|
||||
options.Filters = map[string]any{"name": "joe"}
|
||||
Expect(r.parseRestFilters(context.Background(), options)).To(Equal(squirrel.And{squirrel.Like{"name": "joe%"}}))
|
||||
})
|
||||
|
||||
It("uses the custom filter", func() {
|
||||
r.filterMappings = map[string]filterFunc{
|
||||
"test": func(field string, value interface{}) squirrel.Sqlizer {
|
||||
"test": func(field string, value any) squirrel.Sqlizer {
|
||||
return squirrel.Gt{field: value}
|
||||
},
|
||||
}
|
||||
options.Filters = map[string]interface{}{"test": 100}
|
||||
options.Filters = map[string]any{"test": 100}
|
||||
Expect(r.parseRestFilters(context.Background(), options)).To(Equal(squirrel.And{squirrel.Gt{"test": 100}}))
|
||||
})
|
||||
})
|
||||
|
||||
@@ -60,7 +60,7 @@ func tagIDFilter(name string, idValue any) Sqlizer {
|
||||
}
|
||||
|
||||
// tagLibraryIdFilter filters tags based on library access through the library_tag table
|
||||
func tagLibraryIdFilter(_ string, value interface{}) Sqlizer {
|
||||
func tagLibraryIdFilter(_ string, value any) Sqlizer {
|
||||
return Eq{"library_tag.library_id": value}
|
||||
}
|
||||
|
||||
@@ -142,14 +142,14 @@ func (r *baseTagRepository) Count(options ...rest.QueryOptions) (int64, error) {
|
||||
return r.count(sq, r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *baseTagRepository) Read(id string) (interface{}, error) {
|
||||
func (r *baseTagRepository) Read(id string) (any, error) {
|
||||
query := r.newSelect().Where(Eq{"id": id})
|
||||
var res model.Tag
|
||||
err := r.queryOne(query, &res)
|
||||
return &res, err
|
||||
}
|
||||
|
||||
func (r *baseTagRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *baseTagRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
query := r.newSelect(r.parseRestOptions(r.ctx, options...))
|
||||
var res model.TagList
|
||||
err := r.queryAll(query, &res)
|
||||
@@ -160,7 +160,7 @@ func (r *baseTagRepository) EntityName() string {
|
||||
return "tag"
|
||||
}
|
||||
|
||||
func (r *baseTagRepository) NewInstance() interface{} {
|
||||
func (r *baseTagRepository) NewInstance() any {
|
||||
return model.Tag{}
|
||||
}
|
||||
|
||||
|
||||
@@ -165,7 +165,7 @@ var _ = Describe("Tag Library Filtering", func() {
|
||||
|
||||
It("should respect explicit library_id filters within accessible libraries", func() {
|
||||
tags := readAllTags(®ularUser, rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"library_id": libraryID2},
|
||||
Filters: map[string]any{"library_id": libraryID2},
|
||||
})
|
||||
// Should see only tags from library 2: pop and rock(lib2)
|
||||
Expect(tags).To(HaveLen(2))
|
||||
@@ -174,7 +174,7 @@ var _ = Describe("Tag Library Filtering", func() {
|
||||
|
||||
It("should not return tags when filtering by inaccessible library", func() {
|
||||
tags := readAllTags(®ularUser, rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"library_id": libraryID3},
|
||||
Filters: map[string]any{"library_id": libraryID3},
|
||||
})
|
||||
// Should return no tags since user can't access library 3
|
||||
Expect(tags).To(HaveLen(0))
|
||||
@@ -182,7 +182,7 @@ var _ = Describe("Tag Library Filtering", func() {
|
||||
|
||||
It("should filter by library 1 correctly", func() {
|
||||
tags := readAllTags(®ularUser, rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"library_id": libraryID1},
|
||||
Filters: map[string]any{"library_id": libraryID1},
|
||||
})
|
||||
// Should see only rock from library 1
|
||||
Expect(tags).To(HaveLen(1))
|
||||
@@ -227,7 +227,7 @@ var _ = Describe("Tag Library Filtering", func() {
|
||||
|
||||
It("should allow headless processes to apply explicit library_id filters", func() {
|
||||
tags := readAllTags(nil, rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"library_id": libraryID3},
|
||||
Filters: map[string]any{"library_id": libraryID3},
|
||||
})
|
||||
// Should see only jazz from library 3
|
||||
Expect(tags).To(HaveLen(1))
|
||||
@@ -243,7 +243,7 @@ var _ = Describe("Tag Library Filtering", func() {
|
||||
|
||||
It("should respect explicit library_id filters", func() {
|
||||
tags := readAllTags(&adminUser, rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"library_id": libraryID3},
|
||||
Filters: map[string]any{"library_id": libraryID3},
|
||||
})
|
||||
// Should see only jazz from library 3
|
||||
Expect(tags).To(HaveLen(1))
|
||||
@@ -252,7 +252,7 @@ var _ = Describe("Tag Library Filtering", func() {
|
||||
|
||||
It("should filter by library 2 correctly", func() {
|
||||
tags := readAllTags(&adminUser, rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"library_id": libraryID2},
|
||||
Filters: map[string]any{"library_id": libraryID2},
|
||||
})
|
||||
// Should see pop and rock from library 2
|
||||
Expect(tags).To(HaveLen(2))
|
||||
|
||||
@@ -234,7 +234,7 @@ var _ = Describe("TagRepository", func() {
|
||||
|
||||
It("should filter tags by partial value correctly", func() {
|
||||
options := rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"name": "%rock%"}, // Tags containing 'rock'
|
||||
Filters: map[string]any{"name": "%rock%"}, // Tags containing 'rock'
|
||||
}
|
||||
result, err := restRepo.ReadAll(options)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
@@ -249,7 +249,7 @@ var _ = Describe("TagRepository", func() {
|
||||
|
||||
It("should filter tags by partial value using LIKE", func() {
|
||||
options := rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"name": "%e%"}, // Tags containing 'e'
|
||||
Filters: map[string]any{"name": "%e%"}, // Tags containing 'e'
|
||||
}
|
||||
result, err := restRepo.ReadAll(options)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
@@ -264,7 +264,7 @@ var _ = Describe("TagRepository", func() {
|
||||
|
||||
It("should sort tags by value ascending", func() {
|
||||
options := rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"name": "%r%"}, // Tags containing 'r'
|
||||
Filters: map[string]any{"name": "%r%"}, // Tags containing 'r'
|
||||
Sort: "name",
|
||||
Order: "asc",
|
||||
}
|
||||
@@ -280,7 +280,7 @@ var _ = Describe("TagRepository", func() {
|
||||
|
||||
It("should sort tags by value descending", func() {
|
||||
options := rest.QueryOptions{
|
||||
Filters: map[string]interface{}{"name": "%r%"}, // Tags containing 'r'
|
||||
Filters: map[string]any{"name": "%r%"}, // Tags containing 'r'
|
||||
Sort: "name",
|
||||
Order: "desc",
|
||||
}
|
||||
|
||||
@@ -52,11 +52,11 @@ func (r *transcodingRepository) Count(options ...rest.QueryOptions) (int64, erro
|
||||
return r.count(Select(), r.parseRestOptions(r.ctx, options...))
|
||||
}
|
||||
|
||||
func (r *transcodingRepository) Read(id string) (interface{}, error) {
|
||||
func (r *transcodingRepository) Read(id string) (any, error) {
|
||||
return r.Get(id)
|
||||
}
|
||||
|
||||
func (r *transcodingRepository) ReadAll(options ...rest.QueryOptions) (interface{}, error) {
|
||||
func (r *transcodingRepository) ReadAll(options ...rest.QueryOptions) (any, error) {
|
||||
sel := r.newSelect(r.parseRestOptions(r.ctx, options...)).Columns("*")
|
||||
res := model.Transcodings{}
|
||||
err := r.queryAll(sel, &res)
|
||||
@@ -67,11 +67,11 @@ func (r *transcodingRepository) EntityName() string {
|
||||
return "transcoding"
|
||||
}
|
||||
|
||||
func (r *transcodingRepository) NewInstance() interface{} {
|
||||
func (r *transcodingRepository) NewInstance() any {
|
||||
return &model.Transcoding{}
|
||||
}
|
||||
|
||||
func (r *transcodingRepository) Save(entity interface{}) (string, error) {
|
||||
func (r *transcodingRepository) Save(entity any) (string, error) {
|
||||
if !loggedUser(r.ctx).IsAdmin {
|
||||
return "", rest.ErrPermissionDenied
|
||||
}
|
||||
@@ -83,7 +83,7 @@ func (r *transcodingRepository) Save(entity interface{}) (string, error) {
|
||||
return id, err
|
||||
}
|
||||
|
||||
func (r *transcodingRepository) Update(id string, entity interface{}, cols ...string) error {
|
||||
func (r *transcodingRepository) Update(id string, entity any, cols ...string) error {
|
||||
if !loggedUser(r.ctx).IsAdmin {
|
||||
return rest.ErrPermissionDenied
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
package plugins
|
||||
|
||||
import "slices"
|
||||
|
||||
// Capability represents a plugin capability type.
|
||||
// Capabilities are detected by checking which functions a plugin exports.
|
||||
type Capability string
|
||||
@@ -25,11 +27,8 @@ func detectCapabilities(plugin functionExistsChecker) []Capability {
|
||||
var capabilities []Capability
|
||||
|
||||
for cap, functions := range capabilityFunctions {
|
||||
for _, fn := range functions {
|
||||
if plugin.FunctionExists(fn) {
|
||||
capabilities = append(capabilities, cap)
|
||||
break // Found at least one function, plugin has this capability
|
||||
}
|
||||
if slices.ContainsFunc(functions, plugin.FunctionExists) {
|
||||
capabilities = append(capabilities, cap) // Found at least one function, plugin has this capability
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,10 +37,5 @@ func detectCapabilities(plugin functionExistsChecker) []Capability {
|
||||
|
||||
// hasCapability checks if the given capabilities slice contains a specific capability.
|
||||
func hasCapability(capabilities []Capability, cap Capability) bool {
|
||||
for _, c := range capabilities {
|
||||
if c == cap {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.Contains(capabilities, cap)
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"maps"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
@@ -200,9 +201,7 @@ func (s *webSocketServiceImpl) CloseConnection(ctx context.Context, connectionID
|
||||
func (s *webSocketServiceImpl) Close() error {
|
||||
s.mu.Lock()
|
||||
connections := make(map[string]*wsConnection, len(s.connections))
|
||||
for k, v := range s.connections {
|
||||
connections[k] = v
|
||||
}
|
||||
maps.Copy(connections, s.connections)
|
||||
s.connections = make(map[string]*wsConnection)
|
||||
s.mu.Unlock()
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"maps"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
@@ -594,9 +595,7 @@ func (t *testableWebSocketService) getConnectionCount() int {
|
||||
func (t *testableWebSocketService) closeAllConnections() {
|
||||
t.mu.Lock()
|
||||
conns := make(map[string]*wsConnection, len(t.connections))
|
||||
for k, v := range t.connections {
|
||||
conns[k] = v
|
||||
}
|
||||
maps.Copy(conns, t.connections)
|
||||
t.connections = make(map[string]*wsConnection)
|
||||
t.mu.Unlock()
|
||||
|
||||
|
||||
@@ -142,7 +142,7 @@ var _ = Describe("purgeCacheBySize", func() {
|
||||
now := time.Now()
|
||||
|
||||
// Create 5 files, 1MiB each (total 5MiB)
|
||||
for i := 0; i < 5; i++ {
|
||||
for i := range 5 {
|
||||
path := filepath.Join(cacheDir, filepath.Join("dir", "file"+string(rune('0'+i))+".bin"))
|
||||
createFileWithSize(path, 1*1024*1024, now.Add(-time.Duration(5-i)*time.Hour))
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
-s -r "(\.go$$|\.cpp$$|\.h$$|navidrome.toml|resources|token_received.html)" -R "(^ui|^data|^db/migrations)" -- go run -race -tags netgo .
|
||||
-s -r "(\.go$$|\.cpp$$|\.h$$|navidrome.toml|resources|token_received.html)" -R "(^ui|^data|^db/migrations)" -R "_test\.go$$" -- go run -race -tags netgo .
|
||||
|
||||
@@ -23,6 +23,8 @@ if [ ! -f "$postinstall_flag" ]; then
|
||||
# and not by root
|
||||
chown navidrome:navidrome /var/lib/navidrome/cache
|
||||
touch "$postinstall_flag"
|
||||
else
|
||||
navidrome service stop --configfile /etc/navidrome/navidrome.toml && navidrome service start --configfile /etc/navidrome/navidrome.toml
|
||||
fi
|
||||
|
||||
|
||||
|
||||
@@ -36,7 +36,8 @@
|
||||
"bitDepth": "Bitdybde",
|
||||
"sampleRate": "Samplingfrekvens",
|
||||
"missing": "Manglende",
|
||||
"libraryName": "Bibliotek"
|
||||
"libraryName": "Bibliotek",
|
||||
"composer": "Komponist"
|
||||
},
|
||||
"actions": {
|
||||
"addToQueue": "Afspil senere",
|
||||
@@ -46,7 +47,8 @@
|
||||
"download": "Download",
|
||||
"playNext": "Afspil næste",
|
||||
"info": "Hent info",
|
||||
"showInPlaylist": "Vis i afspilningsliste"
|
||||
"showInPlaylist": "Vis i afspilningsliste",
|
||||
"instantMix": "Instant Mix"
|
||||
}
|
||||
},
|
||||
"album": {
|
||||
@@ -328,6 +330,80 @@
|
||||
"scanInProgress": "Scanning i gang...",
|
||||
"noLibrariesAssigned": "Ingen biblioteker tildelt denne bruger"
|
||||
}
|
||||
},
|
||||
"plugin": {
|
||||
"name": "Plugin |||| Plugins",
|
||||
"fields": {
|
||||
"id": "ID",
|
||||
"name": "Navn",
|
||||
"description": "Beskrivelse",
|
||||
"version": "Version",
|
||||
"author": "Forfatter",
|
||||
"website": "Hjemmeside",
|
||||
"permissions": "Tilladelser",
|
||||
"enabled": "Aktiveret",
|
||||
"status": "Status",
|
||||
"path": "Sti",
|
||||
"lastError": "Fejl",
|
||||
"hasError": "Fejl",
|
||||
"updatedAt": "Opdateret",
|
||||
"createdAt": "Installeret",
|
||||
"configKey": "Nøgle",
|
||||
"configValue": "Værdi",
|
||||
"allUsers": "Tillad alle brugere",
|
||||
"selectedUsers": "Valgte brugere",
|
||||
"allLibraries": "Tillad alle biblioteker",
|
||||
"selectedLibraries": "Valgte biblioteker"
|
||||
},
|
||||
"sections": {
|
||||
"status": "Status",
|
||||
"info": "Pluginoplysninger",
|
||||
"configuration": "Konfiguration",
|
||||
"manifest": "Manifest",
|
||||
"usersPermission": "Brugertilladelse",
|
||||
"libraryPermission": "Bibliotekstilladelse"
|
||||
},
|
||||
"status": {
|
||||
"enabled": "Aktiveret",
|
||||
"disabled": "Deaktiveret"
|
||||
},
|
||||
"actions": {
|
||||
"enable": "Aktivér",
|
||||
"disable": "Deaktivér",
|
||||
"disabledDueToError": "Ret fejlen før aktivering",
|
||||
"disabledUsersRequired": "Vælg brugere før aktivering",
|
||||
"disabledLibrariesRequired": "Vælg biblioteker før aktivering",
|
||||
"addConfig": "Tilføj konfiguration",
|
||||
"rescan": "Genskan"
|
||||
},
|
||||
"notifications": {
|
||||
"enabled": "Plugin aktiveret",
|
||||
"disabled": "Plugin deaktiveret",
|
||||
"updated": "Plugin opdateret",
|
||||
"error": "Fejl ved opdatering af plugin"
|
||||
},
|
||||
"validation": {
|
||||
"invalidJson": "Konfigurationen skal være gyldig JSON"
|
||||
},
|
||||
"messages": {
|
||||
"configHelp": "Konfigurér pluginet med nøgle-værdi-par. Lad stå tomt, hvis pluginet ikke kræver konfiguration.",
|
||||
"clickPermissions": "Klik på en tilladelse for detaljer",
|
||||
"noConfig": "Ingen konfiguration angivet",
|
||||
"allUsersHelp": "Når aktiveret, vil pluginet have adgang til alle brugere, inklusiv dem der oprettes i fremtiden.",
|
||||
"noUsers": "Ingen brugere valgt",
|
||||
"permissionReason": "Årsag",
|
||||
"usersRequired": "Dette plugin kræver adgang til brugeroplysninger. Vælg hvilke brugere pluginet kan tilgå, eller aktivér 'Tillad alle brugere'.",
|
||||
"allLibrariesHelp": "Når aktiveret, vil pluginet have adgang til alle biblioteker, inklusiv dem der oprettes i fremtiden.",
|
||||
"noLibraries": "Ingen biblioteker valgt",
|
||||
"librariesRequired": "Dette plugin kræver adgang til biblioteksoplysninger. Vælg hvilke biblioteker pluginet kan tilgå, eller aktivér 'Tillad alle biblioteker'.",
|
||||
"requiredHosts": "Påkrævede hosts",
|
||||
"configValidationError": "Konfigurationsvalidering mislykkedes:",
|
||||
"schemaRenderError": "Kan ikke vise konfigurationsformularen. Pluginets skema er muligvis ugyldigt."
|
||||
},
|
||||
"placeholders": {
|
||||
"configKey": "nøgle",
|
||||
"configValue": "værdi"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ra": {
|
||||
@@ -511,7 +587,8 @@
|
||||
"remove_all_missing_title": "Fjern alle manglende filer",
|
||||
"remove_all_missing_content": "Er du sikker på, at du vil fjerne alle manglende filer fra databasen? Dét vil permanent fjerne alle referencer til dem, inklusive deres afspilningstællere og vurderinger.",
|
||||
"noSimilarSongsFound": "Ingen lignende sange fundet",
|
||||
"noTopSongsFound": "Ingen topsange fundet"
|
||||
"noTopSongsFound": "Ingen topsange fundet",
|
||||
"startingInstantMix": "Indlæser Instant Mix..."
|
||||
},
|
||||
"menu": {
|
||||
"library": "Bibliotek",
|
||||
@@ -597,7 +674,8 @@
|
||||
"exportSuccess": "Konfigurationen eksporteret til udklipsholder i TOML-format",
|
||||
"exportFailed": "Kunne ikke kopiere konfigurationen",
|
||||
"devFlagsHeader": "Udviklingsflagget (med forbehold for ændring/fjernelse)",
|
||||
"devFlagsComment": "Disse er eksperimental-indstillinger og kan blive fjernet i fremtidige udgaver"
|
||||
"devFlagsComment": "Disse er eksperimental-indstillinger og kan blive fjernet i fremtidige udgaver",
|
||||
"downloadToml": ""
|
||||
}
|
||||
},
|
||||
"activity": {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"languageName": "Euskara",
|
||||
"resources": {
|
||||
"song": {
|
||||
"name": "Abestia |||| Abestiak",
|
||||
"name": "Abestia |||| Abesti",
|
||||
"fields": {
|
||||
"albumArtist": "Albumaren artista",
|
||||
"duration": "Iraupena",
|
||||
@@ -10,6 +10,7 @@
|
||||
"playCount": "Erreprodukzioak",
|
||||
"title": "Titulua",
|
||||
"artist": "Artista",
|
||||
"composer": "Konpositorea",
|
||||
"album": "Albuma",
|
||||
"path": "Fitxategiaren bidea",
|
||||
"libraryName": "Liburutegia",
|
||||
@@ -33,9 +34,9 @@
|
||||
"grouping": "Multzokatzea",
|
||||
"mood": "Aldartea",
|
||||
"participants": "Partaide gehiago",
|
||||
"tags": "Traola gehiago",
|
||||
"mappedTags": "Esleitutako traolak",
|
||||
"rawTags": "Traola gordinak",
|
||||
"tags": "Etiketa gehiago",
|
||||
"mappedTags": "Esleitutako etiketak",
|
||||
"rawTags": "Etiketa gordinak",
|
||||
"missing": "Ez da aurkitu"
|
||||
},
|
||||
"actions": {
|
||||
@@ -46,11 +47,12 @@
|
||||
"shuffleAll": "Erreprodukzio aleatorioa",
|
||||
"download": "Deskargatu",
|
||||
"playNext": "Hurrengoa",
|
||||
"info": "Erakutsi informazioa"
|
||||
"info": "Erakutsi informazioa",
|
||||
"instantMix": "Berehalako nahastea"
|
||||
}
|
||||
},
|
||||
"album": {
|
||||
"name": "Albuma |||| Albumak",
|
||||
"name": "Albuma |||| Album",
|
||||
"fields": {
|
||||
"albumArtist": "Albumaren artista",
|
||||
"artist": "Artista",
|
||||
@@ -66,7 +68,7 @@
|
||||
"date": "Recording Date",
|
||||
"originalDate": "Jatorrizkoa",
|
||||
"releaseDate": "Argitaratze-data",
|
||||
"releases": "Argitaratzea |||| Argitaratzeak",
|
||||
"releases": "Argitaratzea |||| Argitaratze",
|
||||
"released": "Argitaratua",
|
||||
"updatedAt": "Aktualizatze-data:",
|
||||
"comment": "Iruzkina",
|
||||
@@ -101,7 +103,7 @@
|
||||
}
|
||||
},
|
||||
"artist": {
|
||||
"name": "Artista |||| Artistak",
|
||||
"name": "Artista |||| Artista",
|
||||
"fields": {
|
||||
"name": "Izena",
|
||||
"albumCount": "Album kopurua",
|
||||
@@ -330,6 +332,80 @@
|
||||
"scanInProgress": "Araketa abian da…",
|
||||
"noLibrariesAssigned": "Ez da liburutegirik egokitu erabiltzaile honentzat"
|
||||
}
|
||||
},
|
||||
"plugin": {
|
||||
"name": "Plugina |||| Plugin",
|
||||
"fields": {
|
||||
"id": "IDa",
|
||||
"name": "Izena",
|
||||
"description": "Deskribapena",
|
||||
"version": "Bertsioa",
|
||||
"author": "Autorea",
|
||||
"website": "Webgunea",
|
||||
"permissions": "Baimenak",
|
||||
"enabled": "Gaituta",
|
||||
"status": "Egoera",
|
||||
"path": "Bidea",
|
||||
"lastError": "Errorea",
|
||||
"hasError": "Errorea",
|
||||
"updatedAt": "Eguneratuta",
|
||||
"createdAt": "Instalatuta",
|
||||
"configKey": "Gakoa",
|
||||
"configValue": "Balioa",
|
||||
"allUsers": "Baimendu erabiltzaile guztiak",
|
||||
"selectedUsers": "Hautatutako erabiltzaileak",
|
||||
"allLibraries": "Baimendu liburutegi guztiak",
|
||||
"selectedLibraries": "Hautatutako liburutegiak"
|
||||
},
|
||||
"sections": {
|
||||
"status": "Egoera",
|
||||
"info": "Pluginaren informazioa",
|
||||
"configuration": "Konfigurazioa",
|
||||
"manifest": "Manifestua",
|
||||
"usersPermission": "Erabiltzaileen baimenak",
|
||||
"libraryPermission": "Liburutegien baimenak"
|
||||
},
|
||||
"status": {
|
||||
"enabled": "Gaituta",
|
||||
"disabled": "Ezgaituta"
|
||||
},
|
||||
"actions": {
|
||||
"enable": "Gaitu",
|
||||
"disable": "Ezgaitu",
|
||||
"disabledDueToError": "Konpondu errorea gaitu baino lehen",
|
||||
"disabledUsersRequired": "Hautatu erabiltzaileak gaitu baino lehen",
|
||||
"disabledLibrariesRequired": "Hautatu liburutegiak gaitu baino lehen",
|
||||
"addConfig": "Gehitu konfigurazioa",
|
||||
"rescan": "Arakatu berriro"
|
||||
},
|
||||
"notifications": {
|
||||
"enabled": "Plugina gaituta",
|
||||
"disabled": "Plugina ezgaituta",
|
||||
"updated": "Plugina eguneratuta",
|
||||
"error": "Errorea plugina eguneratzean"
|
||||
},
|
||||
"validation": {
|
||||
"invalidJson": "Konfigurazioa baliozko JSON-a izan behar da"
|
||||
},
|
||||
"messages": {
|
||||
"configHelp": "Konfiguratu plugina gako-balio bikoteak erabiliz. Utzi hutsik pluginak konfiguraziorik behar ez badu.",
|
||||
"configValidationError": "Huts egin du konfigurazioaren balidazioak:",
|
||||
"schemaRenderError": "Ezin izan da konfigurazioaren formularioa bihurtu. Litekeena da pluginaren eskema baliozkoa ez izatea.",
|
||||
"clickPermissions": "Sakatu baimen batean xehetasunetarako",
|
||||
"noConfig": "Ez da konfiguraziorik ezarri",
|
||||
"allUsersHelp": "Gaituta dagoenean, pluginak erabiltzaile guztiak atzitu ditzazke, baita etorkizunean sortuko direnak ere.",
|
||||
"noUsers": "Ez da erabiltzailerik hautatu",
|
||||
"permissionReason": "Arrazoia",
|
||||
"usersRequired": "Plugin honek erabiltzaileen informaziora sarbidea behar du. Hautatu zein erabiltzaile atzitu dezakeen pluginak, edo gaitu 'Baimendu erabiltzaile guztiak'.",
|
||||
"allLibrariesHelp": "Gaituta dagoenean, pluginak liburutegi guztietara izango du sarbidea, baita etorkizunean sortuko direnetara ere.",
|
||||
"noLibraries": "Ez da liburutegirik hautatu",
|
||||
"librariesRequired": "Plugin honek liburutegien informaziora sarbidea behar du. Hautatu zein liburutegi atzitu dezakeen pluginak, edo gaitu 'Baimendu liburutegi guztiak'.",
|
||||
"requiredHosts": "Beharrezko ostatatzaileak"
|
||||
},
|
||||
"placeholders": {
|
||||
"configKey": "gakoa",
|
||||
"configValue": "balioa"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ra": {
|
||||
@@ -483,6 +559,7 @@
|
||||
"transcodingEnabled": "Navidrome %{config}-ekin martxan dago eta, beraz, web-interfazeko transkodeketa-ataletik sistema-komandoak exekuta daitezke. Segurtasun arrazoiak tarteko, ezgaitzea gomendatzen dugu, eta transkodeketa-aukerak konfiguratzen ari zarenean bakarrik gaitzea.",
|
||||
"songsAddedToPlaylist": "Abesti bat zerrendara gehitu da |||| %{smart_count} abesti zerrendara gehitu dira",
|
||||
"noSimilarSongsFound": "Ez da antzeko abestirik aurkitu",
|
||||
"startingInstantMix": "Berehalako nahastea kargatzen…",
|
||||
"noTopSongsFound": "Ez da aparteko abestirik aurkitu",
|
||||
"noPlaylistsAvailable": "Ez dago zerrendarik erabilgarri",
|
||||
"delete_user_title": "Ezabatu '%{name}' erabiltzailea",
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
"playCount": "Lejátszások",
|
||||
"title": "Cím",
|
||||
"artist": "Előadó",
|
||||
"composer": "Zeneszerző",
|
||||
"album": "Album",
|
||||
"path": "Elérési út",
|
||||
"libraryName": "Könyvtár",
|
||||
@@ -46,7 +47,8 @@
|
||||
"shuffleAll": "Keverés",
|
||||
"download": "Letöltés",
|
||||
"playNext": "Lejátszás következőként",
|
||||
"info": "Részletek"
|
||||
"info": "Részletek",
|
||||
"instantMix": "Instant keverés"
|
||||
}
|
||||
},
|
||||
"album": {
|
||||
@@ -325,6 +327,80 @@
|
||||
"scanInProgress": "Szkennelés folyamatban...",
|
||||
"noLibrariesAssigned": "Ehhez a felhasználóhoz nincsenek könyvtárak adva"
|
||||
}
|
||||
},
|
||||
"plugin": {
|
||||
"name": "Kiegészítő |||| Kiegészítők",
|
||||
"fields": {
|
||||
"id": "ID",
|
||||
"name": "Név",
|
||||
"description": "Leírás",
|
||||
"version": "Verzió",
|
||||
"author": "Fejlesztő",
|
||||
"website": "Weboldal",
|
||||
"permissions": "Engedélyek",
|
||||
"enabled": "Engedélyezve",
|
||||
"status": "Státusz",
|
||||
"path": "Útvonal",
|
||||
"lastError": "Hiba",
|
||||
"hasError": "Hiba",
|
||||
"updatedAt": "Frissítve",
|
||||
"createdAt": "Telepítve",
|
||||
"configKey": "Kulcs",
|
||||
"configValue": "Érték",
|
||||
"allUsers": "Összes felhasználó engedélyezése",
|
||||
"selectedUsers": "Kiválasztott felhasználók engedélyezése",
|
||||
"allLibraries": "Összes könyvtár engedélyezése",
|
||||
"selectedLibraries": "Kiválasztott könyvtárak engedélyezése"
|
||||
},
|
||||
"sections": {
|
||||
"status": "Státusz",
|
||||
"info": "Kiegészítő információi",
|
||||
"configuration": "Konfiguráció",
|
||||
"manifest": "Manifest",
|
||||
"usersPermission": "Felhasználói engedélyek",
|
||||
"libraryPermission": "Könyvtári engedélyek"
|
||||
},
|
||||
"status": {
|
||||
"enabled": "Engedélyezve",
|
||||
"disabled": "Letiltva"
|
||||
},
|
||||
"actions": {
|
||||
"enable": "Engedélyezés",
|
||||
"disable": "Letiltás",
|
||||
"disabledDueToError": "Javítsd ki a kiegészítő hibáját",
|
||||
"disabledUsersRequired": "Válassz felhasználókat",
|
||||
"disabledLibrariesRequired": "Válassz könyvtárakat",
|
||||
"addConfig": "Konfiguráció hozzáadása",
|
||||
"rescan": "Újraszkennelés"
|
||||
},
|
||||
"notifications": {
|
||||
"enabled": "Kiegészítő engedélyezve",
|
||||
"disabled": "Kiegészítő letiltva",
|
||||
"updated": "Kiegészítő frissítve",
|
||||
"error": "Hiba történt a kiegészítő frissítése közben"
|
||||
},
|
||||
"validation": {
|
||||
"invalidJson": "A konfigurációs JSON érvénytelen"
|
||||
},
|
||||
"messages": {
|
||||
"configHelp": "Konfiguráld a kiegészítőt kulcs-érték párokkal. Hagyd a mezőt üresen, ha nincs szükség konfigurációra.",
|
||||
"configValidationError": "Helytelen konfiguráció:",
|
||||
"schemaRenderError": "Nem sikerült megjeleníteni a konfigurációs űrlapot. A bővítmény sémája érvénytelen lehet.",
|
||||
"clickPermissions": "Kattints egy engedélyre a részletekért",
|
||||
"noConfig": "Nincs konfiguráció beállítva",
|
||||
"allUsersHelp": "Engedélyezés esetén ez a kiegészítő hozzá fog férni minden jelenlegi és jövőben létrehozott felhasználóhoz.",
|
||||
"noUsers": "Nincsenek kiválasztott felhasználók",
|
||||
"permissionReason": "Indok",
|
||||
"usersRequired": "Ez a kiegészítő hozzáférést kér felhasználói információkhoz. Válaszd ki, melyik felhasználókat érheti el, vagy az 'Összes felhasználó engedélyezése' opciót.",
|
||||
"allLibrariesHelp": "Engedélyezés esetén ez a kiegészítő hozzá fog férni minden jelenlegi és jövőben létrehozott könyvtárhoz.",
|
||||
"noLibraries": "Nincs kiválasztott könyvtár",
|
||||
"librariesRequired": "Ez a kiegészítő hozzáférést kér könyvtárinformációkhoz. Válaszd ki, melyik könyvtárakat érheti el, vagy az 'Összes könyvtár engedélyezése' opciót.",
|
||||
"requiredHosts": "Szükséges hostok"
|
||||
},
|
||||
"placeholders": {
|
||||
"configKey": "kulcs",
|
||||
"configValue": "érték"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ra": {
|
||||
@@ -402,7 +478,7 @@
|
||||
"loading": "Betöltés",
|
||||
"not_found": "Nem található",
|
||||
"show": "%{name} #%{id}",
|
||||
"empty": "Nincs %{name} még.",
|
||||
"empty": "Nincsenek %{name}.",
|
||||
"invite": "Szeretnél egyet hozzáadni?"
|
||||
},
|
||||
"input": {
|
||||
@@ -478,6 +554,7 @@
|
||||
"transcodingEnabled": "A Navidrome jelenleg a következőkkel fut %{config}, ez lehetővé teszi a rendszerparancsok futtatását az átkódolási beállításokból a webes felület segítségével. Javasoljuk, hogy biztonsági okokból tiltsd ezt le, és csak az átkódolási beállítások konfigurálásának idejére kapcsold be.",
|
||||
"songsAddedToPlaylist": "1 szám hozzáadva a lejátszási listához |||| %{smart_count} szám hozzáadva a lejátszási listához",
|
||||
"noSimilarSongsFound": "Nem találhatóak hasonló számok",
|
||||
"startingInstantMix": "Instant keverés töltődik...",
|
||||
"noTopSongsFound": "Nincsenek top számok",
|
||||
"noPlaylistsAvailable": "Nem áll rendelkezésre",
|
||||
"delete_user_title": "Felhasználó törlése '%{name}'",
|
||||
@@ -591,6 +668,7 @@
|
||||
"currentValue": "Jelenlegi érték",
|
||||
"configurationFile": "Konfigurációs fájl",
|
||||
"exportToml": "Konfiguráció exportálása (TOML)",
|
||||
"downloadToml": "Konfiguráció letöltése (TOML)",
|
||||
"exportSuccess": "Konfiguráció kiexportálva a vágólapra, TOML formában",
|
||||
"exportFailed": "Nem sikerült kimásolni a konfigurációt",
|
||||
"devFlagsHeader": "Fejlesztői beállítások (változások/eltávolítás jogát fenntartjuk)",
|
||||
|
||||
@@ -674,7 +674,8 @@
|
||||
"exportSuccess": "Configuração exportada para o clipboard em formato TOML",
|
||||
"exportFailed": "Falha ao copiar configuração",
|
||||
"devFlagsHeader": "Flags de Desenvolvimento (sujeitas a mudança/remoção)",
|
||||
"devFlagsComment": "Estas são configurações experimentais e podem ser removidas em versões futuras"
|
||||
"devFlagsComment": "Estas são configurações experimentais e podem ser removidas em versões futuras",
|
||||
"downloadToml": "Baixar configuração (TOML)"
|
||||
}
|
||||
},
|
||||
"activity": {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -29,21 +29,22 @@ var (
|
||||
func New(rootCtx context.Context, ds model.DataStore, cw artwork.CacheWarmer, broker events.Broker,
|
||||
pls core.Playlists, m metrics.Metrics) model.Scanner {
|
||||
c := &controller{
|
||||
rootCtx: rootCtx,
|
||||
ds: ds,
|
||||
cw: cw,
|
||||
broker: broker,
|
||||
pls: pls,
|
||||
metrics: m,
|
||||
rootCtx: rootCtx,
|
||||
ds: ds,
|
||||
cw: cw,
|
||||
broker: broker,
|
||||
pls: pls,
|
||||
metrics: m,
|
||||
devExternalScanner: conf.Server.DevExternalScanner,
|
||||
}
|
||||
if !conf.Server.DevExternalScanner {
|
||||
if !c.devExternalScanner {
|
||||
c.limiter = P(rate.Sometimes{Interval: conf.Server.DevActivityPanelUpdateRate})
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
func (s *controller) getScanner() scanner {
|
||||
if conf.Server.DevExternalScanner {
|
||||
if s.devExternalScanner {
|
||||
return &scannerExternal{}
|
||||
}
|
||||
return &scannerImpl{ds: s.ds, cw: s.cw, pls: s.pls}
|
||||
@@ -92,16 +93,17 @@ type scanner interface {
|
||||
}
|
||||
|
||||
type controller struct {
|
||||
rootCtx context.Context
|
||||
ds model.DataStore
|
||||
cw artwork.CacheWarmer
|
||||
broker events.Broker
|
||||
metrics metrics.Metrics
|
||||
pls core.Playlists
|
||||
limiter *rate.Sometimes
|
||||
count atomic.Uint32
|
||||
folderCount atomic.Uint32
|
||||
changesDetected bool
|
||||
rootCtx context.Context
|
||||
ds model.DataStore
|
||||
cw artwork.CacheWarmer
|
||||
broker events.Broker
|
||||
metrics metrics.Metrics
|
||||
pls core.Playlists
|
||||
limiter *rate.Sometimes
|
||||
devExternalScanner bool
|
||||
count atomic.Uint32
|
||||
folderCount atomic.Uint32
|
||||
changesDetected bool
|
||||
}
|
||||
|
||||
// getLastScanTime returns the most recent scan time across all libraries
|
||||
|
||||
@@ -158,7 +158,7 @@ func writeTargetsToFile(targets []model.ScanTarget) (string, error) {
|
||||
|
||||
for _, target := range targets {
|
||||
if _, err := fmt.Fprintln(tmpFile, target.String()); err != nil {
|
||||
os.Remove(tmpFile.Name())
|
||||
os.Remove(tmpFile.Name()) //nolint:gosec
|
||||
return "", fmt.Errorf("failed to write to temp file: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,8 +65,8 @@ func (ic *IgnoreChecker) PushAllParents(ctx context.Context, targetPath string)
|
||||
|
||||
// Load patterns for each parent directory
|
||||
currentPath := "."
|
||||
parts := strings.Split(path.Clean(targetPath), "/")
|
||||
for _, part := range parts {
|
||||
parts := strings.SplitSeq(path.Clean(targetPath), "/")
|
||||
for part := range parts {
|
||||
if part == "." || part == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -215,8 +215,8 @@ func (t Tags) Lyrics() string {
|
||||
}
|
||||
|
||||
for tag, value := range t.Tags {
|
||||
if strings.HasPrefix(tag, "lyrics-") {
|
||||
language := strings.TrimSpace(strings.TrimPrefix(tag, "lyrics-"))
|
||||
if after, ok := strings.CutPrefix(tag, "lyrics-"); ok {
|
||||
language := strings.TrimSpace(after)
|
||||
|
||||
if language == "" {
|
||||
language = "xxx"
|
||||
|
||||
@@ -2,6 +2,7 @@ package scanner
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
@@ -267,6 +268,10 @@ func (p *phaseMissingTracks) moveMatched(target, missing model.MediaFile) error
|
||||
oldAlbumID := missing.AlbumID
|
||||
newAlbumID := target.AlbumID
|
||||
|
||||
// Preserve the original created_at from the missing file, so moved tracks
|
||||
// don't appear in "Recently Added"
|
||||
target.CreatedAt = missing.CreatedAt
|
||||
|
||||
// Update the target media file with the missing file's ID. This effectively "moves" the track
|
||||
// to the new location while keeping its annotations and references intact.
|
||||
target.ID = missing.ID
|
||||
@@ -298,6 +303,14 @@ func (p *phaseMissingTracks) moveMatched(target, missing model.MediaFile) error
|
||||
log.Warn(p.ctx, "Scanner: Could not reassign album annotations", "from", oldAlbumID, "to", newAlbumID, err)
|
||||
}
|
||||
|
||||
// Keep created_at field from previous instance of the album, so moved albums
|
||||
// don't appear in "Recently Added"
|
||||
if err := tx.Album(p.ctx).CopyAttributes(oldAlbumID, newAlbumID, "created_at"); err != nil {
|
||||
if !errors.Is(err, model.ErrNotFound) {
|
||||
log.Warn(p.ctx, "Scanner: Could not copy album created_at", "from", oldAlbumID, "to", newAlbumID, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Note: RefreshPlayCounts will be called in later phases, so we don't need to call it here
|
||||
p.processedAlbumAnnotations[newAlbumID] = true
|
||||
}
|
||||
|
||||
@@ -724,6 +724,120 @@ var _ = Describe("phaseMissingTracks", func() {
|
||||
}) // End of Context "with multiple libraries"
|
||||
})
|
||||
|
||||
Describe("CreatedAt preservation (#5050)", func() {
|
||||
var albumRepo *tests.MockAlbumRepo
|
||||
|
||||
BeforeEach(func() {
|
||||
albumRepo = ds.Album(ctx).(*tests.MockAlbumRepo)
|
||||
albumRepo.ReassignAnnotationCalls = make(map[string]string)
|
||||
albumRepo.CopyAttributesCalls = make(map[string]string)
|
||||
})
|
||||
|
||||
It("should preserve the missing track's created_at when moving within a library", func() {
|
||||
originalTime := time.Date(2020, 3, 15, 10, 0, 0, 0, time.UTC)
|
||||
missingTrack := model.MediaFile{
|
||||
ID: "1", PID: "A", Path: "old/song.mp3",
|
||||
AlbumID: "album-1",
|
||||
LibraryID: 1,
|
||||
CreatedAt: originalTime,
|
||||
Tags: model.Tags{"title": []string{"My Song"}},
|
||||
Size: 100,
|
||||
}
|
||||
matchedTrack := model.MediaFile{
|
||||
ID: "2", PID: "A", Path: "new/song.mp3",
|
||||
AlbumID: "album-1", // Same album
|
||||
LibraryID: 1,
|
||||
CreatedAt: time.Now(), // Much newer
|
||||
Tags: model.Tags{"title": []string{"My Song"}},
|
||||
Size: 100,
|
||||
}
|
||||
|
||||
_ = ds.MediaFile(ctx).Put(&missingTrack)
|
||||
_ = ds.MediaFile(ctx).Put(&matchedTrack)
|
||||
|
||||
in := &missingTracks{
|
||||
missing: []model.MediaFile{missingTrack},
|
||||
matched: []model.MediaFile{matchedTrack},
|
||||
}
|
||||
|
||||
_, err := phase.processMissingTracks(in)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
movedTrack, _ := ds.MediaFile(ctx).Get("1")
|
||||
Expect(movedTrack.Path).To(Equal("new/song.mp3"))
|
||||
Expect(movedTrack.CreatedAt).To(Equal(originalTime))
|
||||
})
|
||||
|
||||
It("should preserve created_at during cross-library moves with album change", func() {
|
||||
originalTime := time.Date(2019, 6, 1, 12, 0, 0, 0, time.UTC)
|
||||
missingTrack := model.MediaFile{
|
||||
ID: "missing-ca", PID: "B", Path: "lib1/song.mp3",
|
||||
AlbumID: "old-album",
|
||||
LibraryID: 1,
|
||||
CreatedAt: originalTime,
|
||||
}
|
||||
matchedTrack := model.MediaFile{
|
||||
ID: "matched-ca", PID: "B", Path: "lib2/song.mp3",
|
||||
AlbumID: "new-album",
|
||||
LibraryID: 2,
|
||||
CreatedAt: time.Now(),
|
||||
}
|
||||
|
||||
// Set up albums so CopyAttributes can find them
|
||||
albumRepo.SetData(model.Albums{
|
||||
{ID: "old-album", LibraryID: 1, CreatedAt: originalTime},
|
||||
{ID: "new-album", LibraryID: 2, CreatedAt: time.Now()},
|
||||
})
|
||||
|
||||
_ = ds.MediaFile(ctx).Put(&missingTrack)
|
||||
_ = ds.MediaFile(ctx).Put(&matchedTrack)
|
||||
|
||||
err := phase.moveMatched(matchedTrack, missingTrack)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Track's created_at should be preserved from the missing file
|
||||
movedTrack, _ := ds.MediaFile(ctx).Get("missing-ca")
|
||||
Expect(movedTrack.CreatedAt).To(Equal(originalTime))
|
||||
|
||||
// Album's created_at should be copied from old to new
|
||||
Expect(albumRepo.CopyAttributesCalls).To(HaveKeyWithValue("old-album", "new-album"))
|
||||
|
||||
// Verify the new album's CreatedAt was actually updated
|
||||
newAlbum, err := albumRepo.Get("new-album")
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(newAlbum.CreatedAt).To(Equal(originalTime))
|
||||
})
|
||||
|
||||
It("should not copy album created_at when album ID does not change", func() {
|
||||
originalTime := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
missingTrack := model.MediaFile{
|
||||
ID: "missing-same", PID: "C", Path: "dir1/song.mp3",
|
||||
AlbumID: "same-album",
|
||||
LibraryID: 1,
|
||||
CreatedAt: originalTime,
|
||||
}
|
||||
matchedTrack := model.MediaFile{
|
||||
ID: "matched-same", PID: "C", Path: "dir2/song.mp3",
|
||||
AlbumID: "same-album", // Same album
|
||||
LibraryID: 1,
|
||||
CreatedAt: time.Now(),
|
||||
}
|
||||
|
||||
_ = ds.MediaFile(ctx).Put(&missingTrack)
|
||||
_ = ds.MediaFile(ctx).Put(&matchedTrack)
|
||||
|
||||
err := phase.moveMatched(matchedTrack, missingTrack)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
|
||||
// Track's created_at should still be preserved
|
||||
movedTrack, _ := ds.MediaFile(ctx).Get("missing-same")
|
||||
Expect(movedTrack.CreatedAt).To(Equal(originalTime))
|
||||
|
||||
// CopyAttributes should NOT have been called (same album)
|
||||
Expect(albumRepo.CopyAttributesCalls).To(BeEmpty())
|
||||
})
|
||||
})
|
||||
|
||||
Describe("Album Annotation Reassignment", func() {
|
||||
var (
|
||||
albumRepo *tests.MockAlbumRepo
|
||||
|
||||
@@ -6,16 +6,16 @@ import (
|
||||
|
||||
type logger struct{}
|
||||
|
||||
func (l *logger) Info(msg string, keysAndValues ...interface{}) {
|
||||
args := []interface{}{
|
||||
func (l *logger) Info(msg string, keysAndValues ...any) {
|
||||
args := []any{
|
||||
"Scheduler: " + msg,
|
||||
}
|
||||
args = append(args, keysAndValues...)
|
||||
log.Debug(args...)
|
||||
}
|
||||
|
||||
func (l *logger) Error(err error, msg string, keysAndValues ...interface{}) {
|
||||
args := []interface{}{
|
||||
func (l *logger) Error(err error, msg string, keysAndValues ...any) {
|
||||
args := []any{
|
||||
"Scheduler: " + msg,
|
||||
}
|
||||
args = append(args, keysAndValues...)
|
||||
|
||||
@@ -68,8 +68,8 @@ func doLogin(ds model.DataStore, username string, password string, w http.Respon
|
||||
_ = rest.RespondWithJSON(w, http.StatusOK, payload)
|
||||
}
|
||||
|
||||
func buildAuthPayload(user *model.User) map[string]interface{} {
|
||||
payload := map[string]interface{}{
|
||||
func buildAuthPayload(user *model.User) map[string]any {
|
||||
payload := map[string]any{
|
||||
"id": user.ID,
|
||||
"name": user.Name,
|
||||
"username": user.UserName,
|
||||
@@ -288,7 +288,7 @@ func JWTRefresher(next http.Handler) http.Handler {
|
||||
})
|
||||
}
|
||||
|
||||
func handleLoginFromHeaders(ds model.DataStore, r *http.Request) map[string]interface{} {
|
||||
func handleLoginFromHeaders(ds model.DataStore, r *http.Request) map[string]any {
|
||||
username := UsernameFromConfig(r)
|
||||
if username == "" {
|
||||
username = UsernameFromExtAuthHeader(r)
|
||||
|
||||
@@ -53,7 +53,7 @@ var _ = Describe("Auth", func() {
|
||||
|
||||
It("returns the expected payload", func() {
|
||||
Expect(resp.Code).To(Equal(http.StatusOK))
|
||||
var parsed map[string]interface{}
|
||||
var parsed map[string]any
|
||||
Expect(json.Unmarshal(resp.Body.Bytes(), &parsed)).To(BeNil())
|
||||
Expect(parsed["isAdmin"]).To(Equal(true))
|
||||
Expect(parsed["username"]).To(Equal("johndoe"))
|
||||
@@ -88,7 +88,7 @@ var _ = Describe("Auth", func() {
|
||||
serveIndex(ds, fs, nil)(resp, req)
|
||||
|
||||
config := extractAppConfig(resp.Body.String())
|
||||
parsed := config["auth"].(map[string]interface{})
|
||||
parsed := config["auth"].(map[string]any)
|
||||
|
||||
Expect(parsed["id"]).To(Equal("111"))
|
||||
})
|
||||
@@ -106,7 +106,7 @@ var _ = Describe("Auth", func() {
|
||||
serveIndex(ds, fs, nil)(resp, req)
|
||||
|
||||
config := extractAppConfig(resp.Body.String())
|
||||
parsed := config["auth"].(map[string]interface{})
|
||||
parsed := config["auth"].(map[string]any)
|
||||
|
||||
Expect(parsed["id"]).To(Equal("111"))
|
||||
})
|
||||
@@ -127,7 +127,7 @@ var _ = Describe("Auth", func() {
|
||||
serveIndex(ds, fs, nil)(resp, req)
|
||||
|
||||
config := extractAppConfig(resp.Body.String())
|
||||
parsed := config["auth"].(map[string]interface{})
|
||||
parsed := config["auth"].(map[string]any)
|
||||
|
||||
Expect(parsed["username"]).To(Equal(newUser))
|
||||
})
|
||||
@@ -137,7 +137,7 @@ var _ = Describe("Auth", func() {
|
||||
serveIndex(ds, fs, nil)(resp, req)
|
||||
|
||||
config := extractAppConfig(resp.Body.String())
|
||||
parsed := config["auth"].(map[string]interface{})
|
||||
parsed := config["auth"].(map[string]any)
|
||||
|
||||
Expect(parsed["id"]).To(Equal("111"))
|
||||
Expect(parsed["isAdmin"]).To(BeFalse())
|
||||
@@ -182,7 +182,7 @@ var _ = Describe("Auth", func() {
|
||||
serveIndex(ds, fs, nil)(resp, req)
|
||||
|
||||
config := extractAppConfig(resp.Body.String())
|
||||
parsed := config["auth"].(map[string]interface{})
|
||||
parsed := config["auth"].(map[string]any)
|
||||
|
||||
Expect(parsed["id"]).To(Equal("111"))
|
||||
})
|
||||
@@ -206,7 +206,7 @@ var _ = Describe("Auth", func() {
|
||||
login(ds)(resp, req)
|
||||
Expect(resp.Code).To(Equal(http.StatusOK))
|
||||
|
||||
var parsed map[string]interface{}
|
||||
var parsed map[string]any
|
||||
Expect(json.Unmarshal(resp.Body.Bytes(), &parsed)).To(BeNil())
|
||||
Expect(parsed["isAdmin"]).To(Equal(false))
|
||||
Expect(parsed["username"]).To(Equal("janedoe"))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user