Compare commits

..

21 Commits

Author SHA1 Message Date
Deluan
b33d831a1d fix: small issues
Updated mock AAC transcoding command to use the new default (ipod with
fragmented MP4) matching the migration, ensuring tests exercise the same
buildDynamicArgs code path as production. Improved archiver test mock to
match on the whole StreamRequest struct instead of decomposing fields,
making it resilient to future field additions. Added named constants for
JWT claim keys in the transcode token and wrapped ParseTranscodeParams
errors with ErrTokenInvalid for consistency. Documented the IsLossless
BitDepth fallback heuristic as temporary until Codec column is populated.

Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 20:17:31 -05:00
Deluan
3b1bd2c265 feat(transcoding): add sourceUpdatedAt to decision and validate transcode parameters
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
0c55c7ce89 fix: address review findings for OpenSubsonic transcoding PR
Fix multiple issues identified during code review of the transcoding
extension: add missing return after error in shared stream handler
preventing nil pointer panic, replace dead r.Body nil check with
MaxBytesReader size limit, distinguish not-found from other DB errors,
fix bpsToKbps integer truncation with rounding, add "pcm" to
isLosslessFormat for consistency with model.IsLossless(), add
sampleRate/bitDepth/channels to streaming log, fix outdated test
comment, and add tests for conversion functions and GetTranscodeStream
parameter passing.
2026-02-09 16:46:46 -05:00
Deluan
fab2acfe36 fix: implement noopDecider for transcoding decision handling in tests
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
22dba77509 refactor(transcoding): update default command handling and add codec support for transcoding
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
5107492059 refactor(transcoding): streamline transcoding logic by consolidating stream parameter handling and enhancing alias mapping
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
01b1fc90a9 refactor(transcoding): enhance AAC command handling and support for audio channels in streaming
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
4a50142dd6 refactor(transcoding): add bit depth support for audio transcoding and enhance related logic
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
e843b918b2 refactor(transcoding): enhance transcoding options with sample rate support and improve command handling
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
39e341e863 refactor(transcoding): enhance transcoding config lookup logic for audio codecs
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
7ca0eade80 refactor(transcoding): rename TranscodeDecision to Decider and update related methods for clarity
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
2e02e92cc4 refactor(transcoding): enhance logging for transcode decision process and client info conversion
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
216d0c6c6c refactor(transcoding): rename token methods to CreateTranscodeParams and ParseTranscodeParams for clarity
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
c26cc0f5b9 refactor(transcoding): replace strings.EqualFold with direct comparison for protocol and limitation checks
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
4bb6802922 refactor(transcoding): streamline limitation checks and applyLimitation logic for improved readability and maintainability
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
2e00479a8b feat(transcoding): add enums for protocol, comparison operators, limitations, and codec profiles in transcode decision logic
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
07e2f699da fix(transcoding): enforce POST method for GetTranscodeDecision and handle non-POST requests
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:46 -05:00
Deluan
ff57efa170 refactor(transcoding): simplify container alias handling in matchesContainer function
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:45 -05:00
Deluan
0658e1f824 fix(transcoding): update bitrate handling to use kilobits per second (kbps) across transcode decision logic
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:45 -05:00
Deluan
a88ab9f16c fix(subsonic): update codec limitation structure and decision logic for improved clarity
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:45 -05:00
Deluan
b5621b9784 feat(subsonic): implement transcode decision logic and codec handling for media files
Signed-off-by: Deluan <deluan@navidrome.org>
2026-02-09 16:46:45 -05:00
108 changed files with 4571 additions and 4465 deletions

View File

@@ -1,138 +0,0 @@
#!/bin/sh
set -e
I18N_DIR=resources/i18n
# Normalize JSON for deterministic comparison:
# remove empty/null attributes, sort keys alphabetically
process_json() {
jq 'walk(if type == "object" then with_entries(select(.value != null and .value != "" and .value != [] and .value != {})) | to_entries | sort_by(.key) | from_entries else . end)' "$1"
}
# Get list of all languages configured in the POEditor project
get_language_list() {
curl -s -X POST https://api.poeditor.com/v2/languages/list \
-d api_token="${POEDITOR_APIKEY}" \
-d id="${POEDITOR_PROJECTID}"
}
# Extract language name from the language list JSON given a language code
get_language_name() {
lang_code="$1"
lang_list="$2"
echo "$lang_list" | jq -r ".result.languages[] | select(.code == \"$lang_code\") | .name"
}
# Extract language code from a file path (e.g., "resources/i18n/fr.json" -> "fr")
get_lang_code() {
filepath="$1"
filename=$(basename "$filepath")
echo "${filename%.*}"
}
# Export the current translation for a language from POEditor (v2 API)
export_language() {
lang_code="$1"
response=$(curl -s -X POST https://api.poeditor.com/v2/projects/export \
-d api_token="${POEDITOR_APIKEY}" \
-d id="${POEDITOR_PROJECTID}" \
-d language="$lang_code" \
-d type="key_value_json")
url=$(echo "$response" | jq -r '.result.url')
if [ -z "$url" ] || [ "$url" = "null" ]; then
echo "Failed to export $lang_code: $response" >&2
return 1
fi
echo "$url"
}
# Flatten nested JSON to POEditor languages/update format.
# POEditor uses term + context pairs, where:
# term = the leaf key name
# context = the parent path as "key1"."key2"."key3" (empty for root keys)
flatten_to_poeditor() {
jq -c '[paths(scalars) as $p |
{
"term": ($p | last | tostring),
"context": (if ($p | length) > 1 then ($p[:-1] | map("\"" + tostring + "\"") | join(".")) else "" end),
"translation": {"content": getpath($p)}
}
]' "$1"
}
# Update translations for a language in POEditor via languages/update API
update_language() {
lang_code="$1"
file="$2"
flatten_to_poeditor "$file" > /tmp/poeditor_data.json
response=$(curl -s -X POST https://api.poeditor.com/v2/languages/update \
-d api_token="${POEDITOR_APIKEY}" \
-d id="${POEDITOR_PROJECTID}" \
-d language="$lang_code" \
--data-urlencode data@/tmp/poeditor_data.json)
rm -f /tmp/poeditor_data.json
status=$(echo "$response" | jq -r '.response.status')
if [ "$status" != "success" ]; then
echo "Failed to update $lang_code: $response" >&2
return 1
fi
parsed=$(echo "$response" | jq -r '.result.translations.parsed')
added=$(echo "$response" | jq -r '.result.translations.added')
updated=$(echo "$response" | jq -r '.result.translations.updated')
echo " Translations - parsed: $parsed, added: $added, updated: $updated"
}
# --- Main ---
if [ $# -eq 0 ]; then
echo "Usage: $0 <file1> [file2] ..."
echo "No files specified. Nothing to do."
exit 0
fi
lang_list=$(get_language_list)
upload_count=0
for file in "$@"; do
if [ ! -f "$file" ]; then
echo "Warning: File not found: $file, skipping"
continue
fi
lang_code=$(get_lang_code "$file")
lang_name=$(get_language_name "$lang_code" "$lang_list")
if [ -z "$lang_name" ]; then
echo "Warning: Language code '$lang_code' not found in POEditor, skipping $file"
continue
fi
echo "Processing $lang_name ($lang_code)..."
# Export current state from POEditor
url=$(export_language "$lang_code")
curl -sSL "$url" -o poeditor_export.json
# Normalize both files for comparison
process_json "$file" > local_normalized.json
process_json poeditor_export.json > remote_normalized.json
# Compare normalized versions
if diff -q local_normalized.json remote_normalized.json > /dev/null 2>&1; then
echo " No differences, skipping"
else
echo " Differences found, updating POEditor..."
update_language "$lang_code" "$file"
upload_count=$((upload_count + 1))
fi
rm -f poeditor_export.json local_normalized.json remote_normalized.json
done
echo ""
echo "Done. Updated $upload_count translation(s) in POEditor."

View File

@@ -1,32 +0,0 @@
name: POEditor export
on:
push:
branches:
- master
paths:
- 'resources/i18n/*.json'
jobs:
push-translations:
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'navidrome' }}
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 2
- name: Detect changed translation files
id: changed
run: |
CHANGED_FILES=$(git diff --name-only HEAD~1 HEAD -- 'resources/i18n/*.json' | tr '\n' ' ')
echo "files=$CHANGED_FILES" >> $GITHUB_OUTPUT
echo "Changed translation files: $CHANGED_FILES"
- name: Push translations to POEditor
if: ${{ steps.changed.outputs.files != '' }}
env:
POEDITOR_APIKEY: ${{ secrets.POEDITOR_APIKEY }}
POEDITOR_PROJECTID: ${{ secrets.POEDITOR_PROJECTID }}
run: |
.github/workflows/push-translations.sh ${{ steps.changed.outputs.files }}

View File

@@ -20,7 +20,7 @@ DOCKER_TAG ?= deluan/navidrome:develop
# Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib # Taglib version to use in cross-compilation, from https://github.com/navidrome/cross-taglib
CROSS_TAGLIB_VERSION ?= 2.1.1-2 CROSS_TAGLIB_VERSION ?= 2.1.1-2
GOLANGCI_LINT_VERSION ?= v2.9.0 GOLANGCI_LINT_VERSION ?= v2.8.0
UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*") UI_SRC_FILES := $(shell find ui -type f -not -path "ui/build/*" -not -path "ui/node_modules/*")

View File

@@ -65,6 +65,7 @@ func (e extractor) extractMetadata(filePath string) (*metadata.Info, error) {
Channels: int(props.Channels), Channels: int(props.Channels),
SampleRate: int(props.SampleRate), SampleRate: int(props.SampleRate),
BitDepth: int(props.BitsPerSample), BitDepth: int(props.BitsPerSample),
Codec: props.Codec,
} }
// Convert normalized tags to lowercase keys (go-taglib returns UPPERCASE keys) // Convert normalized tags to lowercase keys (go-taglib returns UPPERCASE keys)

View File

@@ -14,13 +14,10 @@ import (
"github.com/navidrome/navidrome/db" "github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/log" "github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model" "github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/plugins"
"github.com/navidrome/navidrome/resources" "github.com/navidrome/navidrome/resources"
"github.com/navidrome/navidrome/scanner" "github.com/navidrome/navidrome/scanner"
"github.com/navidrome/navidrome/scheduler" "github.com/navidrome/navidrome/scheduler"
"github.com/navidrome/navidrome/server"
"github.com/navidrome/navidrome/server/backgrounds" "github.com/navidrome/navidrome/server/backgrounds"
"github.com/navidrome/navidrome/server/subsonic"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/viper" "github.com/spf13/viper"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
@@ -141,13 +138,6 @@ func startServer(ctx context.Context) func() error {
if strings.HasPrefix(conf.Server.UILoginBackgroundURL, "/") { if strings.HasPrefix(conf.Server.UILoginBackgroundURL, "/") {
a.MountRouter("Background images", conf.Server.UILoginBackgroundURL, backgrounds.NewHandler()) a.MountRouter("Background images", conf.Server.UILoginBackgroundURL, backgrounds.NewHandler())
} }
if conf.Server.Plugins.Enabled {
manager := GetPluginManager(ctx)
ds := CreateDataStore()
endpointRouter := plugins.NewEndpointRouter(manager, ds, subsonic.ValidateAuth, server.Authenticator)
a.MountRouter("Plugin Endpoints", consts.URLPathPluginEndpoints, endpointRouter)
a.MountRouter("Plugin Subsonic Endpoints", consts.URLPathPluginSubsonicEndpoints, endpointRouter)
}
return a.Run(ctx, conf.Server.Address, conf.Server.Port, conf.Server.TLSCert, conf.Server.TLSKey) return a.Run(ctx, conf.Server.Address, conf.Server.Port, conf.Server.TLSCert, conf.Server.TLSKey)
} }
} }

View File

@@ -19,6 +19,7 @@ import (
"github.com/navidrome/navidrome/core/metrics" "github.com/navidrome/navidrome/core/metrics"
"github.com/navidrome/navidrome/core/playback" "github.com/navidrome/navidrome/core/playback"
"github.com/navidrome/navidrome/core/scrobbler" "github.com/navidrome/navidrome/core/scrobbler"
"github.com/navidrome/navidrome/core/transcode"
"github.com/navidrome/navidrome/db" "github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/model" "github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/persistence" "github.com/navidrome/navidrome/persistence"
@@ -102,7 +103,8 @@ func CreateSubsonicAPIRouter(ctx context.Context) *subsonic.Router {
modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics) modelScanner := scanner.New(ctx, dataStore, cacheWarmer, broker, playlists, metricsMetrics)
playTracker := scrobbler.GetPlayTracker(dataStore, broker, manager) playTracker := scrobbler.GetPlayTracker(dataStore, broker, manager)
playbackServer := playback.GetInstance(dataStore) playbackServer := playback.GetInstance(dataStore)
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, modelScanner, broker, playlists, playTracker, share, playbackServer, metricsMetrics) decider := transcode.NewDecider(dataStore)
router := subsonic.New(dataStore, artworkArtwork, mediaStreamer, archiver, players, provider, modelScanner, broker, playlists, playTracker, share, playbackServer, metricsMetrics, decider)
return router return router
} }

View File

@@ -239,13 +239,11 @@ type inspectOptions struct {
} }
type pluginsOptions struct { type pluginsOptions struct {
Enabled bool Enabled bool
Folder string Folder string
CacheSize string CacheSize string
AutoReload bool AutoReload bool
LogLevel string LogLevel string
EndpointRequestLimit int
EndpointRequestWindow time.Duration
} }
type extAuthOptions struct { type extAuthOptions struct {
@@ -673,8 +671,6 @@ func setViperDefaults() {
viper.SetDefault("plugins.enabled", true) viper.SetDefault("plugins.enabled", true)
viper.SetDefault("plugins.cachesize", "200MB") viper.SetDefault("plugins.cachesize", "200MB")
viper.SetDefault("plugins.autoreload", false) viper.SetDefault("plugins.autoreload", false)
viper.SetDefault("plugins.endpointrequestlimit", 60)
viper.SetDefault("plugins.endpointrequestwindow", time.Minute)
// DevFlags. These are used to enable/disable debugging and incomplete features // DevFlags. These are used to enable/disable debugging and incomplete features
viper.SetDefault("devlogsourceline", false) viper.SetDefault("devlogsourceline", false)

View File

@@ -36,13 +36,11 @@ const (
DevInitialUserName = "admin" DevInitialUserName = "admin"
DevInitialName = "Dev Admin" DevInitialName = "Dev Admin"
URLPathUI = "/app" URLPathUI = "/app"
URLPathNativeAPI = "/api" URLPathNativeAPI = "/api"
URLPathSubsonicAPI = "/rest" URLPathSubsonicAPI = "/rest"
URLPathPluginEndpoints = "/ext" URLPathPublic = "/share"
URLPathPluginSubsonicEndpoints = "/rest/ext" URLPathPublicImages = URLPathPublic + "/img"
URLPathPublic = "/share"
URLPathPublicImages = URLPathPublic + "/img"
// DefaultUILoginBackgroundURL uses Navidrome curated background images collection, // DefaultUILoginBackgroundURL uses Navidrome curated background images collection,
// available at https://unsplash.com/collections/20072696/navidrome // available at https://unsplash.com/collections/20072696/navidrome
@@ -153,7 +151,13 @@ var (
Name: "aac audio", Name: "aac audio",
TargetFormat: "aac", TargetFormat: "aac",
DefaultBitRate: 256, DefaultBitRate: 256,
Command: "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a aac -f adts -", Command: "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a aac -f ipod -movflags frag_keyframe+empty_moov -",
},
{
Name: "flac audio",
TargetFormat: "flac",
DefaultBitRate: 0,
Command: "ffmpeg -i %s -ss %t -map 0:a:0 -v 0 -c:a flac -f flac -",
}, },
} }
) )

View File

@@ -176,7 +176,7 @@ func (a *archiver) addFileToZip(ctx context.Context, z *zip.Writer, mf model.Med
var r io.ReadCloser var r io.ReadCloser
if format != "raw" && format != "" { if format != "raw" && format != "" {
r, err = a.ms.DoStream(ctx, &mf, format, bitrate, 0) r, err = a.ms.DoStream(ctx, &mf, StreamRequest{Format: format, BitRate: bitrate})
} else { } else {
r, err = os.Open(path) r, err = os.Open(path)
} }

View File

@@ -44,7 +44,7 @@ var _ = Describe("Archiver", func() {
}}).Return(mfs, nil) }}).Return(mfs, nil)
ds.On("MediaFile", mock.Anything).Return(mfRepo) ds.On("MediaFile", mock.Anything).Return(mfRepo)
ms.On("DoStream", mock.Anything, mock.Anything, "mp3", 128, 0).Return(io.NopCloser(strings.NewReader("test")), nil).Times(3) ms.On("DoStream", mock.Anything, mock.Anything, core.StreamRequest{Format: "mp3", BitRate: 128}).Return(io.NopCloser(strings.NewReader("test")), nil).Times(3)
out := new(bytes.Buffer) out := new(bytes.Buffer)
err := arch.ZipAlbum(context.Background(), "1", "mp3", 128, out) err := arch.ZipAlbum(context.Background(), "1", "mp3", 128, out)
@@ -73,7 +73,7 @@ var _ = Describe("Archiver", func() {
}}).Return(mfs, nil) }}).Return(mfs, nil)
ds.On("MediaFile", mock.Anything).Return(mfRepo) ds.On("MediaFile", mock.Anything).Return(mfRepo)
ms.On("DoStream", mock.Anything, mock.Anything, "mp3", 128, 0).Return(io.NopCloser(strings.NewReader("test")), nil).Times(2) ms.On("DoStream", mock.Anything, mock.Anything, core.StreamRequest{Format: "mp3", BitRate: 128}).Return(io.NopCloser(strings.NewReader("test")), nil).Times(2)
out := new(bytes.Buffer) out := new(bytes.Buffer)
err := arch.ZipArtist(context.Background(), "1", "mp3", 128, out) err := arch.ZipArtist(context.Background(), "1", "mp3", 128, out)
@@ -104,7 +104,7 @@ var _ = Describe("Archiver", func() {
} }
sh.On("Load", mock.Anything, "1").Return(share, nil) sh.On("Load", mock.Anything, "1").Return(share, nil)
ms.On("DoStream", mock.Anything, mock.Anything, "mp3", 128, 0).Return(io.NopCloser(strings.NewReader("test")), nil).Times(2) ms.On("DoStream", mock.Anything, mock.Anything, core.StreamRequest{Format: "mp3", BitRate: 128}).Return(io.NopCloser(strings.NewReader("test")), nil).Times(2)
out := new(bytes.Buffer) out := new(bytes.Buffer)
err := arch.ZipShare(context.Background(), "1", out) err := arch.ZipShare(context.Background(), "1", out)
@@ -136,7 +136,7 @@ var _ = Describe("Archiver", func() {
plRepo := &mockPlaylistRepository{} plRepo := &mockPlaylistRepository{}
plRepo.On("GetWithTracks", "1", true, false).Return(pls, nil) plRepo.On("GetWithTracks", "1", true, false).Return(pls, nil)
ds.On("Playlist", mock.Anything).Return(plRepo) ds.On("Playlist", mock.Anything).Return(plRepo)
ms.On("DoStream", mock.Anything, mock.Anything, "mp3", 128, 0).Return(io.NopCloser(strings.NewReader("test")), nil).Times(2) ms.On("DoStream", mock.Anything, mock.Anything, core.StreamRequest{Format: "mp3", BitRate: 128}).Return(io.NopCloser(strings.NewReader("test")), nil).Times(2)
out := new(bytes.Buffer) out := new(bytes.Buffer)
err := arch.ZipPlaylist(context.Background(), "1", "mp3", 128, out) err := arch.ZipPlaylist(context.Background(), "1", "mp3", 128, out)
@@ -217,8 +217,8 @@ type mockMediaStreamer struct {
core.MediaStreamer core.MediaStreamer
} }
func (m *mockMediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, reqFormat string, reqBitRate int, reqOffset int) (*core.Stream, error) { func (m *mockMediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, req core.StreamRequest) (*core.Stream, error) {
args := m.Called(ctx, mf, reqFormat, reqBitRate, reqOffset) args := m.Called(ctx, mf, req)
if args.Error(1) != nil { if args.Error(1) != nil {
return nil, args.Error(1) return nil, args.Error(1)
} }

View File

@@ -12,11 +12,24 @@ import (
"sync" "sync"
"github.com/navidrome/navidrome/conf" "github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/consts"
"github.com/navidrome/navidrome/log" "github.com/navidrome/navidrome/log"
) )
// TranscodeOptions contains all parameters for a transcoding operation.
type TranscodeOptions struct {
Command string // DB command template (used to detect custom vs default)
Format string // Target format (mp3, opus, aac, flac)
FilePath string
BitRate int // kbps, 0 = codec default
SampleRate int // 0 = no constraint
Channels int // 0 = no constraint
BitDepth int // 0 = no constraint; valid values: 16, 24, 32
Offset int // seconds
}
type FFmpeg interface { type FFmpeg interface {
Transcode(ctx context.Context, command, path string, maxBitRate, offset int) (io.ReadCloser, error) Transcode(ctx context.Context, opts TranscodeOptions) (io.ReadCloser, error)
ExtractImage(ctx context.Context, path string) (io.ReadCloser, error) ExtractImage(ctx context.Context, path string) (io.ReadCloser, error)
Probe(ctx context.Context, files []string) (string, error) Probe(ctx context.Context, files []string) (string, error)
CmdPath() (string, error) CmdPath() (string, error)
@@ -35,15 +48,19 @@ const (
type ffmpeg struct{} type ffmpeg struct{}
func (e *ffmpeg) Transcode(ctx context.Context, command, path string, maxBitRate, offset int) (io.ReadCloser, error) { func (e *ffmpeg) Transcode(ctx context.Context, opts TranscodeOptions) (io.ReadCloser, error) {
if _, err := ffmpegCmd(); err != nil { if _, err := ffmpegCmd(); err != nil {
return nil, err return nil, err
} }
// First make sure the file exists if err := fileExists(opts.FilePath); err != nil {
if err := fileExists(path); err != nil {
return nil, err return nil, err
} }
args := createFFmpegCommand(command, path, maxBitRate, offset) var args []string
if isDefaultCommand(opts.Format, opts.Command) {
args = buildDynamicArgs(opts)
} else {
args = buildTemplateArgs(opts)
}
return e.start(ctx, args) return e.start(ctx, args)
} }
@@ -51,7 +68,6 @@ func (e *ffmpeg) ExtractImage(ctx context.Context, path string) (io.ReadCloser,
if _, err := ffmpegCmd(); err != nil { if _, err := ffmpegCmd(); err != nil {
return nil, err return nil, err
} }
// First make sure the file exists
if err := fileExists(path); err != nil { if err := fileExists(path); err != nil {
return nil, err return nil, err
} }
@@ -156,6 +172,139 @@ func (j *ffCmd) wait() {
_ = j.out.Close() _ = j.out.Close()
} }
// formatCodecMap maps target format to ffmpeg codec flag.
var formatCodecMap = map[string]string{
"mp3": "libmp3lame",
"opus": "libopus",
"aac": "aac",
"flac": "flac",
}
// formatOutputMap maps target format to ffmpeg output format flag (-f).
var formatOutputMap = map[string]string{
"mp3": "mp3",
"opus": "opus",
"aac": "ipod",
"flac": "flac",
}
// defaultCommands is used to detect whether a user has customized their transcoding command.
var defaultCommands = func() map[string]string {
m := make(map[string]string, len(consts.DefaultTranscodings))
for _, t := range consts.DefaultTranscodings {
m[t.TargetFormat] = t.Command
}
return m
}()
// isDefaultCommand returns true if the command matches the known default for this format.
func isDefaultCommand(format, command string) bool {
return defaultCommands[format] == command
}
// buildDynamicArgs programmatically constructs ffmpeg arguments for known formats,
// including all transcoding parameters (bitrate, sample rate, channels).
func buildDynamicArgs(opts TranscodeOptions) []string {
cmdPath, _ := ffmpegCmd()
args := []string{cmdPath, "-i", opts.FilePath}
if opts.Offset > 0 {
args = append(args, "-ss", strconv.Itoa(opts.Offset))
}
args = append(args, "-map", "0:a:0")
if codec, ok := formatCodecMap[opts.Format]; ok {
args = append(args, "-c:a", codec)
}
if opts.BitRate > 0 {
args = append(args, "-b:a", strconv.Itoa(opts.BitRate)+"k")
}
if opts.SampleRate > 0 {
args = append(args, "-ar", strconv.Itoa(opts.SampleRate))
}
if opts.Channels > 0 {
args = append(args, "-ac", strconv.Itoa(opts.Channels))
}
// Only pass -sample_fmt for lossless output formats where bit depth matters.
// Lossy codecs (mp3, aac, opus) handle sample format conversion internally,
// and passing interleaved formats like "s16" causes silent failures.
if opts.BitDepth >= 16 && isLosslessOutputFormat(opts.Format) {
args = append(args, "-sample_fmt", bitDepthToSampleFmt(opts.BitDepth))
}
args = append(args, "-v", "0")
if outputFmt, ok := formatOutputMap[opts.Format]; ok {
args = append(args, "-f", outputFmt)
}
// For AAC in MP4 container, enable fragmented MP4 for pipe-safe streaming
if opts.Format == "aac" {
args = append(args, "-movflags", "frag_keyframe+empty_moov")
}
args = append(args, "-")
return args
}
// buildTemplateArgs handles user-customized command templates, with dynamic injection
// of sample rate and channels when the template doesn't already include them.
func buildTemplateArgs(opts TranscodeOptions) []string {
args := createFFmpegCommand(opts.Command, opts.FilePath, opts.BitRate, opts.Offset)
// Dynamically inject -ar, -ac, and -sample_fmt for custom templates that don't include them
if opts.SampleRate > 0 {
args = injectBeforeOutput(args, "-ar", strconv.Itoa(opts.SampleRate))
}
if opts.Channels > 0 {
args = injectBeforeOutput(args, "-ac", strconv.Itoa(opts.Channels))
}
if opts.BitDepth >= 16 && isLosslessOutputFormat(opts.Format) {
args = injectBeforeOutput(args, "-sample_fmt", bitDepthToSampleFmt(opts.BitDepth))
}
return args
}
// injectBeforeOutput inserts a flag and value before the trailing "-" (stdout output).
func injectBeforeOutput(args []string, flag, value string) []string {
if len(args) > 0 && args[len(args)-1] == "-" {
result := make([]string, 0, len(args)+2)
result = append(result, args[:len(args)-1]...)
result = append(result, flag, value, "-")
return result
}
return append(args, flag, value)
}
// isLosslessOutputFormat returns true if the format is a lossless audio format
// where preserving bit depth via -sample_fmt is meaningful.
// Note: this covers only formats ffmpeg can produce as output. For the full set of
// lossless formats used in transcoding decisions, see core/transcode/codec.go:isLosslessFormat.
func isLosslessOutputFormat(format string) bool {
switch strings.ToLower(format) {
case "flac", "alac", "wav", "aiff":
return true
}
return false
}
// bitDepthToSampleFmt converts a bit depth value to the ffmpeg sample_fmt string.
// FLAC only supports s16 and s32; for 24-bit sources, s32 is the correct format
// (ffmpeg packs 24-bit samples into 32-bit containers).
func bitDepthToSampleFmt(bitDepth int) string {
switch bitDepth {
case 16:
return "s16"
case 32:
return "s32"
default:
// 24-bit and other depths: use s32 (the next valid container size)
return "s32"
}
}
// Path will always be an absolute path // Path will always be an absolute path
func createFFmpegCommand(cmd, path string, maxBitRate, offset int) []string { func createFFmpegCommand(cmd, path string, maxBitRate, offset int) []string {
var args []string var args []string

View File

@@ -2,19 +2,27 @@ package ffmpeg
import ( import (
"context" "context"
"os"
"path/filepath"
"runtime" "runtime"
sync "sync" sync "sync"
"testing" "testing"
"time" "time"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/log" "github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/tests"
. "github.com/onsi/ginkgo/v2" . "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega" . "github.com/onsi/gomega"
) )
func TestFFmpeg(t *testing.T) { func TestFFmpeg(t *testing.T) {
tests.Init(t, false) // Inline test init to avoid import cycle with tests package
//nolint:dogsled
_, file, _, _ := runtime.Caller(0)
appPath, _ := filepath.Abs(filepath.Join(filepath.Dir(file), "..", ".."))
confPath := filepath.Join(appPath, "tests", "navidrome-test.toml")
_ = os.Chdir(appPath)
conf.LoadFromFile(confPath)
log.SetLevel(log.LevelFatal) log.SetLevel(log.LevelFatal)
RegisterFailHandler(Fail) RegisterFailHandler(Fail)
RunSpecs(t, "FFmpeg Suite") RunSpecs(t, "FFmpeg Suite")
@@ -70,6 +78,286 @@ var _ = Describe("ffmpeg", func() {
}) })
}) })
Describe("isDefaultCommand", func() {
It("returns true for known default mp3 command", func() {
Expect(isDefaultCommand("mp3", "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -f mp3 -")).To(BeTrue())
})
It("returns true for known default opus command", func() {
Expect(isDefaultCommand("opus", "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a libopus -f opus -")).To(BeTrue())
})
It("returns true for known default aac command", func() {
Expect(isDefaultCommand("aac", "ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a aac -f ipod -movflags frag_keyframe+empty_moov -")).To(BeTrue())
})
It("returns true for known default flac command", func() {
Expect(isDefaultCommand("flac", "ffmpeg -i %s -ss %t -map 0:a:0 -v 0 -c:a flac -f flac -")).To(BeTrue())
})
It("returns false for a custom command", func() {
Expect(isDefaultCommand("mp3", "ffmpeg -i %s -b:a %bk -custom-flag -f mp3 -")).To(BeFalse())
})
It("returns false for unknown format", func() {
Expect(isDefaultCommand("wav", "ffmpeg -i %s -f wav -")).To(BeFalse())
})
})
Describe("buildDynamicArgs", func() {
It("builds mp3 args with bitrate, samplerate, and channels", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "mp3",
FilePath: "/music/file.flac",
BitRate: 256,
SampleRate: 48000,
Channels: 2,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.flac",
"-map", "0:a:0",
"-c:a", "libmp3lame",
"-b:a", "256k",
"-ar", "48000",
"-ac", "2",
"-v", "0",
"-f", "mp3",
"-",
}))
})
It("builds flac args without bitrate", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "flac",
FilePath: "/music/file.dsf",
SampleRate: 48000,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.dsf",
"-map", "0:a:0",
"-c:a", "flac",
"-ar", "48000",
"-v", "0",
"-f", "flac",
"-",
}))
})
It("builds opus args with bitrate only", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "opus",
FilePath: "/music/file.flac",
BitRate: 128,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.flac",
"-map", "0:a:0",
"-c:a", "libopus",
"-b:a", "128k",
"-v", "0",
"-f", "opus",
"-",
}))
})
It("includes offset when specified", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "mp3",
FilePath: "/music/file.mp3",
BitRate: 192,
Offset: 30,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.mp3",
"-ss", "30",
"-map", "0:a:0",
"-c:a", "libmp3lame",
"-b:a", "192k",
"-v", "0",
"-f", "mp3",
"-",
}))
})
It("builds aac args with fragmented MP4 container", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "aac",
FilePath: "/music/file.flac",
BitRate: 256,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.flac",
"-map", "0:a:0",
"-c:a", "aac",
"-b:a", "256k",
"-v", "0",
"-f", "ipod",
"-movflags", "frag_keyframe+empty_moov",
"-",
}))
})
It("builds flac args with bit depth", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "flac",
FilePath: "/music/file.dsf",
BitDepth: 24,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.dsf",
"-map", "0:a:0",
"-c:a", "flac",
"-sample_fmt", "s32",
"-v", "0",
"-f", "flac",
"-",
}))
})
It("omits -sample_fmt when bit depth is 0", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "flac",
FilePath: "/music/file.flac",
BitDepth: 0,
})
Expect(args).ToNot(ContainElement("-sample_fmt"))
})
It("omits -sample_fmt when bit depth is too low (DSD)", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "flac",
FilePath: "/music/file.dsf",
BitDepth: 1,
})
Expect(args).ToNot(ContainElement("-sample_fmt"))
})
It("omits -sample_fmt for mp3 even when bit depth >= 16", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "mp3",
FilePath: "/music/file.flac",
BitRate: 256,
BitDepth: 16,
})
Expect(args).ToNot(ContainElement("-sample_fmt"))
})
It("omits -sample_fmt for aac even when bit depth >= 16", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "aac",
FilePath: "/music/file.flac",
BitRate: 256,
BitDepth: 16,
})
Expect(args).ToNot(ContainElement("-sample_fmt"))
})
It("omits -sample_fmt for opus even when bit depth >= 16", func() {
args := buildDynamicArgs(TranscodeOptions{
Format: "opus",
FilePath: "/music/file.flac",
BitRate: 128,
BitDepth: 16,
})
Expect(args).ToNot(ContainElement("-sample_fmt"))
})
})
Describe("bitDepthToSampleFmt", func() {
It("converts 16-bit", func() {
Expect(bitDepthToSampleFmt(16)).To(Equal("s16"))
})
It("converts 24-bit to s32 (FLAC only supports s16/s32)", func() {
Expect(bitDepthToSampleFmt(24)).To(Equal("s32"))
})
It("converts 32-bit", func() {
Expect(bitDepthToSampleFmt(32)).To(Equal("s32"))
})
})
Describe("buildTemplateArgs", func() {
It("injects -ar and -ac into custom template", func() {
args := buildTemplateArgs(TranscodeOptions{
Command: "ffmpeg -i %s -b:a %bk -v 0 -f mp3 -",
FilePath: "/music/file.flac",
BitRate: 192,
SampleRate: 44100,
Channels: 2,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.flac",
"-b:a", "192k", "-v", "0", "-f", "mp3",
"-ar", "44100", "-ac", "2",
"-",
}))
})
It("injects only -ar when channels is 0", func() {
args := buildTemplateArgs(TranscodeOptions{
Command: "ffmpeg -i %s -b:a %bk -v 0 -f mp3 -",
FilePath: "/music/file.flac",
BitRate: 192,
SampleRate: 48000,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.flac",
"-b:a", "192k", "-v", "0", "-f", "mp3",
"-ar", "48000",
"-",
}))
})
It("does not inject anything when sample rate and channels are 0", func() {
args := buildTemplateArgs(TranscodeOptions{
Command: "ffmpeg -i %s -b:a %bk -v 0 -f mp3 -",
FilePath: "/music/file.flac",
BitRate: 192,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.flac",
"-b:a", "192k", "-v", "0", "-f", "mp3",
"-",
}))
})
It("injects -sample_fmt for lossless output format with bit depth", func() {
args := buildTemplateArgs(TranscodeOptions{
Command: "ffmpeg -i %s -v 0 -c:a flac -f flac -",
Format: "flac",
FilePath: "/music/file.dsf",
BitDepth: 24,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.dsf",
"-v", "0", "-c:a", "flac", "-f", "flac",
"-sample_fmt", "s32",
"-",
}))
})
It("does not inject -sample_fmt for lossy output format even with bit depth", func() {
args := buildTemplateArgs(TranscodeOptions{
Command: "ffmpeg -i %s -b:a %bk -v 0 -f mp3 -",
Format: "mp3",
FilePath: "/music/file.flac",
BitRate: 192,
BitDepth: 16,
})
Expect(args).To(Equal([]string{
"ffmpeg", "-i", "/music/file.flac",
"-b:a", "192k", "-v", "0", "-f", "mp3",
"-",
}))
})
})
Describe("injectBeforeOutput", func() {
It("inserts flag before trailing dash", func() {
args := injectBeforeOutput([]string{"ffmpeg", "-i", "file.mp3", "-f", "mp3", "-"}, "-ar", "48000")
Expect(args).To(Equal([]string{"ffmpeg", "-i", "file.mp3", "-f", "mp3", "-ar", "48000", "-"}))
})
It("appends when no trailing dash", func() {
args := injectBeforeOutput([]string{"ffmpeg", "-i", "file.mp3"}, "-ar", "48000")
Expect(args).To(Equal([]string{"ffmpeg", "-i", "file.mp3", "-ar", "48000"}))
})
})
Describe("FFmpeg", func() { Describe("FFmpeg", func() {
Context("when FFmpeg is available", func() { Context("when FFmpeg is available", func() {
var ff FFmpeg var ff FFmpeg
@@ -93,7 +381,12 @@ var _ = Describe("ffmpeg", func() {
command := "ffmpeg -f lavfi -i sine=frequency=1000:duration=0 -f mp3 -" command := "ffmpeg -f lavfi -i sine=frequency=1000:duration=0 -f mp3 -"
// The input file is not used here, but we need to provide a valid path to the Transcode function // The input file is not used here, but we need to provide a valid path to the Transcode function
stream, err := ff.Transcode(ctx, command, "tests/fixtures/test.mp3", 128, 0) stream, err := ff.Transcode(ctx, TranscodeOptions{
Command: command,
Format: "mp3",
FilePath: "tests/fixtures/test.mp3",
BitRate: 128,
})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
defer stream.Close() defer stream.Close()
@@ -115,7 +408,12 @@ var _ = Describe("ffmpeg", func() {
cancel() // Cancel immediately cancel() // Cancel immediately
// This should fail immediately // This should fail immediately
_, err := ff.Transcode(ctx, "ffmpeg -i %s -f mp3 -", "tests/fixtures/test.mp3", 128, 0) _, err := ff.Transcode(ctx, TranscodeOptions{
Command: "ffmpeg -i %s -f mp3 -",
Format: "mp3",
FilePath: "tests/fixtures/test.mp3",
BitRate: 128,
})
Expect(err).To(MatchError(context.Canceled)) Expect(err).To(MatchError(context.Canceled))
}) })
}) })
@@ -142,7 +440,10 @@ var _ = Describe("ffmpeg", func() {
defer cancel() defer cancel()
// Start a process that will run for a while // Start a process that will run for a while
stream, err := ff.Transcode(ctx, longRunningCmd, "tests/fixtures/test.mp3", 0, 0) stream, err := ff.Transcode(ctx, TranscodeOptions{
Command: longRunningCmd,
FilePath: "tests/fixtures/test.mp3",
})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
defer stream.Close() defer stream.Close()

View File

@@ -18,9 +18,20 @@ import (
"github.com/navidrome/navidrome/utils/cache" "github.com/navidrome/navidrome/utils/cache"
) )
// StreamRequest contains all parameters for creating a media stream.
type StreamRequest struct {
ID string
Format string
BitRate int // kbps
SampleRate int
BitDepth int
Channels int
Offset int // seconds
}
type MediaStreamer interface { type MediaStreamer interface {
NewStream(ctx context.Context, id string, reqFormat string, reqBitRate int, offset int) (*Stream, error) NewStream(ctx context.Context, req StreamRequest) (*Stream, error)
DoStream(ctx context.Context, mf *model.MediaFile, reqFormat string, reqBitRate int, reqOffset int) (*Stream, error) DoStream(ctx context.Context, mf *model.MediaFile, req StreamRequest) (*Stream, error)
} }
type TranscodingCache cache.FileCache type TranscodingCache cache.FileCache
@@ -36,44 +47,48 @@ type mediaStreamer struct {
} }
type streamJob struct { type streamJob struct {
ms *mediaStreamer ms *mediaStreamer
mf *model.MediaFile mf *model.MediaFile
filePath string filePath string
format string format string
bitRate int bitRate int
offset int sampleRate int
bitDepth int
channels int
offset int
} }
func (j *streamJob) Key() string { func (j *streamJob) Key() string {
return fmt.Sprintf("%s.%s.%d.%s.%d", j.mf.ID, j.mf.UpdatedAt.Format(time.RFC3339Nano), j.bitRate, j.format, j.offset) return fmt.Sprintf("%s.%s.%d.%d.%d.%d.%s.%d", j.mf.ID, j.mf.UpdatedAt.Format(time.RFC3339Nano), j.bitRate, j.sampleRate, j.bitDepth, j.channels, j.format, j.offset)
} }
func (ms *mediaStreamer) NewStream(ctx context.Context, id string, reqFormat string, reqBitRate int, reqOffset int) (*Stream, error) { func (ms *mediaStreamer) NewStream(ctx context.Context, req StreamRequest) (*Stream, error) {
mf, err := ms.ds.MediaFile(ctx).Get(id) mf, err := ms.ds.MediaFile(ctx).Get(req.ID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return ms.DoStream(ctx, mf, reqFormat, reqBitRate, reqOffset) return ms.DoStream(ctx, mf, req)
} }
func (ms *mediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, reqFormat string, reqBitRate int, reqOffset int) (*Stream, error) { func (ms *mediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, req StreamRequest) (*Stream, error) {
var format string var format string
var bitRate int var bitRate int
var cached bool var cached bool
defer func() { defer func() {
log.Info(ctx, "Streaming file", "title", mf.Title, "artist", mf.Artist, "format", format, "cached", cached, log.Info(ctx, "Streaming file", "title", mf.Title, "artist", mf.Artist, "format", format, "cached", cached,
"bitRate", bitRate, "user", userName(ctx), "transcoding", format != "raw", "bitRate", bitRate, "sampleRate", req.SampleRate, "bitDepth", req.BitDepth, "channels", req.Channels,
"user", userName(ctx), "transcoding", format != "raw",
"originalFormat", mf.Suffix, "originalBitRate", mf.BitRate) "originalFormat", mf.Suffix, "originalBitRate", mf.BitRate)
}() }()
format, bitRate = selectTranscodingOptions(ctx, ms.ds, mf, reqFormat, reqBitRate) format, bitRate = selectTranscodingOptions(ctx, ms.ds, mf, req.Format, req.BitRate, req.SampleRate)
s := &Stream{ctx: ctx, mf: mf, format: format, bitRate: bitRate} s := &Stream{ctx: ctx, mf: mf, format: format, bitRate: bitRate}
filePath := mf.AbsolutePath() filePath := mf.AbsolutePath()
if format == "raw" { if format == "raw" {
log.Debug(ctx, "Streaming RAW file", "id", mf.ID, "path", filePath, log.Debug(ctx, "Streaming RAW file", "id", mf.ID, "path", filePath,
"requestBitrate", reqBitRate, "requestFormat", reqFormat, "requestOffset", reqOffset, "requestBitrate", req.BitRate, "requestFormat", req.Format, "requestOffset", req.Offset,
"originalBitrate", mf.BitRate, "originalFormat", mf.Suffix, "originalBitrate", mf.BitRate, "originalFormat", mf.Suffix,
"selectedBitrate", bitRate, "selectedFormat", format) "selectedBitrate", bitRate, "selectedFormat", format)
f, err := os.Open(filePath) f, err := os.Open(filePath)
@@ -87,12 +102,15 @@ func (ms *mediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, reqF
} }
job := &streamJob{ job := &streamJob{
ms: ms, ms: ms,
mf: mf, mf: mf,
filePath: filePath, filePath: filePath,
format: format, format: format,
bitRate: bitRate, bitRate: bitRate,
offset: reqOffset, sampleRate: req.SampleRate,
bitDepth: req.BitDepth,
channels: req.Channels,
offset: req.Offset,
} }
r, err := ms.cache.Get(ctx, job) r, err := ms.cache.Get(ctx, job)
if err != nil { if err != nil {
@@ -105,7 +123,7 @@ func (ms *mediaStreamer) DoStream(ctx context.Context, mf *model.MediaFile, reqF
s.Seeker = r.Seeker s.Seeker = r.Seeker
log.Debug(ctx, "Streaming TRANSCODED file", "id", mf.ID, "path", filePath, log.Debug(ctx, "Streaming TRANSCODED file", "id", mf.ID, "path", filePath,
"requestBitrate", reqBitRate, "requestFormat", reqFormat, "requestOffset", reqOffset, "requestBitrate", req.BitRate, "requestFormat", req.Format, "requestOffset", req.Offset,
"originalBitrate", mf.BitRate, "originalFormat", mf.Suffix, "originalBitrate", mf.BitRate, "originalFormat", mf.Suffix,
"selectedBitrate", bitRate, "selectedFormat", format, "cached", cached, "seekable", s.Seekable()) "selectedBitrate", bitRate, "selectedFormat", format, "cached", cached, "seekable", s.Seekable())
@@ -131,12 +149,13 @@ func (s *Stream) EstimatedContentLength() int {
} }
// TODO This function deserves some love (refactoring) // TODO This function deserves some love (refactoring)
func selectTranscodingOptions(ctx context.Context, ds model.DataStore, mf *model.MediaFile, reqFormat string, reqBitRate int) (format string, bitRate int) { func selectTranscodingOptions(ctx context.Context, ds model.DataStore, mf *model.MediaFile, reqFormat string, reqBitRate int, reqSampleRate int) (format string, bitRate int) {
format = "raw" format = "raw"
if reqFormat == "raw" { if reqFormat == "raw" {
return format, 0 return format, 0
} }
if reqFormat == mf.Suffix && reqBitRate == 0 { needsResample := reqSampleRate > 0 && reqSampleRate < mf.SampleRate
if reqFormat == mf.Suffix && reqBitRate == 0 && !needsResample {
bitRate = mf.BitRate bitRate = mf.BitRate
return format, bitRate return format, bitRate
} }
@@ -175,7 +194,7 @@ func selectTranscodingOptions(ctx context.Context, ds model.DataStore, mf *model
bitRate = t.DefaultBitRate bitRate = t.DefaultBitRate
} }
} }
if format == mf.Suffix && bitRate >= mf.BitRate { if format == mf.Suffix && bitRate >= mf.BitRate && !needsResample {
format = "raw" format = "raw"
bitRate = 0 bitRate = 0
} }
@@ -217,7 +236,16 @@ func NewTranscodingCache() TranscodingCache {
transcodingCtx = request.AddValues(context.Background(), ctx) transcodingCtx = request.AddValues(context.Background(), ctx)
} }
out, err := job.ms.transcoder.Transcode(transcodingCtx, t.Command, job.filePath, job.bitRate, job.offset) out, err := job.ms.transcoder.Transcode(transcodingCtx, ffmpeg.TranscodeOptions{
Command: t.Command,
Format: job.format,
FilePath: job.filePath,
BitRate: job.bitRate,
SampleRate: job.sampleRate,
BitDepth: job.bitDepth,
Channels: job.channels,
Offset: job.offset,
})
if err != nil { if err != nil {
log.Error(ctx, "Error starting transcoder", "id", job.mf.ID, err) log.Error(ctx, "Error starting transcoder", "id", job.mf.ID, err)
return nil, os.ErrInvalid return nil, os.ErrInvalid

View File

@@ -26,42 +26,64 @@ var _ = Describe("MediaStreamer", func() {
It("returns raw if raw is requested", func() { It("returns raw if raw is requested", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, _ := selectTranscodingOptions(ctx, ds, mf, "raw", 0) format, _ := selectTranscodingOptions(ctx, ds, mf, "raw", 0, 0)
Expect(format).To(Equal("raw")) Expect(format).To(Equal("raw"))
}) })
It("returns raw if a transcoder does not exists", func() { It("returns raw if a transcoder does not exists", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, _ := selectTranscodingOptions(ctx, ds, mf, "m4a", 0) format, _ := selectTranscodingOptions(ctx, ds, mf, "m4a", 0, 0)
Expect(format).To(Equal("raw")) Expect(format).To(Equal("raw"))
}) })
It("returns the requested format if a transcoder exists", func() { It("returns the requested format if a transcoder exists", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 0) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 0, 0)
Expect(format).To(Equal("mp3")) Expect(format).To(Equal("mp3"))
Expect(bitRate).To(Equal(160)) // Default Bit Rate Expect(bitRate).To(Equal(160)) // Default Bit Rate
}) })
It("returns raw if requested format is the same as the original and it is not necessary to downsample", func() { It("returns raw if requested format is the same as the original and it is not necessary to downsample", func() {
mf.Suffix = "mp3" mf.Suffix = "mp3"
mf.BitRate = 112 mf.BitRate = 112
format, _ := selectTranscodingOptions(ctx, ds, mf, "mp3", 128) format, _ := selectTranscodingOptions(ctx, ds, mf, "mp3", 128, 0)
Expect(format).To(Equal("raw")) Expect(format).To(Equal("raw"))
}) })
It("returns the requested format if requested BitRate is lower than original", func() { It("returns the requested format if requested BitRate is lower than original", func() {
mf.Suffix = "mp3" mf.Suffix = "mp3"
mf.BitRate = 320 mf.BitRate = 320
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 192) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 192, 0)
Expect(format).To(Equal("mp3")) Expect(format).To(Equal("mp3"))
Expect(bitRate).To(Equal(192)) Expect(bitRate).To(Equal(192))
}) })
It("returns raw if requested format is the same as the original, but requested BitRate is 0", func() { It("returns raw if requested format is the same as the original, but requested BitRate is 0", func() {
mf.Suffix = "mp3" mf.Suffix = "mp3"
mf.BitRate = 320 mf.BitRate = 320
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 0) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 0, 0)
Expect(format).To(Equal("raw")) Expect(format).To(Equal("raw"))
Expect(bitRate).To(Equal(320)) Expect(bitRate).To(Equal(320))
}) })
It("returns the format when same format is requested but with a lower sample rate", func() {
mf.Suffix = "flac"
mf.BitRate = 2118
mf.SampleRate = 96000
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "flac", 0, 48000)
Expect(format).To(Equal("flac"))
Expect(bitRate).To(Equal(0))
})
It("returns raw when same format is requested with same sample rate", func() {
mf.Suffix = "flac"
mf.BitRate = 1000
mf.SampleRate = 48000
format, _ := selectTranscodingOptions(ctx, ds, mf, "flac", 0, 48000)
Expect(format).To(Equal("raw"))
})
It("returns raw when same format is requested with no sample rate constraint", func() {
mf.Suffix = "flac"
mf.BitRate = 1000
mf.SampleRate = 96000
format, _ := selectTranscodingOptions(ctx, ds, mf, "flac", 0, 0)
Expect(format).To(Equal("raw"))
})
Context("Downsampling", func() { Context("Downsampling", func() {
BeforeEach(func() { BeforeEach(func() {
conf.Server.DefaultDownsamplingFormat = "opus" conf.Server.DefaultDownsamplingFormat = "opus"
@@ -69,13 +91,13 @@ var _ = Describe("MediaStreamer", func() {
mf.BitRate = 960 mf.BitRate = 960
}) })
It("returns the DefaultDownsamplingFormat if a maxBitrate is requested but not the format", func() { It("returns the DefaultDownsamplingFormat if a maxBitrate is requested but not the format", func() {
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 128) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 128, 0)
Expect(format).To(Equal("opus")) Expect(format).To(Equal("opus"))
Expect(bitRate).To(Equal(128)) Expect(bitRate).To(Equal(128))
}) })
It("returns raw if maxBitrate is equal or greater than original", func() { It("returns raw if maxBitrate is equal or greater than original", func() {
// This happens with DSub (and maybe other clients?). See https://github.com/navidrome/navidrome/issues/2066 // This happens with DSub (and maybe other clients?). See https://github.com/navidrome/navidrome/issues/2066
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 960) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 960, 0)
Expect(format).To(Equal("raw")) Expect(format).To(Equal("raw"))
Expect(bitRate).To(Equal(0)) Expect(bitRate).To(Equal(0))
}) })
@@ -90,34 +112,34 @@ var _ = Describe("MediaStreamer", func() {
It("returns raw if raw is requested", func() { It("returns raw if raw is requested", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, _ := selectTranscodingOptions(ctx, ds, mf, "raw", 0) format, _ := selectTranscodingOptions(ctx, ds, mf, "raw", 0, 0)
Expect(format).To(Equal("raw")) Expect(format).To(Equal("raw"))
}) })
It("returns configured format/bitrate as default", func() { It("returns configured format/bitrate as default", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 0) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 0, 0)
Expect(format).To(Equal("oga")) Expect(format).To(Equal("oga"))
Expect(bitRate).To(Equal(96)) Expect(bitRate).To(Equal(96))
}) })
It("returns requested format", func() { It("returns requested format", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 0) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 0, 0)
Expect(format).To(Equal("mp3")) Expect(format).To(Equal("mp3"))
Expect(bitRate).To(Equal(160)) // Default Bit Rate Expect(bitRate).To(Equal(160)) // Default Bit Rate
}) })
It("returns requested bitrate", func() { It("returns requested bitrate", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 80) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 80, 0)
Expect(format).To(Equal("oga")) Expect(format).To(Equal("oga"))
Expect(bitRate).To(Equal(80)) Expect(bitRate).To(Equal(80))
}) })
It("returns raw if selected bitrate and format is the same as original", func() { It("returns raw if selected bitrate and format is the same as original", func() {
mf.Suffix = "mp3" mf.Suffix = "mp3"
mf.BitRate = 192 mf.BitRate = 192
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 192) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 192, 0)
Expect(format).To(Equal("raw")) Expect(format).To(Equal("raw"))
Expect(bitRate).To(Equal(0)) Expect(bitRate).To(Equal(0))
}) })
@@ -133,27 +155,27 @@ var _ = Describe("MediaStreamer", func() {
It("returns raw if raw is requested", func() { It("returns raw if raw is requested", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, _ := selectTranscodingOptions(ctx, ds, mf, "raw", 0) format, _ := selectTranscodingOptions(ctx, ds, mf, "raw", 0, 0)
Expect(format).To(Equal("raw")) Expect(format).To(Equal("raw"))
}) })
It("returns configured format/bitrate as default", func() { It("returns configured format/bitrate as default", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 0) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 0, 0)
Expect(format).To(Equal("oga")) Expect(format).To(Equal("oga"))
Expect(bitRate).To(Equal(192)) Expect(bitRate).To(Equal(192))
}) })
It("returns requested format", func() { It("returns requested format", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 0) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "mp3", 0, 0)
Expect(format).To(Equal("mp3")) Expect(format).To(Equal("mp3"))
Expect(bitRate).To(Equal(160)) // Default Bit Rate Expect(bitRate).To(Equal(160)) // Default Bit Rate
}) })
It("returns requested bitrate", func() { It("returns requested bitrate", func() {
mf.Suffix = "flac" mf.Suffix = "flac"
mf.BitRate = 1000 mf.BitRate = 1000
format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 160) format, bitRate := selectTranscodingOptions(ctx, ds, mf, "", 160, 0)
Expect(format).To(Equal("oga")) Expect(format).To(Equal("oga"))
Expect(bitRate).To(Equal(160)) Expect(bitRate).To(Equal(160))
}) })

View File

@@ -39,34 +39,34 @@ var _ = Describe("MediaStreamer", func() {
Context("NewStream", func() { Context("NewStream", func() {
It("returns a seekable stream if format is 'raw'", func() { It("returns a seekable stream if format is 'raw'", func() {
s, err := streamer.NewStream(ctx, "123", "raw", 0, 0) s, err := streamer.NewStream(ctx, core.StreamRequest{ID: "123", Format: "raw"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
Expect(s.Seekable()).To(BeTrue()) Expect(s.Seekable()).To(BeTrue())
}) })
It("returns a seekable stream if maxBitRate is 0", func() { It("returns a seekable stream if maxBitRate is 0", func() {
s, err := streamer.NewStream(ctx, "123", "mp3", 0, 0) s, err := streamer.NewStream(ctx, core.StreamRequest{ID: "123", Format: "mp3"})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
Expect(s.Seekable()).To(BeTrue()) Expect(s.Seekable()).To(BeTrue())
}) })
It("returns a seekable stream if maxBitRate is higher than file bitRate", func() { It("returns a seekable stream if maxBitRate is higher than file bitRate", func() {
s, err := streamer.NewStream(ctx, "123", "mp3", 320, 0) s, err := streamer.NewStream(ctx, core.StreamRequest{ID: "123", Format: "mp3", BitRate: 320})
Expect(err).ToNot(HaveOccurred()) Expect(err).ToNot(HaveOccurred())
Expect(s.Seekable()).To(BeTrue()) Expect(s.Seekable()).To(BeTrue())
}) })
It("returns a NON seekable stream if transcode is required", func() { It("returns a NON seekable stream if transcode is required", func() {
s, err := streamer.NewStream(ctx, "123", "mp3", 64, 0) s, err := streamer.NewStream(ctx, core.StreamRequest{ID: "123", Format: "mp3", BitRate: 64})
Expect(err).To(BeNil()) Expect(err).To(BeNil())
Expect(s.Seekable()).To(BeFalse()) Expect(s.Seekable()).To(BeFalse())
Expect(s.Duration()).To(Equal(float32(257.0))) Expect(s.Duration()).To(Equal(float32(257.0)))
}) })
It("returns a seekable stream if the file is complete in the cache", func() { It("returns a seekable stream if the file is complete in the cache", func() {
s, err := streamer.NewStream(ctx, "123", "mp3", 32, 0) s, err := streamer.NewStream(ctx, core.StreamRequest{ID: "123", Format: "mp3", BitRate: 32})
Expect(err).To(BeNil()) Expect(err).To(BeNil())
_, _ = io.ReadAll(s) _, _ = io.ReadAll(s)
_ = s.Close() _ = s.Close()
Eventually(func() bool { return ffmpeg.IsClosed() }, "3s").Should(BeTrue()) Eventually(func() bool { return ffmpeg.IsClosed() }, "3s").Should(BeTrue())
s, err = streamer.NewStream(ctx, "123", "mp3", 32, 0) s, err = streamer.NewStream(ctx, core.StreamRequest{ID: "123", Format: "mp3", BitRate: 32})
Expect(err).To(BeNil()) Expect(err).To(BeNil())
Expect(s.Seekable()).To(BeTrue()) Expect(s.Seekable()).To(BeTrue())
}) })

View File

@@ -220,7 +220,7 @@ var staticData = sync.OnceValue(func() insights.Data {
data.Config.ScanWatcherWait = uint64(math.Trunc(conf.Server.Scanner.WatcherWait.Seconds())) data.Config.ScanWatcherWait = uint64(math.Trunc(conf.Server.Scanner.WatcherWait.Seconds()))
data.Config.ScanOnStartup = conf.Server.Scanner.ScanOnStartup data.Config.ScanOnStartup = conf.Server.Scanner.ScanOnStartup
data.Config.ReverseProxyConfigured = conf.Server.ExtAuth.TrustedSources != "" data.Config.ReverseProxyConfigured = conf.Server.ExtAuth.TrustedSources != ""
data.Config.HasCustomPID = conf.Server.PID.Track != consts.DefaultTrackPID || conf.Server.PID.Album != consts.DefaultAlbumPID data.Config.HasCustomPID = conf.Server.PID.Track != "" || conf.Server.PID.Album != ""
data.Config.HasCustomTags = len(conf.Server.Tags) > 0 data.Config.HasCustomTags = len(conf.Server.Tags) > 0
return data return data

87
core/transcode/aliases.go Normal file
View File

@@ -0,0 +1,87 @@
package transcode
import (
"slices"
"strings"
)
// containerAliasGroups maps each container alias to a canonical group name.
var containerAliasGroups = func() map[string]string {
groups := [][]string{
{"aac", "adts", "m4a", "mp4", "m4b", "m4p"},
{"mpeg", "mp3", "mp2"},
{"ogg", "oga"},
{"aif", "aiff"},
{"asf", "wma"},
{"mpc", "mpp"},
{"wv"},
}
m := make(map[string]string)
for _, g := range groups {
canonical := g[0]
for _, name := range g {
m[name] = canonical
}
}
return m
}()
// codecAliasGroups maps each codec alias to a canonical group name.
// Codecs within the same group are considered equivalent.
var codecAliasGroups = func() map[string]string {
groups := [][]string{
{"aac", "adts"},
{"ac3", "ac-3"},
{"eac3", "e-ac3", "e-ac-3", "eac-3"},
{"mpc7", "musepack7"},
{"mpc8", "musepack8"},
{"wma1", "wmav1"},
{"wma2", "wmav2"},
{"wmalossless", "wma9lossless"},
{"wmapro", "wma9pro"},
{"shn", "shorten"},
{"mp4als", "als"},
}
m := make(map[string]string)
for _, g := range groups {
for _, name := range g {
m[name] = g[0] // canonical = first entry
}
}
return m
}()
// matchesWithAliases checks if a value matches any entry in candidates,
// consulting the alias map for equivalent names.
func matchesWithAliases(value string, candidates []string, aliases map[string]string) bool {
value = strings.ToLower(value)
canonical := aliases[value]
for _, c := range candidates {
c = strings.ToLower(c)
if c == value {
return true
}
if canonical != "" && aliases[c] == canonical {
return true
}
}
return false
}
// matchesContainer checks if a file suffix matches any of the container names,
// including common aliases.
func matchesContainer(suffix string, containers []string) bool {
return matchesWithAliases(suffix, containers, containerAliasGroups)
}
// matchesCodec checks if a codec matches any of the codec names,
// including common aliases.
func matchesCodec(codec string, codecs []string) bool {
return matchesWithAliases(codec, codecs, codecAliasGroups)
}
func containsIgnoreCase(slice []string, s string) bool {
return slices.ContainsFunc(slice, func(item string) bool {
return strings.EqualFold(item, s)
})
}

59
core/transcode/codec.go Normal file
View File

@@ -0,0 +1,59 @@
package transcode
import "strings"
// isLosslessFormat returns true if the format is a lossless audio codec/format.
// Note: core/ffmpeg has a separate isLosslessOutputFormat that covers only formats
// ffmpeg can produce as output (a smaller set). This function covers all known lossless formats
// for transcoding decision purposes.
func isLosslessFormat(format string) bool {
switch strings.ToLower(format) {
case "flac", "alac", "wav", "aiff", "ape", "wv", "tta", "tak", "shn", "dsd", "pcm":
return true
}
return false
}
// normalizeSourceSampleRate adjusts the source sample rate for codecs that store
// it differently than PCM. Currently handles DSD (÷8):
// DSD64=2822400→352800, DSD128=5644800→705600, etc.
// For other codecs, returns the rate unchanged.
func normalizeSourceSampleRate(sampleRate int, codec string) int {
if strings.EqualFold(codec, "dsd") && sampleRate > 0 {
return sampleRate / 8
}
return sampleRate
}
// normalizeSourceBitDepth adjusts the source bit depth for codecs that use
// non-standard bit depths. Currently handles DSD (1-bit → 24-bit PCM, which is
// what ffmpeg produces). For other codecs, returns the depth unchanged.
func normalizeSourceBitDepth(bitDepth int, codec string) int {
if strings.EqualFold(codec, "dsd") && bitDepth == 1 {
return 24
}
return bitDepth
}
// codecFixedOutputSampleRate returns the mandatory output sample rate for codecs
// that always resample regardless of input (e.g., Opus always outputs 48000Hz).
// Returns 0 if the codec has no fixed output rate.
func codecFixedOutputSampleRate(codec string) int {
switch strings.ToLower(codec) {
case "opus":
return 48000
}
return 0
}
// codecMaxSampleRate returns the hard maximum output sample rate for a codec.
// Returns 0 if the codec has no hard limit.
func codecMaxSampleRate(codec string) int {
switch strings.ToLower(codec) {
case "mp3":
return 48000
case "aac":
return 96000
}
return 0
}

View File

@@ -0,0 +1,206 @@
package transcode
import (
"strconv"
"strings"
"github.com/navidrome/navidrome/model"
)
// adjustResult represents the outcome of applying a limitation to a transcoded stream value
type adjustResult int
const (
adjustNone adjustResult = iota // Value already satisfies the limitation
adjustAdjusted // Value was changed to fit the limitation
adjustCannotFit // Cannot satisfy the limitation (reject this profile)
)
// checkLimitations checks codec profile limitations against source media.
// Returns "" if all limitations pass, or a typed reason string for the first failure.
func checkLimitations(mf *model.MediaFile, sourceBitrate int, limitations []Limitation) string {
for _, lim := range limitations {
var ok bool
var reason string
switch lim.Name {
case LimitationAudioChannels:
ok = checkIntLimitation(mf.Channels, lim.Comparison, lim.Values)
reason = "audio channels not supported"
case LimitationAudioSamplerate:
ok = checkIntLimitation(mf.SampleRate, lim.Comparison, lim.Values)
reason = "audio samplerate not supported"
case LimitationAudioBitrate:
ok = checkIntLimitation(sourceBitrate, lim.Comparison, lim.Values)
reason = "audio bitrate not supported"
case LimitationAudioBitdepth:
ok = checkIntLimitation(mf.BitDepth, lim.Comparison, lim.Values)
reason = "audio bitdepth not supported"
case LimitationAudioProfile:
// TODO: populate source profile when MediaFile has audio profile info
ok = checkStringLimitation("", lim.Comparison, lim.Values)
reason = "audio profile not supported"
default:
continue
}
if !ok && lim.Required {
return reason
}
}
return ""
}
// applyLimitation adjusts a transcoded stream parameter to satisfy the limitation.
// Returns the adjustment result.
func applyLimitation(sourceBitrate int, lim *Limitation, ts *StreamDetails) adjustResult {
switch lim.Name {
case LimitationAudioChannels:
return applyIntLimitation(lim.Comparison, lim.Values, ts.Channels, func(v int) { ts.Channels = v })
case LimitationAudioBitrate:
current := ts.Bitrate
if current == 0 {
current = sourceBitrate
}
return applyIntLimitation(lim.Comparison, lim.Values, current, func(v int) { ts.Bitrate = v })
case LimitationAudioSamplerate:
return applyIntLimitation(lim.Comparison, lim.Values, ts.SampleRate, func(v int) { ts.SampleRate = v })
case LimitationAudioBitdepth:
if ts.BitDepth > 0 {
return applyIntLimitation(lim.Comparison, lim.Values, ts.BitDepth, func(v int) { ts.BitDepth = v })
}
case LimitationAudioProfile:
// TODO: implement when audio profile data is available
}
return adjustNone
}
// applyIntLimitation applies a limitation comparison to a value.
// If the value needs adjusting, calls the setter and returns the result.
func applyIntLimitation(comparison string, values []string, current int, setter func(int)) adjustResult {
if len(values) == 0 {
return adjustNone
}
switch comparison {
case ComparisonLessThanEqual:
limit, ok := parseInt(values[0])
if !ok {
return adjustNone
}
if current <= limit {
return adjustNone
}
setter(limit)
return adjustAdjusted
case ComparisonGreaterThanEqual:
limit, ok := parseInt(values[0])
if !ok {
return adjustNone
}
if current >= limit {
return adjustNone
}
// Cannot upscale
return adjustCannotFit
case ComparisonEquals:
// Check if current value matches any allowed value
for _, v := range values {
if limit, ok := parseInt(v); ok && current == limit {
return adjustNone
}
}
// Find the closest allowed value below current (don't upscale)
var closest int
found := false
for _, v := range values {
if limit, ok := parseInt(v); ok && limit < current {
if !found || limit > closest {
closest = limit
found = true
}
}
}
if found {
setter(closest)
return adjustAdjusted
}
return adjustCannotFit
case ComparisonNotEquals:
for _, v := range values {
if limit, ok := parseInt(v); ok && current == limit {
return adjustCannotFit
}
}
return adjustNone
}
return adjustNone
}
func checkIntLimitation(value int, comparison string, values []string) bool {
if len(values) == 0 {
return true
}
switch comparison {
case ComparisonLessThanEqual:
limit, ok := parseInt(values[0])
if !ok {
return true
}
return value <= limit
case ComparisonGreaterThanEqual:
limit, ok := parseInt(values[0])
if !ok {
return true
}
return value >= limit
case ComparisonEquals:
for _, v := range values {
if limit, ok := parseInt(v); ok && value == limit {
return true
}
}
return false
case ComparisonNotEquals:
for _, v := range values {
if limit, ok := parseInt(v); ok && value == limit {
return false
}
}
return true
}
return true
}
// checkStringLimitation checks a string value against a limitation.
// Only Equals and NotEquals comparisons are meaningful for strings.
// LessThanEqual/GreaterThanEqual are not applicable and always pass.
func checkStringLimitation(value string, comparison string, values []string) bool {
switch comparison {
case ComparisonEquals:
for _, v := range values {
if strings.EqualFold(value, v) {
return true
}
}
return false
case ComparisonNotEquals:
for _, v := range values {
if strings.EqualFold(value, v) {
return false
}
}
return true
}
return true
}
func parseInt(s string) (int, bool) {
v, err := strconv.Atoi(s)
if err != nil || v < 0 {
return 0, false
}
return v, true
}

400
core/transcode/transcode.go Normal file
View File

@@ -0,0 +1,400 @@
package transcode
import (
"context"
"errors"
"fmt"
"strings"
"time"
"github.com/navidrome/navidrome/core/auth"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
)
const (
tokenTTL = 12 * time.Hour
defaultBitrate = 256 // kbps
// JWT claim keys for transcode params tokens
claimMediaID = "mid" // Media file ID
claimDirectPlay = "dp" // Direct play flag (bool)
claimUpdatedAt = "ua" // Source file updated-at (Unix seconds)
claimFormat = "fmt" // Target transcoding format
claimBitrate = "br" // Target bitrate (kbps)
claimChannels = "ch" // Target channels
claimSampleRate = "sr" // Target sample rate (Hz)
claimBitDepth = "bd" // Target bit depth
)
func NewDecider(ds model.DataStore) Decider {
return &deciderService{
ds: ds,
}
}
type deciderService struct {
ds model.DataStore
}
func (s *deciderService) MakeDecision(ctx context.Context, mf *model.MediaFile, clientInfo *ClientInfo) (*Decision, error) {
decision := &Decision{
MediaID: mf.ID,
SourceUpdatedAt: mf.UpdatedAt,
}
sourceBitrate := mf.BitRate // kbps
log.Trace(ctx, "Making transcode decision", "mediaID", mf.ID, "container", mf.Suffix,
"codec", mf.AudioCodec(), "bitrate", sourceBitrate, "channels", mf.Channels,
"sampleRate", mf.SampleRate, "lossless", mf.IsLossless(), "client", clientInfo.Name)
// Build source stream details
decision.SourceStream = buildSourceStream(mf)
// Check global bitrate constraint first.
if clientInfo.MaxAudioBitrate > 0 && sourceBitrate > clientInfo.MaxAudioBitrate {
log.Trace(ctx, "Global bitrate constraint exceeded, skipping direct play",
"sourceBitrate", sourceBitrate, "maxAudioBitrate", clientInfo.MaxAudioBitrate)
decision.TranscodeReasons = append(decision.TranscodeReasons, "audio bitrate not supported")
// Skip direct play profiles entirely — global constraint fails
} else {
// Try direct play profiles, collecting reasons for each failure
for _, profile := range clientInfo.DirectPlayProfiles {
if reason := s.checkDirectPlayProfile(mf, sourceBitrate, &profile, clientInfo); reason == "" {
decision.CanDirectPlay = true
decision.TranscodeReasons = nil // Clear any previously collected reasons
break
} else {
decision.TranscodeReasons = append(decision.TranscodeReasons, reason)
}
}
}
// If direct play is possible, we're done
if decision.CanDirectPlay {
log.Debug(ctx, "Transcode decision: direct play", "mediaID", mf.ID, "container", mf.Suffix, "codec", mf.AudioCodec())
return decision, nil
}
// Try transcoding profiles (in order of preference)
for _, profile := range clientInfo.TranscodingProfiles {
if ts, transcodeFormat := s.computeTranscodedStream(ctx, mf, sourceBitrate, &profile, clientInfo); ts != nil {
decision.CanTranscode = true
decision.TargetFormat = transcodeFormat
decision.TargetBitrate = ts.Bitrate
decision.TargetChannels = ts.Channels
decision.TargetSampleRate = ts.SampleRate
decision.TargetBitDepth = ts.BitDepth
decision.TranscodeStream = ts
break
}
}
if decision.CanTranscode {
log.Debug(ctx, "Transcode decision: transcode", "mediaID", mf.ID,
"targetFormat", decision.TargetFormat, "targetBitrate", decision.TargetBitrate,
"targetChannels", decision.TargetChannels, "reasons", decision.TranscodeReasons)
}
// If neither direct play nor transcode is possible
if !decision.CanDirectPlay && !decision.CanTranscode {
decision.ErrorReason = "no compatible playback profile found"
log.Warn(ctx, "Transcode decision: no compatible profile", "mediaID", mf.ID,
"container", mf.Suffix, "codec", mf.AudioCodec(), "reasons", decision.TranscodeReasons)
}
return decision, nil
}
func buildSourceStream(mf *model.MediaFile) StreamDetails {
return StreamDetails{
Container: mf.Suffix,
Codec: mf.AudioCodec(),
Bitrate: mf.BitRate,
SampleRate: mf.SampleRate,
BitDepth: mf.BitDepth,
Channels: mf.Channels,
Duration: mf.Duration,
Size: mf.Size,
IsLossless: mf.IsLossless(),
}
}
// checkDirectPlayProfile returns "" if the profile matches (direct play OK),
// or a typed reason string if it doesn't match.
func (s *deciderService) checkDirectPlayProfile(mf *model.MediaFile, sourceBitrate int, profile *DirectPlayProfile, clientInfo *ClientInfo) string {
// Check protocol (only http for now)
if len(profile.Protocols) > 0 && !containsIgnoreCase(profile.Protocols, ProtocolHTTP) {
return "protocol not supported"
}
// Check container
if len(profile.Containers) > 0 && !matchesContainer(mf.Suffix, profile.Containers) {
return "container not supported"
}
// Check codec
if len(profile.AudioCodecs) > 0 && !matchesCodec(mf.AudioCodec(), profile.AudioCodecs) {
return "audio codec not supported"
}
// Check channels
if profile.MaxAudioChannels > 0 && mf.Channels > profile.MaxAudioChannels {
return "audio channels not supported"
}
// Check codec-specific limitations
for _, codecProfile := range clientInfo.CodecProfiles {
if strings.EqualFold(codecProfile.Type, CodecProfileTypeAudio) && matchesCodec(mf.AudioCodec(), []string{codecProfile.Name}) {
if reason := checkLimitations(mf, sourceBitrate, codecProfile.Limitations); reason != "" {
return reason
}
}
}
return ""
}
// computeTranscodedStream attempts to build a valid transcoded stream for the given profile.
// Returns the stream details and the internal transcoding format (which may differ from the
// response container when a codec fallback occurs, e.g., "mp4"→"aac").
// Returns nil, "" if the profile cannot produce a valid output.
func (s *deciderService) computeTranscodedStream(ctx context.Context, mf *model.MediaFile, sourceBitrate int, profile *Profile, clientInfo *ClientInfo) (*StreamDetails, string) {
// Check protocol (only http for now)
if profile.Protocol != "" && !strings.EqualFold(profile.Protocol, ProtocolHTTP) {
log.Trace(ctx, "Skipping transcoding profile: unsupported protocol", "protocol", profile.Protocol)
return nil, ""
}
responseContainer, targetFormat := s.resolveTargetFormat(ctx, profile)
if targetFormat == "" {
return nil, ""
}
targetIsLossless := isLosslessFormat(targetFormat)
// Reject lossy to lossless conversion
if !mf.IsLossless() && targetIsLossless {
log.Trace(ctx, "Skipping transcoding profile: lossy to lossless not allowed", "targetFormat", targetFormat)
return nil, ""
}
ts := &StreamDetails{
Container: responseContainer,
Codec: strings.ToLower(profile.AudioCodec),
SampleRate: normalizeSourceSampleRate(mf.SampleRate, mf.AudioCodec()),
Channels: mf.Channels,
BitDepth: normalizeSourceBitDepth(mf.BitDepth, mf.AudioCodec()),
IsLossless: targetIsLossless,
}
if ts.Codec == "" {
ts.Codec = targetFormat
}
// Apply codec-intrinsic sample rate adjustments before codec profile limitations
if fixedRate := codecFixedOutputSampleRate(ts.Codec); fixedRate > 0 {
ts.SampleRate = fixedRate
}
if maxRate := codecMaxSampleRate(ts.Codec); maxRate > 0 && ts.SampleRate > maxRate {
ts.SampleRate = maxRate
}
// Determine target bitrate (all in kbps)
if ok := s.computeBitrate(ctx, mf, sourceBitrate, targetFormat, targetIsLossless, clientInfo, ts); !ok {
return nil, ""
}
// Apply MaxAudioChannels from the transcoding profile
if profile.MaxAudioChannels > 0 && mf.Channels > profile.MaxAudioChannels {
ts.Channels = profile.MaxAudioChannels
}
// Apply codec profile limitations to the TARGET codec
if ok := s.applyCodecLimitations(ctx, sourceBitrate, targetFormat, targetIsLossless, clientInfo, ts); !ok {
return nil, ""
}
return ts, targetFormat
}
// resolveTargetFormat determines the response container and internal target format
// by looking up transcoding configs. Returns ("", "") if no config found.
func (s *deciderService) resolveTargetFormat(ctx context.Context, profile *Profile) (responseContainer, targetFormat string) {
responseContainer = strings.ToLower(profile.Container)
targetFormat = responseContainer
if targetFormat == "" {
targetFormat = strings.ToLower(profile.AudioCodec)
responseContainer = targetFormat
}
// Try the container first, then fall back to the audioCodec (e.g. "ogg" → "opus", "mp4" → "aac").
_, err := s.ds.Transcoding(ctx).FindByFormat(targetFormat)
if errors.Is(err, model.ErrNotFound) && profile.AudioCodec != "" && !strings.EqualFold(targetFormat, profile.AudioCodec) {
codec := strings.ToLower(profile.AudioCodec)
log.Trace(ctx, "No transcoding config for container, trying audioCodec", "container", targetFormat, "audioCodec", codec)
_, err = s.ds.Transcoding(ctx).FindByFormat(codec)
if err == nil {
targetFormat = codec
}
}
if err != nil {
if !errors.Is(err, model.ErrNotFound) {
log.Error(ctx, "Error looking up transcoding config", "format", targetFormat, err)
} else {
log.Trace(ctx, "Skipping transcoding profile: no transcoding config", "targetFormat", targetFormat)
}
return "", ""
}
return responseContainer, targetFormat
}
// computeBitrate determines the target bitrate for the transcoded stream.
// Returns false if the profile should be rejected.
func (s *deciderService) computeBitrate(ctx context.Context, mf *model.MediaFile, sourceBitrate int, targetFormat string, targetIsLossless bool, clientInfo *ClientInfo, ts *StreamDetails) bool {
if mf.IsLossless() {
if !targetIsLossless {
if clientInfo.MaxTranscodingAudioBitrate > 0 {
ts.Bitrate = clientInfo.MaxTranscodingAudioBitrate
} else {
ts.Bitrate = defaultBitrate
}
} else {
if clientInfo.MaxAudioBitrate > 0 && sourceBitrate > clientInfo.MaxAudioBitrate {
log.Trace(ctx, "Skipping transcoding profile: lossless target exceeds bitrate limit",
"targetFormat", targetFormat, "sourceBitrate", sourceBitrate, "maxAudioBitrate", clientInfo.MaxAudioBitrate)
return false
}
}
} else {
ts.Bitrate = sourceBitrate
}
// Apply maxAudioBitrate as final cap
if clientInfo.MaxAudioBitrate > 0 && ts.Bitrate > 0 && ts.Bitrate > clientInfo.MaxAudioBitrate {
ts.Bitrate = clientInfo.MaxAudioBitrate
}
return true
}
// applyCodecLimitations applies codec profile limitations to the transcoded stream.
// Returns false if the profile should be rejected.
func (s *deciderService) applyCodecLimitations(ctx context.Context, sourceBitrate int, targetFormat string, targetIsLossless bool, clientInfo *ClientInfo, ts *StreamDetails) bool {
targetCodec := ts.Codec
for _, codecProfile := range clientInfo.CodecProfiles {
if !strings.EqualFold(codecProfile.Type, CodecProfileTypeAudio) {
continue
}
if !matchesCodec(targetCodec, []string{codecProfile.Name}) {
continue
}
for _, lim := range codecProfile.Limitations {
result := applyLimitation(sourceBitrate, &lim, ts)
if strings.EqualFold(lim.Name, LimitationAudioBitrate) && targetIsLossless && result == adjustAdjusted {
log.Trace(ctx, "Skipping transcoding profile: cannot adjust bitrate for lossless target",
"targetFormat", targetFormat, "codec", targetCodec, "limitation", lim.Name)
return false
}
if result == adjustCannotFit {
log.Trace(ctx, "Skipping transcoding profile: codec limitation cannot be satisfied",
"targetFormat", targetFormat, "codec", targetCodec, "limitation", lim.Name,
"comparison", lim.Comparison, "values", lim.Values)
return false
}
}
}
return true
}
func (s *deciderService) CreateTranscodeParams(decision *Decision) (string, error) {
exp := time.Now().Add(tokenTTL)
claims := map[string]any{
claimMediaID: decision.MediaID,
claimDirectPlay: decision.CanDirectPlay,
claimUpdatedAt: decision.SourceUpdatedAt.Truncate(time.Second).Unix(),
}
if decision.CanTranscode && decision.TargetFormat != "" {
claims[claimFormat] = decision.TargetFormat
claims[claimBitrate] = decision.TargetBitrate
if decision.TargetChannels > 0 {
claims[claimChannels] = decision.TargetChannels
}
if decision.TargetSampleRate > 0 {
claims[claimSampleRate] = decision.TargetSampleRate
}
if decision.TargetBitDepth > 0 {
claims[claimBitDepth] = decision.TargetBitDepth
}
}
return auth.CreateExpiringPublicToken(exp, claims)
}
func (s *deciderService) ParseTranscodeParams(token string) (*Params, error) {
claims, err := auth.Validate(token)
if err != nil {
return nil, err
}
params := &Params{}
// Required claims
mid, ok := claims[claimMediaID].(string)
if !ok || mid == "" {
return nil, fmt.Errorf("%w: invalid transcode token: missing media ID", ErrTokenInvalid)
}
params.MediaID = mid
dp, ok := claims[claimDirectPlay].(bool)
if !ok {
return nil, fmt.Errorf("%w: invalid transcode token: missing direct play flag", ErrTokenInvalid)
}
params.DirectPlay = dp
// Optional claims (legitimately absent for direct-play tokens)
if f, ok := claims[claimFormat].(string); ok {
params.TargetFormat = f
}
if br, ok := claims[claimBitrate].(float64); ok {
params.TargetBitrate = int(br)
}
if ch, ok := claims[claimChannels].(float64); ok {
params.TargetChannels = int(ch)
}
if sr, ok := claims[claimSampleRate].(float64); ok {
params.TargetSampleRate = int(sr)
}
if bd, ok := claims[claimBitDepth].(float64); ok {
params.TargetBitDepth = int(bd)
}
ua, ok := claims[claimUpdatedAt].(float64)
if !ok {
return nil, fmt.Errorf("%w: invalid transcode token: missing source timestamp", ErrTokenInvalid)
}
params.SourceUpdatedAt = time.Unix(int64(ua), 0)
return params, nil
}
func (s *deciderService) ValidateTranscodeParams(ctx context.Context, token string, mediaID string) (*Params, *model.MediaFile, error) {
params, err := s.ParseTranscodeParams(token)
if err != nil {
return nil, nil, errors.Join(ErrTokenInvalid, err)
}
if params.MediaID != mediaID {
return nil, nil, fmt.Errorf("%w: token mediaID %q does not match %q", ErrTokenInvalid, params.MediaID, mediaID)
}
mf, err := s.ds.MediaFile(ctx).Get(mediaID)
if err != nil {
if errors.Is(err, model.ErrNotFound) {
return nil, nil, ErrMediaNotFound
}
return nil, nil, err
}
if !mf.UpdatedAt.Truncate(time.Second).Equal(params.SourceUpdatedAt) {
log.Info(ctx, "Transcode token is stale", "mediaID", mediaID,
"tokenUpdatedAt", params.SourceUpdatedAt, "fileUpdatedAt", mf.UpdatedAt)
return nil, nil, ErrTokenStale
}
return params, mf, nil
}

View File

@@ -0,0 +1,17 @@
package transcode
import (
"testing"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/tests"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
func TestTranscode(t *testing.T) {
tests.Init(t, false)
log.SetLevel(log.LevelFatal)
RegisterFailHandler(Fail)
RunSpecs(t, "Transcode Suite")
}

View File

File diff suppressed because it is too large Load Diff

140
core/transcode/types.go Normal file
View File

@@ -0,0 +1,140 @@
package transcode
import (
"context"
"errors"
"time"
"github.com/navidrome/navidrome/model"
)
var (
ErrTokenInvalid = errors.New("invalid or expired transcode token")
ErrMediaNotFound = errors.New("media file not found")
ErrTokenStale = errors.New("transcode token is stale: media file has changed")
)
// Decider is the core service interface for making transcoding decisions
type Decider interface {
MakeDecision(ctx context.Context, mf *model.MediaFile, clientInfo *ClientInfo) (*Decision, error)
CreateTranscodeParams(decision *Decision) (string, error)
ParseTranscodeParams(token string) (*Params, error)
ValidateTranscodeParams(ctx context.Context, token string, mediaID string) (*Params, *model.MediaFile, error)
}
// ClientInfo represents client playback capabilities.
// All bitrate values are in kilobits per second (kbps)
type ClientInfo struct {
Name string
Platform string
MaxAudioBitrate int
MaxTranscodingAudioBitrate int
DirectPlayProfiles []DirectPlayProfile
TranscodingProfiles []Profile
CodecProfiles []CodecProfile
}
// DirectPlayProfile describes a format the client can play directly
type DirectPlayProfile struct {
Containers []string
AudioCodecs []string
Protocols []string
MaxAudioChannels int
}
// Profile describes a transcoding target the client supports
type Profile struct {
Container string
AudioCodec string
Protocol string
MaxAudioChannels int
}
// CodecProfile describes codec-specific limitations
type CodecProfile struct {
Type string
Name string
Limitations []Limitation
}
// Limitation describes a specific codec limitation
type Limitation struct {
Name string
Comparison string
Values []string
Required bool
}
// Protocol values (OpenSubsonic spec enum)
const (
ProtocolHTTP = "http"
ProtocolHLS = "hls"
)
// Comparison operators (OpenSubsonic spec enum)
const (
ComparisonEquals = "Equals"
ComparisonNotEquals = "NotEquals"
ComparisonLessThanEqual = "LessThanEqual"
ComparisonGreaterThanEqual = "GreaterThanEqual"
)
// Limitation names (OpenSubsonic spec enum)
const (
LimitationAudioChannels = "audioChannels"
LimitationAudioBitrate = "audioBitrate"
LimitationAudioProfile = "audioProfile"
LimitationAudioSamplerate = "audioSamplerate"
LimitationAudioBitdepth = "audioBitdepth"
)
// Codec profile types (OpenSubsonic spec enum)
const (
CodecProfileTypeAudio = "AudioCodec"
)
// Decision represents the internal decision result.
// All bitrate values are in kilobits per second (kbps).
type Decision struct {
MediaID string
CanDirectPlay bool
CanTranscode bool
TranscodeReasons []string
ErrorReason string
TargetFormat string
TargetBitrate int
TargetChannels int
TargetSampleRate int
TargetBitDepth int
SourceStream StreamDetails
SourceUpdatedAt time.Time
TranscodeStream *StreamDetails
}
// StreamDetails describes audio stream properties.
// Bitrate is in kilobits per second (kbps).
type StreamDetails struct {
Container string
Codec string
Profile string // Audio profile (e.g., "LC", "HE-AAC"). Empty until scanner support is added.
Bitrate int
SampleRate int
BitDepth int
Channels int
Duration float32
Size int64
IsLossless bool
}
// Params contains the parameters extracted from a transcode token.
// TargetBitrate is in kilobits per second (kbps).
type Params struct {
MediaID string
DirectPlay bool
TargetFormat string
TargetBitrate int
TargetChannels int
TargetSampleRate int
TargetBitDepth int
SourceUpdatedAt time.Time
}

View File

@@ -8,6 +8,7 @@ import (
"github.com/navidrome/navidrome/core/metrics" "github.com/navidrome/navidrome/core/metrics"
"github.com/navidrome/navidrome/core/playback" "github.com/navidrome/navidrome/core/playback"
"github.com/navidrome/navidrome/core/scrobbler" "github.com/navidrome/navidrome/core/scrobbler"
"github.com/navidrome/navidrome/core/transcode"
) )
var Set = wire.NewSet( var Set = wire.NewSet(
@@ -20,6 +21,7 @@ var Set = wire.NewSet(
NewLibrary, NewLibrary,
NewUser, NewUser,
NewMaintenance, NewMaintenance,
transcode.NewDecider,
agents.GetAgents, agents.GetAgents,
external.NewProvider, external.NewProvider,
wire.Bind(new(external.Agents), new(*agents.Agents)), wire.Bind(new(external.Agents), new(*agents.Agents)),

View File

@@ -0,0 +1,63 @@
package migrations
import (
"context"
"database/sql"
"github.com/navidrome/navidrome/model/id"
"github.com/pressly/goose/v3"
)
func init() {
goose.AddMigrationContext(upAddCodecAndUpdateTranscodings, downAddCodecAndUpdateTranscodings)
}
func upAddCodecAndUpdateTranscodings(_ context.Context, tx *sql.Tx) error {
// Add codec column to media_file.
_, err := tx.Exec(`ALTER TABLE media_file ADD COLUMN codec VARCHAR(255) DEFAULT '' NOT NULL`)
if err != nil {
return err
}
_, err = tx.Exec(`CREATE INDEX IF NOT EXISTS media_file_codec ON media_file(codec)`)
if err != nil {
return err
}
// Update old AAC default (adts) to new default (ipod with fragmented MP4).
// Only affects users who still have the unmodified old default command.
_, err = tx.Exec(
`UPDATE transcoding SET command = ? WHERE target_format = 'aac' AND command = ?`,
"ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a aac -f ipod -movflags frag_keyframe+empty_moov -",
"ffmpeg -i %s -ss %t -map 0:a:0 -b:a %bk -v 0 -c:a aac -f adts -",
)
if err != nil {
return err
}
// Add FLAC transcoding for existing installations that were seeded before FLAC was added.
var count int
err = tx.QueryRow("SELECT COUNT(*) FROM transcoding WHERE target_format = 'flac'").Scan(&count)
if err != nil {
return err
}
if count == 0 {
_, err = tx.Exec(
"INSERT INTO transcoding (id, name, target_format, default_bit_rate, command) VALUES (?, ?, ?, ?, ?)",
id.NewRandom(), "flac audio", "flac", 0,
"ffmpeg -i %s -ss %t -map 0:a:0 -v 0 -c:a flac -f flac -",
)
if err != nil {
return err
}
}
return nil
}
func downAddCodecAndUpdateTranscodings(_ context.Context, tx *sql.Tx) error {
_, err := tx.Exec(`DROP INDEX IF EXISTS media_file_codec`)
if err != nil {
return err
}
_, err = tx.Exec(`ALTER TABLE media_file DROP COLUMN codec`)
return err
}

2
go.mod
View File

@@ -7,7 +7,7 @@ replace (
github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 => github.com/deluan/tag v0.0.0-20241002021117-dfe5e6ea396d github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 => github.com/deluan/tag v0.0.0-20241002021117-dfe5e6ea396d
// Fork to implement raw tags support // Fork to implement raw tags support
go.senan.xyz/taglib => github.com/deluan/go-taglib v0.0.0-20260212150743-3f1b97cb0d1e go.senan.xyz/taglib => github.com/deluan/go-taglib v0.0.0-20260209170351-c057626454d0
) )
require ( require (

4
go.sum
View File

@@ -36,8 +36,8 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40=
github.com/deluan/go-taglib v0.0.0-20260212150743-3f1b97cb0d1e h1:pwx3kmHzl1N28coJV2C1zfm2ZF0qkQcGX+Z6BvXteB4= github.com/deluan/go-taglib v0.0.0-20260209170351-c057626454d0 h1:R8fMzz++cqdQ3DVjzrmAKmZFr2PT8vT8pQEfRzxms00=
github.com/deluan/go-taglib v0.0.0-20260212150743-3f1b97cb0d1e/go.mod h1:sKDN0U4qXDlq6LFK+aOAkDH4Me5nDV1V/A4B+B69xBA= github.com/deluan/go-taglib v0.0.0-20260209170351-c057626454d0/go.mod h1:sKDN0U4qXDlq6LFK+aOAkDH4Me5nDV1V/A4B+B69xBA=
github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf h1:tb246l2Zmpt/GpF9EcHCKTtwzrd0HGfEmoODFA/qnk4= github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf h1:tb246l2Zmpt/GpF9EcHCKTtwzrd0HGfEmoODFA/qnk4=
github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf/go.mod h1:tSgDythFsl0QgS/PFWfIZqcJKnkADWneY80jaVRlqK8= github.com/deluan/rest v0.0.0-20211102003136-6260bc399cbf/go.mod h1:tSgDythFsl0QgS/PFWfIZqcJKnkADWneY80jaVRlqK8=
github.com/deluan/sanitize v0.0.0-20241120162836-fdfd8fdfaa55 h1:wSCnggTs2f2ji6nFwQmfwgINcmSMj0xF0oHnoyRSPe4= github.com/deluan/sanitize v0.0.0-20241120162836-fdfd8fdfaa55 h1:wSCnggTs2f2ji6nFwQmfwgINcmSMj0xF0oHnoyRSPe4=

View File

@@ -23,7 +23,6 @@ var fieldMap = map[string]*mappedField{
"releasedate": {field: "media_file.release_date"}, "releasedate": {field: "media_file.release_date"},
"size": {field: "media_file.size"}, "size": {field: "media_file.size"},
"compilation": {field: "media_file.compilation"}, "compilation": {field: "media_file.compilation"},
"explicitstatus": {field: "media_file.explicit_status"},
"dateadded": {field: "media_file.created_at"}, "dateadded": {field: "media_file.created_at"},
"datemodified": {field: "media_file.updated_at"}, "datemodified": {field: "media_file.updated_at"},
"discsubtitle": {field: "media_file.disc_subtitle"}, "discsubtitle": {field: "media_file.disc_subtitle"},

View File

@@ -14,6 +14,7 @@ import (
"github.com/gohugoio/hashstructure" "github.com/gohugoio/hashstructure"
"github.com/navidrome/navidrome/conf" "github.com/navidrome/navidrome/conf"
confmime "github.com/navidrome/navidrome/conf/mime"
"github.com/navidrome/navidrome/consts" "github.com/navidrome/navidrome/consts"
"github.com/navidrome/navidrome/utils" "github.com/navidrome/navidrome/utils"
"github.com/navidrome/navidrome/utils/slice" "github.com/navidrome/navidrome/utils/slice"
@@ -56,6 +57,7 @@ type MediaFile struct {
SampleRate int `structs:"sample_rate" json:"sampleRate"` SampleRate int `structs:"sample_rate" json:"sampleRate"`
BitDepth int `structs:"bit_depth" json:"bitDepth"` BitDepth int `structs:"bit_depth" json:"bitDepth"`
Channels int `structs:"channels" json:"channels"` Channels int `structs:"channels" json:"channels"`
Codec string `structs:"codec" json:"codec"`
Genre string `structs:"genre" json:"genre"` Genre string `structs:"genre" json:"genre"`
Genres Genres `structs:"-" json:"genres,omitempty"` Genres Genres `structs:"-" json:"genres,omitempty"`
SortTitle string `structs:"sort_title" json:"sortTitle,omitempty"` SortTitle string `structs:"sort_title" json:"sortTitle,omitempty"`
@@ -161,6 +163,81 @@ func (mf MediaFile) AbsolutePath() string {
return filepath.Join(mf.LibraryPath, mf.Path) return filepath.Join(mf.LibraryPath, mf.Path)
} }
// AudioCodec returns the audio codec for this file.
// Uses the stored Codec field if available, otherwise infers from Suffix and audio properties.
func (mf MediaFile) AudioCodec() string {
// If we have a stored codec from scanning, normalize and return it
if mf.Codec != "" {
return strings.ToLower(mf.Codec)
}
// Fallback: infer from Suffix + BitDepth
return mf.inferCodecFromSuffix()
}
// inferCodecFromSuffix infers the codec from the file extension when Codec field is empty.
func (mf MediaFile) inferCodecFromSuffix() string {
switch strings.ToLower(mf.Suffix) {
case "mp3", "mpga":
return "mp3"
case "mp2":
return "mp2"
case "ogg", "oga":
return "vorbis"
case "opus":
return "opus"
case "mpc":
return "mpc"
case "wma":
return "wma"
case "flac":
return "flac"
case "wav":
return "pcm"
case "aif", "aiff", "aifc":
return "pcm"
case "ape":
return "ape"
case "wv", "wvp":
return "wv"
case "tta":
return "tta"
case "tak":
return "tak"
case "shn":
return "shn"
case "dsf", "dff":
return "dsd"
case "m4a":
// AAC if BitDepth==0, ALAC if BitDepth>0
if mf.BitDepth > 0 {
return "alac"
}
return "aac"
case "m4b", "m4p", "m4r":
return "aac"
default:
return ""
}
}
// IsLossless returns true if this file uses a lossless codec.
func (mf MediaFile) IsLossless() bool {
codec := mf.AudioCodec()
// Primary: codec-based check (most accurate for containers like M4A)
switch codec {
case "flac", "alac", "pcm", "ape", "wv", "tta", "tak", "shn", "dsd":
return true
}
// Secondary: suffix-based check using configurable list from YAML
if slices.Contains(confmime.LosslessFormats, mf.Suffix) {
return true
}
// Fallback heuristic: if BitDepth is set, it's likely lossless.
// This may produce false positives for lossy formats that report bit depth,
// but it becomes irrelevant once the Codec column is populated after a full rescan.
return mf.BitDepth > 0
}
type MediaFiles []MediaFile type MediaFiles []MediaFile
// ToAlbum creates an Album object based on the attributes of this MediaFiles collection. // ToAlbum creates an Album object based on the attributes of this MediaFiles collection.

View File

@@ -475,7 +475,7 @@ var _ = Describe("MediaFile", func() {
DeferCleanup(configtest.SetupConfig()) DeferCleanup(configtest.SetupConfig())
conf.Server.EnableMediaFileCoverArt = true conf.Server.EnableMediaFileCoverArt = true
}) })
Describe(".CoverArtId()", func() { Describe("CoverArtId", func() {
It("returns its own id if it HasCoverArt", func() { It("returns its own id if it HasCoverArt", func() {
mf := MediaFile{ID: "111", AlbumID: "1", HasCoverArt: true} mf := MediaFile{ID: "111", AlbumID: "1", HasCoverArt: true}
id := mf.CoverArtID() id := mf.CoverArtID()
@@ -496,6 +496,94 @@ var _ = Describe("MediaFile", func() {
Expect(id.ID).To(Equal(mf.AlbumID)) Expect(id.ID).To(Equal(mf.AlbumID))
}) })
}) })
Describe("AudioCodec", func() {
It("returns normalized stored codec when available", func() {
mf := MediaFile{Codec: "AAC", Suffix: "m4a"}
Expect(mf.AudioCodec()).To(Equal("aac"))
})
It("returns stored codec lowercased", func() {
mf := MediaFile{Codec: "ALAC", Suffix: "m4a"}
Expect(mf.AudioCodec()).To(Equal("alac"))
})
DescribeTable("infers codec from suffix when Codec field is empty",
func(suffix string, bitDepth int, expected string) {
mf := MediaFile{Suffix: suffix, BitDepth: bitDepth}
Expect(mf.AudioCodec()).To(Equal(expected))
},
Entry("mp3", "mp3", 0, "mp3"),
Entry("mpga", "mpga", 0, "mp3"),
Entry("mp2", "mp2", 0, "mp2"),
Entry("ogg", "ogg", 0, "vorbis"),
Entry("oga", "oga", 0, "vorbis"),
Entry("opus", "opus", 0, "opus"),
Entry("mpc", "mpc", 0, "mpc"),
Entry("wma", "wma", 0, "wma"),
Entry("flac", "flac", 0, "flac"),
Entry("wav", "wav", 0, "pcm"),
Entry("aif", "aif", 0, "pcm"),
Entry("aiff", "aiff", 0, "pcm"),
Entry("aifc", "aifc", 0, "pcm"),
Entry("ape", "ape", 0, "ape"),
Entry("wv", "wv", 0, "wv"),
Entry("wvp", "wvp", 0, "wv"),
Entry("tta", "tta", 0, "tta"),
Entry("tak", "tak", 0, "tak"),
Entry("shn", "shn", 0, "shn"),
Entry("dsf", "dsf", 0, "dsd"),
Entry("dff", "dff", 0, "dsd"),
Entry("m4a with BitDepth=0 (AAC)", "m4a", 0, "aac"),
Entry("m4a with BitDepth>0 (ALAC)", "m4a", 16, "alac"),
Entry("m4b", "m4b", 0, "aac"),
Entry("m4p", "m4p", 0, "aac"),
Entry("m4r", "m4r", 0, "aac"),
Entry("unknown suffix", "xyz", 0, ""),
)
It("prefers stored codec over suffix inference", func() {
mf := MediaFile{Codec: "ALAC", Suffix: "m4a", BitDepth: 0}
Expect(mf.AudioCodec()).To(Equal("alac"))
})
})
Describe("IsLossless", func() {
BeforeEach(func() {
DeferCleanup(configtest.SetupConfig())
})
DescribeTable("detects lossless codecs",
func(codec string, suffix string, bitDepth int, expected bool) {
mf := MediaFile{Codec: codec, Suffix: suffix, BitDepth: bitDepth}
Expect(mf.IsLossless()).To(Equal(expected))
},
Entry("flac", "FLAC", "flac", 16, true),
Entry("alac", "ALAC", "m4a", 24, true),
Entry("pcm via wav", "", "wav", 16, true),
Entry("pcm via aiff", "", "aiff", 24, true),
Entry("ape", "", "ape", 16, true),
Entry("wv", "", "wv", 0, true),
Entry("tta", "", "tta", 0, true),
Entry("tak", "", "tak", 0, true),
Entry("shn", "", "shn", 0, true),
Entry("dsd", "", "dsf", 0, true),
Entry("mp3 is lossy", "MP3", "mp3", 0, false),
Entry("aac is lossy", "AAC", "m4a", 0, false),
Entry("vorbis is lossy", "", "ogg", 0, false),
Entry("opus is lossy", "", "opus", 0, false),
)
It("detects lossless via BitDepth fallback when codec is unknown", func() {
mf := MediaFile{Suffix: "xyz", BitDepth: 24}
Expect(mf.IsLossless()).To(BeTrue())
})
It("returns false for unknown with no BitDepth", func() {
mf := MediaFile{Suffix: "xyz", BitDepth: 0}
Expect(mf.IsLossless()).To(BeFalse())
})
})
}) })
func t(v string) time.Time { func t(v string) time.Time {

View File

@@ -65,6 +65,7 @@ func (md Metadata) ToMediaFile(libID int, folderID string) model.MediaFile {
mf.SampleRate = md.AudioProperties().SampleRate mf.SampleRate = md.AudioProperties().SampleRate
mf.BitDepth = md.AudioProperties().BitDepth mf.BitDepth = md.AudioProperties().BitDepth
mf.Channels = md.AudioProperties().Channels mf.Channels = md.AudioProperties().Channels
mf.Codec = md.AudioProperties().Codec
mf.Path = md.FilePath() mf.Path = md.FilePath()
mf.Suffix = md.Suffix() mf.Suffix = md.Suffix()
mf.Size = md.Size() mf.Size = md.Size()

View File

@@ -35,6 +35,7 @@ type AudioProperties struct {
BitDepth int BitDepth int
SampleRate int SampleRate int
Channels int Channels int
Codec string
} }
type Date string type Date string

View File

@@ -1,52 +0,0 @@
package capabilities
// HTTPEndpoint allows plugins to handle incoming HTTP requests.
// Plugins that declare the 'endpoints' permission must implement this capability.
// The host dispatches incoming HTTP requests to the plugin's HandleRequest function.
//
//nd:capability name=httpendpoint required=true
type HTTPEndpoint interface {
// HandleRequest processes an incoming HTTP request and returns a response.
//nd:export name=nd_http_handle_request raw=true
HandleRequest(HTTPHandleRequest) (HTTPHandleResponse, error)
}
// HTTPHandleRequest is the input provided when an HTTP request is dispatched to a plugin.
type HTTPHandleRequest struct {
// Method is the HTTP method (GET, POST, PUT, DELETE, PATCH, etc.).
Method string `json:"method"`
// Path is the request path relative to the plugin's base URL.
// For example, if the full URL is /ext/my-plugin/webhook, Path is "/webhook".
// Both /ext/my-plugin and /ext/my-plugin/ are normalized to Path = "".
Path string `json:"path"`
// Query is the raw query string without the leading '?'.
Query string `json:"query,omitempty"`
// Headers contains the HTTP request headers.
Headers map[string][]string `json:"headers,omitempty"`
// Body is the request body content.
Body []byte `json:"body,omitempty"`
// User contains the authenticated user information. Nil for auth:"none" endpoints.
User *HTTPUser `json:"user,omitempty"`
}
// HTTPUser contains authenticated user information passed to the plugin.
type HTTPUser struct {
// ID is the internal Navidrome user ID.
ID string `json:"id"`
// Username is the user's login name.
Username string `json:"username"`
// Name is the user's display name.
Name string `json:"name"`
// IsAdmin indicates whether the user has admin privileges.
IsAdmin bool `json:"isAdmin"`
}
// HTTPHandleResponse is the response returned by the plugin's HandleRequest function.
type HTTPHandleResponse struct {
// Status is the HTTP status code. Defaults to 200 if zero or not set.
Status int32 `json:"status,omitempty"`
// Headers contains the HTTP response headers to set.
Headers map[string][]string `json:"headers,omitempty"`
// Body is the response body content.
Body []byte `json:"body,omitempty"`
}

View File

@@ -1,81 +0,0 @@
version: v1-draft
exports:
nd_http_handle_request:
description: HandleRequest processes an incoming HTTP request and returns a response.
input:
$ref: '#/components/schemas/HTTPHandleRequest'
contentType: application/json
output:
$ref: '#/components/schemas/HTTPHandleResponse'
contentType: application/json
components:
schemas:
HTTPHandleRequest:
description: HTTPHandleRequest is the input provided when an HTTP request is dispatched to a plugin.
properties:
method:
type: string
description: Method is the HTTP method (GET, POST, PUT, DELETE, PATCH, etc.).
path:
type: string
description: |-
Path is the request path relative to the plugin's base URL.
For example, if the full URL is /ext/my-plugin/webhook, Path is "/webhook".
Both /ext/my-plugin and /ext/my-plugin/ are normalized to Path = "".
query:
type: string
description: Query is the raw query string without the leading '?'.
headers:
type: object
description: Headers contains the HTTP request headers.
additionalProperties:
type: array
items:
type: string
body:
type: buffer
description: Body is the request body content.
user:
$ref: '#/components/schemas/HTTPUser'
description: User contains the authenticated user information. Nil for auth:"none" endpoints.
nullable: true
required:
- method
- path
HTTPHandleResponse:
description: HTTPHandleResponse is the response returned by the plugin's HandleRequest function.
properties:
status:
type: integer
format: int32
description: Status is the HTTP status code. Defaults to 200 if zero or not set.
headers:
type: object
description: Headers contains the HTTP response headers to set.
additionalProperties:
type: array
items:
type: string
body:
type: buffer
description: Body is the response body content.
HTTPUser:
description: HTTPUser contains authenticated user information passed to the plugin.
properties:
id:
type: string
description: ID is the internal Navidrome user ID.
username:
type: string
description: Username is the user's login name.
name:
type: string
description: Name is the user's display name.
isAdmin:
type: boolean
description: IsAdmin indicates whether the user has admin privileges.
required:
- id
- username
- name
- isAdmin

View File

@@ -1,14 +0,0 @@
package plugins
// CapabilityHTTPEndpoint indicates the plugin can handle incoming HTTP requests.
// Detected when the plugin exports the nd_http_handle_request function.
const CapabilityHTTPEndpoint Capability = "HTTPEndpoint"
const FuncHTTPHandleRequest = "nd_http_handle_request"
func init() {
registerCapability(
CapabilityHTTPEndpoint,
FuncHTTPHandleRequest,
)
}

View File

@@ -364,27 +364,6 @@ func capabilityFuncMap(cap Capability) template.FuncMap {
"providerInterface": func(e Export) string { return e.ProviderInterfaceName() }, "providerInterface": func(e Export) string { return e.ProviderInterfaceName() },
"implVar": func(e Export) string { return e.ImplVarName() }, "implVar": func(e Export) string { return e.ImplVarName() },
"exportFunc": func(e Export) string { return e.ExportFuncName() }, "exportFunc": func(e Export) string { return e.ExportFuncName() },
"rawFieldName": rawFieldName(cap),
}
}
// rawFieldName returns a template function that finds the first []byte field name
// in a struct by type name. This is used by raw export templates to generate
// field-specific binary frame code.
func rawFieldName(cap Capability) func(string) string {
structMap := make(map[string]StructDef)
for _, s := range cap.Structs {
structMap[s.Name] = s
}
return func(typeName string) string {
if s, ok := structMap[typeName]; ok {
for _, f := range s.Fields {
if f.Type == "[]byte" {
return f.Name
}
}
}
return ""
} }
} }
@@ -487,7 +466,6 @@ func rustCapabilityFuncMap(cap Capability) template.FuncMap {
"providerInterface": func(e Export) string { return e.ProviderInterfaceName() }, "providerInterface": func(e Export) string { return e.ProviderInterfaceName() },
"registerMacroName": func(name string) string { return registerMacroName(cap.Name, name) }, "registerMacroName": func(name string) string { return registerMacroName(cap.Name, name) },
"snakeCase": ToSnakeCase, "snakeCase": ToSnakeCase,
"rawFieldName": rawFieldName(cap),
"indent": func(spaces int, s string) string { "indent": func(spaces int, s string) string {
indent := strings.Repeat(" ", spaces) indent := strings.Repeat(" ", spaces)
lines := strings.Split(s, "\n") lines := strings.Split(s, "\n")
@@ -582,15 +560,9 @@ func rustConstName(name string) string {
// skipSerializingFunc returns the appropriate skip_serializing_if function name. // skipSerializingFunc returns the appropriate skip_serializing_if function name.
func skipSerializingFunc(goType string) string { func skipSerializingFunc(goType string) string {
if goType == "[]byte" { if strings.HasPrefix(goType, "*") || strings.HasPrefix(goType, "[]") || strings.HasPrefix(goType, "map[") {
return "Vec::is_empty"
}
if strings.HasPrefix(goType, "*") || strings.HasPrefix(goType, "[]") {
return "Option::is_none" return "Option::is_none"
} }
if strings.HasPrefix(goType, "map[") {
return "HashMap::is_empty"
}
switch goType { switch goType {
case "string": case "string":
return "String::is_empty" return "String::is_empty"

View File

@@ -1432,20 +1432,12 @@ type OnInitOutput struct {
var _ = Describe("Rust Generation", func() { var _ = Describe("Rust Generation", func() {
Describe("skipSerializingFunc", func() { Describe("skipSerializingFunc", func() {
It("should return Vec::is_empty for []byte type", func() { It("should return Option::is_none for pointer, slice, and map types", func() {
Expect(skipSerializingFunc("[]byte")).To(Equal("Vec::is_empty"))
})
It("should return Option::is_none for pointer and slice types", func() {
Expect(skipSerializingFunc("*string")).To(Equal("Option::is_none")) Expect(skipSerializingFunc("*string")).To(Equal("Option::is_none"))
Expect(skipSerializingFunc("*MyStruct")).To(Equal("Option::is_none")) Expect(skipSerializingFunc("*MyStruct")).To(Equal("Option::is_none"))
Expect(skipSerializingFunc("[]string")).To(Equal("Option::is_none")) Expect(skipSerializingFunc("[]string")).To(Equal("Option::is_none"))
Expect(skipSerializingFunc("[]int32")).To(Equal("Option::is_none")) Expect(skipSerializingFunc("[]int32")).To(Equal("Option::is_none"))
}) Expect(skipSerializingFunc("map[string]int")).To(Equal("Option::is_none"))
It("should return HashMap::is_empty for map types", func() {
Expect(skipSerializingFunc("map[string]int")).To(Equal("HashMap::is_empty"))
Expect(skipSerializingFunc("map[string]string")).To(Equal("HashMap::is_empty"))
}) })
It("should return String::is_empty for string type", func() { It("should return String::is_empty for string type", func() {

View File

@@ -269,7 +269,6 @@ func parseExport(name string, funcType *ast.FuncType, annotation map[string]stri
Name: name, Name: name,
ExportName: annotation["name"], ExportName: annotation["name"],
Doc: doc, Doc: doc,
Raw: annotation["raw"] == "true",
} }
// Capability exports have exactly one input parameter (the struct type) // Capability exports have exactly one input parameter (the struct type)

View File

@@ -635,68 +635,6 @@ type Output struct {
}) })
}) })
Describe("ParseCapabilities raw=true", func() {
It("should parse raw=true export annotation", func() {
src := `package capabilities
//nd:capability name=httpendpoint required=true
type HTTPEndpoint interface {
//nd:export name=nd_http_handle_request raw=true
HandleRequest(HTTPHandleRequest) (HTTPHandleResponse, error)
}
type HTTPHandleRequest struct {
Method string ` + "`json:\"method\"`" + `
Body []byte ` + "`json:\"body,omitempty\"`" + `
}
type HTTPHandleResponse struct {
Status int32 ` + "`json:\"status,omitempty\"`" + `
Body []byte ` + "`json:\"body,omitempty\"`" + `
}
`
err := os.WriteFile(filepath.Join(tmpDir, "http_endpoint.go"), []byte(src), 0600)
Expect(err).NotTo(HaveOccurred())
capabilities, err := ParseCapabilities(tmpDir)
Expect(err).NotTo(HaveOccurred())
Expect(capabilities).To(HaveLen(1))
cap := capabilities[0]
Expect(cap.Methods).To(HaveLen(1))
Expect(cap.Methods[0].Raw).To(BeTrue())
Expect(cap.HasRawMethods()).To(BeTrue())
})
It("should default Raw to false for export annotations without raw", func() {
src := `package capabilities
//nd:capability name=test required=true
type TestCapability interface {
//nd:export name=nd_test
Test(TestInput) (TestOutput, error)
}
type TestInput struct {
Value string ` + "`json:\"value\"`" + `
}
type TestOutput struct {
Result string ` + "`json:\"result\"`" + `
}
`
err := os.WriteFile(filepath.Join(tmpDir, "test.go"), []byte(src), 0600)
Expect(err).NotTo(HaveOccurred())
capabilities, err := ParseCapabilities(tmpDir)
Expect(err).NotTo(HaveOccurred())
Expect(capabilities).To(HaveLen(1))
Expect(capabilities[0].Methods[0].Raw).To(BeFalse())
Expect(capabilities[0].HasRawMethods()).To(BeFalse())
})
})
Describe("Export helpers", func() { Describe("Export helpers", func() {
It("should generate correct provider interface name", func() { It("should generate correct provider interface name", func() {
e := Export{Name: "GetArtistBiography"} e := Export{Name: "GetArtistBiography"}

View File

@@ -9,10 +9,6 @@ package {{.Package}}
import ( import (
"github.com/navidrome/navidrome/plugins/pdk/go/pdk" "github.com/navidrome/navidrome/plugins/pdk/go/pdk"
{{- if .Capability.HasRawMethods}}
"encoding/binary"
"encoding/json"
{{- end}}
) )
{{- /* Generate type alias definitions */ -}} {{- /* Generate type alias definitions */ -}}
@@ -60,7 +56,6 @@ func (e {{$typeName}}) Error() string { return string(e) }
{{- end}} {{- end}}
{{- /* Generate struct definitions */ -}} {{- /* Generate struct definitions */ -}}
{{- $capability := .Capability}}
{{- range .Capability.Structs}} {{- range .Capability.Structs}}
{{- if .Doc}} {{- if .Doc}}
@@ -73,12 +68,8 @@ type {{.Name}} struct {
{{- if .Doc}} {{- if .Doc}}
{{formatDoc .Doc | indent 1}} {{formatDoc .Doc | indent 1}}
{{- end}} {{- end}}
{{- if and (eq .Type "[]byte") $capability.HasRawMethods}}
{{.Name}} {{.Type}} `json:"-"`
{{- else}}
{{.Name}} {{.Type}} `json:"{{.JSONTag}}{{if .OmitEmpty}},omitempty{{end}}"` {{.Name}} {{.Type}} `json:"{{.JSONTag}}{{if .OmitEmpty}},omitempty{{end}}"`
{{- end}} {{- end}}
{{- end}}
} }
{{- end}} {{- end}}
@@ -181,53 +172,6 @@ func {{exportFunc .}}() int32 {
// Return standard code - host will skip this plugin gracefully // Return standard code - host will skip this plugin gracefully
return NotImplementedCode return NotImplementedCode
} }
{{- if .Raw}}
{{- /* Raw binary frame input/output */ -}}
{{- if .HasInput}}
// Parse input frame: [json_len:4B][JSON without []byte field][raw bytes]
raw := pdk.Input()
if len(raw) < 4 {
pdk.SetErrorString("malformed input frame")
return -1
}
jsonLen := binary.BigEndian.Uint32(raw[:4])
if uint32(len(raw)-4) < jsonLen {
pdk.SetErrorString("invalid json length in input frame")
return -1
}
var input {{.Input.Type}}
if err := json.Unmarshal(raw[4:4+jsonLen], &input); err != nil {
pdk.SetError(err)
return -1
}
input.{{rawFieldName .Input.Type}} = raw[4+jsonLen:]
{{- end}}
{{- if and .HasInput .HasOutput}}
output, err := {{implVar .}}(input)
if err != nil {
// Error frame: [0x01][UTF-8 error message]
errMsg := []byte(err.Error())
errFrame := make([]byte, 1+len(errMsg))
errFrame[0] = 0x01
copy(errFrame[1:], errMsg)
pdk.Output(errFrame)
return 0
}
// Success frame: [0x00][json_len:4B][JSON without []byte field][raw bytes]
jsonBytes, _ := json.Marshal(output)
rawBytes := output.{{rawFieldName .Output.Type}}
frame := make([]byte, 1+4+len(jsonBytes)+len(rawBytes))
frame[0] = 0x00
binary.BigEndian.PutUint32(frame[1:5], uint32(len(jsonBytes)))
copy(frame[5:5+len(jsonBytes)], jsonBytes)
copy(frame[5+len(jsonBytes):], rawBytes)
pdk.Output(frame)
{{- end}}
{{- else}}
{{- /* Standard JSON input/output */ -}}
{{- if .HasInput}} {{- if .HasInput}}
var input {{.Input.Type}} var input {{.Input.Type}}
@@ -272,7 +216,6 @@ func {{exportFunc .}}() int32 {
pdk.SetError(err) pdk.SetError(err)
return -1 return -1
} }
{{- end}}
{{- end}} {{- end}}
return 0 return 0

View File

@@ -52,7 +52,6 @@ pub const {{rustConstName $v.Name}}: &'static str = {{$v.Value}};
{{- end}} {{- end}}
{{- /* Generate struct definitions */ -}} {{- /* Generate struct definitions */ -}}
{{- $capability := .Capability}}
{{- range .Capability.Structs}} {{- range .Capability.Structs}}
{{- if .Doc}} {{- if .Doc}}
@@ -67,16 +66,12 @@ pub struct {{.Name}} {
{{- if .Doc}} {{- if .Doc}}
{{rustDocComment .Doc | indent 4}} {{rustDocComment .Doc | indent 4}}
{{- end}} {{- end}}
{{- if and (eq .Type "[]byte") $capability.HasRawMethods}} {{- if .OmitEmpty}}
#[serde(skip)]
pub {{rustFieldName .Name}}: {{fieldRustType .}},
{{- else if .OmitEmpty}}
#[serde(default, skip_serializing_if = "{{skipSerializingFunc .Type}}")] #[serde(default, skip_serializing_if = "{{skipSerializingFunc .Type}}")]
pub {{rustFieldName .Name}}: {{fieldRustType .}},
{{- else}} {{- else}}
#[serde(default)] #[serde(default)]
pub {{rustFieldName .Name}}: {{fieldRustType .}},
{{- end}} {{- end}}
pub {{rustFieldName .Name}}: {{fieldRustType .}},
{{- end}} {{- end}}
} }
{{- end}} {{- end}}
@@ -129,56 +124,6 @@ pub trait {{agentName .Capability}} {
macro_rules! register_{{snakeCase .Package}} { macro_rules! register_{{snakeCase .Package}} {
($plugin_type:ty) => { ($plugin_type:ty) => {
{{- range .Capability.Methods}} {{- range .Capability.Methods}}
{{- if .Raw}}
#[extism_pdk::plugin_fn]
pub fn {{.ExportName}}(
{{- if .HasInput}}
_raw_input: extism_pdk::Raw<Vec<u8>>
{{- end}}
) -> extism_pdk::FnResult<extism_pdk::Raw<Vec<u8>>> {
let plugin = <$plugin_type>::default();
{{- if .HasInput}}
// Parse input frame: [json_len:4B][JSON without []byte field][raw bytes]
let raw_bytes = _raw_input.0;
if raw_bytes.len() < 4 {
let mut err_frame = vec![0x01u8];
err_frame.extend_from_slice(b"malformed input frame");
return Ok(extism_pdk::Raw(err_frame));
}
let json_len = u32::from_be_bytes([raw_bytes[0], raw_bytes[1], raw_bytes[2], raw_bytes[3]]) as usize;
if json_len > raw_bytes.len() - 4 {
let mut err_frame = vec![0x01u8];
err_frame.extend_from_slice(b"invalid json length in input frame");
return Ok(extism_pdk::Raw(err_frame));
}
let mut req: $crate::{{snakeCase $.Package}}::{{rustOutputType .Input.Type}} = serde_json::from_slice(&raw_bytes[4..4+json_len])
.map_err(|e| extism_pdk::Error::msg(e.to_string()))?;
req.{{rustFieldName (rawFieldName .Input.Type)}} = raw_bytes[4+json_len..].to_vec();
{{- end}}
{{- if and .HasInput .HasOutput}}
match $crate::{{snakeCase $.Package}}::{{agentName $.Capability}}::{{rustMethodName .Name}}(&plugin, req) {
Ok(output) => {
// Success frame: [0x00][json_len:4B][JSON without []byte field][raw bytes]
let json_bytes = serde_json::to_vec(&output)
.map_err(|e| extism_pdk::Error::msg(e.to_string()))?;
let raw_field = &output.{{rustFieldName (rawFieldName .Output.Type)}};
let mut frame = Vec::with_capacity(1 + 4 + json_bytes.len() + raw_field.len());
frame.push(0x00);
frame.extend_from_slice(&(json_bytes.len() as u32).to_be_bytes());
frame.extend_from_slice(&json_bytes);
frame.extend_from_slice(raw_field);
Ok(extism_pdk::Raw(frame))
}
Err(e) => {
// Error frame: [0x01][UTF-8 error message]
let mut err_frame = vec![0x01u8];
err_frame.extend_from_slice(e.message.as_bytes());
Ok(extism_pdk::Raw(err_frame))
}
}
{{- end}}
}
{{- else}}
#[extism_pdk::plugin_fn] #[extism_pdk::plugin_fn]
pub fn {{.ExportName}}( pub fn {{.ExportName}}(
{{- if .HasInput}} {{- if .HasInput}}
@@ -201,7 +146,6 @@ macro_rules! register_{{snakeCase .Package}} {
{{- end}} {{- end}}
} }
{{- end}} {{- end}}
{{- end}}
}; };
} }
{{- else}} {{- else}}
@@ -227,56 +171,6 @@ pub trait {{providerInterface .}} {
#[macro_export] #[macro_export]
macro_rules! {{registerMacroName .Name}} { macro_rules! {{registerMacroName .Name}} {
($plugin_type:ty) => { ($plugin_type:ty) => {
{{- if .Raw}}
#[extism_pdk::plugin_fn]
pub fn {{.ExportName}}(
{{- if .HasInput}}
_raw_input: extism_pdk::Raw<Vec<u8>>
{{- end}}
) -> extism_pdk::FnResult<extism_pdk::Raw<Vec<u8>>> {
let plugin = <$plugin_type>::default();
{{- if .HasInput}}
// Parse input frame: [json_len:4B][JSON without []byte field][raw bytes]
let raw_bytes = _raw_input.0;
if raw_bytes.len() < 4 {
let mut err_frame = vec![0x01u8];
err_frame.extend_from_slice(b"malformed input frame");
return Ok(extism_pdk::Raw(err_frame));
}
let json_len = u32::from_be_bytes([raw_bytes[0], raw_bytes[1], raw_bytes[2], raw_bytes[3]]) as usize;
if json_len > raw_bytes.len() - 4 {
let mut err_frame = vec![0x01u8];
err_frame.extend_from_slice(b"invalid json length in input frame");
return Ok(extism_pdk::Raw(err_frame));
}
let mut req: $crate::{{snakeCase $.Package}}::{{rustOutputType .Input.Type}} = serde_json::from_slice(&raw_bytes[4..4+json_len])
.map_err(|e| extism_pdk::Error::msg(e.to_string()))?;
req.{{rustFieldName (rawFieldName .Input.Type)}} = raw_bytes[4+json_len..].to_vec();
{{- end}}
{{- if and .HasInput .HasOutput}}
match $crate::{{snakeCase $.Package}}::{{providerInterface .}}::{{rustMethodName .Name}}(&plugin, req) {
Ok(output) => {
// Success frame: [0x00][json_len:4B][JSON without []byte field][raw bytes]
let json_bytes = serde_json::to_vec(&output)
.map_err(|e| extism_pdk::Error::msg(e.to_string()))?;
let raw_field = &output.{{rustFieldName (rawFieldName .Output.Type)}};
let mut frame = Vec::with_capacity(1 + 4 + json_bytes.len() + raw_field.len());
frame.push(0x00);
frame.extend_from_slice(&(json_bytes.len() as u32).to_be_bytes());
frame.extend_from_slice(&json_bytes);
frame.extend_from_slice(raw_field);
Ok(extism_pdk::Raw(frame))
}
Err(e) => {
// Error frame: [0x01][UTF-8 error message]
let mut err_frame = vec![0x01u8];
err_frame.extend_from_slice(e.message.as_bytes());
Ok(extism_pdk::Raw(err_frame))
}
}
{{- end}}
}
{{- else}}
#[extism_pdk::plugin_fn] #[extism_pdk::plugin_fn]
pub fn {{.ExportName}}( pub fn {{.ExportName}}(
{{- if .HasInput}} {{- if .HasInput}}
@@ -298,7 +192,6 @@ macro_rules! {{registerMacroName .Name}} {
Ok(()) Ok(())
{{- end}} {{- end}}
} }
{{- end}}
}; };
} }
{{- end}} {{- end}}

View File

@@ -53,7 +53,6 @@ func (e {{$typeName}}) Error() string { return string(e) }
{{- end}} {{- end}}
{{- /* Generate struct definitions */ -}} {{- /* Generate struct definitions */ -}}
{{- $capability := .Capability}}
{{- range .Capability.Structs}} {{- range .Capability.Structs}}
{{- if .Doc}} {{- if .Doc}}
@@ -66,12 +65,8 @@ type {{.Name}} struct {
{{- if .Doc}} {{- if .Doc}}
{{formatDoc .Doc | indent 1}} {{formatDoc .Doc | indent 1}}
{{- end}} {{- end}}
{{- if and (eq .Type "[]byte") $capability.HasRawMethods}}
{{.Name}} {{.Type}} `json:"-"`
{{- else}}
{{.Name}} {{.Type}} `json:"{{.JSONTag}}{{if .OmitEmpty}},omitempty{{end}}"` {{.Name}} {{.Type}} `json:"{{.JSONTag}}{{if .OmitEmpty}},omitempty{{end}}"`
{{- end}} {{- end}}
{{- end}}
} }
{{- end}} {{- end}}

View File

@@ -48,16 +48,6 @@ type ConstDef struct {
Doc string // Documentation comment Doc string // Documentation comment
} }
// HasRawMethods returns true if any export in the capability uses raw binary framing.
func (c Capability) HasRawMethods() bool {
for _, m := range c.Methods {
if m.Raw {
return true
}
}
return false
}
// KnownStructs returns a map of struct names defined in this capability. // KnownStructs returns a map of struct names defined in this capability.
func (c Capability) KnownStructs() map[string]bool { func (c Capability) KnownStructs() map[string]bool {
result := make(map[string]bool) result := make(map[string]bool)
@@ -74,7 +64,6 @@ type Export struct {
Input Param // Single input parameter (the struct type) Input Param // Single input parameter (the struct type)
Output Param // Single output return value (the struct type) Output Param // Single output return value (the struct type)
Doc string // Documentation comment for the method Doc string // Documentation comment for the method
Raw bool // If true, uses binary framing instead of JSON for []byte fields
} }
// ProviderInterfaceName returns the optional provider interface name. // ProviderInterfaceName returns the optional provider interface name.

View File

@@ -54,14 +54,6 @@ type (
Nullable bool `yaml:"nullable,omitempty"` Nullable bool `yaml:"nullable,omitempty"`
Items *xtpProperty `yaml:"items,omitempty"` Items *xtpProperty `yaml:"items,omitempty"`
} }
// xtpMapProperty represents a map property in XTP (type: object with additionalProperties).
xtpMapProperty struct {
Type string `yaml:"type"`
Description string `yaml:"description,omitempty"`
Nullable bool `yaml:"nullable,omitempty"`
AdditionalProperties *xtpProperty `yaml:"additionalProperties"`
}
) )
// GenerateSchema generates an XTP YAML schema from a capability. // GenerateSchema generates an XTP YAML schema from a capability.
@@ -214,12 +206,7 @@ func buildObjectSchema(st StructDef, knownTypes map[string]bool) xtpObjectSchema
for _, field := range st.Fields { for _, field := range st.Fields {
propName := getJSONFieldName(field) propName := getJSONFieldName(field)
goType := strings.TrimPrefix(field.Type, "*") addToMap(&schema.Properties, propName, buildProperty(field, knownTypes))
if strings.HasPrefix(goType, "map[") {
addToMap(&schema.Properties, propName, buildMapProperty(goType, field.Doc, strings.HasPrefix(field.Type, "*"), knownTypes))
} else {
addToMap(&schema.Properties, propName, buildProperty(field, knownTypes))
}
if !strings.HasPrefix(field.Type, "*") && !field.OmitEmpty { if !strings.HasPrefix(field.Type, "*") && !field.OmitEmpty {
schema.Required = append(schema.Required, propName) schema.Required = append(schema.Required, propName)
@@ -259,12 +246,6 @@ func buildProperty(field FieldDef, knownTypes map[string]bool) xtpProperty {
return prop return prop
} }
// Handle []byte as buffer type (must be checked before generic slice handling)
if goType == "[]byte" {
prop.Type = "buffer"
return prop
}
// Handle slice types // Handle slice types
if strings.HasPrefix(goType, "[]") { if strings.HasPrefix(goType, "[]") {
elemType := goType[2:] elemType := goType[2:]
@@ -283,55 +264,6 @@ func buildProperty(field FieldDef, knownTypes map[string]bool) xtpProperty {
return prop return prop
} }
// buildMapProperty builds an XTP MapProperty for a Go map type.
// It parses map[K]V and generates additionalProperties describing V.
func buildMapProperty(goType, doc string, isPointer bool, knownTypes map[string]bool) xtpMapProperty {
prop := xtpMapProperty{
Type: "object",
Description: cleanDocForYAML(doc),
Nullable: isPointer,
}
// Parse value type from map[K]V
valueType := parseMapValueType(goType)
valProp := &xtpProperty{}
if strings.HasPrefix(valueType, "[]") {
elemType := valueType[2:]
valProp.Type = "array"
valProp.Items = &xtpProperty{}
if isKnownType(elemType, knownTypes) {
valProp.Items.Ref = "#/components/schemas/" + elemType
} else {
valProp.Items.Type = goTypeToXTPType(elemType)
}
} else if isKnownType(valueType, knownTypes) {
valProp.Ref = "#/components/schemas/" + valueType
} else {
valProp.Type, valProp.Format = goTypeToXTPTypeAndFormat(valueType)
}
prop.AdditionalProperties = valProp
return prop
}
// parseMapValueType extracts the value type from a Go map type string like "map[string][]string".
func parseMapValueType(goType string) string {
// Find the closing bracket of the key type
depth := 0
for i, ch := range goType {
if ch == '[' {
depth++
} else if ch == ']' {
depth--
if depth == 0 {
return goType[i+1:]
}
}
}
return "object" // fallback
}
// addToMap adds a key-value pair to a yaml.Node map, preserving insertion order. // addToMap adds a key-value pair to a yaml.Node map, preserving insertion order.
func addToMap[T any](node *yaml.Node, key string, value T) { func addToMap[T any](node *yaml.Node, key string, value T) {
var valNode yaml.Node var valNode yaml.Node

View File

@@ -719,139 +719,4 @@ var _ = Describe("XTP Schema Generation", func() {
Expect(schemas).NotTo(HaveKey("UnusedStatus")) Expect(schemas).NotTo(HaveKey("UnusedStatus"))
}) })
}) })
Describe("GenerateSchema with []byte fields", func() {
It("should render []byte as buffer type and validate against XTP JSONSchema", func() {
capability := Capability{
Name: "buffer_test",
SourceFile: "buffer_test",
Methods: []Export{
{ExportName: "test", Input: NewParam("input", "Input"), Output: NewParam("output", "Output")},
},
Structs: []StructDef{
{
Name: "Input",
Fields: []FieldDef{
{Name: "Name", Type: "string", JSONTag: "name"},
{Name: "Data", Type: "[]byte", JSONTag: "data,omitempty", OmitEmpty: true},
},
},
{
Name: "Output",
Fields: []FieldDef{
{Name: "Body", Type: "[]byte", JSONTag: "body,omitempty", OmitEmpty: true},
},
},
},
}
schema, err := GenerateSchema(capability)
Expect(err).NotTo(HaveOccurred())
Expect(ValidateXTPSchema(schema)).To(Succeed())
doc := parseSchema(schema)
components := doc["components"].(map[string]any)
schemas := components["schemas"].(map[string]any)
input := schemas["Input"].(map[string]any)
props := input["properties"].(map[string]any)
data := props["data"].(map[string]any)
Expect(data["type"]).To(Equal("buffer"))
Expect(data).NotTo(HaveKey("items"))
Expect(data).NotTo(HaveKey("format"))
output := schemas["Output"].(map[string]any)
outProps := output["properties"].(map[string]any)
body := outProps["body"].(map[string]any)
Expect(body["type"]).To(Equal("buffer"))
})
})
Describe("GenerateSchema with map fields", func() {
It("should render map[string][]string as object with additionalProperties and validate", func() {
capability := Capability{
Name: "map_test",
SourceFile: "map_test",
Methods: []Export{
{ExportName: "test", Input: NewParam("input", "Input"), Output: NewParam("output", "Output")},
},
Structs: []StructDef{
{
Name: "Input",
Fields: []FieldDef{
{Name: "Headers", Type: "map[string][]string", JSONTag: "headers,omitempty", OmitEmpty: true},
},
},
{
Name: "Output",
Fields: []FieldDef{
{Name: "Value", Type: "string", JSONTag: "value"},
},
},
},
}
schema, err := GenerateSchema(capability)
Expect(err).NotTo(HaveOccurred())
Expect(ValidateXTPSchema(schema)).To(Succeed())
doc := parseSchema(schema)
components := doc["components"].(map[string]any)
schemas := components["schemas"].(map[string]any)
input := schemas["Input"].(map[string]any)
props := input["properties"].(map[string]any)
headers := props["headers"].(map[string]any)
Expect(headers).To(HaveKey("additionalProperties"))
addlProps := headers["additionalProperties"].(map[string]any)
Expect(addlProps["type"]).To(Equal("array"))
items := addlProps["items"].(map[string]any)
Expect(items["type"]).To(Equal("string"))
})
It("should render map[string]string as object with string additionalProperties", func() {
capability := Capability{
Name: "map_string_test",
SourceFile: "map_string_test",
Methods: []Export{
{ExportName: "test", Input: NewParam("input", "Input"), Output: NewParam("output", "Output")},
},
Structs: []StructDef{
{
Name: "Input",
Fields: []FieldDef{
{Name: "Metadata", Type: "map[string]string", JSONTag: "metadata,omitempty", OmitEmpty: true},
},
},
{
Name: "Output",
Fields: []FieldDef{
{Name: "Value", Type: "string", JSONTag: "value"},
},
},
},
}
schema, err := GenerateSchema(capability)
Expect(err).NotTo(HaveOccurred())
Expect(ValidateXTPSchema(schema)).To(Succeed())
doc := parseSchema(schema)
components := doc["components"].(map[string]any)
schemas := components["schemas"].(map[string]any)
input := schemas["Input"].(map[string]any)
props := input["properties"].(map[string]any)
metadata := props["metadata"].(map[string]any)
Expect(metadata).To(HaveKey("additionalProperties"))
addlProps := metadata["additionalProperties"].(map[string]any)
Expect(addlProps["type"]).To(Equal("string"))
})
})
Describe("parseMapValueType", func() {
DescribeTable("should extract value type from Go map types",
func(goType, wantValue string) {
Expect(parseMapValueType(goType)).To(Equal(wantValue))
},
Entry("map[string]string", "map[string]string", "string"),
Entry("map[string]int", "map[string]int", "int"),
Entry("map[string][]string", "map[string][]string", "[]string"),
Entry("map[string][]byte", "map[string][]byte", "[]byte"),
)
})
}) })

View File

@@ -26,19 +26,27 @@ const subsonicAPIVersion = "1.16.1"
// URL Format: Only the path and query parameters are used - host/protocol are ignored. // URL Format: Only the path and query parameters are used - host/protocol are ignored.
// Automatic Parameters: The service adds 'c' (client), 'v' (version), and optionally 'f' (format). // Automatic Parameters: The service adds 'c' (client), 'v' (version), and optionally 'f' (format).
type subsonicAPIServiceImpl struct { type subsonicAPIServiceImpl struct {
pluginName string pluginID string
router SubsonicRouter router SubsonicRouter
ds model.DataStore ds model.DataStore
userAccess UserAccess allowedUserIDs []string // User IDs this plugin can access (from DB configuration)
allUsers bool // If true, plugin can access all users
userIDMap map[string]struct{}
} }
// newSubsonicAPIService creates a new SubsonicAPIService for a plugin. // newSubsonicAPIService creates a new SubsonicAPIService for a plugin.
func newSubsonicAPIService(pluginName string, router SubsonicRouter, ds model.DataStore, userAccess UserAccess) host.SubsonicAPIService { func newSubsonicAPIService(pluginID string, router SubsonicRouter, ds model.DataStore, allowedUserIDs []string, allUsers bool) host.SubsonicAPIService {
userIDMap := make(map[string]struct{})
for _, id := range allowedUserIDs {
userIDMap[id] = struct{}{}
}
return &subsonicAPIServiceImpl{ return &subsonicAPIServiceImpl{
pluginName: pluginName, pluginID: pluginID,
router: router, router: router,
ds: ds, ds: ds,
userAccess: userAccess, allowedUserIDs: allowedUserIDs,
allUsers: allUsers,
userIDMap: userIDMap,
} }
} }
@@ -66,12 +74,12 @@ func (s *subsonicAPIServiceImpl) executeRequest(ctx context.Context, uri string,
} }
if err := s.checkPermissions(ctx, username); err != nil { if err := s.checkPermissions(ctx, username); err != nil {
log.Warn(ctx, "SubsonicAPI call blocked by permissions", "plugin", s.pluginName, "user", username, err) log.Warn(ctx, "SubsonicAPI call blocked by permissions", "plugin", s.pluginID, "user", username, err)
return nil, err return nil, err
} }
// Add required Subsonic API parameters // Add required Subsonic API parameters
query.Set("c", s.pluginName) // Client name (plugin ID) query.Set("c", s.pluginID) // Client name (plugin ID)
query.Set("v", subsonicAPIVersion) // API version query.Set("v", subsonicAPIVersion) // API version
if setJSON { if setJSON {
query.Set("f", "json") // Response format query.Set("f", "json") // Response format
@@ -86,8 +94,11 @@ func (s *subsonicAPIServiceImpl) executeRequest(ctx context.Context, uri string,
RawQuery: query.Encode(), RawQuery: query.Encode(),
} }
// Use http.NewRequest (not WithContext) to avoid inheriting Chi RouteContext; // Create HTTP request with a fresh context to avoid Chi RouteContext pollution.
// auth context is set explicitly below via request.WithInternalAuth. // Using http.NewRequest (instead of http.NewRequestWithContext) ensures the internal
// SubsonicAPI call doesn't inherit routing information from the parent handler,
// which would cause Chi to invoke the wrong handler. Authentication context is
// explicitly added in the next step via request.WithInternalAuth.
httpReq, err := http.NewRequest("GET", finalURL.String(), nil) httpReq, err := http.NewRequest("GET", finalURL.String(), nil)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create HTTP request: %w", err) return nil, fmt.Errorf("failed to create HTTP request: %w", err)
@@ -124,13 +135,14 @@ func (s *subsonicAPIServiceImpl) CallRaw(ctx context.Context, uri string) (strin
} }
func (s *subsonicAPIServiceImpl) checkPermissions(ctx context.Context, username string) error { func (s *subsonicAPIServiceImpl) checkPermissions(ctx context.Context, username string) error {
if s.userAccess.allUsers { // If allUsers is true, allow any user
if s.allUsers {
return nil return nil
} }
// Must have at least one allowed user configured // Must have at least one allowed user ID configured
if !s.userAccess.HasConfiguredUsers() { if len(s.allowedUserIDs) == 0 {
return fmt.Errorf("no users configured for plugin %s", s.pluginName) return fmt.Errorf("no users configured for plugin %s", s.pluginID)
} }
// Look up the user by username to get their ID // Look up the user by username to get their ID
@@ -143,7 +155,7 @@ func (s *subsonicAPIServiceImpl) checkPermissions(ctx context.Context, username
} }
// Check if the user's ID is in the allowed list // Check if the user's ID is in the allowed list
if !s.userAccess.IsAllowed(usr.ID) { if _, ok := s.userIDMap[usr.ID]; !ok {
return fmt.Errorf("user %s is not authorized for this plugin", username) return fmt.Errorf("user %s is not authorized for this plugin", username)
} }

View File

@@ -268,7 +268,7 @@ var _ = Describe("SubsonicAPIService", func() {
Context("with specific user IDs allowed", func() { Context("with specific user IDs allowed", func() {
It("blocks users not in the allowed list", func() { It("blocks users not in the allowed list", func() {
// allowedUserIDs contains "user2", but testuser is "user1" // allowedUserIDs contains "user2", but testuser is "user1"
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(false, []string{"user2"})) service := newSubsonicAPIService("test-plugin", router, dataStore, []string{"user2"}, false)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, err := service.Call(ctx, "/ping?u=testuser") _, err := service.Call(ctx, "/ping?u=testuser")
@@ -278,7 +278,7 @@ var _ = Describe("SubsonicAPIService", func() {
It("allows users in the allowed list", func() { It("allows users in the allowed list", func() {
// allowedUserIDs contains "user2" which is "alloweduser" // allowedUserIDs contains "user2" which is "alloweduser"
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(false, []string{"user2"})) service := newSubsonicAPIService("test-plugin", router, dataStore, []string{"user2"}, false)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
response, err := service.Call(ctx, "/ping?u=alloweduser") response, err := service.Call(ctx, "/ping?u=alloweduser")
@@ -288,7 +288,7 @@ var _ = Describe("SubsonicAPIService", func() {
It("blocks admin users when not in allowed list", func() { It("blocks admin users when not in allowed list", func() {
// allowedUserIDs only contains "user1" (testuser), not "admin1" // allowedUserIDs only contains "user1" (testuser), not "admin1"
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(false, []string{"user1"})) service := newSubsonicAPIService("test-plugin", router, dataStore, []string{"user1"}, false)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, err := service.Call(ctx, "/ping?u=adminuser") _, err := service.Call(ctx, "/ping?u=adminuser")
@@ -298,7 +298,7 @@ var _ = Describe("SubsonicAPIService", func() {
It("allows admin users when in allowed list", func() { It("allows admin users when in allowed list", func() {
// allowedUserIDs contains "admin1" // allowedUserIDs contains "admin1"
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(false, []string{"admin1"})) service := newSubsonicAPIService("test-plugin", router, dataStore, []string{"admin1"}, false)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
response, err := service.Call(ctx, "/ping?u=adminuser") response, err := service.Call(ctx, "/ping?u=adminuser")
@@ -309,7 +309,7 @@ var _ = Describe("SubsonicAPIService", func() {
Context("with allUsers=true", func() { Context("with allUsers=true", func() {
It("allows all users regardless of allowed list", func() { It("allows all users regardless of allowed list", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(true, nil)) service := newSubsonicAPIService("test-plugin", router, dataStore, nil, true)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
response, err := service.Call(ctx, "/ping?u=testuser") response, err := service.Call(ctx, "/ping?u=testuser")
@@ -318,7 +318,7 @@ var _ = Describe("SubsonicAPIService", func() {
}) })
It("allows admin users when allUsers is true", func() { It("allows admin users when allUsers is true", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(true, nil)) service := newSubsonicAPIService("test-plugin", router, dataStore, nil, true)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
response, err := service.Call(ctx, "/ping?u=adminuser") response, err := service.Call(ctx, "/ping?u=adminuser")
@@ -329,7 +329,7 @@ var _ = Describe("SubsonicAPIService", func() {
Context("with no users configured", func() { Context("with no users configured", func() {
It("returns error when no users are configured", func() { It("returns error when no users are configured", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(false, nil)) service := newSubsonicAPIService("test-plugin", router, dataStore, nil, false)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, err := service.Call(ctx, "/ping?u=testuser") _, err := service.Call(ctx, "/ping?u=testuser")
@@ -338,7 +338,7 @@ var _ = Describe("SubsonicAPIService", func() {
}) })
It("returns error for empty user list", func() { It("returns error for empty user list", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(false, []string{})) service := newSubsonicAPIService("test-plugin", router, dataStore, []string{}, false)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, err := service.Call(ctx, "/ping?u=testuser") _, err := service.Call(ctx, "/ping?u=testuser")
@@ -350,7 +350,7 @@ var _ = Describe("SubsonicAPIService", func() {
Describe("URL Handling", func() { Describe("URL Handling", func() {
It("returns error for missing username parameter", func() { It("returns error for missing username parameter", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(true, nil)) service := newSubsonicAPIService("test-plugin", router, dataStore, nil, true)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, err := service.Call(ctx, "/ping") _, err := service.Call(ctx, "/ping")
@@ -359,7 +359,7 @@ var _ = Describe("SubsonicAPIService", func() {
}) })
It("returns error for invalid URL", func() { It("returns error for invalid URL", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(true, nil)) service := newSubsonicAPIService("test-plugin", router, dataStore, nil, true)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, err := service.Call(ctx, "://invalid") _, err := service.Call(ctx, "://invalid")
@@ -368,7 +368,7 @@ var _ = Describe("SubsonicAPIService", func() {
}) })
It("extracts endpoint from path correctly", func() { It("extracts endpoint from path correctly", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(false, []string{"user1"})) service := newSubsonicAPIService("test-plugin", router, dataStore, []string{"user1"}, false)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, err := service.Call(ctx, "/rest/ping.view?u=testuser") _, err := service.Call(ctx, "/rest/ping.view?u=testuser")
@@ -381,7 +381,7 @@ var _ = Describe("SubsonicAPIService", func() {
Describe("CallRaw", func() { Describe("CallRaw", func() {
It("returns binary data and content-type", func() { It("returns binary data and content-type", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(true, nil)) service := newSubsonicAPIService("test-plugin", router, dataStore, nil, true)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
contentType, data, err := service.CallRaw(ctx, "/getCoverArt?u=testuser&id=al-1") contentType, data, err := service.CallRaw(ctx, "/getCoverArt?u=testuser&id=al-1")
@@ -391,7 +391,7 @@ var _ = Describe("SubsonicAPIService", func() {
}) })
It("does not set f=json parameter", func() { It("does not set f=json parameter", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(true, nil)) service := newSubsonicAPIService("test-plugin", router, dataStore, nil, true)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, _, err := service.CallRaw(ctx, "/getCoverArt?u=testuser&id=al-1") _, _, err := service.CallRaw(ctx, "/getCoverArt?u=testuser&id=al-1")
@@ -403,7 +403,7 @@ var _ = Describe("SubsonicAPIService", func() {
}) })
It("enforces permission checks", func() { It("enforces permission checks", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(false, []string{"user2"})) service := newSubsonicAPIService("test-plugin", router, dataStore, []string{"user2"}, false)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, _, err := service.CallRaw(ctx, "/getCoverArt?u=testuser&id=al-1") _, _, err := service.CallRaw(ctx, "/getCoverArt?u=testuser&id=al-1")
@@ -412,7 +412,7 @@ var _ = Describe("SubsonicAPIService", func() {
}) })
It("returns error when username is missing", func() { It("returns error when username is missing", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(true, nil)) service := newSubsonicAPIService("test-plugin", router, dataStore, nil, true)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, _, err := service.CallRaw(ctx, "/getCoverArt") _, _, err := service.CallRaw(ctx, "/getCoverArt")
@@ -421,7 +421,7 @@ var _ = Describe("SubsonicAPIService", func() {
}) })
It("returns error when router is nil", func() { It("returns error when router is nil", func() {
service := newSubsonicAPIService("test-plugin", nil, dataStore, NewUserAccess(true, nil)) service := newSubsonicAPIService("test-plugin", nil, dataStore, nil, true)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, _, err := service.CallRaw(ctx, "/getCoverArt?u=testuser") _, _, err := service.CallRaw(ctx, "/getCoverArt?u=testuser")
@@ -430,7 +430,7 @@ var _ = Describe("SubsonicAPIService", func() {
}) })
It("returns error for invalid URL", func() { It("returns error for invalid URL", func() {
service := newSubsonicAPIService("test-plugin", router, dataStore, NewUserAccess(true, nil)) service := newSubsonicAPIService("test-plugin", router, dataStore, nil, true)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, _, err := service.CallRaw(ctx, "://invalid") _, _, err := service.CallRaw(ctx, "://invalid")
@@ -441,7 +441,7 @@ var _ = Describe("SubsonicAPIService", func() {
Describe("Router Availability", func() { Describe("Router Availability", func() {
It("returns error when router is nil", func() { It("returns error when router is nil", func() {
service := newSubsonicAPIService("test-plugin", nil, dataStore, NewUserAccess(true, nil)) service := newSubsonicAPIService("test-plugin", nil, dataStore, nil, true)
ctx := GinkgoT().Context() ctx := GinkgoT().Context()
_, err := service.Call(ctx, "/ping?u=testuser") _, err := service.Call(ctx, "/ping?u=testuser")

View File

@@ -9,14 +9,16 @@ import (
) )
type usersServiceImpl struct { type usersServiceImpl struct {
ds model.DataStore ds model.DataStore
userAccess UserAccess allowedUsers []string // User IDs this plugin can access
allUsers bool // If true, plugin can access all users
} }
func newUsersService(ds model.DataStore, userAccess UserAccess) host.UsersService { func newUsersService(ds model.DataStore, allowedUsers []string, allUsers bool) host.UsersService {
return &usersServiceImpl{ return &usersServiceImpl{
ds: ds, ds: ds,
userAccess: userAccess, allowedUsers: allowedUsers,
allUsers: allUsers,
} }
} }
@@ -26,9 +28,17 @@ func (s *usersServiceImpl) GetUsers(ctx context.Context) ([]host.User, error) {
return nil, err return nil, err
} }
// Build allowed users map for efficient lookup
allowedMap := make(map[string]bool, len(s.allowedUsers))
for _, id := range s.allowedUsers {
allowedMap[id] = true
}
var result []host.User var result []host.User
for _, u := range users { for _, u := range users {
if s.userAccess.IsAllowed(u.ID) { // If allUsers is true, include all users
// Otherwise, only include users in the allowed list
if s.allUsers || allowedMap[u.ID] {
result = append(result, host.User{ result = append(result, host.User{
UserName: u.UserName, UserName: u.UserName,
Name: u.Name, Name: u.Name,

View File

@@ -61,7 +61,7 @@ var _ = Describe("UsersService", Ordered, func() {
Context("with allUsers=true", func() { Context("with allUsers=true", func() {
BeforeEach(func() { BeforeEach(func() {
service = newUsersService(ds, NewUserAccess(true, nil)) service = newUsersService(ds, nil, true)
}) })
It("should return all users", func() { It("should return all users", func() {
@@ -100,7 +100,7 @@ var _ = Describe("UsersService", Ordered, func() {
Context("with specific allowed users", func() { Context("with specific allowed users", func() {
BeforeEach(func() { BeforeEach(func() {
// Only allow access to user1 and user3 // Only allow access to user1 and user3
service = newUsersService(ds, NewUserAccess(false, []string{"user1", "user3"})) service = newUsersService(ds, []string{"user1", "user3"}, false)
}) })
It("should return only allowed users", func() { It("should return only allowed users", func() {
@@ -119,7 +119,7 @@ var _ = Describe("UsersService", Ordered, func() {
Context("with empty allowed users and allUsers=false", func() { Context("with empty allowed users and allUsers=false", func() {
BeforeEach(func() { BeforeEach(func() {
service = newUsersService(ds, NewUserAccess(false, []string{})) service = newUsersService(ds, []string{}, false)
}) })
It("should return no users", func() { It("should return no users", func() {
@@ -132,7 +132,7 @@ var _ = Describe("UsersService", Ordered, func() {
Context("when datastore returns error", func() { Context("when datastore returns error", func() {
BeforeEach(func() { BeforeEach(func() {
mockUserRepo.Error = model.ErrNotFound mockUserRepo.Error = model.ErrNotFound
service = newUsersService(ds, NewUserAccess(true, nil)) service = newUsersService(ds, nil, true)
}) })
It("should propagate the error", func() { It("should propagate the error", func() {
@@ -170,7 +170,7 @@ var _ = Describe("UsersService", Ordered, func() {
Context("with allUsers=true", func() { Context("with allUsers=true", func() {
BeforeEach(func() { BeforeEach(func() {
service = newUsersService(ds, NewUserAccess(true, nil)) service = newUsersService(ds, nil, true)
}) })
It("should return only admin users", func() { It("should return only admin users", func() {
@@ -185,7 +185,7 @@ var _ = Describe("UsersService", Ordered, func() {
Context("with specific allowed users including admin", func() { Context("with specific allowed users including admin", func() {
BeforeEach(func() { BeforeEach(func() {
// Allow access to user1 (admin) and user2 (non-admin) // Allow access to user1 (admin) and user2 (non-admin)
service = newUsersService(ds, NewUserAccess(false, []string{"user1", "user2"})) service = newUsersService(ds, []string{"user1", "user2"}, false)
}) })
It("should return only admin users from allowed list", func() { It("should return only admin users from allowed list", func() {
@@ -199,7 +199,7 @@ var _ = Describe("UsersService", Ordered, func() {
Context("with specific allowed users excluding admin", func() { Context("with specific allowed users excluding admin", func() {
BeforeEach(func() { BeforeEach(func() {
// Only allow access to non-admin users // Only allow access to non-admin users
service = newUsersService(ds, NewUserAccess(false, []string{"user2", "user3"})) service = newUsersService(ds, []string{"user2", "user3"}, false)
}) })
It("should return empty when no admins in allowed list", func() { It("should return empty when no admins in allowed list", func() {
@@ -212,7 +212,7 @@ var _ = Describe("UsersService", Ordered, func() {
Context("when datastore returns error", func() { Context("when datastore returns error", func() {
BeforeEach(func() { BeforeEach(func() {
mockUserRepo.Error = model.ErrNotFound mockUserRepo.Error = model.ErrNotFound
service = newUsersService(ds, NewUserAccess(true, nil)) service = newUsersService(ds, nil, true)
}) })
It("should propagate the error", func() { It("should propagate the error", func() {

View File

@@ -1,189 +0,0 @@
package plugins
import (
"io"
"net/http"
"github.com/go-chi/chi/v5"
"github.com/go-chi/httprate"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/plugins/capabilities"
)
const maxEndpointBodySize = 1 << 20 // 1MB
// SubsonicAuthValidator validates Subsonic authentication and returns the user.
// This is set by the cmd/ package to avoid import cycles (plugins -> server/subsonic).
type SubsonicAuthValidator func(ds model.DataStore, r *http.Request) (*model.User, error)
// NativeAuthMiddleware is an HTTP middleware that authenticates using JWT tokens.
// This is set by the cmd/ package to avoid import cycles (plugins -> server).
type NativeAuthMiddleware func(ds model.DataStore) func(next http.Handler) http.Handler
// NewEndpointRouter creates an HTTP handler that dispatches requests to plugin endpoints.
// It should be mounted at both /ext and /rest/ext. The handler uses a catch-all pattern
// because Chi does not support adding routes after startup, and plugins can be loaded/unloaded
// at runtime. Plugin lookup happens per-request under RLock.
func NewEndpointRouter(manager *Manager, ds model.DataStore, subsonicAuth SubsonicAuthValidator, nativeAuth NativeAuthMiddleware) http.Handler {
r := chi.NewRouter()
// Apply rate limiting if configured
if conf.Server.Plugins.EndpointRequestLimit > 0 {
r.Use(httprate.LimitByIP(conf.Server.Plugins.EndpointRequestLimit, conf.Server.Plugins.EndpointRequestWindow))
}
h := &endpointHandler{
manager: manager,
ds: ds,
subsonicAuth: subsonicAuth,
nativeAuth: nativeAuth,
}
r.HandleFunc("/{pluginID}/*", h.ServeHTTP)
r.HandleFunc("/{pluginID}", h.ServeHTTP)
return r
}
type endpointHandler struct {
manager *Manager
ds model.DataStore
subsonicAuth SubsonicAuthValidator
nativeAuth NativeAuthMiddleware
}
func (h *endpointHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
pluginID := chi.URLParam(r, "pluginID")
h.manager.mu.RLock()
p, ok := h.manager.plugins[pluginID]
h.manager.mu.RUnlock()
if !ok || !hasCapability(p.capabilities, CapabilityHTTPEndpoint) {
http.NotFound(w, r)
return
}
if p.manifest.Permissions == nil || p.manifest.Permissions.Endpoints == nil {
http.NotFound(w, r)
return
}
authType := p.manifest.Permissions.Endpoints.Auth
switch authType {
case EndpointsPermissionAuthSubsonic:
h.serveWithSubsonicAuth(w, r, p)
case EndpointsPermissionAuthNative:
h.serveWithNativeAuth(w, r, p)
case EndpointsPermissionAuthNone:
h.dispatch(w, r, p)
default:
http.Error(w, "Unknown auth type", http.StatusInternalServerError)
}
}
func (h *endpointHandler) serveWithSubsonicAuth(w http.ResponseWriter, r *http.Request, p *plugin) {
usr, err := h.subsonicAuth(h.ds, r)
if err != nil {
log.Warn(r.Context(), "Plugin endpoint auth failed", "plugin", p.name, "auth", "subsonic", err)
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return
}
ctx := request.WithUser(r.Context(), *usr)
h.dispatch(w, r.WithContext(ctx), p)
}
func (h *endpointHandler) serveWithNativeAuth(w http.ResponseWriter, r *http.Request, p *plugin) {
h.nativeAuth(h.ds)(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
h.dispatch(w, r, p)
})).ServeHTTP(w, r)
}
func (h *endpointHandler) dispatch(w http.ResponseWriter, r *http.Request, p *plugin) {
ctx := r.Context()
// Check user authorization and extract user info (skip for auth:"none")
var httpUser *capabilities.HTTPUser
if p.manifest.Permissions.Endpoints.Auth != EndpointsPermissionAuthNone {
user, ok := request.UserFrom(ctx)
if !ok {
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return
}
if !p.userAccess.IsAllowed(user.ID) {
log.Warn(ctx, "Plugin endpoint access denied", "plugin", p.name, "user", user.UserName)
http.Error(w, "Forbidden", http.StatusForbidden)
return
}
httpUser = &capabilities.HTTPUser{
ID: user.ID,
Username: user.UserName,
Name: user.Name,
IsAdmin: user.IsAdmin,
}
}
// Read request body with size limit
body, err := io.ReadAll(io.LimitReader(r.Body, maxEndpointBodySize))
if err != nil {
log.Error(ctx, "Failed to read request body", "plugin", p.name, err)
http.Error(w, "Failed to read request body", http.StatusBadRequest)
return
}
// Build the plugin request
// Normalize path: both /ext/plugin and /ext/plugin/ map to ""
rawPath := chi.URLParam(r, "*")
relPath := ""
if rawPath != "" {
relPath = "/" + rawPath
}
pluginReq := capabilities.HTTPHandleRequest{
Method: r.Method,
Path: relPath,
Query: r.URL.RawQuery,
Headers: r.Header,
Body: body,
User: httpUser,
}
// Call the plugin using binary framing for []byte Body fields
resp, err := callPluginFunctionRaw(
ctx, p, FuncHTTPHandleRequest,
pluginReq, pluginReq.Body,
func(r *capabilities.HTTPHandleResponse, raw []byte) { r.Body = raw },
)
if err != nil {
log.Error(ctx, "Plugin endpoint call failed", "plugin", p.name, "path", relPath, err)
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
return
}
// Write response headers from plugin
for key, values := range resp.Headers {
for _, v := range values {
w.Header().Add(key, v)
}
}
// Security hardening: override any plugin-set security headers
w.Header().Set("X-Content-Type-Options", "nosniff")
w.Header().Set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'; img-src data:; sandbox")
// Write status code (default to 200)
status := int(resp.Status)
if status == 0 {
status = http.StatusOK
}
w.WriteHeader(status)
// Write response body
if len(resp.Body) > 0 {
if _, err := w.Write(resp.Body); err != nil {
log.Error(ctx, "Failed to write plugin endpoint response", "plugin", p.name, err)
}
}
}

View File

@@ -1,480 +0,0 @@
//go:build !windows
package plugins
import (
"crypto/sha256"
"encoding/hex"
"encoding/json"
"io"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"strings"
"github.com/navidrome/navidrome/conf"
"github.com/navidrome/navidrome/conf/configtest"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/tests"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
// fakeNativeAuth is a mock native auth middleware that authenticates by looking up
// the "X-Test-User" header and setting the user in the context.
func fakeNativeAuth(ds model.DataStore) func(next http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
username := r.Header.Get("X-Test-User")
if username == "" {
http.Error(w, "Not authenticated", http.StatusUnauthorized)
return
}
user, err := ds.User(r.Context()).FindByUsername(username)
if err != nil {
http.Error(w, "Not authenticated", http.StatusUnauthorized)
return
}
ctx := request.WithUser(r.Context(), *user)
ctx = request.WithUsername(ctx, user.UserName)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
}
// fakeSubsonicAuth is a mock subsonic auth that validates by looking up
// the "u" query parameter.
func fakeSubsonicAuth(ds model.DataStore, r *http.Request) (*model.User, error) {
username := r.URL.Query().Get("u")
if username == "" {
return nil, model.ErrInvalidAuth
}
user, err := ds.User(r.Context()).FindByUsername(username)
if err != nil {
return nil, model.ErrInvalidAuth
}
return user, nil
}
var _ = Describe("HTTP Endpoint Handler", Ordered, func() {
var (
manager *Manager
tmpDir string
userRepo *tests.MockedUserRepo
dataStore *tests.MockDataStore
router http.Handler
)
BeforeAll(func() {
var err error
tmpDir, err = os.MkdirTemp("", "http-endpoint-test-*")
Expect(err).ToNot(HaveOccurred())
// Copy all test plugins
for _, pluginName := range []string{"test-http-endpoint", "test-http-endpoint-public", "test-http-endpoint-native"} {
srcPath := filepath.Join(testdataDir, pluginName+PackageExtension)
destPath := filepath.Join(tmpDir, pluginName+PackageExtension)
data, err := os.ReadFile(srcPath)
Expect(err).ToNot(HaveOccurred())
err = os.WriteFile(destPath, data, 0600)
Expect(err).ToNot(HaveOccurred())
}
// Setup config
DeferCleanup(configtest.SetupConfig())
conf.Server.Plugins.Enabled = true
conf.Server.Plugins.Folder = tmpDir
conf.Server.Plugins.AutoReload = false
conf.Server.CacheFolder = filepath.Join(tmpDir, "cache")
// Setup mock data store
userRepo = tests.CreateMockUserRepo()
dataStore = &tests.MockDataStore{MockedUser: userRepo}
// Add test users
_ = userRepo.Put(&model.User{
ID: "user1",
UserName: "testuser",
Name: "Test User",
IsAdmin: false,
})
_ = userRepo.Put(&model.User{
ID: "admin1",
UserName: "adminuser",
Name: "Admin User",
IsAdmin: true,
})
// Build enabled plugins list
var enabledPlugins model.Plugins
for _, pluginName := range []string{"test-http-endpoint", "test-http-endpoint-public", "test-http-endpoint-native"} {
pluginPath := filepath.Join(tmpDir, pluginName+PackageExtension)
data, err := os.ReadFile(pluginPath)
Expect(err).ToNot(HaveOccurred())
hash := sha256.Sum256(data)
hashHex := hex.EncodeToString(hash[:])
enabledPlugins = append(enabledPlugins, model.Plugin{
ID: pluginName,
Path: pluginPath,
SHA256: hashHex,
Enabled: true,
AllUsers: true,
})
}
// Setup mock plugin repo
mockPluginRepo := dataStore.Plugin(GinkgoT().Context()).(*tests.MockPluginRepo)
mockPluginRepo.Permitted = true
mockPluginRepo.SetData(enabledPlugins)
// Create and start manager
manager = &Manager{
plugins: make(map[string]*plugin),
ds: dataStore,
metrics: noopMetricsRecorder{},
subsonicRouter: http.NotFoundHandler(),
}
err = manager.Start(GinkgoT().Context())
Expect(err).ToNot(HaveOccurred())
// Create the endpoint router with fake auth functions
router = NewEndpointRouter(manager, dataStore, fakeSubsonicAuth, fakeNativeAuth)
DeferCleanup(func() {
_ = manager.Stop()
_ = os.RemoveAll(tmpDir)
})
})
Describe("Plugin Loading", func() {
It("loads the authenticated endpoint plugin", func() {
manager.mu.RLock()
p := manager.plugins["test-http-endpoint"]
manager.mu.RUnlock()
Expect(p).ToNot(BeNil())
Expect(p.manifest.Name).To(Equal("Test HTTP Endpoint Plugin"))
Expect(p.manifest.Permissions.Endpoints).ToNot(BeNil())
Expect(string(p.manifest.Permissions.Endpoints.Auth)).To(Equal("subsonic"))
Expect(hasCapability(p.capabilities, CapabilityHTTPEndpoint)).To(BeTrue())
})
It("loads the native auth endpoint plugin", func() {
manager.mu.RLock()
p := manager.plugins["test-http-endpoint-native"]
manager.mu.RUnlock()
Expect(p).ToNot(BeNil())
Expect(p.manifest.Name).To(Equal("Test HTTP Endpoint Native Plugin"))
Expect(p.manifest.Permissions.Endpoints).ToNot(BeNil())
Expect(string(p.manifest.Permissions.Endpoints.Auth)).To(Equal("native"))
Expect(hasCapability(p.capabilities, CapabilityHTTPEndpoint)).To(BeTrue())
})
It("loads the public endpoint plugin", func() {
manager.mu.RLock()
p := manager.plugins["test-http-endpoint-public"]
manager.mu.RUnlock()
Expect(p).ToNot(BeNil())
Expect(p.manifest.Name).To(Equal("Test HTTP Endpoint Public Plugin"))
Expect(p.manifest.Permissions.Endpoints).ToNot(BeNil())
Expect(string(p.manifest.Permissions.Endpoints.Auth)).To(Equal("none"))
Expect(hasCapability(p.capabilities, CapabilityHTTPEndpoint)).To(BeTrue())
})
})
Describe("Subsonic Auth Endpoints", func() {
It("returns hello response with valid auth", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint/hello?u=testuser", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Body.String()).To(Equal("Hello from plugin!"))
Expect(w.Header().Get("Content-Type")).To(Equal("text/plain"))
})
It("returns echo response with request details", func() {
req := httptest.NewRequest("POST", "/test-http-endpoint/echo?u=testuser&foo=bar", strings.NewReader("test body"))
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Header().Get("Content-Type")).To(Equal("application/json"))
var resp map[string]any
err := json.Unmarshal(w.Body.Bytes(), &resp)
Expect(err).ToNot(HaveOccurred())
Expect(resp["method"]).To(Equal("POST"))
Expect(resp["path"]).To(Equal("/echo"))
Expect(resp["body"]).To(Equal("test body"))
Expect(resp["hasUser"]).To(BeTrue())
Expect(resp["username"]).To(Equal("testuser"))
})
It("returns plugin-defined error status", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint/error?u=testuser", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusInternalServerError))
Expect(w.Body.String()).To(Equal("Something went wrong"))
})
It("returns plugin 404 for unknown paths", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint/unknown?u=testuser", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusNotFound))
Expect(w.Body.String()).To(Equal("Not found: /unknown"))
})
It("returns 401 without auth credentials", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint/hello", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusUnauthorized))
})
It("returns 401 with invalid auth credentials", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint/hello?u=nonexistent", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusUnauthorized))
})
})
Describe("Native Auth Endpoints", func() {
It("returns hello response with valid native auth", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint-native/hello", nil)
req.Header.Set("X-Test-User", "testuser")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Body.String()).To(Equal("Hello from native auth plugin!"))
Expect(w.Header().Get("Content-Type")).To(Equal("text/plain"))
})
It("returns echo response with user details", func() {
req := httptest.NewRequest("POST", "/test-http-endpoint-native/echo?foo=bar", strings.NewReader("native body"))
req.Header.Set("X-Test-User", "adminuser")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Header().Get("Content-Type")).To(Equal("application/json"))
var resp map[string]any
err := json.Unmarshal(w.Body.Bytes(), &resp)
Expect(err).ToNot(HaveOccurred())
Expect(resp["method"]).To(Equal("POST"))
Expect(resp["path"]).To(Equal("/echo"))
Expect(resp["body"]).To(Equal("native body"))
Expect(resp["hasUser"]).To(BeTrue())
Expect(resp["username"]).To(Equal("adminuser"))
})
It("returns 401 without auth header", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint-native/hello", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusUnauthorized))
})
It("returns 401 with invalid auth header", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint-native/hello", nil)
req.Header.Set("X-Test-User", "nonexistent")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusUnauthorized))
})
})
Describe("Public Endpoints (auth: none)", func() {
It("returns webhook response without auth", func() {
req := httptest.NewRequest("POST", "/test-http-endpoint-public/webhook", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Body.String()).To(Equal("webhook received"))
})
It("does not pass user info to public endpoints", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint-public/check-no-user", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Body.String()).To(Equal("hasUser=false"))
})
})
Describe("Security Headers", func() {
It("includes security headers in authenticated endpoint responses", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint/hello?u=testuser", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Header().Get("X-Content-Type-Options")).To(Equal("nosniff"))
Expect(w.Header().Get("Content-Security-Policy")).To(Equal("default-src 'none'; style-src 'unsafe-inline'; img-src data:; sandbox"))
})
It("includes security headers in public endpoint responses", func() {
req := httptest.NewRequest("POST", "/test-http-endpoint-public/webhook", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Header().Get("X-Content-Type-Options")).To(Equal("nosniff"))
Expect(w.Header().Get("Content-Security-Policy")).To(Equal("default-src 'none'; style-src 'unsafe-inline'; img-src data:; sandbox"))
})
It("overrides plugin-set security headers", func() {
req := httptest.NewRequest("POST", "/test-http-endpoint/echo?u=testuser", strings.NewReader("body"))
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Header().Get("X-Content-Type-Options")).To(Equal("nosniff"))
Expect(w.Header().Get("Content-Security-Policy")).To(Equal("default-src 'none'; style-src 'unsafe-inline'; img-src data:; sandbox"))
})
})
Describe("Unknown Plugin", func() {
It("returns 404 for nonexistent plugin", func() {
req := httptest.NewRequest("GET", "/nonexistent-plugin/hello", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusNotFound))
})
})
Describe("User Authorization", func() {
var restrictedRouter http.Handler
BeforeAll(func() {
// Create a manager with a plugin restricted to specific users
restrictedTmpDir, err := os.MkdirTemp("", "http-endpoint-restricted-test-*")
Expect(err).ToNot(HaveOccurred())
srcPath := filepath.Join(testdataDir, "test-http-endpoint"+PackageExtension)
destPath := filepath.Join(restrictedTmpDir, "test-http-endpoint"+PackageExtension)
data, err := os.ReadFile(srcPath)
Expect(err).ToNot(HaveOccurred())
err = os.WriteFile(destPath, data, 0600)
Expect(err).ToNot(HaveOccurred())
hash := sha256.Sum256(data)
hashHex := hex.EncodeToString(hash[:])
DeferCleanup(configtest.SetupConfig())
conf.Server.Plugins.Enabled = true
conf.Server.Plugins.Folder = restrictedTmpDir
conf.Server.Plugins.AutoReload = false
conf.Server.CacheFolder = filepath.Join(restrictedTmpDir, "cache")
restrictedPluginRepo := tests.CreateMockPluginRepo()
restrictedPluginRepo.Permitted = true
restrictedPluginRepo.SetData(model.Plugins{{
ID: "test-http-endpoint",
Path: destPath,
SHA256: hashHex,
Enabled: true,
AllUsers: false,
Users: `["admin1"]`, // Only admin1 is allowed
}})
restrictedDS := &tests.MockDataStore{
MockedPlugin: restrictedPluginRepo,
MockedUser: userRepo,
}
restrictedManager := &Manager{
plugins: make(map[string]*plugin),
ds: restrictedDS,
metrics: noopMetricsRecorder{},
subsonicRouter: http.NotFoundHandler(),
}
err = restrictedManager.Start(GinkgoT().Context())
Expect(err).ToNot(HaveOccurred())
restrictedRouter = NewEndpointRouter(restrictedManager, restrictedDS, fakeSubsonicAuth, fakeNativeAuth)
DeferCleanup(func() {
_ = restrictedManager.Stop()
_ = os.RemoveAll(restrictedTmpDir)
})
})
It("allows authorized users", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint/hello?u=adminuser", nil)
w := httptest.NewRecorder()
restrictedRouter.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Body.String()).To(Equal("Hello from plugin!"))
})
It("denies unauthorized users", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint/hello?u=testuser", nil)
w := httptest.NewRecorder()
restrictedRouter.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusForbidden))
})
})
Describe("Request without trailing path", func() {
It("handles requests to plugin root", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint-public/webhook", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
})
})
Describe("Binary Response", func() {
It("returns raw binary data intact", func() {
req := httptest.NewRequest("GET", "/test-http-endpoint/binary?u=testuser", nil)
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
Expect(w.Header().Get("Content-Type")).To(Equal("image/png"))
// PNG header bytes
Expect(w.Body.Bytes()).To(Equal([]byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A}))
})
})
Describe("Request body handling", func() {
It("passes request body to the plugin", func() {
body := `{"event":"push","ref":"refs/heads/main"}`
req := httptest.NewRequest("POST", "/test-http-endpoint/echo?u=testuser", strings.NewReader(body))
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
Expect(w.Code).To(Equal(http.StatusOK))
respBody, err := io.ReadAll(w.Body)
Expect(err).ToNot(HaveOccurred())
var resp map[string]any
err = json.Unmarshal(respBody, &resp)
Expect(err).ToNot(HaveOccurred())
Expect(resp["body"]).To(Equal(body))
})
})
})

View File

@@ -260,10 +260,19 @@ func (m *Manager) LoadScrobbler(name string) (scrobbler.Scrobbler, bool) {
return nil, false return nil, false
} }
// Create a new scrobbler adapter for this plugin // Build user ID map for fast lookups
userIDMap := make(map[string]struct{})
for _, id := range plugin.allowedUserIDs {
userIDMap[id] = struct{}{}
}
// Create a new scrobbler adapter for this plugin with user authorization config
return &ScrobblerPlugin{ return &ScrobblerPlugin{
name: plugin.name, name: plugin.name,
plugin: plugin, plugin: plugin,
allowedUserIDs: plugin.allowedUserIDs,
allUsers: plugin.allUsers,
userIDMap: userIDMap,
}, true }, true
} }

View File

@@ -2,7 +2,6 @@ package plugins
import ( import (
"context" "context"
"encoding/binary"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
@@ -61,147 +60,39 @@ func callPluginFunction[I any, O any](ctx context.Context, plugin *plugin, funcN
startCall := time.Now() startCall := time.Now()
exit, output, err := p.CallWithContext(ctx, funcName, inputBytes) exit, output, err := p.CallWithContext(ctx, funcName, inputBytes)
elapsed := time.Since(startCall) elapsed := time.Since(startCall)
success := false
skipMetrics := false
defer func() {
if !skipMetrics {
plugin.metrics.RecordPluginRequest(ctx, plugin.name, funcName, success, elapsed.Milliseconds())
}
}()
if err != nil { if err != nil {
// If context was cancelled, return that error instead of the plugin error // If context was cancelled, return that error instead of the plugin error
if ctx.Err() != nil { if ctx.Err() != nil {
skipMetrics = true
log.Debug(ctx, "Plugin call cancelled", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed) log.Debug(ctx, "Plugin call cancelled", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed)
return result, ctx.Err() return result, ctx.Err()
} }
plugin.metrics.RecordPluginRequest(ctx, plugin.name, funcName, false, elapsed.Milliseconds())
log.Trace(ctx, "Plugin call failed", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed, "navidromeDuration", startCall.Sub(start), err) log.Trace(ctx, "Plugin call failed", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed, "navidromeDuration", startCall.Sub(start), err)
return result, fmt.Errorf("plugin call failed: %w", err) return result, fmt.Errorf("plugin call failed: %w", err)
} }
if exit != 0 { if exit != 0 {
if exit == notImplementedCode { if exit == notImplementedCode {
skipMetrics = true
log.Trace(ctx, "Plugin function not implemented", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed, "navidromeDuration", startCall.Sub(start)) log.Trace(ctx, "Plugin function not implemented", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed, "navidromeDuration", startCall.Sub(start))
// TODO Should we record metrics for not implemented calls?
//plugin.metrics.RecordPluginRequest(ctx, plugin.name, funcName, true, elapsed.Milliseconds())
return result, fmt.Errorf("%w: %s", errNotImplemented, funcName) return result, fmt.Errorf("%w: %s", errNotImplemented, funcName)
} }
plugin.metrics.RecordPluginRequest(ctx, plugin.name, funcName, false, elapsed.Milliseconds())
return result, fmt.Errorf("plugin call exited with code %d", exit) return result, fmt.Errorf("plugin call exited with code %d", exit)
} }
if len(output) > 0 { if len(output) > 0 {
if err = json.Unmarshal(output, &result); err != nil { err = json.Unmarshal(output, &result)
if err != nil {
log.Trace(ctx, "Plugin call failed", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed, "navidromeDuration", startCall.Sub(start), err) log.Trace(ctx, "Plugin call failed", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed, "navidromeDuration", startCall.Sub(start), err)
return result, err
} }
} }
success = true // Record metrics for successful calls (or JSON unmarshal failures)
plugin.metrics.RecordPluginRequest(ctx, plugin.name, funcName, err == nil, elapsed.Milliseconds())
log.Trace(ctx, "Plugin call succeeded", "plugin", plugin.name, "function", funcName, "pluginDuration", time.Since(startCall), "navidromeDuration", startCall.Sub(start)) log.Trace(ctx, "Plugin call succeeded", "plugin", plugin.name, "function", funcName, "pluginDuration", time.Since(startCall), "navidromeDuration", startCall.Sub(start))
return result, nil return result, err
}
// callPluginFunctionRaw calls a plugin function using binary framing for []byte fields.
// The input is JSON-encoded (with []byte field excluded via json:"-"), followed by raw bytes.
// The output frame is: [status:1B][json_len:4B][JSON][raw bytes] for success (0x00),
// or [0x01][UTF-8 error message] for errors.
func callPluginFunctionRaw[I any, O any](
ctx context.Context, plugin *plugin, funcName string,
input I, rawInputBytes []byte,
setRawOutput func(*O, []byte),
) (O, error) {
start := time.Now()
var result O
p, err := plugin.instance(ctx)
if err != nil {
return result, fmt.Errorf("failed to create plugin: %w", err)
}
defer p.Close(ctx)
if !p.FunctionExists(funcName) {
log.Trace(ctx, "Plugin function not found", "plugin", plugin.name, "function", funcName)
return result, fmt.Errorf("%w: %s", errFunctionNotFound, funcName)
}
// Build input frame: [json_len:4B][JSON][raw bytes]
jsonBytes, err := json.Marshal(input)
if err != nil {
return result, fmt.Errorf("failed to marshal input: %w", err)
}
const maxFrameSize = 2 << 20 // 2 MiB
if len(jsonBytes) > maxFrameSize || len(rawInputBytes) > maxFrameSize {
return result, fmt.Errorf("input frame too large")
}
frame := make([]byte, 4+len(jsonBytes)+len(rawInputBytes))
binary.BigEndian.PutUint32(frame[:4], uint32(len(jsonBytes)))
copy(frame[4:4+len(jsonBytes)], jsonBytes)
copy(frame[4+len(jsonBytes):], rawInputBytes)
startCall := time.Now()
exit, output, err := p.CallWithContext(ctx, funcName, frame)
elapsed := time.Since(startCall)
success := false
skipMetrics := false
defer func() {
if !skipMetrics {
plugin.metrics.RecordPluginRequest(ctx, plugin.name, funcName, success, elapsed.Milliseconds())
}
}()
if err != nil {
if ctx.Err() != nil {
skipMetrics = true
log.Debug(ctx, "Plugin call cancelled", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed)
return result, ctx.Err()
}
log.Trace(ctx, "Plugin call failed", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed, "navidromeDuration", startCall.Sub(start), err)
return result, fmt.Errorf("plugin call failed: %w", err)
}
if exit != 0 {
if exit == notImplementedCode {
skipMetrics = true
log.Trace(ctx, "Plugin function not implemented", "plugin", plugin.name, "function", funcName, "pluginDuration", elapsed, "navidromeDuration", startCall.Sub(start))
return result, fmt.Errorf("%w: %s", errNotImplemented, funcName)
}
return result, fmt.Errorf("plugin call exited with code %d", exit)
}
// Parse output frame
if len(output) < 1 {
return result, fmt.Errorf("empty response from plugin")
}
statusByte := output[0]
if statusByte == 0x01 {
return result, fmt.Errorf("plugin error: %s", string(output[1:]))
}
if statusByte != 0x00 {
return result, fmt.Errorf("unknown response status byte: 0x%02x", statusByte)
}
// Success frame: [0x00][json_len:4B][JSON][raw bytes]
if len(output) < 5 {
return result, fmt.Errorf("malformed success response from plugin")
}
jsonLen := binary.BigEndian.Uint32(output[1:5])
if uint32(len(output)-5) < jsonLen {
return result, fmt.Errorf("invalid json length in response frame: %d exceeds available %d bytes", jsonLen, len(output)-5)
}
jsonData := output[5 : 5+jsonLen]
rawData := output[5+jsonLen:]
if err := json.Unmarshal(jsonData, &result); err != nil {
return result, fmt.Errorf("failed to unmarshal response: %w", err)
}
setRawOutput(&result, rawData)
success = true
log.Trace(ctx, "Plugin call succeeded", "plugin", plugin.name, "function", funcName, "pluginDuration", time.Since(startCall), "navidromeDuration", startCall.Sub(start))
return result, nil
} }
// extismLogger is a helper to log messages from Extism plugins // extismLogger is a helper to log messages from Extism plugins

View File

@@ -24,9 +24,10 @@ type serviceContext struct {
manager *Manager manager *Manager
permissions *Permissions permissions *Permissions
config map[string]string config map[string]string
userAccess UserAccess // User authorization for this plugin allowedUsers []string // User IDs this plugin can access
allowedLibraries []int // Library IDs this plugin can access allUsers bool // If true, plugin can access all users
allLibraries bool // If true, plugin can access all libraries allowedLibraries []int // Library IDs this plugin can access
allLibraries bool // If true, plugin can access all libraries
} }
// hostServiceEntry defines a host service for table-driven registration. // hostServiceEntry defines a host service for table-driven registration.
@@ -51,7 +52,7 @@ var hostServices = []hostServiceEntry{
name: "SubsonicAPI", name: "SubsonicAPI",
hasPermission: func(p *Permissions) bool { return p != nil && p.Subsonicapi != nil }, hasPermission: func(p *Permissions) bool { return p != nil && p.Subsonicapi != nil },
create: func(ctx *serviceContext) ([]extism.HostFunction, io.Closer) { create: func(ctx *serviceContext) ([]extism.HostFunction, io.Closer) {
service := newSubsonicAPIService(ctx.pluginName, ctx.manager.subsonicRouter, ctx.manager.ds, ctx.userAccess) service := newSubsonicAPIService(ctx.pluginName, ctx.manager.subsonicRouter, ctx.manager.ds, ctx.allowedUsers, ctx.allUsers)
return host.RegisterSubsonicAPIHostFunctions(service), nil return host.RegisterSubsonicAPIHostFunctions(service), nil
}, },
}, },
@@ -114,7 +115,7 @@ var hostServices = []hostServiceEntry{
name: "Users", name: "Users",
hasPermission: func(p *Permissions) bool { return p != nil && p.Users != nil }, hasPermission: func(p *Permissions) bool { return p != nil && p.Users != nil },
create: func(ctx *serviceContext) ([]extism.HostFunction, io.Closer) { create: func(ctx *serviceContext) ([]extism.HostFunction, io.Closer) {
service := newUsersService(ctx.manager.ds, ctx.userAccess) service := newUsersService(ctx.manager.ds, ctx.allowedUsers, ctx.allUsers)
return host.RegisterUsersHostFunctions(service), nil return host.RegisterUsersHostFunctions(service), nil
}, },
}, },
@@ -301,14 +302,13 @@ func (m *Manager) loadPluginWithConfig(p *model.Plugin) error {
var hostFunctions []extism.HostFunction var hostFunctions []extism.HostFunction
var closers []io.Closer var closers []io.Closer
userAccess := NewUserAccess(p.AllUsers, allowedUsers)
svcCtx := &serviceContext{ svcCtx := &serviceContext{
pluginName: p.ID, pluginName: p.ID,
manager: m, manager: m,
permissions: pkg.Manifest.Permissions, permissions: pkg.Manifest.Permissions,
config: pluginConfig, config: pluginConfig,
userAccess: userAccess, allowedUsers: allowedUsers,
allUsers: p.AllUsers,
allowedLibraries: allowedLibraries, allowedLibraries: allowedLibraries,
allLibraries: p.AllLibraries, allLibraries: p.AllLibraries,
} }
@@ -361,14 +361,15 @@ func (m *Manager) loadPluginWithConfig(p *model.Plugin) error {
m.mu.Lock() m.mu.Lock()
m.plugins[p.ID] = &plugin{ m.plugins[p.ID] = &plugin{
name: p.ID, name: p.ID,
path: p.Path, path: p.Path,
manifest: pkg.Manifest, manifest: pkg.Manifest,
compiled: compiled, compiled: compiled,
capabilities: capabilities, capabilities: capabilities,
closers: closers, closers: closers,
metrics: m.metrics, metrics: m.metrics,
userAccess: userAccess, allowedUserIDs: allowedUsers,
allUsers: p.AllUsers,
} }
m.mu.Unlock() m.mu.Unlock()

View File

@@ -12,14 +12,15 @@ import (
// plugin represents a loaded plugin // plugin represents a loaded plugin
type plugin struct { type plugin struct {
name string // Plugin name (from filename) name string // Plugin name (from filename)
path string // Path to the wasm file path string // Path to the wasm file
manifest *Manifest manifest *Manifest
compiled *extism.CompiledPlugin compiled *extism.CompiledPlugin
capabilities []Capability // Auto-detected capabilities based on exported functions capabilities []Capability // Auto-detected capabilities based on exported functions
closers []io.Closer // Cleanup functions to call on unload closers []io.Closer // Cleanup functions to call on unload
metrics PluginMetricsRecorder metrics PluginMetricsRecorder
userAccess UserAccess // User authorization for this plugin allowedUserIDs []string // User IDs this plugin can access (from DB configuration)
allUsers bool // If true, plugin can access all users
} }
// instance creates a new plugin instance for the given context. // instance creates a new plugin instance for the given context.

View File

@@ -110,33 +110,6 @@
}, },
"users": { "users": {
"$ref": "#/$defs/UsersPermission" "$ref": "#/$defs/UsersPermission"
},
"endpoints": {
"$ref": "#/$defs/EndpointsPermission"
}
}
},
"EndpointsPermission": {
"type": "object",
"description": "HTTP endpoint permissions for registering custom HTTP endpoints on the Navidrome server. Requires 'users' permission when auth is 'native' or 'subsonic'.",
"additionalProperties": false,
"required": ["auth"],
"properties": {
"reason": {
"type": "string",
"description": "Explanation for why HTTP endpoint registration is needed"
},
"auth": {
"type": "string",
"enum": ["native", "subsonic", "none"],
"description": "Authentication type for plugin endpoints: 'native' (JWT), 'subsonic' (params), or 'none' (public/unauthenticated)"
},
"paths": {
"type": "array",
"description": "Declared endpoint paths (informational, for admin UI display). Relative to plugin base URL.",
"items": {
"type": "string"
}
} }
} }
}, },

View File

@@ -32,15 +32,6 @@ func (m *Manifest) Validate() error {
} }
} }
// Endpoints permission with auth 'native' or 'subsonic' requires users permission
if m.Permissions != nil && m.Permissions.Endpoints != nil {
if m.Permissions.Endpoints.Auth != EndpointsPermissionAuthNone {
if m.Permissions.Users == nil {
return fmt.Errorf("'endpoints' permission with auth '%s' requires 'users' permission to be declared", m.Permissions.Endpoints.Auth)
}
}
}
// Validate config schema if present // Validate config schema if present
if m.Config != nil && m.Config.Schema != nil { if m.Config != nil && m.Config.Schema != nil {
if err := validateConfigSchema(m.Config.Schema); err != nil { if err := validateConfigSchema(m.Config.Schema); err != nil {
@@ -73,14 +64,6 @@ func ValidateWithCapabilities(m *Manifest, capabilities []Capability) error {
return fmt.Errorf("scrobbler capability requires 'users' permission to be declared in manifest") return fmt.Errorf("scrobbler capability requires 'users' permission to be declared in manifest")
} }
} }
// HTTPEndpoint capability requires endpoints permission
if hasCapability(capabilities, CapabilityHTTPEndpoint) {
if m.Permissions == nil || m.Permissions.Endpoints == nil {
return fmt.Errorf("HTTP endpoint capability requires 'endpoints' permission to be declared in manifest")
}
}
return nil return nil
} }

View File

@@ -4,7 +4,6 @@ package plugins
import "encoding/json" import "encoding/json"
import "fmt" import "fmt"
import "reflect"
// Artwork service permissions for generating artwork URLs // Artwork service permissions for generating artwork URLs
type ArtworkPermission struct { type ArtworkPermission struct {
@@ -46,71 +45,6 @@ func (j *ConfigDefinition) UnmarshalJSON(value []byte) error {
return nil return nil
} }
// HTTP endpoint permissions for registering custom HTTP endpoints on the Navidrome
// server. Requires 'users' permission when auth is 'native' or 'subsonic'.
type EndpointsPermission struct {
// Authentication type for plugin endpoints: 'native' (JWT), 'subsonic' (params),
// or 'none' (public/unauthenticated)
Auth EndpointsPermissionAuth `json:"auth" yaml:"auth" mapstructure:"auth"`
// Declared endpoint paths (informational, for admin UI display). Relative to
// plugin base URL.
Paths []string `json:"paths,omitempty" yaml:"paths,omitempty" mapstructure:"paths,omitempty"`
// Explanation for why HTTP endpoint registration is needed
Reason *string `json:"reason,omitempty" yaml:"reason,omitempty" mapstructure:"reason,omitempty"`
}
type EndpointsPermissionAuth string
const EndpointsPermissionAuthNative EndpointsPermissionAuth = "native"
const EndpointsPermissionAuthNone EndpointsPermissionAuth = "none"
const EndpointsPermissionAuthSubsonic EndpointsPermissionAuth = "subsonic"
var enumValues_EndpointsPermissionAuth = []interface{}{
"native",
"subsonic",
"none",
}
// UnmarshalJSON implements json.Unmarshaler.
func (j *EndpointsPermissionAuth) UnmarshalJSON(value []byte) error {
var v string
if err := json.Unmarshal(value, &v); err != nil {
return err
}
var ok bool
for _, expected := range enumValues_EndpointsPermissionAuth {
if reflect.DeepEqual(v, expected) {
ok = true
break
}
}
if !ok {
return fmt.Errorf("invalid value (expected one of %#v): %#v", enumValues_EndpointsPermissionAuth, v)
}
*j = EndpointsPermissionAuth(v)
return nil
}
// UnmarshalJSON implements json.Unmarshaler.
func (j *EndpointsPermission) UnmarshalJSON(value []byte) error {
var raw map[string]interface{}
if err := json.Unmarshal(value, &raw); err != nil {
return err
}
if _, ok := raw["auth"]; raw != nil && !ok {
return fmt.Errorf("field auth in EndpointsPermission: required")
}
type Plain EndpointsPermission
var plain Plain
if err := json.Unmarshal(value, &plain); err != nil {
return err
}
*j = EndpointsPermission(plain)
return nil
}
// Experimental features that may change or be removed in future versions // Experimental features that may change or be removed in future versions
type Experimental struct { type Experimental struct {
// Threads corresponds to the JSON schema field "threads". // Threads corresponds to the JSON schema field "threads".
@@ -232,9 +166,6 @@ type Permissions struct {
// Cache corresponds to the JSON schema field "cache". // Cache corresponds to the JSON schema field "cache".
Cache *CachePermission `json:"cache,omitempty" yaml:"cache,omitempty" mapstructure:"cache,omitempty"` Cache *CachePermission `json:"cache,omitempty" yaml:"cache,omitempty" mapstructure:"cache,omitempty"`
// Endpoints corresponds to the JSON schema field "endpoints".
Endpoints *EndpointsPermission `json:"endpoints,omitempty" yaml:"endpoints,omitempty" mapstructure:"endpoints,omitempty"`
// Http corresponds to the JSON schema field "http". // Http corresponds to the JSON schema field "http".
Http *HTTPPermission `json:"http,omitempty" yaml:"http,omitempty" mapstructure:"http,omitempty"` Http *HTTPPermission `json:"http,omitempty" yaml:"http,omitempty" mapstructure:"http,omitempty"`

View File

@@ -6,10 +6,3 @@ require (
github.com/extism/go-pdk v1.1.3 github.com/extism/go-pdk v1.1.3
github.com/stretchr/testify v1.11.1 github.com/stretchr/testify v1.11.1
) )
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/stretchr/objx v0.5.2 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

View File

@@ -1,126 +0,0 @@
// Code generated by ndpgen. DO NOT EDIT.
//
// This file contains export wrappers for the HTTPEndpoint capability.
// It is intended for use in Navidrome plugins built with TinyGo.
//
//go:build wasip1
package httpendpoint
import (
"encoding/binary"
"encoding/json"
"github.com/navidrome/navidrome/plugins/pdk/go/pdk"
)
// HTTPHandleRequest is the input provided when an HTTP request is dispatched to a plugin.
type HTTPHandleRequest struct {
// Method is the HTTP method (GET, POST, PUT, DELETE, PATCH, etc.).
Method string `json:"method"`
// Path is the request path relative to the plugin's base URL.
// For example, if the full URL is /ext/my-plugin/webhook, Path is "/webhook".
// Both /ext/my-plugin and /ext/my-plugin/ are normalized to Path = "".
Path string `json:"path"`
// Query is the raw query string without the leading '?'.
Query string `json:"query,omitempty"`
// Headers contains the HTTP request headers.
Headers map[string][]string `json:"headers,omitempty"`
// Body is the request body content.
Body []byte `json:"-"`
// User contains the authenticated user information. Nil for auth:"none" endpoints.
User *HTTPUser `json:"user,omitempty"`
}
// HTTPHandleResponse is the response returned by the plugin's HandleRequest function.
type HTTPHandleResponse struct {
// Status is the HTTP status code. Defaults to 200 if zero or not set.
Status int32 `json:"status,omitempty"`
// Headers contains the HTTP response headers to set.
Headers map[string][]string `json:"headers,omitempty"`
// Body is the response body content.
Body []byte `json:"-"`
}
// HTTPUser contains authenticated user information passed to the plugin.
type HTTPUser struct {
// ID is the internal Navidrome user ID.
ID string `json:"id"`
// Username is the user's login name.
Username string `json:"username"`
// Name is the user's display name.
Name string `json:"name"`
// IsAdmin indicates whether the user has admin privileges.
IsAdmin bool `json:"isAdmin"`
}
// HTTPEndpoint requires all methods to be implemented.
// HTTPEndpoint allows plugins to handle incoming HTTP requests.
// Plugins that declare the 'endpoints' permission must implement this capability.
// The host dispatches incoming HTTP requests to the plugin's HandleRequest function.
type HTTPEndpoint interface {
// HandleRequest - HandleRequest processes an incoming HTTP request and returns a response.
HandleRequest(HTTPHandleRequest) (HTTPHandleResponse, error)
} // Internal implementation holders
var (
handleRequestImpl func(HTTPHandleRequest) (HTTPHandleResponse, error)
)
// Register registers a httpendpoint implementation.
// All methods are required.
func Register(impl HTTPEndpoint) {
handleRequestImpl = impl.HandleRequest
}
// NotImplementedCode is the standard return code for unimplemented functions.
// The host recognizes this and skips the plugin gracefully.
const NotImplementedCode int32 = -2
//go:wasmexport nd_http_handle_request
func _NdHttpHandleRequest() int32 {
if handleRequestImpl == nil {
// Return standard code - host will skip this plugin gracefully
return NotImplementedCode
}
// Parse input frame: [json_len:4B][JSON without []byte field][raw bytes]
raw := pdk.Input()
if len(raw) < 4 {
pdk.SetErrorString("malformed input frame")
return -1
}
jsonLen := binary.BigEndian.Uint32(raw[:4])
if uint32(len(raw)-4) < jsonLen {
pdk.SetErrorString("invalid json length in input frame")
return -1
}
var input HTTPHandleRequest
if err := json.Unmarshal(raw[4:4+jsonLen], &input); err != nil {
pdk.SetError(err)
return -1
}
input.Body = raw[4+jsonLen:]
output, err := handleRequestImpl(input)
if err != nil {
// Error frame: [0x01][UTF-8 error message]
errMsg := []byte(err.Error())
errFrame := make([]byte, 1+len(errMsg))
errFrame[0] = 0x01
copy(errFrame[1:], errMsg)
pdk.Output(errFrame)
return 0
}
// Success frame: [0x00][json_len:4B][JSON without []byte field][raw bytes]
jsonBytes, _ := json.Marshal(output)
rawBytes := output.Body
frame := make([]byte, 1+4+len(jsonBytes)+len(rawBytes))
frame[0] = 0x00
binary.BigEndian.PutUint32(frame[1:5], uint32(len(jsonBytes)))
copy(frame[5:5+len(jsonBytes)], jsonBytes)
copy(frame[5+len(jsonBytes):], rawBytes)
pdk.Output(frame)
return 0
}

View File

@@ -1,65 +0,0 @@
// Code generated by ndpgen. DO NOT EDIT.
//
// This file provides stub implementations for non-WASM platforms.
// It allows Go plugins to compile and run tests outside of WASM,
// but the actual functionality is only available in WASM builds.
//
//go:build !wasip1
package httpendpoint
// HTTPHandleRequest is the input provided when an HTTP request is dispatched to a plugin.
type HTTPHandleRequest struct {
// Method is the HTTP method (GET, POST, PUT, DELETE, PATCH, etc.).
Method string `json:"method"`
// Path is the request path relative to the plugin's base URL.
// For example, if the full URL is /ext/my-plugin/webhook, Path is "/webhook".
// Both /ext/my-plugin and /ext/my-plugin/ are normalized to Path = "".
Path string `json:"path"`
// Query is the raw query string without the leading '?'.
Query string `json:"query,omitempty"`
// Headers contains the HTTP request headers.
Headers map[string][]string `json:"headers,omitempty"`
// Body is the request body content.
Body []byte `json:"-"`
// User contains the authenticated user information. Nil for auth:"none" endpoints.
User *HTTPUser `json:"user,omitempty"`
}
// HTTPHandleResponse is the response returned by the plugin's HandleRequest function.
type HTTPHandleResponse struct {
// Status is the HTTP status code. Defaults to 200 if zero or not set.
Status int32 `json:"status,omitempty"`
// Headers contains the HTTP response headers to set.
Headers map[string][]string `json:"headers,omitempty"`
// Body is the response body content.
Body []byte `json:"-"`
}
// HTTPUser contains authenticated user information passed to the plugin.
type HTTPUser struct {
// ID is the internal Navidrome user ID.
ID string `json:"id"`
// Username is the user's login name.
Username string `json:"username"`
// Name is the user's display name.
Name string `json:"name"`
// IsAdmin indicates whether the user has admin privileges.
IsAdmin bool `json:"isAdmin"`
}
// HTTPEndpoint requires all methods to be implemented.
// HTTPEndpoint allows plugins to handle incoming HTTP requests.
// Plugins that declare the 'endpoints' permission must implement this capability.
// The host dispatches incoming HTTP requests to the plugin's HandleRequest function.
type HTTPEndpoint interface {
// HandleRequest - HandleRequest processes an incoming HTTP request and returns a response.
HandleRequest(HTTPHandleRequest) (HTTPHandleResponse, error)
}
// NotImplementedCode is the standard return code for unimplemented functions.
const NotImplementedCode int32 = -2
// Register is a no-op on non-WASM platforms.
// This stub allows code to compile outside of WASM.
func Register(_ HTTPEndpoint) {}

View File

@@ -1,156 +0,0 @@
// Code generated by ndpgen. DO NOT EDIT.
//
// This file contains export wrappers for the HTTPEndpoint capability.
// It is intended for use in Navidrome plugins built with extism-pdk.
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
// Helper functions for skip_serializing_if with numeric types
#[allow(dead_code)]
fn is_zero_i32(value: &i32) -> bool { *value == 0 }
#[allow(dead_code)]
fn is_zero_u32(value: &u32) -> bool { *value == 0 }
#[allow(dead_code)]
fn is_zero_i64(value: &i64) -> bool { *value == 0 }
#[allow(dead_code)]
fn is_zero_u64(value: &u64) -> bool { *value == 0 }
#[allow(dead_code)]
fn is_zero_f32(value: &f32) -> bool { *value == 0.0 }
#[allow(dead_code)]
fn is_zero_f64(value: &f64) -> bool { *value == 0.0 }
/// HTTPHandleRequest is the input provided when an HTTP request is dispatched to a plugin.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HTTPHandleRequest {
/// Method is the HTTP method (GET, POST, PUT, DELETE, PATCH, etc.).
#[serde(default)]
pub method: String,
/// Path is the request path relative to the plugin's base URL.
/// For example, if the full URL is /ext/my-plugin/webhook, Path is "/webhook".
/// Both /ext/my-plugin and /ext/my-plugin/ are normalized to Path = "".
#[serde(default)]
pub path: String,
/// Query is the raw query string without the leading '?'.
#[serde(default, skip_serializing_if = "String::is_empty")]
pub query: String,
/// Headers contains the HTTP request headers.
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub headers: std::collections::HashMap<String, Vec<String>>,
/// Body is the request body content.
#[serde(skip)]
pub body: Vec<u8>,
/// User contains the authenticated user information. Nil for auth:"none" endpoints.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub user: Option<HTTPUser>,
}
/// HTTPHandleResponse is the response returned by the plugin's HandleRequest function.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HTTPHandleResponse {
/// Status is the HTTP status code. Defaults to 200 if zero or not set.
#[serde(default, skip_serializing_if = "is_zero_i32")]
pub status: i32,
/// Headers contains the HTTP response headers to set.
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub headers: std::collections::HashMap<String, Vec<String>>,
/// Body is the response body content.
#[serde(skip)]
pub body: Vec<u8>,
}
/// HTTPUser contains authenticated user information passed to the plugin.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct HTTPUser {
/// ID is the internal Navidrome user ID.
#[serde(default)]
pub id: String,
/// Username is the user's login name.
#[serde(default)]
pub username: String,
/// Name is the user's display name.
#[serde(default)]
pub name: String,
/// IsAdmin indicates whether the user has admin privileges.
#[serde(default)]
pub is_admin: bool,
}
/// Error represents an error from a capability method.
#[derive(Debug)]
pub struct Error {
pub message: String,
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.message)
}
}
impl std::error::Error for Error {}
impl Error {
pub fn new(message: impl Into<String>) -> Self {
Self { message: message.into() }
}
}
/// HTTPEndpoint requires all methods to be implemented.
/// HTTPEndpoint allows plugins to handle incoming HTTP requests.
/// Plugins that declare the 'endpoints' permission must implement this capability.
/// The host dispatches incoming HTTP requests to the plugin's HandleRequest function.
pub trait HTTPEndpoint {
/// HandleRequest - HandleRequest processes an incoming HTTP request and returns a response.
fn handle_request(&self, req: HTTPHandleRequest) -> Result<HTTPHandleResponse, Error>;
}
/// Register all exports for the HTTPEndpoint capability.
/// This macro generates the WASM export functions for all trait methods.
#[macro_export]
macro_rules! register_httpendpoint {
($plugin_type:ty) => {
#[extism_pdk::plugin_fn]
pub fn nd_http_handle_request(
_raw_input: extism_pdk::Raw<Vec<u8>>
) -> extism_pdk::FnResult<extism_pdk::Raw<Vec<u8>>> {
let plugin = <$plugin_type>::default();
// Parse input frame: [json_len:4B][JSON without []byte field][raw bytes]
let raw_bytes = _raw_input.0;
if raw_bytes.len() < 4 {
let mut err_frame = vec![0x01u8];
err_frame.extend_from_slice(b"malformed input frame");
return Ok(extism_pdk::Raw(err_frame));
}
let json_len = u32::from_be_bytes([raw_bytes[0], raw_bytes[1], raw_bytes[2], raw_bytes[3]]) as usize;
if json_len > raw_bytes.len() - 4 {
let mut err_frame = vec![0x01u8];
err_frame.extend_from_slice(b"invalid json length in input frame");
return Ok(extism_pdk::Raw(err_frame));
}
let mut req: $crate::httpendpoint::HTTPHandleRequest = serde_json::from_slice(&raw_bytes[4..4+json_len])
.map_err(|e| extism_pdk::Error::msg(e.to_string()))?;
req.body = raw_bytes[4+json_len..].to_vec();
match $crate::httpendpoint::HTTPEndpoint::handle_request(&plugin, req) {
Ok(output) => {
// Success frame: [0x00][json_len:4B][JSON without []byte field][raw bytes]
let json_bytes = serde_json::to_vec(&output)
.map_err(|e| extism_pdk::Error::msg(e.to_string()))?;
let raw_field = &output.body;
let mut frame = Vec::with_capacity(1 + 4 + json_bytes.len() + raw_field.len());
frame.push(0x00);
frame.extend_from_slice(&(json_bytes.len() as u32).to_be_bytes());
frame.extend_from_slice(&json_bytes);
frame.extend_from_slice(raw_field);
Ok(extism_pdk::Raw(frame))
}
Err(e) => {
// Error frame: [0x01][UTF-8 error message]
let mut err_frame = vec![0x01u8];
err_frame.extend_from_slice(e.message.as_bytes());
Ok(extism_pdk::Raw(err_frame))
}
}
}
};
}

View File

@@ -5,7 +5,6 @@
//! This crate provides type definitions, traits, and registration macros //! This crate provides type definitions, traits, and registration macros
//! for implementing Navidrome plugin capabilities in Rust. //! for implementing Navidrome plugin capabilities in Rust.
pub mod httpendpoint;
pub mod lifecycle; pub mod lifecycle;
pub mod metadata; pub mod metadata;
pub mod scheduler; pub mod scheduler;

View File

@@ -33,8 +33,11 @@ func init() {
// ScrobblerPlugin is an adapter that wraps an Extism plugin and implements // ScrobblerPlugin is an adapter that wraps an Extism plugin and implements
// the scrobbler.Scrobbler interface for scrobbling to external services. // the scrobbler.Scrobbler interface for scrobbling to external services.
type ScrobblerPlugin struct { type ScrobblerPlugin struct {
name string name string
plugin *plugin plugin *plugin
allowedUserIDs []string // User IDs this plugin can access (from DB configuration)
allUsers bool // If true, plugin can access all users
userIDMap map[string]struct{} // Cached map for fast lookups
} }
// IsAuthorized checks if the user is authorized with this scrobbler. // IsAuthorized checks if the user is authorized with this scrobbler.
@@ -42,7 +45,7 @@ type ScrobblerPlugin struct {
// then delegates to the plugin for service-specific authorization. // then delegates to the plugin for service-specific authorization.
func (s *ScrobblerPlugin) IsAuthorized(ctx context.Context, userId string) bool { func (s *ScrobblerPlugin) IsAuthorized(ctx context.Context, userId string) bool {
// First check server-side authorization based on plugin configuration // First check server-side authorization based on plugin configuration
if !s.plugin.userAccess.IsAllowed(userId) { if !s.isUserAllowed(userId) {
return false return false
} }
@@ -60,6 +63,18 @@ func (s *ScrobblerPlugin) IsAuthorized(ctx context.Context, userId string) bool
return result return result
} }
// isUserAllowed checks if the given user ID is allowed to use this plugin.
func (s *ScrobblerPlugin) isUserAllowed(userId string) bool {
if s.allUsers {
return true
}
if len(s.allowedUserIDs) == 0 {
return false
}
_, ok := s.userIDMap[userId]
return ok
}
// NowPlaying sends a now playing notification to the scrobbler // NowPlaying sends a now playing notification to the scrobbler
func (s *ScrobblerPlugin) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error { func (s *ScrobblerPlugin) NowPlaying(ctx context.Context, userId string, track *model.MediaFile, position int) error {
username := getUsernameFromContext(ctx) username := getUsernameFromContext(ctx)

View File

@@ -71,6 +71,41 @@ var _ = Describe("ScrobblerPlugin", Ordered, func() {
}) })
}) })
Describe("isUserAllowed", func() {
It("returns true when allUsers is true", func() {
sp := &ScrobblerPlugin{allUsers: true}
Expect(sp.isUserAllowed("any-user")).To(BeTrue())
})
It("returns false when allowedUserIDs is empty and allUsers is false", func() {
sp := &ScrobblerPlugin{allUsers: false, allowedUserIDs: []string{}}
Expect(sp.isUserAllowed("user-1")).To(BeFalse())
})
It("returns false when allowedUserIDs is nil and allUsers is false", func() {
sp := &ScrobblerPlugin{allUsers: false}
Expect(sp.isUserAllowed("user-1")).To(BeFalse())
})
It("returns true when user is in allowedUserIDs", func() {
sp := &ScrobblerPlugin{
allUsers: false,
allowedUserIDs: []string{"user-1", "user-2"},
userIDMap: map[string]struct{}{"user-1": {}, "user-2": {}},
}
Expect(sp.isUserAllowed("user-1")).To(BeTrue())
})
It("returns false when user is not in allowedUserIDs", func() {
sp := &ScrobblerPlugin{
allUsers: false,
allowedUserIDs: []string{"user-1", "user-2"},
userIDMap: map[string]struct{}{"user-1": {}, "user-2": {}},
}
Expect(sp.isUserAllowed("user-3")).To(BeFalse())
})
})
Describe("NowPlaying", func() { Describe("NowPlaying", func() {
It("successfully calls the plugin", func() { It("successfully calls the plugin", func() {
track := &model.MediaFile{ track := &model.MediaFile{

View File

@@ -1,16 +0,0 @@
module test-http-endpoint-native
go 1.25
require github.com/navidrome/navidrome/plugins/pdk/go v0.0.0
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/extism/go-pdk v1.1.3 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/stretchr/testify v1.11.1 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
replace github.com/navidrome/navidrome/plugins/pdk/go => ../../pdk/go

View File

@@ -1,14 +0,0 @@
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/extism/go-pdk v1.1.3 h1:hfViMPWrqjN6u67cIYRALZTZLk/enSPpNKa+rZ9X2SQ=
github.com/extism/go-pdk v1.1.3/go.mod h1:Gz+LIU/YCKnKXhgge8yo5Yu1F/lbv7KtKFkiCSzW/P4=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -1,61 +0,0 @@
// Test plugin for native auth (JWT) HTTP endpoint integration tests.
// Build with: tinygo build -o ../test-http-endpoint-native.wasm -target wasip1 -buildmode=c-shared .
package main
import (
"encoding/json"
"github.com/navidrome/navidrome/plugins/pdk/go/httpendpoint"
)
func init() {
httpendpoint.Register(&testNativeEndpoint{})
}
type testNativeEndpoint struct{}
func (t *testNativeEndpoint) HandleRequest(req httpendpoint.HTTPHandleRequest) (httpendpoint.HTTPHandleResponse, error) {
switch req.Path {
case "/hello":
return httpendpoint.HTTPHandleResponse{
Status: 200,
Headers: map[string][]string{
"Content-Type": {"text/plain"},
},
Body: []byte("Hello from native auth plugin!"),
}, nil
case "/echo":
// Echo back the request as JSON
data, _ := json.Marshal(map[string]any{
"method": req.Method,
"path": req.Path,
"query": req.Query,
"body": string(req.Body),
"hasUser": req.User != nil,
"username": userName(req.User),
})
return httpendpoint.HTTPHandleResponse{
Status: 200,
Headers: map[string][]string{
"Content-Type": {"application/json"},
},
Body: data,
}, nil
default:
return httpendpoint.HTTPHandleResponse{
Status: 404,
Body: []byte("Not found: " + req.Path),
}, nil
}
}
func userName(u *httpendpoint.HTTPUser) string {
if u == nil {
return ""
}
return u.Username
}
func main() {}

View File

@@ -1,16 +0,0 @@
{
"name": "Test HTTP Endpoint Native Plugin",
"author": "Navidrome Test",
"version": "1.0.0",
"description": "Test plugin for native (JWT) HTTP endpoint integration testing",
"permissions": {
"endpoints": {
"auth": "native",
"paths": ["/hello", "/echo"],
"reason": "Testing native auth HTTP endpoint handling"
},
"users": {
"reason": "Authenticated endpoints require user access"
}
}
}

View File

@@ -1,16 +0,0 @@
module test-http-endpoint-public
go 1.25
require github.com/navidrome/navidrome/plugins/pdk/go v0.0.0
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/extism/go-pdk v1.1.3 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/stretchr/testify v1.11.1 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
replace github.com/navidrome/navidrome/plugins/pdk/go => ../../pdk/go

View File

@@ -1,14 +0,0 @@
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/extism/go-pdk v1.1.3 h1:hfViMPWrqjN6u67cIYRALZTZLk/enSPpNKa+rZ9X2SQ=
github.com/extism/go-pdk v1.1.3/go.mod h1:Gz+LIU/YCKnKXhgge8yo5Yu1F/lbv7KtKFkiCSzW/P4=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -1,45 +0,0 @@
// Test plugin for public (unauthenticated) HTTP endpoint integration tests.
// Build with: tinygo build -o ../test-http-endpoint-public.wasm -target wasip1 -buildmode=c-shared .
package main
import (
"github.com/navidrome/navidrome/plugins/pdk/go/httpendpoint"
)
func init() {
httpendpoint.Register(&testPublicEndpoint{})
}
type testPublicEndpoint struct{}
func (t *testPublicEndpoint) HandleRequest(req httpendpoint.HTTPHandleRequest) (httpendpoint.HTTPHandleResponse, error) {
switch req.Path {
case "/webhook":
return httpendpoint.HTTPHandleResponse{
Status: 200,
Headers: map[string][]string{
"Content-Type": {"text/plain"},
},
Body: []byte("webhook received"),
}, nil
case "/check-no-user":
// Verify that no user info is provided for public endpoints
hasUser := "false"
if req.User != nil {
hasUser = "true"
}
return httpendpoint.HTTPHandleResponse{
Status: 200,
Body: []byte("hasUser=" + hasUser),
}, nil
default:
return httpendpoint.HTTPHandleResponse{
Status: 404,
Body: []byte("Not found: " + req.Path),
}, nil
}
}
func main() {}

View File

@@ -1,13 +0,0 @@
{
"name": "Test HTTP Endpoint Public Plugin",
"author": "Navidrome Test",
"version": "1.0.0",
"description": "Test plugin for public (unauthenticated) HTTP endpoint integration testing",
"permissions": {
"endpoints": {
"auth": "none",
"paths": ["/webhook"],
"reason": "Testing public HTTP endpoints"
}
}
}

View File

@@ -1,16 +0,0 @@
module test-http-endpoint
go 1.25
require github.com/navidrome/navidrome/plugins/pdk/go v0.0.0
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/extism/go-pdk v1.1.3 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/stretchr/testify v1.11.1 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
replace github.com/navidrome/navidrome/plugins/pdk/go => ../../pdk/go

View File

@@ -1,14 +0,0 @@
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/extism/go-pdk v1.1.3 h1:hfViMPWrqjN6u67cIYRALZTZLk/enSPpNKa+rZ9X2SQ=
github.com/extism/go-pdk v1.1.3/go.mod h1:Gz+LIU/YCKnKXhgge8yo5Yu1F/lbv7KtKFkiCSzW/P4=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -1,77 +0,0 @@
// Test plugin for HTTP endpoint integration tests.
// Build with: tinygo build -o ../test-http-endpoint.wasm -target wasip1 -buildmode=c-shared .
package main
import (
"encoding/json"
"github.com/navidrome/navidrome/plugins/pdk/go/httpendpoint"
)
func init() {
httpendpoint.Register(&testEndpoint{})
}
type testEndpoint struct{}
func (t *testEndpoint) HandleRequest(req httpendpoint.HTTPHandleRequest) (httpendpoint.HTTPHandleResponse, error) {
switch req.Path {
case "/hello":
return httpendpoint.HTTPHandleResponse{
Status: 200,
Headers: map[string][]string{
"Content-Type": {"text/plain"},
},
Body: []byte("Hello from plugin!"),
}, nil
case "/echo":
// Echo back the request as JSON
data, _ := json.Marshal(map[string]any{
"method": req.Method,
"path": req.Path,
"query": req.Query,
"body": string(req.Body),
"hasUser": req.User != nil,
"username": userName(req.User),
})
return httpendpoint.HTTPHandleResponse{
Status: 200,
Headers: map[string][]string{
"Content-Type": {"application/json"},
},
Body: data,
}, nil
case "/binary":
// Return raw binary data (PNG header)
return httpendpoint.HTTPHandleResponse{
Status: 200,
Headers: map[string][]string{
"Content-Type": {"image/png"},
},
Body: []byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A},
}, nil
case "/error":
return httpendpoint.HTTPHandleResponse{
Status: 500,
Body: []byte("Something went wrong"),
}, nil
default:
return httpendpoint.HTTPHandleResponse{
Status: 404,
Body: []byte("Not found: " + req.Path),
}, nil
}
}
func userName(u *httpendpoint.HTTPUser) string {
if u == nil {
return ""
}
return u.Username
}
func main() {}

View File

@@ -1,16 +0,0 @@
{
"name": "Test HTTP Endpoint Plugin",
"author": "Navidrome Test",
"version": "1.0.0",
"description": "Test plugin for HTTP endpoint integration testing",
"permissions": {
"endpoints": {
"auth": "subsonic",
"paths": ["/hello", "/echo"],
"reason": "Testing HTTP endpoint handling"
},
"users": {
"reason": "Authenticated endpoints require user access"
}
}
}

View File

@@ -1,35 +0,0 @@
package plugins
// UserAccess encapsulates user authorization for a plugin,
// determining which users are allowed to interact with it.
type UserAccess struct {
allUsers bool
userIDMap map[string]struct{}
}
// NewUserAccess creates a UserAccess from the plugin's configuration.
// If allUsers is true, all users are allowed regardless of the list.
func NewUserAccess(allUsers bool, userIDs []string) UserAccess {
userIDMap := make(map[string]struct{}, len(userIDs))
for _, id := range userIDs {
userIDMap[id] = struct{}{}
}
return UserAccess{
allUsers: allUsers,
userIDMap: userIDMap,
}
}
// IsAllowed checks if the given user ID is permitted.
func (ua UserAccess) IsAllowed(userID string) bool {
if ua.allUsers {
return true
}
_, ok := ua.userIDMap[userID]
return ok
}
// HasConfiguredUsers reports whether any specific user IDs have been configured.
func (ua UserAccess) HasConfiguredUsers() bool {
return ua.allUsers || len(ua.userIDMap) > 0
}

View File

@@ -1,64 +0,0 @@
//go:build !windows
package plugins
import (
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
)
var _ = Describe("UserAccess", func() {
Describe("IsAllowed", func() {
It("returns true when allUsers is true", func() {
ua := NewUserAccess(true, nil)
Expect(ua.IsAllowed("any-user")).To(BeTrue())
})
It("returns true when allUsers is true even with an explicit list", func() {
ua := NewUserAccess(true, []string{"user-1"})
Expect(ua.IsAllowed("other-user")).To(BeTrue())
})
It("returns false when userIDs is empty", func() {
ua := NewUserAccess(false, []string{})
Expect(ua.IsAllowed("user-1")).To(BeFalse())
})
It("returns false when userIDs is nil", func() {
ua := NewUserAccess(false, nil)
Expect(ua.IsAllowed("user-1")).To(BeFalse())
})
It("returns true when user is in the list", func() {
ua := NewUserAccess(false, []string{"user-1", "user-2"})
Expect(ua.IsAllowed("user-1")).To(BeTrue())
})
It("returns false when user is not in the list", func() {
ua := NewUserAccess(false, []string{"user-1", "user-2"})
Expect(ua.IsAllowed("user-3")).To(BeFalse())
})
})
Describe("HasConfiguredUsers", func() {
It("returns true when allUsers is true", func() {
ua := NewUserAccess(true, nil)
Expect(ua.HasConfiguredUsers()).To(BeTrue())
})
It("returns true when specific users are configured", func() {
ua := NewUserAccess(false, []string{"user-1"})
Expect(ua.HasConfiguredUsers()).To(BeTrue())
})
It("returns false when no users are configured", func() {
ua := NewUserAccess(false, nil)
Expect(ua.HasConfiguredUsers()).To(BeFalse())
})
It("returns false when user list is empty", func() {
ua := NewUserAccess(false, []string{})
Expect(ua.HasConfiguredUsers()).To(BeFalse())
})
})
})

View File

@@ -23,8 +23,6 @@ if [ ! -f "$postinstall_flag" ]; then
# and not by root # and not by root
chown navidrome:navidrome /var/lib/navidrome/cache chown navidrome:navidrome /var/lib/navidrome/cache
touch "$postinstall_flag" touch "$postinstall_flag"
else
navidrome service stop --configfile /etc/navidrome/navidrome.toml && navidrome service start --configfile /etc/navidrome/navidrome.toml
fi fi

View File

@@ -36,8 +36,7 @@
"bitDepth": "Bitdybde", "bitDepth": "Bitdybde",
"sampleRate": "Samplingfrekvens", "sampleRate": "Samplingfrekvens",
"missing": "Manglende", "missing": "Manglende",
"libraryName": "Bibliotek", "libraryName": "Bibliotek"
"composer": "Komponist"
}, },
"actions": { "actions": {
"addToQueue": "Afspil senere", "addToQueue": "Afspil senere",
@@ -47,8 +46,7 @@
"download": "Download", "download": "Download",
"playNext": "Afspil næste", "playNext": "Afspil næste",
"info": "Hent info", "info": "Hent info",
"showInPlaylist": "Vis i afspilningsliste", "showInPlaylist": "Vis i afspilningsliste"
"instantMix": "Instant Mix"
} }
}, },
"album": { "album": {
@@ -330,80 +328,6 @@
"scanInProgress": "Scanning i gang...", "scanInProgress": "Scanning i gang...",
"noLibrariesAssigned": "Ingen biblioteker tildelt denne bruger" "noLibrariesAssigned": "Ingen biblioteker tildelt denne bruger"
} }
},
"plugin": {
"name": "Plugin |||| Plugins",
"fields": {
"id": "ID",
"name": "Navn",
"description": "Beskrivelse",
"version": "Version",
"author": "Forfatter",
"website": "Hjemmeside",
"permissions": "Tilladelser",
"enabled": "Aktiveret",
"status": "Status",
"path": "Sti",
"lastError": "Fejl",
"hasError": "Fejl",
"updatedAt": "Opdateret",
"createdAt": "Installeret",
"configKey": "Nøgle",
"configValue": "Værdi",
"allUsers": "Tillad alle brugere",
"selectedUsers": "Valgte brugere",
"allLibraries": "Tillad alle biblioteker",
"selectedLibraries": "Valgte biblioteker"
},
"sections": {
"status": "Status",
"info": "Pluginoplysninger",
"configuration": "Konfiguration",
"manifest": "Manifest",
"usersPermission": "Brugertilladelse",
"libraryPermission": "Bibliotekstilladelse"
},
"status": {
"enabled": "Aktiveret",
"disabled": "Deaktiveret"
},
"actions": {
"enable": "Aktivér",
"disable": "Deaktivér",
"disabledDueToError": "Ret fejlen før aktivering",
"disabledUsersRequired": "Vælg brugere før aktivering",
"disabledLibrariesRequired": "Vælg biblioteker før aktivering",
"addConfig": "Tilføj konfiguration",
"rescan": "Genskan"
},
"notifications": {
"enabled": "Plugin aktiveret",
"disabled": "Plugin deaktiveret",
"updated": "Plugin opdateret",
"error": "Fejl ved opdatering af plugin"
},
"validation": {
"invalidJson": "Konfigurationen skal være gyldig JSON"
},
"messages": {
"configHelp": "Konfigurér pluginet med nøgle-værdi-par. Lad stå tomt, hvis pluginet ikke kræver konfiguration.",
"clickPermissions": "Klik på en tilladelse for detaljer",
"noConfig": "Ingen konfiguration angivet",
"allUsersHelp": "Når aktiveret, vil pluginet have adgang til alle brugere, inklusiv dem der oprettes i fremtiden.",
"noUsers": "Ingen brugere valgt",
"permissionReason": "Årsag",
"usersRequired": "Dette plugin kræver adgang til brugeroplysninger. Vælg hvilke brugere pluginet kan tilgå, eller aktivér 'Tillad alle brugere'.",
"allLibrariesHelp": "Når aktiveret, vil pluginet have adgang til alle biblioteker, inklusiv dem der oprettes i fremtiden.",
"noLibraries": "Ingen biblioteker valgt",
"librariesRequired": "Dette plugin kræver adgang til biblioteksoplysninger. Vælg hvilke biblioteker pluginet kan tilgå, eller aktivér 'Tillad alle biblioteker'.",
"requiredHosts": "Påkrævede hosts",
"configValidationError": "Konfigurationsvalidering mislykkedes:",
"schemaRenderError": "Kan ikke vise konfigurationsformularen. Pluginets skema er muligvis ugyldigt."
},
"placeholders": {
"configKey": "nøgle",
"configValue": "værdi"
}
} }
}, },
"ra": { "ra": {
@@ -587,8 +511,7 @@
"remove_all_missing_title": "Fjern alle manglende filer", "remove_all_missing_title": "Fjern alle manglende filer",
"remove_all_missing_content": "Er du sikker på, at du vil fjerne alle manglende filer fra databasen? Dét vil permanent fjerne alle referencer til dem, inklusive deres afspilningstællere og vurderinger.", "remove_all_missing_content": "Er du sikker på, at du vil fjerne alle manglende filer fra databasen? Dét vil permanent fjerne alle referencer til dem, inklusive deres afspilningstællere og vurderinger.",
"noSimilarSongsFound": "Ingen lignende sange fundet", "noSimilarSongsFound": "Ingen lignende sange fundet",
"noTopSongsFound": "Ingen topsange fundet", "noTopSongsFound": "Ingen topsange fundet"
"startingInstantMix": "Indlæser Instant Mix..."
}, },
"menu": { "menu": {
"library": "Bibliotek", "library": "Bibliotek",
@@ -674,8 +597,7 @@
"exportSuccess": "Konfigurationen eksporteret til udklipsholder i TOML-format", "exportSuccess": "Konfigurationen eksporteret til udklipsholder i TOML-format",
"exportFailed": "Kunne ikke kopiere konfigurationen", "exportFailed": "Kunne ikke kopiere konfigurationen",
"devFlagsHeader": "Udviklingsflagget (med forbehold for ændring/fjernelse)", "devFlagsHeader": "Udviklingsflagget (med forbehold for ændring/fjernelse)",
"devFlagsComment": "Disse er eksperimental-indstillinger og kan blive fjernet i fremtidige udgaver", "devFlagsComment": "Disse er eksperimental-indstillinger og kan blive fjernet i fremtidige udgaver"
"downloadToml": ""
} }
}, },
"activity": { "activity": {

View File

@@ -2,7 +2,7 @@
"languageName": "Euskara", "languageName": "Euskara",
"resources": { "resources": {
"song": { "song": {
"name": "Abestia |||| Abesti", "name": "Abestia |||| Abestiak",
"fields": { "fields": {
"albumArtist": "Albumaren artista", "albumArtist": "Albumaren artista",
"duration": "Iraupena", "duration": "Iraupena",
@@ -10,7 +10,6 @@
"playCount": "Erreprodukzioak", "playCount": "Erreprodukzioak",
"title": "Titulua", "title": "Titulua",
"artist": "Artista", "artist": "Artista",
"composer": "Konpositorea",
"album": "Albuma", "album": "Albuma",
"path": "Fitxategiaren bidea", "path": "Fitxategiaren bidea",
"libraryName": "Liburutegia", "libraryName": "Liburutegia",
@@ -34,9 +33,9 @@
"grouping": "Multzokatzea", "grouping": "Multzokatzea",
"mood": "Aldartea", "mood": "Aldartea",
"participants": "Partaide gehiago", "participants": "Partaide gehiago",
"tags": "Etiketa gehiago", "tags": "Traola gehiago",
"mappedTags": "Esleitutako etiketak", "mappedTags": "Esleitutako traolak",
"rawTags": "Etiketa gordinak", "rawTags": "Traola gordinak",
"missing": "Ez da aurkitu" "missing": "Ez da aurkitu"
}, },
"actions": { "actions": {
@@ -47,12 +46,11 @@
"shuffleAll": "Erreprodukzio aleatorioa", "shuffleAll": "Erreprodukzio aleatorioa",
"download": "Deskargatu", "download": "Deskargatu",
"playNext": "Hurrengoa", "playNext": "Hurrengoa",
"info": "Erakutsi informazioa", "info": "Erakutsi informazioa"
"instantMix": "Berehalako nahastea"
} }
}, },
"album": { "album": {
"name": "Albuma |||| Album", "name": "Albuma |||| Albumak",
"fields": { "fields": {
"albumArtist": "Albumaren artista", "albumArtist": "Albumaren artista",
"artist": "Artista", "artist": "Artista",
@@ -68,7 +66,7 @@
"date": "Recording Date", "date": "Recording Date",
"originalDate": "Jatorrizkoa", "originalDate": "Jatorrizkoa",
"releaseDate": "Argitaratze-data", "releaseDate": "Argitaratze-data",
"releases": "Argitaratzea |||| Argitaratze", "releases": "Argitaratzea |||| Argitaratzeak",
"released": "Argitaratua", "released": "Argitaratua",
"updatedAt": "Aktualizatze-data:", "updatedAt": "Aktualizatze-data:",
"comment": "Iruzkina", "comment": "Iruzkina",
@@ -103,7 +101,7 @@
} }
}, },
"artist": { "artist": {
"name": "Artista |||| Artista", "name": "Artista |||| Artistak",
"fields": { "fields": {
"name": "Izena", "name": "Izena",
"albumCount": "Album kopurua", "albumCount": "Album kopurua",
@@ -332,80 +330,6 @@
"scanInProgress": "Araketa abian da…", "scanInProgress": "Araketa abian da…",
"noLibrariesAssigned": "Ez da liburutegirik egokitu erabiltzaile honentzat" "noLibrariesAssigned": "Ez da liburutegirik egokitu erabiltzaile honentzat"
} }
},
"plugin": {
"name": "Plugina |||| Plugin",
"fields": {
"id": "IDa",
"name": "Izena",
"description": "Deskribapena",
"version": "Bertsioa",
"author": "Autorea",
"website": "Webgunea",
"permissions": "Baimenak",
"enabled": "Gaituta",
"status": "Egoera",
"path": "Bidea",
"lastError": "Errorea",
"hasError": "Errorea",
"updatedAt": "Eguneratuta",
"createdAt": "Instalatuta",
"configKey": "Gakoa",
"configValue": "Balioa",
"allUsers": "Baimendu erabiltzaile guztiak",
"selectedUsers": "Hautatutako erabiltzaileak",
"allLibraries": "Baimendu liburutegi guztiak",
"selectedLibraries": "Hautatutako liburutegiak"
},
"sections": {
"status": "Egoera",
"info": "Pluginaren informazioa",
"configuration": "Konfigurazioa",
"manifest": "Manifestua",
"usersPermission": "Erabiltzaileen baimenak",
"libraryPermission": "Liburutegien baimenak"
},
"status": {
"enabled": "Gaituta",
"disabled": "Ezgaituta"
},
"actions": {
"enable": "Gaitu",
"disable": "Ezgaitu",
"disabledDueToError": "Konpondu errorea gaitu baino lehen",
"disabledUsersRequired": "Hautatu erabiltzaileak gaitu baino lehen",
"disabledLibrariesRequired": "Hautatu liburutegiak gaitu baino lehen",
"addConfig": "Gehitu konfigurazioa",
"rescan": "Arakatu berriro"
},
"notifications": {
"enabled": "Plugina gaituta",
"disabled": "Plugina ezgaituta",
"updated": "Plugina eguneratuta",
"error": "Errorea plugina eguneratzean"
},
"validation": {
"invalidJson": "Konfigurazioa baliozko JSON-a izan behar da"
},
"messages": {
"configHelp": "Konfiguratu plugina gako-balio bikoteak erabiliz. Utzi hutsik pluginak konfiguraziorik behar ez badu.",
"configValidationError": "Huts egin du konfigurazioaren balidazioak:",
"schemaRenderError": "Ezin izan da konfigurazioaren formularioa bihurtu. Litekeena da pluginaren eskema baliozkoa ez izatea.",
"clickPermissions": "Sakatu baimen batean xehetasunetarako",
"noConfig": "Ez da konfiguraziorik ezarri",
"allUsersHelp": "Gaituta dagoenean, pluginak erabiltzaile guztiak atzitu ditzazke, baita etorkizunean sortuko direnak ere.",
"noUsers": "Ez da erabiltzailerik hautatu",
"permissionReason": "Arrazoia",
"usersRequired": "Plugin honek erabiltzaileen informaziora sarbidea behar du. Hautatu zein erabiltzaile atzitu dezakeen pluginak, edo gaitu 'Baimendu erabiltzaile guztiak'.",
"allLibrariesHelp": "Gaituta dagoenean, pluginak liburutegi guztietara izango du sarbidea, baita etorkizunean sortuko direnetara ere.",
"noLibraries": "Ez da liburutegirik hautatu",
"librariesRequired": "Plugin honek liburutegien informaziora sarbidea behar du. Hautatu zein liburutegi atzitu dezakeen pluginak, edo gaitu 'Baimendu liburutegi guztiak'.",
"requiredHosts": "Beharrezko ostatatzaileak"
},
"placeholders": {
"configKey": "gakoa",
"configValue": "balioa"
}
} }
}, },
"ra": { "ra": {
@@ -559,7 +483,6 @@
"transcodingEnabled": "Navidrome %{config}-ekin martxan dago eta, beraz, web-interfazeko transkodeketa-ataletik sistema-komandoak exekuta daitezke. Segurtasun arrazoiak tarteko, ezgaitzea gomendatzen dugu, eta transkodeketa-aukerak konfiguratzen ari zarenean bakarrik gaitzea.", "transcodingEnabled": "Navidrome %{config}-ekin martxan dago eta, beraz, web-interfazeko transkodeketa-ataletik sistema-komandoak exekuta daitezke. Segurtasun arrazoiak tarteko, ezgaitzea gomendatzen dugu, eta transkodeketa-aukerak konfiguratzen ari zarenean bakarrik gaitzea.",
"songsAddedToPlaylist": "Abesti bat zerrendara gehitu da |||| %{smart_count} abesti zerrendara gehitu dira", "songsAddedToPlaylist": "Abesti bat zerrendara gehitu da |||| %{smart_count} abesti zerrendara gehitu dira",
"noSimilarSongsFound": "Ez da antzeko abestirik aurkitu", "noSimilarSongsFound": "Ez da antzeko abestirik aurkitu",
"startingInstantMix": "Berehalako nahastea kargatzen…",
"noTopSongsFound": "Ez da aparteko abestirik aurkitu", "noTopSongsFound": "Ez da aparteko abestirik aurkitu",
"noPlaylistsAvailable": "Ez dago zerrendarik erabilgarri", "noPlaylistsAvailable": "Ez dago zerrendarik erabilgarri",
"delete_user_title": "Ezabatu '%{name}' erabiltzailea", "delete_user_title": "Ezabatu '%{name}' erabiltzailea",

View File

@@ -10,7 +10,6 @@
"playCount": "Lejátszások", "playCount": "Lejátszások",
"title": "Cím", "title": "Cím",
"artist": "Előadó", "artist": "Előadó",
"composer": "Zeneszerző",
"album": "Album", "album": "Album",
"path": "Elérési út", "path": "Elérési út",
"libraryName": "Könyvtár", "libraryName": "Könyvtár",
@@ -47,8 +46,7 @@
"shuffleAll": "Keverés", "shuffleAll": "Keverés",
"download": "Letöltés", "download": "Letöltés",
"playNext": "Lejátszás következőként", "playNext": "Lejátszás következőként",
"info": "Részletek", "info": "Részletek"
"instantMix": "Instant keverés"
} }
}, },
"album": { "album": {
@@ -327,80 +325,6 @@
"scanInProgress": "Szkennelés folyamatban...", "scanInProgress": "Szkennelés folyamatban...",
"noLibrariesAssigned": "Ehhez a felhasználóhoz nincsenek könyvtárak adva" "noLibrariesAssigned": "Ehhez a felhasználóhoz nincsenek könyvtárak adva"
} }
},
"plugin": {
"name": "Kiegészítő |||| Kiegészítők",
"fields": {
"id": "ID",
"name": "Név",
"description": "Leírás",
"version": "Verzió",
"author": "Fejlesztő",
"website": "Weboldal",
"permissions": "Engedélyek",
"enabled": "Engedélyezve",
"status": "Státusz",
"path": "Útvonal",
"lastError": "Hiba",
"hasError": "Hiba",
"updatedAt": "Frissítve",
"createdAt": "Telepítve",
"configKey": "Kulcs",
"configValue": "Érték",
"allUsers": "Összes felhasználó engedélyezése",
"selectedUsers": "Kiválasztott felhasználók engedélyezése",
"allLibraries": "Összes könyvtár engedélyezése",
"selectedLibraries": "Kiválasztott könyvtárak engedélyezése"
},
"sections": {
"status": "Státusz",
"info": "Kiegészítő információi",
"configuration": "Konfiguráció",
"manifest": "Manifest",
"usersPermission": "Felhasználói engedélyek",
"libraryPermission": "Könyvtári engedélyek"
},
"status": {
"enabled": "Engedélyezve",
"disabled": "Letiltva"
},
"actions": {
"enable": "Engedélyezés",
"disable": "Letiltás",
"disabledDueToError": "Javítsd ki a kiegészítő hibáját",
"disabledUsersRequired": "Válassz felhasználókat",
"disabledLibrariesRequired": "Válassz könyvtárakat",
"addConfig": "Konfiguráció hozzáadása",
"rescan": "Újraszkennelés"
},
"notifications": {
"enabled": "Kiegészítő engedélyezve",
"disabled": "Kiegészítő letiltva",
"updated": "Kiegészítő frissítve",
"error": "Hiba történt a kiegészítő frissítése közben"
},
"validation": {
"invalidJson": "A konfigurációs JSON érvénytelen"
},
"messages": {
"configHelp": "Konfiguráld a kiegészítőt kulcs-érték párokkal. Hagyd a mezőt üresen, ha nincs szükség konfigurációra.",
"configValidationError": "Helytelen konfiguráció:",
"schemaRenderError": "Nem sikerült megjeleníteni a konfigurációs űrlapot. A bővítmény sémája érvénytelen lehet.",
"clickPermissions": "Kattints egy engedélyre a részletekért",
"noConfig": "Nincs konfiguráció beállítva",
"allUsersHelp": "Engedélyezés esetén ez a kiegészítő hozzá fog férni minden jelenlegi és jövőben létrehozott felhasználóhoz.",
"noUsers": "Nincsenek kiválasztott felhasználók",
"permissionReason": "Indok",
"usersRequired": "Ez a kiegészítő hozzáférést kér felhasználói információkhoz. Válaszd ki, melyik felhasználókat érheti el, vagy az 'Összes felhasználó engedélyezése' opciót.",
"allLibrariesHelp": "Engedélyezés esetén ez a kiegészítő hozzá fog férni minden jelenlegi és jövőben létrehozott könyvtárhoz.",
"noLibraries": "Nincs kiválasztott könyvtár",
"librariesRequired": "Ez a kiegészítő hozzáférést kér könyvtárinformációkhoz. Válaszd ki, melyik könyvtárakat érheti el, vagy az 'Összes könyvtár engedélyezése' opciót.",
"requiredHosts": "Szükséges hostok"
},
"placeholders": {
"configKey": "kulcs",
"configValue": "érték"
}
} }
}, },
"ra": { "ra": {
@@ -478,7 +402,7 @@
"loading": "Betöltés", "loading": "Betöltés",
"not_found": "Nem található", "not_found": "Nem található",
"show": "%{name} #%{id}", "show": "%{name} #%{id}",
"empty": "Nincsenek %{name}.", "empty": "Nincs %{name} még.",
"invite": "Szeretnél egyet hozzáadni?" "invite": "Szeretnél egyet hozzáadni?"
}, },
"input": { "input": {
@@ -554,7 +478,6 @@
"transcodingEnabled": "A Navidrome jelenleg a következőkkel fut %{config}, ez lehetővé teszi a rendszerparancsok futtatását az átkódolási beállításokból a webes felület segítségével. Javasoljuk, hogy biztonsági okokból tiltsd ezt le, és csak az átkódolási beállítások konfigurálásának idejére kapcsold be.", "transcodingEnabled": "A Navidrome jelenleg a következőkkel fut %{config}, ez lehetővé teszi a rendszerparancsok futtatását az átkódolási beállításokból a webes felület segítségével. Javasoljuk, hogy biztonsági okokból tiltsd ezt le, és csak az átkódolási beállítások konfigurálásának idejére kapcsold be.",
"songsAddedToPlaylist": "1 szám hozzáadva a lejátszási listához |||| %{smart_count} szám hozzáadva a lejátszási listához", "songsAddedToPlaylist": "1 szám hozzáadva a lejátszási listához |||| %{smart_count} szám hozzáadva a lejátszási listához",
"noSimilarSongsFound": "Nem találhatóak hasonló számok", "noSimilarSongsFound": "Nem találhatóak hasonló számok",
"startingInstantMix": "Instant keverés töltődik...",
"noTopSongsFound": "Nincsenek top számok", "noTopSongsFound": "Nincsenek top számok",
"noPlaylistsAvailable": "Nem áll rendelkezésre", "noPlaylistsAvailable": "Nem áll rendelkezésre",
"delete_user_title": "Felhasználó törlése '%{name}'", "delete_user_title": "Felhasználó törlése '%{name}'",
@@ -668,7 +591,6 @@
"currentValue": "Jelenlegi érték", "currentValue": "Jelenlegi érték",
"configurationFile": "Konfigurációs fájl", "configurationFile": "Konfigurációs fájl",
"exportToml": "Konfiguráció exportálása (TOML)", "exportToml": "Konfiguráció exportálása (TOML)",
"downloadToml": "Konfiguráció letöltése (TOML)",
"exportSuccess": "Konfiguráció kiexportálva a vágólapra, TOML formában", "exportSuccess": "Konfiguráció kiexportálva a vágólapra, TOML formában",
"exportFailed": "Nem sikerült kimásolni a konfigurációt", "exportFailed": "Nem sikerült kimásolni a konfigurációt",
"devFlagsHeader": "Fejlesztői beállítások (változások/eltávolítás jogát fenntartjuk)", "devFlagsHeader": "Fejlesztői beállítások (változások/eltávolítás jogát fenntartjuk)",

View File

@@ -674,8 +674,7 @@
"exportSuccess": "Configuração exportada para o clipboard em formato TOML", "exportSuccess": "Configuração exportada para o clipboard em formato TOML",
"exportFailed": "Falha ao copiar configuração", "exportFailed": "Falha ao copiar configuração",
"devFlagsHeader": "Flags de Desenvolvimento (sujeitas a mudança/remoção)", "devFlagsHeader": "Flags de Desenvolvimento (sujeitas a mudança/remoção)",
"devFlagsComment": "Estas são configurações experimentais e podem ser removidas em versões futuras", "devFlagsComment": "Estas são configurações experimentais e podem ser removidas em versões futuras"
"downloadToml": "Baixar configuração (TOML)"
} }
}, },
"activity": { "activity": {

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,113 +0,0 @@
// Package e2e provides end-to-end integration tests for the Navidrome Subsonic API.
//
// These tests exercise the full HTTP request/response cycle through the Subsonic API router,
// using a real SQLite database and real repository implementations while stubbing out external
// services (artwork, streaming, scrobbling, etc.) with noop implementations.
//
// # Test Infrastructure
//
// The suite uses [Ginkgo] v2 as the test runner and [Gomega] for assertions. It is invoked
// through the standard Go test entry point [TestSubsonicE2E], which initializes the test
// environment, creates a temporary SQLite database, and runs the specs.
//
// # Setup and Teardown
//
// During [BeforeSuite], the test infrastructure:
//
// 1. Creates a temporary SQLite database with WAL journal mode.
// 2. Initializes the schema via [db.Init].
// 3. Creates two test users: an admin ("admin") and a regular user ("regular"),
// both with the password "password".
// 4. Creates a single library ("Music Library") backed by a fake in-memory filesystem
// (scheme "fake:///music") using the [storagetest] package.
// 5. Populates the filesystem with a set of test tracks spanning multiple artists,
// albums, genres, and years.
// 6. Runs the scanner to import all metadata into the database.
// 7. Takes a snapshot of the database to serve as a golden baseline for test isolation.
//
// # Test Data
//
// The fake filesystem contains the following music library structure:
//
// Rock/The Beatles/Abbey Road/
// 01 - Come Together.mp3 (1969, Rock)
// 02 - Something.mp3 (1969, Rock)
// Rock/The Beatles/Help!/
// 01 - Help.mp3 (1965, Rock)
// Rock/Led Zeppelin/IV/
// 01 - Stairway To Heaven.mp3 (1971, Rock)
// Jazz/Miles Davis/Kind of Blue/
// 01 - So What.mp3 (1959, Jazz)
// Pop/
// 01 - Standalone Track.mp3 (2020, Pop)
//
// # Database Isolation
//
// Before each top-level Describe block, the [setupTestDB] function restores the database
// to its golden snapshot state using SQLite's ATTACH DATABASE mechanism. This copies all
// table data from the snapshot back into the main database, providing each test group with
// a clean, consistent starting state without the overhead of re-scanning the filesystem.
//
// A fresh [subsonic.Router] is also created for each test group, wired with real data store
// repositories and noop stubs for external services:
//
// - noopArtwork: returns [model.ErrNotFound] for all artwork requests.
// - noopStreamer: returns [model.ErrNotFound] for all stream requests.
// - noopArchiver: returns [model.ErrNotFound] for all archive requests.
// - noopProvider: returns empty results for all external metadata lookups.
// - noopPlayTracker: silently discards all scrobble events.
//
// # Request Helpers
//
// Tests build HTTP requests using the [buildReq] helper, which constructs a Subsonic API
// request with authentication parameters (username, password, API version "1.16.1", client
// name "test-client", and JSON format). Convenience wrappers include:
//
// - [doReq]: sends a request as the admin user and returns the parsed JSON response.
// - [doReqWithUser]: sends a request as a specific user.
// - [doRawReq] / [doRawReqWithUser]: returns the raw [httptest.ResponseRecorder] for
// binary content or status code inspection.
//
// Responses are parsed via [parseJSONResponse], which unwraps the Subsonic JSON envelope
// and returns the inner response map.
//
// # Test Organization
//
// Each test file covers a logical group of Subsonic API endpoints:
//
// - subsonic_system_test.go: ping, getLicense, getOpenSubsonicExtensions
// - subsonic_browsing_test.go: getMusicFolders, getIndexes, getArtists, getMusicDirectory,
// getArtist, getAlbum, getSong, getGenres
// - subsonic_searching_test.go: search2, search3
// - subsonic_album_lists_test.go: getAlbumList, getAlbumList2
// - subsonic_playlists_test.go: createPlaylist, getPlaylist, getPlaylists,
// updatePlaylist, deletePlaylist
// - subsonic_media_annotation_test.go: star, unstar, getStarred, setRating, scrobble
// - subsonic_media_retrieval_test.go: stream, download, getCoverArt, getAvatar,
// getLyrics, getLyricsBySongId
// - subsonic_bookmarks_test.go: createBookmark, getBookmarks, deleteBookmark,
// savePlayQueue, getPlayQueue
// - subsonic_radio_test.go: getInternetRadioStations, createInternetRadioStation,
// updateInternetRadioStation, deleteInternetRadioStation
// - subsonic_sharing_test.go: createShare, getShares, updateShare, deleteShare
// - subsonic_users_test.go: getUser, getUsers
// - subsonic_scan_test.go: getScanStatus, startScan
// - subsonic_multiuser_test.go: multi-user isolation and permission enforcement
// - subsonic_multilibrary_test.go: multi-library access control and data isolation
//
// Some test groups use Ginkgo's Ordered decorator to run tests sequentially within a block,
// allowing later tests to depend on state created by earlier ones (e.g., creating a playlist
// and then verifying it can be retrieved).
//
// # Running
//
// The e2e tests are included in the standard test suite and can be run with:
//
// make test PKG=./server/e2e # Run only e2e tests
// make test # Run all tests including e2e
// make test-race # Run with race detector
//
// [Ginkgo]: https://onsi.github.io/ginkgo/
// [Gomega]: https://onsi.github.io/gomega/
// [storagetest]: /core/storage/storagetest
package e2e

View File

@@ -23,6 +23,7 @@ import (
"github.com/navidrome/navidrome/core/playback" "github.com/navidrome/navidrome/core/playback"
"github.com/navidrome/navidrome/core/scrobbler" "github.com/navidrome/navidrome/core/scrobbler"
"github.com/navidrome/navidrome/core/storage/storagetest" "github.com/navidrome/navidrome/core/storage/storagetest"
"github.com/navidrome/navidrome/core/transcode"
"github.com/navidrome/navidrome/db" "github.com/navidrome/navidrome/db"
"github.com/navidrome/navidrome/log" "github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model" "github.com/navidrome/navidrome/model"
@@ -189,14 +190,33 @@ func (n noopArtwork) GetOrPlaceholder(_ context.Context, _ string, _ int, _ bool
// noopStreamer implements core.MediaStreamer // noopStreamer implements core.MediaStreamer
type noopStreamer struct{} type noopStreamer struct{}
func (n noopStreamer) NewStream(context.Context, string, string, int, int) (*core.Stream, error) { func (n noopStreamer) NewStream(context.Context, core.StreamRequest) (*core.Stream, error) {
return nil, model.ErrNotFound return nil, model.ErrNotFound
} }
func (n noopStreamer) DoStream(context.Context, *model.MediaFile, string, int, int) (*core.Stream, error) { func (n noopStreamer) DoStream(context.Context, *model.MediaFile, core.StreamRequest) (*core.Stream, error) {
return nil, model.ErrNotFound return nil, model.ErrNotFound
} }
// noopDecider implements transcode.Decider
type noopDecider struct{}
func (n noopDecider) MakeDecision(context.Context, *model.MediaFile, *transcode.ClientInfo) (*transcode.Decision, error) {
return nil, nil
}
func (n noopDecider) CreateTranscodeParams(*transcode.Decision) (string, error) {
return "", nil
}
func (n noopDecider) ParseTranscodeParams(string) (*transcode.Params, error) {
return nil, nil
}
func (n noopDecider) ValidateTranscodeParams(context.Context, string, string) (*transcode.Params, *model.MediaFile, error) {
return nil, nil, nil
}
// noopArchiver implements core.Archiver // noopArchiver implements core.Archiver
type noopArchiver struct{} type noopArchiver struct{}
@@ -265,6 +285,7 @@ var (
_ core.Archiver = noopArchiver{} _ core.Archiver = noopArchiver{}
_ external.Provider = noopProvider{} _ external.Provider = noopProvider{}
_ scrobbler.PlayTracker = noopPlayTracker{} _ scrobbler.PlayTracker = noopPlayTracker{}
_ transcode.Decider = noopDecider{}
) )
var _ = BeforeSuite(func() { var _ = BeforeSuite(func() {
@@ -318,11 +339,6 @@ func setupTestDB() {
ctx = request.WithUser(GinkgoT().Context(), adminUser) ctx = request.WithUser(GinkgoT().Context(), adminUser)
DeferCleanup(configtest.SetupConfig()) DeferCleanup(configtest.SetupConfig())
DeferCleanup(func() {
// Wait for any background scan (e.g. from startScan endpoint) to finish
// before config cleanup runs, to avoid a data race on conf.Server.
Eventually(scanner.IsScanning).Should(BeFalse())
})
conf.Server.MusicFolder = "fake:///music" conf.Server.MusicFolder = "fake:///music"
conf.Server.DevExternalScanner = false conf.Server.DevExternalScanner = false
@@ -349,6 +365,7 @@ func setupTestDB() {
core.NewShare(ds), core.NewShare(ds),
playback.PlaybackServer(nil), playback.PlaybackServer(nil),
metrics.NewNoopInstance(), metrics.NewNoopInstance(),
noopDecider{},
) )
} }

View File

@@ -8,6 +8,7 @@ import (
"strconv" "strconv"
"github.com/lestrrat-go/jwx/v2/jwt" "github.com/lestrrat-go/jwx/v2/jwt"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/core/auth" "github.com/navidrome/navidrome/core/auth"
"github.com/navidrome/navidrome/log" "github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/utils/req" "github.com/navidrome/navidrome/utils/req"
@@ -24,10 +25,13 @@ func (pub *Router) handleStream(w http.ResponseWriter, r *http.Request) {
return return
} }
stream, err := pub.streamer.NewStream(ctx, info.id, info.format, info.bitrate, 0) stream, err := pub.streamer.NewStream(ctx, core.StreamRequest{
ID: info.id, Format: info.format, BitRate: info.bitrate,
})
if err != nil { if err != nil {
log.Error(ctx, "Error starting shared stream", err) log.Error(ctx, "Error starting shared stream", err)
http.Error(w, "invalid request", http.StatusInternalServerError) http.Error(w, "invalid request", http.StatusInternalServerError)
return
} }
// Make sure the stream will be closed at the end, to avoid leakage // Make sure the stream will be closed at the end, to avoid leakage

View File

@@ -27,7 +27,7 @@ var _ = Describe("Album Lists", func() {
ds = &tests.MockDataStore{} ds = &tests.MockDataStore{}
auth.Init(ds) auth.Init(ds)
mockRepo = ds.Album(ctx).(*tests.MockAlbumRepo) mockRepo = ds.Album(ctx).(*tests.MockAlbumRepo)
router = New(ds, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil) router = New(ds, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil)
w = httptest.NewRecorder() w = httptest.NewRecorder()
}) })

View File

@@ -17,6 +17,7 @@ import (
"github.com/navidrome/navidrome/core/metrics" "github.com/navidrome/navidrome/core/metrics"
"github.com/navidrome/navidrome/core/playback" "github.com/navidrome/navidrome/core/playback"
"github.com/navidrome/navidrome/core/scrobbler" "github.com/navidrome/navidrome/core/scrobbler"
"github.com/navidrome/navidrome/core/transcode"
"github.com/navidrome/navidrome/log" "github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model" "github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/server" "github.com/navidrome/navidrome/server"
@@ -34,40 +35,42 @@ type handlerRaw = func(http.ResponseWriter, *http.Request) (*responses.Subsonic,
type Router struct { type Router struct {
http.Handler http.Handler
ds model.DataStore ds model.DataStore
artwork artwork.Artwork artwork artwork.Artwork
streamer core.MediaStreamer streamer core.MediaStreamer
archiver core.Archiver archiver core.Archiver
players core.Players players core.Players
provider external.Provider provider external.Provider
playlists core.Playlists playlists core.Playlists
scanner model.Scanner scanner model.Scanner
broker events.Broker broker events.Broker
scrobbler scrobbler.PlayTracker scrobbler scrobbler.PlayTracker
share core.Share share core.Share
playback playback.PlaybackServer playback playback.PlaybackServer
metrics metrics.Metrics metrics metrics.Metrics
transcodeDecision transcode.Decider
} }
func New(ds model.DataStore, artwork artwork.Artwork, streamer core.MediaStreamer, archiver core.Archiver, func New(ds model.DataStore, artwork artwork.Artwork, streamer core.MediaStreamer, archiver core.Archiver,
players core.Players, provider external.Provider, scanner model.Scanner, broker events.Broker, players core.Players, provider external.Provider, scanner model.Scanner, broker events.Broker,
playlists core.Playlists, scrobbler scrobbler.PlayTracker, share core.Share, playback playback.PlaybackServer, playlists core.Playlists, scrobbler scrobbler.PlayTracker, share core.Share, playback playback.PlaybackServer,
metrics metrics.Metrics, metrics metrics.Metrics, transcodeDecision transcode.Decider,
) *Router { ) *Router {
r := &Router{ r := &Router{
ds: ds, ds: ds,
artwork: artwork, artwork: artwork,
streamer: streamer, streamer: streamer,
archiver: archiver, archiver: archiver,
players: players, players: players,
provider: provider, provider: provider,
playlists: playlists, playlists: playlists,
scanner: scanner, scanner: scanner,
broker: broker, broker: broker,
scrobbler: scrobbler, scrobbler: scrobbler,
share: share, share: share,
playback: playback, playback: playback,
metrics: metrics, metrics: metrics,
transcodeDecision: transcodeDecision,
} }
r.Handler = r.routes() r.Handler = r.routes()
return r return r
@@ -172,6 +175,8 @@ func (api *Router) routes() http.Handler {
h(r, "getLyricsBySongId", api.GetLyricsBySongId) h(r, "getLyricsBySongId", api.GetLyricsBySongId)
hr(r, "stream", api.Stream) hr(r, "stream", api.Stream)
hr(r, "download", api.Download) hr(r, "download", api.Download)
hr(r, "getTranscodeDecision", api.GetTranscodeDecision)
hr(r, "getTranscodeStream", api.GetTranscodeStream)
}) })
r.Group(func(r chi.Router) { r.Group(func(r chi.Router) {
// configure request throttling // configure request throttling

View File

@@ -27,7 +27,7 @@ var _ = Describe("MediaAnnotationController", func() {
ds = &tests.MockDataStore{} ds = &tests.MockDataStore{}
playTracker = &fakePlayTracker{} playTracker = &fakePlayTracker{}
eventBroker = &fakeEventBroker{} eventBroker = &fakeEventBroker{}
router = New(ds, nil, nil, nil, nil, nil, nil, eventBroker, nil, playTracker, nil, nil, nil) router = New(ds, nil, nil, nil, nil, nil, nil, eventBroker, nil, playTracker, nil, nil, nil, nil)
}) })
Describe("Scrobble", func() { Describe("Scrobble", func() {

View File

@@ -33,7 +33,7 @@ var _ = Describe("MediaRetrievalController", func() {
MockedMediaFile: mockRepo, MockedMediaFile: mockRepo,
} }
artwork = &fakeArtwork{data: "image data"} artwork = &fakeArtwork{data: "image data"}
router = New(ds, artwork, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil) router = New(ds, artwork, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil)
w = httptest.NewRecorder() w = httptest.NewRecorder()
DeferCleanup(configtest.SetupConfig()) DeferCleanup(configtest.SetupConfig())
conf.Server.LyricsPriority = "embedded,.lrc" conf.Server.LyricsPriority = "embedded,.lrc"

View File

@@ -25,32 +25,24 @@ import (
"github.com/navidrome/navidrome/model/request" "github.com/navidrome/navidrome/model/request"
"github.com/navidrome/navidrome/server" "github.com/navidrome/navidrome/server"
"github.com/navidrome/navidrome/server/subsonic/responses" "github.com/navidrome/navidrome/server/subsonic/responses"
. "github.com/navidrome/navidrome/utils/gg"
"github.com/navidrome/navidrome/utils/req" "github.com/navidrome/navidrome/utils/req"
) )
// mergeFormIntoQuery parses form data (both URL query params and POST body)
// and writes all values back into r.URL.RawQuery. This is needed because
// some Subsonic clients send parameters as form fields instead of query params.
// This support the OpenSubsonic `formPost` extension
func mergeFormIntoQuery(r *http.Request) error {
if err := r.ParseForm(); err != nil {
return err
}
var parts []string
for key, values := range r.Form {
for _, v := range values {
parts = append(parts, url.QueryEscape(key)+"="+url.QueryEscape(v))
}
}
r.URL.RawQuery = strings.Join(parts, "&")
return nil
}
func postFormToQueryParams(next http.Handler) http.Handler { func postFormToQueryParams(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if err := mergeFormIntoQuery(r); err != nil { err := r.ParseForm()
if err != nil {
sendError(w, r, newError(responses.ErrorGeneric, err.Error())) sendError(w, r, newError(responses.ErrorGeneric, err.Error()))
} }
var parts []string
for key, values := range r.Form {
for _, v := range values {
parts = append(parts, url.QueryEscape(key)+"="+url.QueryEscape(v))
}
}
r.URL.RawQuery = strings.Join(parts, "&")
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
}) })
} }
@@ -103,64 +95,54 @@ func checkRequiredParameters(next http.Handler) http.Handler {
}) })
} }
// authenticateRequest validates the authentication credentials in an HTTP request and returns
// the authenticated user. It supports internal auth, reverse proxy auth, and Subsonic classic
// auth (username + password/token/salt/jwt query params).
//
// Callers should handle specific error types as needed:
// - context.Canceled: request was canceled during authentication
// - model.ErrNotFound: username not found in database
// - model.ErrInvalidAuth: invalid credentials (wrong password, token, etc.)
func authenticateRequest(ds model.DataStore, r *http.Request) (*model.User, error) {
ctx := r.Context()
// Check internal auth or reverse proxy auth first
username, _ := fromInternalOrProxyAuth(r)
if username != "" {
return ds.User(ctx).FindByUsername(username)
}
// Fall back to Subsonic classic auth (query params)
p := req.Params(r)
username, _ = p.String("u")
if username == "" {
return nil, model.ErrInvalidAuth
}
pass, _ := p.String("p")
token, _ := p.String("t")
salt, _ := p.String("s")
jwt, _ := p.String("jwt")
usr, err := ds.User(ctx).FindByUsernameWithPassword(username)
if err != nil {
return nil, err
}
if err := validateCredentials(usr, pass, token, salt, jwt); err != nil {
return nil, err
}
return usr, nil
}
func authenticate(ds model.DataStore) func(next http.Handler) http.Handler { func authenticate(ds model.DataStore) func(next http.Handler) http.Handler {
return func(next http.Handler) http.Handler { return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context() ctx := r.Context()
usr, err := authenticateRequest(ds, r) var usr *model.User
if err != nil { var err error
username, _ := request.UsernameFrom(ctx)
switch { username, isInternalAuth := fromInternalOrProxyAuth(r)
case errors.Is(err, context.Canceled): if username != "" {
log.Debug(ctx, "API: Request canceled when authenticating", "username", username, "remoteAddr", r.RemoteAddr, err) authType := If(isInternalAuth, "internal", "reverse-proxy")
usr, err = ds.User(ctx).FindByUsername(username)
if errors.Is(err, context.Canceled) {
log.Debug(ctx, "API: Request canceled when authenticating", "auth", authType, "username", username, "remoteAddr", r.RemoteAddr, err)
return return
case errors.Is(err, model.ErrNotFound), errors.Is(err, model.ErrInvalidAuth):
log.Warn(ctx, "API: Invalid login", "username", username, "remoteAddr", r.RemoteAddr, err)
default:
log.Error(ctx, "API: Error authenticating", "username", username, "remoteAddr", r.RemoteAddr, err)
} }
if errors.Is(err, model.ErrNotFound) {
log.Warn(ctx, "API: Invalid login", "auth", authType, "username", username, "remoteAddr", r.RemoteAddr, err)
} else if err != nil {
log.Error(ctx, "API: Error authenticating username", "auth", authType, "username", username, "remoteAddr", r.RemoteAddr, err)
}
} else {
p := req.Params(r)
username, _ := p.String("u")
pass, _ := p.String("p")
token, _ := p.String("t")
salt, _ := p.String("s")
jwt, _ := p.String("jwt")
usr, err = ds.User(ctx).FindByUsernameWithPassword(username)
if errors.Is(err, context.Canceled) {
log.Debug(ctx, "API: Request canceled when authenticating", "auth", "subsonic", "username", username, "remoteAddr", r.RemoteAddr, err)
return
}
switch {
case errors.Is(err, model.ErrNotFound):
log.Warn(ctx, "API: Invalid login", "auth", "subsonic", "username", username, "remoteAddr", r.RemoteAddr, err)
case err != nil:
log.Error(ctx, "API: Error authenticating username", "auth", "subsonic", "username", username, "remoteAddr", r.RemoteAddr, err)
default:
err = validateCredentials(usr, pass, token, salt, jwt)
if err != nil {
log.Warn(ctx, "API: Invalid login", "auth", "subsonic", "username", username, "remoteAddr", r.RemoteAddr, err)
}
}
}
if err != nil {
sendError(w, r, newError(responses.ErrorAuthenticationFail)) sendError(w, r, newError(responses.ErrorAuthenticationFail))
return return
} }
@@ -171,19 +153,6 @@ func authenticate(ds model.DataStore) func(next http.Handler) http.Handler {
} }
} }
// ValidateAuth validates Subsonic authentication from an HTTP request and returns the authenticated user.
// Unlike the authenticate middleware, this function does not write any HTTP response, making it suitable
// for use by external consumers (e.g., plugin endpoints) that need Subsonic auth but want to handle
// errors themselves.
func ValidateAuth(ds model.DataStore, r *http.Request) (*model.User, error) {
// Parse form data into query params (same as postFormToQueryParams middleware,
// which is not in the call chain when ValidateAuth is used directly)
if err := mergeFormIntoQuery(r); err != nil {
return nil, fmt.Errorf("parsing form: %w", err)
}
return authenticateRequest(ds, r)
}
func validateCredentials(user *model.User, pass, token, salt, jwt string) error { func validateCredentials(user *model.User, pass, token, salt, jwt string) error {
valid := false valid := false

View File

@@ -13,6 +13,7 @@ func (api *Router) GetOpenSubsonicExtensions(_ *http.Request) (*responses.Subson
{Name: "formPost", Versions: []int32{1}}, {Name: "formPost", Versions: []int32{1}},
{Name: "songLyrics", Versions: []int32{1}}, {Name: "songLyrics", Versions: []int32{1}},
{Name: "indexBasedQueue", Versions: []int32{1}}, {Name: "indexBasedQueue", Versions: []int32{1}},
{Name: "transcoding", Versions: []int32{1}},
} }
return response, nil return response, nil
} }

View File

@@ -19,7 +19,7 @@ var _ = Describe("GetOpenSubsonicExtensions", func() {
) )
BeforeEach(func() { BeforeEach(func() {
router = subsonic.New(nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil) router = subsonic.New(nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil)
w = httptest.NewRecorder() w = httptest.NewRecorder()
r = httptest.NewRequest("GET", "/getOpenSubsonicExtensions?f=json", nil) r = httptest.NewRequest("GET", "/getOpenSubsonicExtensions?f=json", nil)
}) })
@@ -35,11 +35,12 @@ var _ = Describe("GetOpenSubsonicExtensions", func() {
err := json.Unmarshal(w.Body.Bytes(), &response) err := json.Unmarshal(w.Body.Bytes(), &response)
Expect(err).NotTo(HaveOccurred()) Expect(err).NotTo(HaveOccurred())
Expect(*response.Subsonic.OpenSubsonicExtensions).To(SatisfyAll( Expect(*response.Subsonic.OpenSubsonicExtensions).To(SatisfyAll(
HaveLen(4), HaveLen(5),
ContainElement(responses.OpenSubsonicExtension{Name: "transcodeOffset", Versions: []int32{1}}), ContainElement(responses.OpenSubsonicExtension{Name: "transcodeOffset", Versions: []int32{1}}),
ContainElement(responses.OpenSubsonicExtension{Name: "formPost", Versions: []int32{1}}), ContainElement(responses.OpenSubsonicExtension{Name: "formPost", Versions: []int32{1}}),
ContainElement(responses.OpenSubsonicExtension{Name: "songLyrics", Versions: []int32{1}}), ContainElement(responses.OpenSubsonicExtension{Name: "songLyrics", Versions: []int32{1}}),
ContainElement(responses.OpenSubsonicExtension{Name: "indexBasedQueue", Versions: []int32{1}}), ContainElement(responses.OpenSubsonicExtension{Name: "indexBasedQueue", Versions: []int32{1}}),
ContainElement(responses.OpenSubsonicExtension{Name: "transcoding", Versions: []int32{1}}),
)) ))
}) })
}) })

View File

@@ -24,7 +24,7 @@ var _ = Describe("buildPlaylist", func() {
BeforeEach(func() { BeforeEach(func() {
ds = &tests.MockDataStore{} ds = &tests.MockDataStore{}
router = New(ds, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil) router = New(ds, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil)
ctx = context.Background() ctx = context.Background()
}) })
@@ -224,7 +224,7 @@ var _ = Describe("UpdatePlaylist", func() {
BeforeEach(func() { BeforeEach(func() {
ds = &tests.MockDataStore{} ds = &tests.MockDataStore{}
playlists = &fakePlaylists{} playlists = &fakePlaylists{}
router = New(ds, nil, nil, nil, nil, nil, nil, nil, playlists, nil, nil, nil, nil) router = New(ds, nil, nil, nil, nil, nil, nil, nil, playlists, nil, nil, nil, nil, nil)
}) })
It("clears the comment when parameter is empty", func() { It("clears the comment when parameter is empty", func() {

View File

@@ -61,6 +61,7 @@ type Subsonic struct {
OpenSubsonicExtensions *OpenSubsonicExtensions `xml:"openSubsonicExtensions,omitempty" json:"openSubsonicExtensions,omitempty"` OpenSubsonicExtensions *OpenSubsonicExtensions `xml:"openSubsonicExtensions,omitempty" json:"openSubsonicExtensions,omitempty"`
LyricsList *LyricsList `xml:"lyricsList,omitempty" json:"lyricsList,omitempty"` LyricsList *LyricsList `xml:"lyricsList,omitempty" json:"lyricsList,omitempty"`
PlayQueueByIndex *PlayQueueByIndex `xml:"playQueueByIndex,omitempty" json:"playQueueByIndex,omitempty"` PlayQueueByIndex *PlayQueueByIndex `xml:"playQueueByIndex,omitempty" json:"playQueueByIndex,omitempty"`
TranscodeDecision *TranscodeDecision `xml:"transcodeDecision,omitempty" json:"transcodeDecision,omitempty"`
} }
const ( const (
@@ -617,3 +618,26 @@ func marshalJSONArray[T any](v []T) ([]byte, error) {
} }
return json.Marshal(v) return json.Marshal(v)
} }
// TranscodeDecision represents the response for getTranscodeDecision (OpenSubsonic transcoding extension)
type TranscodeDecision struct {
CanDirectPlay bool `xml:"canDirectPlay,attr" json:"canDirectPlay"`
CanTranscode bool `xml:"canTranscode,attr" json:"canTranscode"`
TranscodeReasons []string `xml:"transcodeReason,omitempty" json:"transcodeReason,omitempty"`
ErrorReason string `xml:"errorReason,attr,omitempty" json:"errorReason,omitempty"`
TranscodeParams string `xml:"transcodeParams,attr,omitempty" json:"transcodeParams,omitempty"`
SourceStream *StreamDetails `xml:"sourceStream,omitempty" json:"sourceStream,omitempty"`
TranscodeStream *StreamDetails `xml:"transcodeStream,omitempty" json:"transcodeStream,omitempty"`
}
// StreamDetails describes audio stream properties for transcoding decisions
type StreamDetails struct {
Protocol string `xml:"protocol,attr,omitempty" json:"protocol,omitempty"`
Container string `xml:"container,attr,omitempty" json:"container,omitempty"`
Codec string `xml:"codec,attr,omitempty" json:"codec,omitempty"`
AudioChannels int32 `xml:"audioChannels,attr,omitempty" json:"audioChannels,omitempty"`
AudioBitrate int32 `xml:"audioBitrate,attr,omitempty" json:"audioBitrate,omitempty"`
AudioProfile string `xml:"audioProfile,attr,omitempty" json:"audioProfile,omitempty"`
AudioSamplerate int32 `xml:"audioSamplerate,attr,omitempty" json:"audioSamplerate,omitempty"`
AudioBitdepth int32 `xml:"audioBitdepth,attr,omitempty" json:"audioBitdepth,omitempty"`
}

View File

@@ -21,7 +21,7 @@ var _ = Describe("Search", func() {
ds = &tests.MockDataStore{} ds = &tests.MockDataStore{}
auth.Init(ds) auth.Init(ds)
router = New(ds, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil) router = New(ds, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil)
// Get references to the mock repositories so we can inspect their Options // Get references to the mock repositories so we can inspect their Options
mockAlbumRepo = ds.Album(nil).(*tests.MockAlbumRepo) mockAlbumRepo = ds.Album(nil).(*tests.MockAlbumRepo)

View File

@@ -60,7 +60,9 @@ func (api *Router) Stream(w http.ResponseWriter, r *http.Request) (*responses.Su
format, _ := p.String("format") format, _ := p.String("format")
timeOffset := p.IntOr("timeOffset", 0) timeOffset := p.IntOr("timeOffset", 0)
stream, err := api.streamer.NewStream(ctx, id, format, maxBitRate, timeOffset) stream, err := api.streamer.NewStream(ctx, core.StreamRequest{
ID: id, Format: format, BitRate: maxBitRate, Offset: timeOffset,
})
if err != nil { if err != nil {
return nil, err return nil, err
} }
@@ -129,7 +131,9 @@ func (api *Router) Download(w http.ResponseWriter, r *http.Request) (*responses.
switch v := entity.(type) { switch v := entity.(type) {
case *model.MediaFile: case *model.MediaFile:
stream, err := api.streamer.NewStream(ctx, id, format, maxBitRate, 0) stream, err := api.streamer.NewStream(ctx, core.StreamRequest{
ID: id, Format: format, BitRate: maxBitRate,
})
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@@ -0,0 +1,383 @@
package subsonic
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"slices"
"strconv"
"github.com/navidrome/navidrome/core"
"github.com/navidrome/navidrome/core/transcode"
"github.com/navidrome/navidrome/log"
"github.com/navidrome/navidrome/model"
"github.com/navidrome/navidrome/server/subsonic/responses"
"github.com/navidrome/navidrome/utils/req"
)
// API-layer request structs for JSON unmarshaling (decoupled from core structs)
// clientInfoRequest represents client playback capabilities from the request body
type clientInfoRequest struct {
Name string `json:"name,omitempty"`
Platform string `json:"platform,omitempty"`
MaxAudioBitrate int `json:"maxAudioBitrate,omitempty"`
MaxTranscodingAudioBitrate int `json:"maxTranscodingAudioBitrate,omitempty"`
DirectPlayProfiles []directPlayProfileRequest `json:"directPlayProfiles,omitempty"`
TranscodingProfiles []transcodingProfileRequest `json:"transcodingProfiles,omitempty"`
CodecProfiles []codecProfileRequest `json:"codecProfiles,omitempty"`
}
// directPlayProfileRequest describes a format the client can play directly
type directPlayProfileRequest struct {
Containers []string `json:"containers,omitempty"`
AudioCodecs []string `json:"audioCodecs,omitempty"`
Protocols []string `json:"protocols,omitempty"`
MaxAudioChannels int `json:"maxAudioChannels,omitempty"`
}
// transcodingProfileRequest describes a transcoding target the client supports
type transcodingProfileRequest struct {
Container string `json:"container,omitempty"`
AudioCodec string `json:"audioCodec,omitempty"`
Protocol string `json:"protocol,omitempty"`
MaxAudioChannels int `json:"maxAudioChannels,omitempty"`
}
// codecProfileRequest describes codec-specific limitations
type codecProfileRequest struct {
Type string `json:"type,omitempty"`
Name string `json:"name,omitempty"`
Limitations []limitationRequest `json:"limitations,omitempty"`
}
// limitationRequest describes a specific codec limitation
type limitationRequest struct {
Name string `json:"name,omitempty"`
Comparison string `json:"comparison,omitempty"`
Values []string `json:"values,omitempty"`
Required bool `json:"required,omitempty"`
}
// toCoreClientInfo converts the API request struct to the transcode.ClientInfo struct.
// The OpenSubsonic spec uses bps for bitrate values; core uses kbps.
func (r *clientInfoRequest) toCoreClientInfo() *transcode.ClientInfo {
ci := &transcode.ClientInfo{
Name: r.Name,
Platform: r.Platform,
MaxAudioBitrate: bpsToKbps(r.MaxAudioBitrate),
MaxTranscodingAudioBitrate: bpsToKbps(r.MaxTranscodingAudioBitrate),
}
for _, dp := range r.DirectPlayProfiles {
ci.DirectPlayProfiles = append(ci.DirectPlayProfiles, transcode.DirectPlayProfile{
Containers: dp.Containers,
AudioCodecs: dp.AudioCodecs,
Protocols: dp.Protocols,
MaxAudioChannels: dp.MaxAudioChannels,
})
}
for _, tp := range r.TranscodingProfiles {
ci.TranscodingProfiles = append(ci.TranscodingProfiles, transcode.Profile{
Container: tp.Container,
AudioCodec: tp.AudioCodec,
Protocol: tp.Protocol,
MaxAudioChannels: tp.MaxAudioChannels,
})
}
for _, cp := range r.CodecProfiles {
coreCP := transcode.CodecProfile{
Type: cp.Type,
Name: cp.Name,
}
for _, lim := range cp.Limitations {
coreLim := transcode.Limitation{
Name: lim.Name,
Comparison: lim.Comparison,
Values: lim.Values,
Required: lim.Required,
}
// Convert audioBitrate limitation values from bps to kbps
if lim.Name == transcode.LimitationAudioBitrate {
coreLim.Values = convertBitrateValues(lim.Values)
}
coreCP.Limitations = append(coreCP.Limitations, coreLim)
}
ci.CodecProfiles = append(ci.CodecProfiles, coreCP)
}
return ci
}
// bpsToKbps converts bits per second to kilobits per second (rounded).
func bpsToKbps(bps int) int {
return (bps + 500) / 1000
}
// kbpsToBps converts kilobits per second to bits per second.
func kbpsToBps(kbps int) int {
return kbps * 1000
}
// convertBitrateValues converts a slice of bps string values to kbps string values.
func convertBitrateValues(bpsValues []string) []string {
result := make([]string, len(bpsValues))
for i, v := range bpsValues {
n, err := strconv.Atoi(v)
if err == nil {
result[i] = strconv.Itoa(bpsToKbps(n))
} else {
result[i] = v // preserve unparseable values as-is
}
}
return result
}
// validate checks that all enum fields in the request contain valid values per the OpenSubsonic spec.
func (r *clientInfoRequest) validate() error {
for _, dp := range r.DirectPlayProfiles {
for _, p := range dp.Protocols {
if !isValidProtocol(p) {
return fmt.Errorf("invalid protocol: %s", p)
}
}
}
for _, tp := range r.TranscodingProfiles {
if tp.Protocol != "" && !isValidProtocol(tp.Protocol) {
return fmt.Errorf("invalid protocol: %s", tp.Protocol)
}
}
for _, cp := range r.CodecProfiles {
if !isValidCodecProfileType(cp.Type) {
return fmt.Errorf("invalid codec profile type: %s", cp.Type)
}
for _, lim := range cp.Limitations {
if !isValidLimitationName(lim.Name) {
return fmt.Errorf("invalid limitation name: %s", lim.Name)
}
if !isValidComparison(lim.Comparison) {
return fmt.Errorf("invalid comparison: %s", lim.Comparison)
}
}
}
return nil
}
var validProtocols = []string{
transcode.ProtocolHTTP,
transcode.ProtocolHLS,
}
func isValidProtocol(p string) bool {
return slices.Contains(validProtocols, p)
}
var validCodecProfileTypes = []string{
transcode.CodecProfileTypeAudio,
}
func isValidCodecProfileType(t string) bool {
return slices.Contains(validCodecProfileTypes, t)
}
var validLimitationNames = []string{
transcode.LimitationAudioChannels,
transcode.LimitationAudioBitrate,
transcode.LimitationAudioProfile,
transcode.LimitationAudioSamplerate,
transcode.LimitationAudioBitdepth,
}
func isValidLimitationName(n string) bool {
return slices.Contains(validLimitationNames, n)
}
var validComparisons = []string{
transcode.ComparisonEquals,
transcode.ComparisonNotEquals,
transcode.ComparisonLessThanEqual,
transcode.ComparisonGreaterThanEqual,
}
func isValidComparison(c string) bool {
return slices.Contains(validComparisons, c)
}
// GetTranscodeDecision handles the OpenSubsonic getTranscodeDecision endpoint.
// It receives client capabilities and returns a decision on whether to direct play or transcode.
func (api *Router) GetTranscodeDecision(w http.ResponseWriter, r *http.Request) (*responses.Subsonic, error) {
if r.Method != http.MethodPost {
w.Header().Set("Allow", "POST")
http.Error(w, "Method Not Allowed", http.StatusMethodNotAllowed)
return nil, nil
}
ctx := r.Context()
p := req.Params(r)
mediaID, err := p.String("mediaId")
if err != nil {
return nil, newError(responses.ErrorMissingParameter, "missing required parameter: mediaId")
}
mediaType, err := p.String("mediaType")
if err != nil {
return nil, newError(responses.ErrorMissingParameter, "missing required parameter: mediaType")
}
// Only support songs for now
if mediaType != "song" {
return nil, newError(responses.ErrorGeneric, "mediaType '%s' is not yet supported", mediaType)
}
// Parse and validate ClientInfo from request body (required per OpenSubsonic spec)
var clientInfoReq clientInfoRequest
r.Body = http.MaxBytesReader(w, r.Body, 1<<20) // 1 MB limit
if err := json.NewDecoder(r.Body).Decode(&clientInfoReq); err != nil {
return nil, newError(responses.ErrorGeneric, "invalid JSON request body")
}
if err := clientInfoReq.validate(); err != nil {
return nil, newError(responses.ErrorGeneric, "%v", err)
}
clientInfo := clientInfoReq.toCoreClientInfo()
// Get media file
mf, err := api.ds.MediaFile(ctx).Get(mediaID)
if err != nil {
if errors.Is(err, model.ErrNotFound) {
return nil, newError(responses.ErrorDataNotFound, "media file not found: %s", mediaID)
}
return nil, newError(responses.ErrorGeneric, "error retrieving media file: %v", err)
}
// Make the decision
decision, err := api.transcodeDecision.MakeDecision(ctx, mf, clientInfo)
if err != nil {
return nil, newError(responses.ErrorGeneric, "failed to make transcode decision: %v", err)
}
// Only create a token when there is a valid playback path
var transcodeParams string
if decision.CanDirectPlay || decision.CanTranscode {
transcodeParams, err = api.transcodeDecision.CreateTranscodeParams(decision)
if err != nil {
return nil, newError(responses.ErrorGeneric, "failed to create transcode token: %v", err)
}
}
// Build response (convert kbps from core to bps for the API)
response := newResponse()
response.TranscodeDecision = &responses.TranscodeDecision{
CanDirectPlay: decision.CanDirectPlay,
CanTranscode: decision.CanTranscode,
TranscodeReasons: decision.TranscodeReasons,
ErrorReason: decision.ErrorReason,
TranscodeParams: transcodeParams,
SourceStream: &responses.StreamDetails{
Protocol: "http",
Container: decision.SourceStream.Container,
Codec: decision.SourceStream.Codec,
AudioBitrate: int32(kbpsToBps(decision.SourceStream.Bitrate)),
AudioProfile: decision.SourceStream.Profile,
AudioSamplerate: int32(decision.SourceStream.SampleRate),
AudioBitdepth: int32(decision.SourceStream.BitDepth),
AudioChannels: int32(decision.SourceStream.Channels),
},
}
if decision.TranscodeStream != nil {
response.TranscodeDecision.TranscodeStream = &responses.StreamDetails{
Protocol: "http",
Container: decision.TranscodeStream.Container,
Codec: decision.TranscodeStream.Codec,
AudioBitrate: int32(kbpsToBps(decision.TranscodeStream.Bitrate)),
AudioProfile: decision.TranscodeStream.Profile,
AudioSamplerate: int32(decision.TranscodeStream.SampleRate),
AudioBitdepth: int32(decision.TranscodeStream.BitDepth),
AudioChannels: int32(decision.TranscodeStream.Channels),
}
}
return response, nil
}
// GetTranscodeStream handles the OpenSubsonic getTranscodeStream endpoint.
// It streams media using the decision encoded in the transcodeParams JWT token.
// All errors are returned as proper HTTP status codes (not Subsonic error responses).
func (api *Router) GetTranscodeStream(w http.ResponseWriter, r *http.Request) (*responses.Subsonic, error) {
ctx := r.Context()
p := req.Params(r)
mediaID, err := p.String("mediaId")
if err != nil {
http.Error(w, "Bad Request", http.StatusBadRequest)
return nil, nil
}
mediaType, err := p.String("mediaType")
if err != nil {
http.Error(w, "Bad Request", http.StatusBadRequest)
return nil, nil
}
transcodeParamsToken, err := p.String("transcodeParams")
if err != nil {
http.Error(w, "Bad Request", http.StatusBadRequest)
return nil, nil
}
// Only support songs for now
if mediaType != "song" {
http.Error(w, "Bad Request", http.StatusBadRequest)
return nil, nil
}
// Validate the token, mediaID match, file existence, and freshness
params, mf, err := api.transcodeDecision.ValidateTranscodeParams(ctx, transcodeParamsToken, mediaID)
if err != nil {
switch {
case errors.Is(err, transcode.ErrMediaNotFound):
http.Error(w, "Not Found", http.StatusNotFound)
case errors.Is(err, transcode.ErrTokenInvalid), errors.Is(err, transcode.ErrTokenStale):
http.Error(w, "Gone", http.StatusGone)
default:
log.Error(ctx, "Error validating transcode params", err)
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
}
return nil, nil
}
// Build streaming parameters from the token
streamReq := core.StreamRequest{ID: mediaID, Offset: p.IntOr("offset", 0)}
if !params.DirectPlay && params.TargetFormat != "" {
streamReq.Format = params.TargetFormat
streamReq.BitRate = params.TargetBitrate // Already in kbps, matching the streamer
streamReq.SampleRate = params.TargetSampleRate
streamReq.BitDepth = params.TargetBitDepth
streamReq.Channels = params.TargetChannels
}
// Create stream (use DoStream to avoid duplicate DB fetch)
stream, err := api.streamer.DoStream(ctx, mf, streamReq)
if err != nil {
log.Error(ctx, "Error creating stream", "mediaID", mediaID, err)
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
return nil, nil
}
// Make sure the stream will be closed at the end
defer func() {
if err := stream.Close(); err != nil && log.IsGreaterOrEqualTo(log.LevelDebug) {
log.Error("Error closing stream", "id", mediaID, "file", stream.Name(), err)
}
}()
w.Header().Set("X-Content-Type-Options", "nosniff")
api.serveStream(ctx, w, r, stream, mediaID)
return nil, nil
}

Some files were not shown because too many files have changed in this diff Show More